예제 #1
0
파일: plot.py 프로젝트: jsobeck/apogee
 def input_wrapper(*args,**kwargs):
     if len(args) >= 2 and isinstance(args[0],(list,numpy.ndarray)) \
             and isinstance(args[1],(list,numpy.ndarray)):
         # wavelength, spectrum
         return func(args[0],args[1],*args[2:],**kwargs)
     elif len(args) >= 1 and isinstance(args[0],(list,numpy.ndarray)):
         # spectrum on standard re-sampled wavelength grid
         lam=apStarWavegrid()
         apStarBlu_lo,apStarBlu_hi,apStarGre_lo,apStarGre_hi,apStarRed_lo,apStarRed_hi = _apStarPixelLimits(dr=None)    
         aspcapBlu_start,aspcapGre_start,aspcapRed_start,aspcapTotal = _aspcapPixelLimits(dr=None)
         if len(args[0]) == aspcapTotal: # Input is on ASPCAP grid
             spec= numpy.zeros(len(lam))
             spec[apStarBlu_lo:apStarBlu_hi]= args[0][:aspcapGre_start]
             spec[apStarGre_lo:apStarGre_hi]= args[0][aspcapGre_start:aspcapRed_start]
             spec[apStarRed_lo:apStarRed_hi]= args[0][aspcapRed_start:]
         else:
             spec= args[0]
         return func(lam,spec,*args[1:],**kwargs)
     elif isinstance(args[0],(int,numpy.short,str)) \
             and isinstance(args[1],str):
         # location ID and APOGEE ID (loc ID can be string for 1m sample)
         if kwargs.get('apStar',False):
             spec, hdr= apread.apStar(args[0],args[1],header=True,
                                      ext=kwargs.pop('ext',1))
             spec= spec[numpy.amin([kwargs.pop('apStarIndx',1),
                                    len(spec)-1])]
         else: #aspcapStar
             spec, hdr= apread.aspcapStar(args[0],args[1],header=True,
                                          ext=kwargs.pop('ext',1))
         lam= 10.**numpy.arange(hdr['CRVAL1'],
                                hdr['CRVAL1']+len(spec)*hdr['CDELT1'],
                                hdr['CDELT1'])
         return func(lam,spec,*args[2:],**kwargs)
예제 #2
0
파일: plot.py 프로젝트: jobovy/apogee
 def input_wrapper(*args,**kwargs):
     if len(args) >= 2 and isinstance(args[0],(list,numpy.ndarray)) \
             and isinstance(args[1],(list,numpy.ndarray)):
         # wavelength, spectrum
         return func(args[0],args[1],*args[2:],**kwargs)
     elif len(args) >= 1 and isinstance(args[0],(list,numpy.ndarray)):
         # spectrum on standard re-sampled wavelength grid
         lam=apStarWavegrid()
         apStarBlu_lo,apStarBlu_hi,apStarGre_lo,apStarGre_hi,apStarRed_lo,apStarRed_hi = _apStarPixelLimits(dr=None)    
         aspcapBlu_start,aspcapGre_start,aspcapRed_start,aspcapTotal = _aspcapPixelLimits(dr=None)
         if len(args[0]) == aspcapTotal: # Input is on ASPCAP grid
             spec= numpy.zeros(len(lam))
             spec[apStarBlu_lo:apStarBlu_hi]= args[0][:aspcapGre_start]
             spec[apStarGre_lo:apStarGre_hi]= args[0][aspcapGre_start:aspcapRed_start]
             spec[apStarRed_lo:apStarRed_hi]= args[0][aspcapRed_start:]
         else:
             spec= args[0]
         return func(lam,spec,*args[1:],**kwargs)
     elif isinstance(args[0],(int,numpy.short,str)) \
             and isinstance(args[1],str):
         # location ID and APOGEE ID (loc ID can be string for 1m sample)
         if kwargs.get('apStar',False):
             spec, hdr= apread.apStar(args[0],args[1],header=True,
                                      ext=kwargs.pop('ext',1))
             spec= spec[numpy.amin([kwargs.pop('apStarIndx',1),
                                    len(spec)-1])]
         else: #aspcapStar
             spec, hdr= apread.aspcapStar(args[0],args[1],header=True,
                                          ext=kwargs.pop('ext',1))
         lam= 10.**numpy.arange(hdr['CRVAL1'],
                                hdr['CRVAL1']+len(spec)*hdr['CDELT1'],
                                hdr['CDELT1'])
         return func(lam,spec,*args[2:],**kwargs)
예제 #3
0
 def input_wrapper(*args,**kwargs):
     if len(args) >= 2 and isinstance(args[0],(list,numpy.ndarray)) \
             and isinstance(args[1],(list,numpy.ndarray)):
         # wavelength, spectrum
         return func(args[0],args[1],*args[2:],**kwargs)
     elif len(args) >= 1 and isinstance(args[0],(list,numpy.ndarray)):
         # spectrum on standard re-sampled wavelength grid
         lam=apStarWavegrid()
         if len(args[0]) == 7214: # Input is on ASPCAP grid
             spec= numpy.zeros(len(lam))
             spec[322:3242]= args[0][:2920]
             spec[3648:6048]= args[0][2920:5320]
             spec[6412:8306]= args[0][5320:]
         else:
             spec= args[0]
         return func(lam,spec,*args[1:],**kwargs)
     elif isinstance(args[0],(int,numpy.short,str)) \
             and isinstance(args[1],str):
         # location ID and APOGEE ID (loc ID can be string for 1m sample)
         if kwargs.get('apStar',False):
             spec, hdr= apread.apStar(args[0],args[1],header=True,
                                      ext=kwargs.pop('ext',1))
             spec= spec[numpy.amin([kwargs.pop('apStarIndx',1),
                                    len(spec)-1])]
         else: #aspcapStar
             spec, hdr= apread.aspcapStar(args[0],args[1],header=True,
                                          ext=kwargs.pop('ext',1))
         lam= 10.**numpy.arange(hdr['CRVAL1'],
                                hdr['CRVAL1']+len(spec)*hdr['CDELT1'],
                                hdr['CDELT1'])
         return func(lam,spec,*args[2:],**kwargs)
def runFinder(ranger):
    interestingTargets = []
    skippedTargets = []
    locationID = 4590
    apogeeID = "2M00050265+0116236"
    interestingTarget = False

    badheader, header = apread.apStar(locationID, apogeeID, ext=0, dr="13", header=True)
    skippedTargets.append([locationID, apogeeID])

    nvisits = header["NVISITS"]
    """sys.stdout.write("\r{0}\t\t\t".format('Target ' + str(i + 1) + '/' + str(targetCount)))
	sys.stdout.flush()"""
    positions = []
    for visit in range(1, nvisits + 1):
        data = apread.apStar(locationID, apogeeID, ext=9, header=False, dr="13")
        if nvisits != 1:
            ccf = data["CCF"][0][1 + visit]
        else:
            ccf = data["CCF"][0]

        pos = getMaxPositions(ccf, ranger)
        r = calcR(ccf)
        if (str(pos[0]) != "none") and ((str(pos[1]) != "none")):
            interestingTarget = True
            # r = calcR(ccf, pos[0], pos[1])
        """elif r < 1.0:
			interestingTarget = True"""

        positions.append([pos[0], pos[1], r])

        # reportPositions(locationID, apogeeID, ranger, positions)
    if interestingTarget == True:
        interestingTargets.append([locationID, apogeeID])
        interestingTarget = False

    reportTargets(interestingTargets, ranger, "interestingTargets")
    reportTargets(skippedTargets, ranger, "skippedTargets")
    del interestingTargets[:]
    del skippedTargets[:]
예제 #5
0
	def constructParams(self):
		'''
		Constructs the parameter given the data table provided in HDU9 of the targets apStar file.
		'''
		data = apread.apStar(self.locationID, self.apogeeID, ext=9, header=False)
		self.modelParamA.constructParams(data)
		self.modelParamB.constructParams(data)
		self.maxTeffA = self.modelParamA.teff + 100.
		self.minTeffA = self.modelParamA.teff - 100.
		self.teffStepA = 50.
		self.maxTeffB = self.maxTeffA + 100.
		self.minTeffB = self.minTeffA - 100.
		self.teffStepB = self.teffStepA
예제 #6
0
def grid(passCount, gridParams, minimizedVisitParams):
	'''
	The binary model fitting grid. This function will fit the targets of the following parameters:
	 	1) Teff of component A
	 	2) Teff of component B
	 	3) Flux Ratio of component B
	 	4) Relative Heliocentric Velocity of Component A
	 	5) Relative Heliocentric Velocity of Component B

	After chi2 minimization of the above parameters, the parameters used to get the minimized chi2 value is written into
	lists/chi2.lis. The other parameters that were tested on the grid and their corresponding chi2 values can be found
	in lists/chi2/FIELD_ID/2M_ID.lis.

	:param passCount: [in] The amount of maximum amount of passes the grid will go through
	:param gridParams: [in/out] The list of GridParams that contain the targets fitting data (built in runGrid)
	:param minimizedVisitParams: [out] All the visits with the minimized chi2 parameters
	'''
	targetCount = len(gridParams)
	tpass = Timer()
	tpassSum = 0.0
	for j in range(passCount):
		tpass.start()
		print('-------------PASS ' + str(j+1) + '/' + str(passCount) + '-------------')
		ttarget = Timer()
		ttargetSum = 0.0
		for i in range(targetCount):
			locationID = gridParams[i].locationID
			apogeeID = gridParams[i].apogeeID
			badheader, header = apread.apStar(locationID, apogeeID, ext=0, header=True)
			nvisits = header['NVISITS']
			print('Fitting: ' + locationID + ', ' + apogeeID + ', nvisits: ' + str(nvisits))
			print('On target: ' + str(i+1) + '/' + str(targetCount))
			ttarget.start()

			gridParams[i], minimizedVisitParams[i] = bg.targetGrid(gridParams[i], minimizedVisitParams[i], plot=False)

			temp = ttarget.end()
			ttargetSum+= temp
			print('Target run time: ' + str(round(temp, 2)) + str('s'))
		temp = tpass.end()
		tpassSum+= temp
		print('Pass run time: ' + str(round(temp, 2)) + str('s'))
		print('Average target run time: ' + str(round(ttargetSum/targetCount, 2)) + str('s'))
	print('Average pass run time: ' + str(round(tpassSum/passCount, 2)) + str('s'))
예제 #7
0
def get_spectra_ap(data,ext = 1,indx = None):
    """
    Returns apStar spectra and header information for each object specified in data 
    
    data:    labels for a subset of the APOGEE survey
    """
    specs = np.zeros((len(data),7214),dtype=np.int16)
    hdrs = {}
    goodind = []
    badind = []
    for i in range(len(data)):
        try:
            specs[i] = apread.apStar(data['LOCATION_ID'][i],data['APOGEE_ID'][i],ext = ext, header = False, aspcapWavegrid=True)[indx]
            goodind.append(i)
        except IOError as e:
            badind.append(i)
            print i,data['CLUSTER'][i],' File missing'
            continue
    if badind == []:
        return specs
    if badind != []:
        return (specs,(np.array(goodind),))
예제 #8
0
def getRVs(locationID, apogeeID, visit):
	'''
	Returns the velocities of the binar components.

	:param locationID: The location ID of the binary.
	:param apogeeID: The apogee ID of the binary.
	:param visit: The visit we are using to test against.
	:return: The velocities of the individual binary components in the system.
	'''
	# Contains the dir that holds martins data (deltaV's)
	martin_data = '/Volumes/CoveyData-1/APOGEE_Spectra/Martin/Data/Highly_Likely/rv_tables/'
	
	# Get the Julian Dates, velocity of components A and B (km/s), and residual velocities (km/s)
	# TODO: just get the line we want... no need to load the whole file. line = visit
	jDates, velA, velB, residual = np.loadtxt(martin_data + str(locationID) + '_' + apogeeID + '_rvs.tbl', skiprows=1, unpack=True)

	# Get the master HDU of the binary
	badheader, header = apread.apStar(locationID, apogeeID, ext=0, header=True)

	
	row = -1
	# Check if there is only one visit
	if (header['NVISITS'] == 1):
		return [ velA - velB, velB - velA ]
	# Find the correct row from the rvs table
	else:
		try:
			for i in range(header['NVISITS']):
				if (int(header['JD' + str(visit)] * 10) == int(jDates[i] * 10)):
					row = i
		except IndexError:
			print('WARNING: rvs table for ' + str(locationID) + ', ' + apogeeID + ' may not have the same visit count.')
			pass
	
	if(row == -1):
		raise Exception('ERROR: visit not found. Check rvs tables and master HDU of ' + str(locationID) + '_' + apogeeID)

	return [ velA[row], velB[row] ]
예제 #9
0
 def input_wrapper(*args, **kwargs):
     if len(args) >= 2 and isinstance(args[0],(list,numpy.ndarray)) \
             and isinstance(args[1],(list,numpy.ndarray)):
         # wavelength, spectrum
         return func(args[0], args[1], *args[2:], **kwargs)
     elif len(args) >= 1 and isinstance(args[0], (list, numpy.ndarray)):
         # spectrum on standard re-sampled wavelength grid
         lam = apStarWavegrid()
         if len(args[0]) == 7214:  # Input is on ASPCAP grid
             spec = numpy.zeros(len(lam))
             spec[322:3242] = args[0][:2920]
             spec[3648:6048] = args[0][2920:5320]
             spec[6412:8306] = args[0][5320:]
         else:
             spec = args[0]
         return func(lam, spec, *args[1:], **kwargs)
     elif isinstance(args[0],(int,numpy.short,str)) \
             and isinstance(args[1],str):
         # location ID and APOGEE ID (loc ID can be string for 1m sample)
         if kwargs.get('apStar', False):
             spec, hdr = apread.apStar(args[0],
                                       args[1],
                                       header=True,
                                       ext=kwargs.pop('ext', 1))
             spec = spec[numpy.amin(
                 [kwargs.pop('apStarIndx', 1),
                  len(spec) - 1])]
         else:  #aspcapStar
             spec, hdr = apread.aspcapStar(args[0],
                                           args[1],
                                           header=True,
                                           ext=kwargs.pop('ext', 1))
         lam = 10.**numpy.arange(hdr['CRVAL1'],
                                 hdr['CRVAL1'] + len(spec) * hdr['CDELT1'],
                                 hdr['CDELT1'])
         return func(lam, spec, *args[2:], **kwargs)
def get_spectra(name, red_clump, location):
	"""Return cluster data, spectra, spectral errors, photometric Teffs, and bitmask from APOGEE.
	
	If the data file for the specified cluster already exists locally, 
	import the data from the file (cluster data, spectra, spectral errors, bitmask).
	If the data file does not exist, obtain the APOGEE spectra from a specified cluster 
	from the allStar catalogue, replacing ASPCAP abundances with astroNN abundances.
	
	Parameters
	----------
	name : str
		Name of desired cluster (i.e. 'NGC 2682') 
	red_clump : str
		If the red clump stars in rcsample are to be removed, set to 'True'.  If all stars are to be used,
		set to 'False'.
	location : str
		If running locally, set to 'personal'.  If running on the server, set to 'server'.
	
	Returns
	-------
	apogee_cluster_data (all stars) or apogee_cluster_data_final (red clumps removed) : structured array
		All cluster data from APOGEE
	spectra_50 (all stars) or spectra_final (red clumps removed) : tuple
		Array of floats representing the cleaned-up fluxes in the APOGEE spectra with red clump stars removed
	spectra_err_50 (all stars) or spectra_err_final (red clumps removed) : tuple
		Array of floats representing the cleaned-up spectral errors from the APOGEE spectra with red clump stars 
		removed
	good_T (all stars) or T_final (red clumps removed) : tuple
		Array of floats representing the effective temperatures of the stars in the cluster
		between 4000K and 5000K
	full_bitmask (all stars) or bitmask_final (red clumps removed) : tuple
		Array of ints (1 or 0), cleaned in the same way as the spectra, representing the bad pixels 
		in the APOGEE_PIXMASK bitmask
	"""
	
	#Path, strip spaces in cluster name
	if location == 'personal':
		path = '/Users/chloecheng/Personal/' + str(name).replace(' ', '') + '.hdf5'
	elif location == 'server':
		path = '/geir_data/scr/ccheng/AST425/Personal/' + str(name).replace(' ', '') + '.hdf5' 
		
	#If the data file for this cluster exists, save the data to variables
	if glob.glob(path):
		if red_clump == 'False':
			file = h5py.File(path, 'r')
			apogee_cluster_data = file['apogee_cluster_data'][()]
			spectra_50 = file['spectra'][()]
			spectra_err_50 = file['spectra_errs'][()]
			good_T = file['T'][()]
			full_bitmask = file['bitmask'][()]
			file.close()
			print(name, ' complete.')
			return apogee_cluster_data, spectra_50, spectra_err_50, good_T, full_bitmask
		
		elif red_clump == 'True':
			file = h5py.File(path, 'r')
			apogee_cluster_data_final = file['apogee_cluster_data'][()]
			spectra_final = file['spectra'][()]
			spectra_err_final = file['spectra_errs'][()]
			T_final = file['T'][()]
			bitmask_final = file['bitmask'][()]
			file.close()
			print(name, ' complete.')
			return apogee_cluster_data_final, spectra_final, spectra_err_final, T_final, bitmask_final
		
	#If the file does not exist, get the data from APOGEE
	else:
		#Get red clump stars from rcsample
		rc_data = rcsample(dr='14')
		rc_stars = []
		for i in range(len(rc_data)):
			#rc_stars.append(rc_data[i][2]) - REMOVE IN FINAL VERSION
			rc_stars.append(rc_data[i][2].decode('UTF-8'))
		rc_stars = np.array(rc_stars)
	
		#Read in APOGEE catalogue data, removing duplicated stars and replacing ASPCAP with astroNN abundances
		apogee_cat = apread.allStar(use_astroNN_abundances=True)
		unique_apoids,unique_inds = np.unique(apogee_cat['APOGEE_ID'],return_index=True)
		apogee_cat = apogee_cat[unique_inds]
		
		#Read in overall cluster information
		cls = afits.open('occam_cluster-DR14.fits')
		cls = cls[1].data
		
		#Read in information about cluster members
		members = afits.open('occam_member-DR14.fits')
		members = members[1].data
		
		#Select all members of a given cluster
		cluster_members = (members['CLUSTER']==name) & (members['MEMBER_FLAG']=='GM') #second part of the mask indicates to only use giant stars
		member_list = members[cluster_members]
		
		#Find APOGEE entries for that cluster
		#numpy.in1d finds the 1D intersection between two lists. 
		#In this case we're matching using the unique APOGEE ID assigned to each star
		#The indices given by numpy.in1d are for the first argument, so in this case the apogee catalogue
		cluster_inds = np.in1d((apogee_cat['APOGEE_ID']).astype('U100'),member_list['APOGEE_ID'])
		apogee_cluster_data = apogee_cat[cluster_inds]
		T = photometric_Teff(apogee_cluster_data)
		
		#Mark red clump stars in the members of the cluster as NaNs
		cluster_stars = member_list['APOGEE_ID']
		cluster_marked = np.copy(cluster_stars)
		for i in range(len(cluster_stars)):
			for j in range(len(rc_stars)):
				if cluster_stars[i] == rc_stars[j]:
					cluster_marked[i] = np.nan
		
		#Get spectra, spectral errors, and bitmask for each star - apStar
		#We can use the APOGEE package to read each star's spectrum
		#We'll read in the ASPCAP spectra, which have combined all of the visits for each star and removed the spaces between the spectra
		number_of_members = len(member_list)
		spectra = np.zeros((number_of_members, 7514))
		spectra_errs = np.zeros((number_of_members, 7514))
		bitmask = np.zeros((number_of_members, 7514))
		for s,star in enumerate(apogee_cluster_data):
			spectra[s] = apread.aspcapStar(star['LOCATION_ID'],star['APOGEE_ID'],ext=1,header=False,dr='14',aspcapWavegrid=True)
			spectra_errs[s] = apread.aspcapStar(star['LOCATION_ID'],star['APOGEE_ID'],ext=2,header=False,dr='14',aspcapWavegrid=True)
			bitmask[s] = apread.apStar(star['LOCATION_ID'],star['APOGEE_ID'],ext=3,header=False,dr='14', aspcapWavegrid=True)[1]
		
		#Set all entries in bitmask to integers	
		bitmask = bitmask.astype(int)
		bitmask_flip = np.zeros_like(bitmask)
		for i in range(len(spectra)):
			for j in range(7514):
				if bitmask[i][j] == 0:
					bitmask_flip[i][j] = 1
				else:
					bitmask_flip[i][j] = 0
					
		#Remove empty spectra
		full_spectra = []
		full_spectra_errs = []
		full_bitmask = []
		full_T = [] 
		full_stars = [] 
		for i in range(len(spectra)):
			if any(spectra[i,:] != 0):
				full_spectra.append(spectra[i])
				full_spectra_errs.append(spectra_errs[i])
				full_bitmask.append(bitmask_flip[i])
				full_T.append(T[i]) 
				full_stars.append(i) 
		full_spectra = np.array(full_spectra)
		full_spectra_errs = np.array(full_spectra_errs)
		full_bitmask = np.array(full_bitmask)
		full_T = np.array(full_T) 
		full_stars = np.array(full_stars) 
		full_marked_stars = cluster_marked[full_stars] 
		
		#Create array of NaNs to replace flagged values in spectra
		masked_spectra = np.empty_like(full_spectra)
		masked_spectra_errs = np.empty_like(full_spectra_errs)
		masked_spectra[:] = np.nan
		masked_spectra_errs[:] = np.nan
		
		#Mask the spectra
		for i in range(len(full_spectra)):
			for j in range(7514):
				if full_bitmask[i][j] != 0:
					masked_spectra[i][j] = full_spectra[i][j]
					masked_spectra_errs[i][j] = full_spectra_errs[i][j]
					
		#Cut stars that are outside of the temperature limits 
		good_T_inds = (full_T > 4000) & (full_T < 5000)
		final_spectra = masked_spectra[good_T_inds]
		final_spectra_errs = masked_spectra_errs[good_T_inds]
		good_T = full_T[good_T_inds]
		apogee_cluster_data = apogee_cluster_data[good_T_inds]
		full_bitmask = full_bitmask[good_T_inds]
		final_stars = full_marked_stars[good_T_inds] 
		rgs = (final_stars != 'nan') #Get indices for final red giant stars to be used
		
		#Want an SNR of 200 so set those errors that have a larger SNR to have an SNR of 200
		spectra_err_200 = np.zeros_like(final_spectra_errs)
		for i in range(len(final_spectra)):
			for j in range(7514):
				if final_spectra[i][j]/final_spectra_errs[i][j] <= 200:
					spectra_err_200[i][j] = final_spectra_errs[i][j]
				else:
					spectra_err_200[i][j] = final_spectra[i][j]/200
					
		#Cut errors with SNR of less than 50
		spectra_50 = np.copy(final_spectra)
		spectra_err_50 = np.copy(spectra_err_200)
		
		for i in range(len(final_spectra)):
			for j in range(7514):
				if final_spectra[i][j]/spectra_err_200[i][j] <= 50:
					spectra_50[i][j] = np.nan
					spectra_err_50[i][j] = np.nan
		
		#Cut red clumps
		logg = apogee_cluster_data['LOGG']
		apogee_cluster_data_final = apogee_cluster_data[rgs]
		spectra_final = spectra_50[rgs]
		spectra_err_final = spectra_err_50[rgs]
		T_final = good_T[rgs]
		bitmask_final = full_bitmask[rgs]
		
		if red_clump == 'False':
			#Write to file
			file = h5py.File(path, 'w')
			file['apogee_cluster_data'] = apogee_cluster_data
			file['spectra'] = spectra_50
			file['spectra_errs'] = spectra_err_50
			file['T'] = good_T
			file['bitmask'] = full_bitmask
			file.close()
			print(name, 'complete')
			
			return apogee_cluster_data, spectra_50, spectra_err_50, good_T, full_bitmask
		
		elif red_clump == 'True':
			#Write to file 
			file = h5py.File(path, 'w')
			file['apogee_cluster_data'] = apogee_cluster_data_final
			file['spectra'] = spectra_final
			file['spectra_errs'] = spectra_err_final
			file['T'] = T_final
			file['bitmask'] = bitmask_final
			file.close()
			print(name, 'complete')
			
			return apogee_cluster_data_final, spectra_final, spectra_err_final, T_final, bitmask_final
#filename = 'lists/binaries2.dat'
filename = '/Volumes/CoveyData-1/APOGEE_Spectra/APOGEE2_DR13/Bisector/BinaryFinder4/kevin_candidate_list.csv'
locationIDs, apogeeIDs = np.loadtxt(filename, unpack=True, delimiter=',', dtype=str)
targetCount = len(locationIDs)
print(targetCount, 'targets')

locationIDs, apogeeIDs = bf.removeSingle(locationIDs, apogeeIDs, 'kevin_candidate_list')
targetCount = len(locationIDs)
print(targetCount, 'targets')

plt.rcParams["figure.figsize"] = [20.0, 15.0]
for i in range(targetCount):
	locationID = locationIDs[i]
	apogeeID = apogeeIDs[i]
	print(locationID, apogeeID)
	badheader, header = apread.apStar(locationID, apogeeID, ext=0, dr='13', header=True)
	data = apread.apStar(locationID, apogeeID, ext=9, header=False, dr='13')

	nvisits = header['NVISITS']
	for visit in range(0, nvisits):
		if (nvisits != 1):
			ccf = data['CCF'][0][2 + visit]
		else:
			ccf = data['CCF'][0]
		
		plt.plot(ccf + visit,label= 'Visit: '+str(1+visit))
		
		#axes = plt.gca()
		#axes.set_xlim([100,300])
		
		plt.xlabel('CCF Lag',fontsize=15)
예제 #12
0
def recordTargets(locationIDs, apogeeIDs):
	'''
	With the given field and 2M IDs, record all the BFData
	:param locationIDs: Field IDs
	:param apogeeIDs: 2M IDs
	'''
	interestingTargetsr = []
	interestingTargetsDualPeak = []
	skippedTargets = []
	
	targetCount = len(locationIDs)
	for i in range(targetCount):
		locationID = locationIDs[i]
		apogeeID = apogeeIDs[i]

		# Get fits files
		try:
			badheader, header = apread.apStar(locationID, apogeeID, ext=0, dr='13', header=True)
			data = apread.apStar(locationID, apogeeID, ext=9, header=False, dr='13')
		except IOError:
			skippedTargets.append([locationID, apogeeID])
			continue
		
		# Calculate r and test for second peak
		nvisits = header['NVISITS']

		positions = []
		rRecorded = False
		dpRecorded = False
		for visit in range(0, nvisits):
			if (nvisits != 1):
				ccf = data['CCF'][0][2 + visit]
				snr = header['SNRVIS' + str(1+visit)]
			else:
				ccf = data['CCF'][0]
				snr = header['SNRVIS1']
			max1, max2, peakhDiff = getMaxPositions(ccf)
			
			# Calculate r values
			r = []

			# Calculate r by reflecting about the highest peak
			ccfCount = len(ccf)
			if max2 != np.nan:
				peakLoc = max(max1, max2)
			else:
				peakLoc = max1

			try:
				if (ccfCount > peakLoc*2):
					r.append(calcR(ccf, pos2=peakLoc*2, peakLoc=peakLoc))
				else:
					r.append(calcR(ccf, pos1=2*peakLoc-ccfCount+1, pos2=ccfCount-1, peakLoc=peakLoc))
			except:
				r.append(np.nan)
				print(locationID, apogeeID)
			
			# calculate r by reflecting about the center (201)
			for cut in range(20):
				r.append(calcR(ccf, pos1=cut*10+1, pos2=(401 - (cut * 10)), peakLoc=201))
			
			if (r[0] < 7.0) and (rRecorded is False):
				rRecorded = True
				interestingTargetsr.append([locationID, apogeeID])

			if (np.isnan(max2) == False) and (dpRecorded is False):
				dpRecorded = True
				interestingTargetsDualPeak.append([locationID, apogeeID])

			positions.append([snr, max1, max2, peakhDiff, r])
		
		recordBFData(locationID, apogeeID, positions)

	recordTargetsCSV(interestingTargetsr, 'interestingTargetsr')
	recordTargetsCSV(interestingTargetsDualPeak, 'interestingTargetsDualPeak')
	recordTargetsCSV(skippedTargets, 'skippedTargets')
예제 #13
0
        'CCF_372', 'CCF_373', 'CCF_374', 'CCF_375', 'CCF_376', 'CCF_377',
        'CCF_378', 'CCF_379', 'CCF_380', 'CCF_381', 'CCF_382', 'CCF_383',
        'CCF_384', 'CCF_385', 'CCF_386', 'CCF_387', 'CCF_388', 'CCF_389',
        'CCF_390', 'CCF_391', 'CCF_392', 'CCF_393', 'CCF_394', 'CCF_395',
        'CCF_396', 'CCF_397', 'CCF_398', 'CCF_399', 'CCF_400', 'CCF_401'
    ]

    writer = csv.DictWriter(output, delimiter=',', fieldnames=names)
    writer.writeheader()

    for i in range(len(locationIDs)):
        locationID = locationIDs[i]
        apogeeID = apogeeIDs[i]

        if locationID != 1:
            header = apread.apStar(locationID, apogeeID, ext=0, header=True)
            Data = apread.apStar(locationID, apogeeID, ext=9, header=False)
            nvisits = header[1]['NVISITS']

            for visit in range(0, nvisits):
                snr = header[1]['SNRVIS' + str(visit + 1)]
                if (nvisits != 1):
                    CCF = Data['CCF'][0][2 + visit]
                else:
                    CCF = Data['CCF'][0]
                writer.writerow({
                    'Location_ID': locationID,
                    'Apogee_ID': apogeeID,
                    'SNR': snr,
                    'CCF_1': CCF[j],
                    'CCF_2': CCF[j + 1],
예제 #14
0
running = True
visitSum = 0.0
while running:
	for i in range(3):
			if procs[i].is_alive() == False:
				badheader, header = apread.apStar(locationIDs[i], apogeeIDs[i], ext=0, header=True, dr='13')
				nvisits = header['NVISITS']
				visitSum+= timers[i].end() / nvisits
	if procs[0].is_alive() == False and procs[1].is_alive() == False and procs[2].is_alive() == False:
		running = False
	time.sleep(2)'''
timer = Timer()
visitSum = 0.0
for i in range(targetCount):
	badheader, header = apread.apStar(locationIDs[i], apogeeIDs[i], ext=0, header=True, dr='13')
	nvisits = header['NVISITS']
	timer.start()
	runTarget(targets[i])
	visitSum+= timer.end() / nvisits
	print(visitSum)
print('avg visit time:', visitSum/targetCount)

'''
done=4
print('------------Target ' + str(done + 1) + '/' + str(targetCount) + ' ------------')
while targetQueue.empty() == False:
	# runTarget(targetQueue.get_nowait())
	for i in range(4):
		if procs[i].is_alive() == False:
			del(procs[i])
예제 #15
0
def runTarget(gridParam):
	locationID = gridParam.locationID
	apogeeID = gridParam.apogeeID

	badheader, header = apread.apStar(locationID, apogeeID, ext=0, header=True, dr='13')
	specs = apread.apStar(locationID, apogeeID, ext=1, header=False, dr='13')
	specerrs = apread.apStar(locationID, apogeeID, ext=2, header=False, dr='13')
	nvisits = header['NVISITS']
	gridParamVists = []
	for visit in range(1, nvisits + 1):
		print('Visit ' + str(visit) + '/' + str(nvisits))
		if nvisits is 1:
			spec = specs
			specerr = specerrs
		else:
			spec = specs[1 + nvisits]
			specerr = specerrs[ 1+ nvisits]
		
		aspec= np.reshape(spec,(1, len(spec)))
		aspecerr= np.reshape(specerr,(1, len(specerr)))
		cont= spec / continuum.fit(aspec, aspecerr, type='aspcap')[0]
		conterr = specerr / continuum.fit(aspec, aspecerr, type='aspcap')[0]
		
		gridParam = GridParam(locationID, apogeeID)
		gridParam.constructParams()
		gridParam.spec = bm.shiftFlux(cont, header['VHELIO' + str(visit)])
		gridParam.specErr = bm.shiftFlux(conterr, header['VHELIO' + str(visit)])
		gridParam.getRVs(visit)
		gridParam.visit = visit

		nSteps = 200
		sampler = MCMC(gridParam, nSteps=nSteps)
		circular_samples = sampler.chain[:, :, :].reshape((-1, 5))
		results = np.asarray(list(map(lambda v: (v[1], v[2]-v[1], v[1]-v[0]),
							zip(*np.percentile(circular_samples, [16, 50, 84], axis=0)))))
		
		fig, ax = plt.subplots(5, 1, sharex='col')
		for i in range(5):
			for j in range(len(sampler.chain[:, 0, i])):
				ax[i].plot(np.linspace(0, nSteps, num=nSteps), sampler.chain[j, :, i], 'k', alpha=0.2)
			ax[i].plot(np.linspace(0, nSteps, num=nSteps) , np.ones(nSteps)*results[i][0], 'b', lw=2)
		fig.set_figheight(20)
		fig.set_figwidth(15)
		if not os.path.exists('plots/walker/' + str(locationID) + '/' + str(apogeeID) + '/'):
			os.makedirs('plots/walker/' + str(locationID) + '/' + str(apogeeID) + '/')
		plt.savefig('plots/walker/' + str(locationID) + '/' + str(apogeeID) + '/' + str(visit) + '.png')

		plt.close('all')
		gridParam.modelParamA.teff = results[0][0]
		gridParam.modelParamB.teff = results[1][0]
		gridParam.modelParamB.fluxRatio = results[2][0]
		gridParam.modelParamA.rv = results[3][0]
		gridParam.modelParamB.rv = results[4][0]

		gridParam.chi2 = -1.0 * fitModel(None, gridParam, plot=True)
		gridParamVists.append(gridParam)
	

	if not os.path.exists('lists/chi2/' + str(locationID) + '/'):
		os.makedirs('lists/chi2/' + str(locationID) + '/')
	filename = 'lists/chi2/' + str(locationID) + '/' + str(apogeeID) + '.tbl'
	writeGridToFile(gridParamVists, filename=filename)
예제 #16
0
			binModel, peak[i][j], residuals[i][j] = mg.binaryModelGen(locationID, apogeeID, params, visit, plot=True);
	
	# Get the max peak value
	peakMax = np.argmax(peak)

	# Create fit params array to return
	fitParams = np.full((6, 2), 0.)
	max1 = int(peakMax / len(rangeTeff))
	max2 = int(peakMax % len(rangeTeff))

	fitParams = [	[rangeTeff[max1], rangeTeff[max2]],
					[logg, logg],
					[metals, metals],
					[am, am],
					[nm, nm],
					[cm, cm] ]

	binPlot.plotTeffGrid(locationID, apogeeID, visit, rangeTeff, peak, 'CCF');
	binPlot.plotTeffGrid(locationID, apogeeID, visit, rangeTeff, residuals, 'Residual');
	return fitParams


locationIDs, apogeeIDs = np.loadtxt('binaries.dat', unpack=True, delimiter=',', dtype=str)
for i in range(len(locationIDs)):
	print('Fitting: ' + locationIDs[i] + ', ' + apogeeIDs[i])
	badheader, header = apread.apStar(int(locationIDs[i]), apogeeIDs[i], ext=0, header=True)

	printParams(binaryGridFit(int(locationIDs[i]), apogeeIDs[i], params, visit + 1, rangeTeff))
	for visit in range(header['NVISITS']):
		print('---------------VISIT ' + str(visit + 1) + '---------------')
		printParams(binaryGridFit(int(locationIDs[i]), apogeeIDs[i], params, visit + 1, rangeTeff))

import apogee.tools.read as apread
import apogee.spec.plot as splot
import matplotlib.pyplot as plt
import numpy as np
from scipy.stats.stats import pearsonr

teff1 = 5000.
teff2 = 5250.
logg = 4.7
metals = am = nm = cm = 0.

locationID = 4611
apogeeID = '2M05350392-0529033'
spec, hdr= apread.apStar(locationID,apogeeID,ext=1)

# mspec1= ferre.interpolate(teff1,logg,metals,am,nm,cm)
# mspec2= ferre.interpolate(teff2,logg,metals,am,nm,cm)
# print(mspec1.shape)
# print(pearsonr(mspec1, mspec2))
spec[0][spec[0] <= 0.] = np.nan

nan_vals = [i for i in range(len(spec[0])) if np.isnan(spec[0][i])]
nan_ranges = [(nan_vals[i] + 1, nan_vals[i+1]) for i in range(len(nan_vals) - 1) if nan_vals[i+1]!=nan_vals[i]+1]


print(nan_ranges)
# print(spec[0][0:nan_ranges[0][0]])

'''ms1 = np.array(mspec1[np.isnan(mspec1) == False])
def get_spectra(name, red_clump, location):
    """Return cluster data, spectra, spectral errors, photometric Teffs, and bitmask from APOGEE.
	
	If the data file for the specified cluster already exists locally, 
	import the data from the file (cluster data, spectra, spectral errors, bitmask).
	If the data file does not exist, obtain the APOGEE spectra from a specified cluster 
	from the allStar catalogue, replacing ASPCAP abundances with astroNN abundances.
	
	Parameters
	----------
	name : str
		Name of desired cluster (i.e. 'PJ_26') 
	red_clump : bool
		If the red clump stars in rcsample are to be removed, set to True.  If all stars are to be used,
		set to False.
	
	Returns
	-------
	cluster_data_full (all stars) or cluster_data (red clumps removed) : structured array
		All cluster data from APOGEE
	cluster_spectra_full (all stars) or cluster_spectra (red clumps removed) : tuple
		Array of floats representing the cleaned-up fluxes in the APOGEE spectra with red clump stars removed
	cluster_spectra_errs_full (all stars) or cluster_spectra_errs (red clumps removed) : tuple
		Array of floats representing the cleaned-up spectral errors from the APOGEE spectra with red clump stars 
		removed
	cluster_T_full (all stars) or cluster_T (red clumps removed) : tuple
		Array of floats representing the effective temperatures of the stars in the cluster
		between 4000K and 5000K
	full_bitmask (all stars) or bitmask_final (red clumps removed) : tuple
		Array of ints (1 or 0), cleaned in the same way as the spectra, representing the bad pixels 
		in the APOGEE_PIXMASK bitmask
	"""

    if location == 'personal':
        path = '/Users/chloecheng/Personal/' + str(name) + '.hdf5'
    elif location == 'server':
        path = '/geir_data/scr/ccheng/AST425/Personal/' + str(name) + '.hdf5'

    #If the data file for this cluster exists, save the data to variables
    if glob.glob(path):
        if red_clump == 'False':
            file = h5py.File(path, 'r')
            cluster_data_full = file['apogee_cluster_data'][()]
            cluster_spectra_full = file['spectra'][()]
            cluster_spectra_errs_full = file['spectra_errs'][()]
            cluster_T_full = file['T'][()]
            full_bitmask = file['bitmask'][()]
            file.close()
            print(name, ' complete.')
            return cluster_data_full, cluster_spectra_full, cluster_spectra_errs_full, cluster_T_full, full_bitmask

        elif red_clump == 'True':
            file = h5py.File(path, 'r')
            cluster_data = file['apogee_cluster_data'][()]
            cluster_spectra = file['spectra'][()]
            cluster_spectra_errs = file['spectra_errs'][()]
            cluster_T = file['T'][()]
            bitmask_final = file['bitmask'][()]
            file.close()
            print(name, ' complete.')
            return cluster_data, cluster_spectra, cluster_spectra_errs, cluster_T, bitmask_final

    #If the file does not exist
    else:
        #Get red clump stars from rcsample
        rc_data = rcsample(dr='14')
        rc_stars = []
        for i in range(len(rc_data)):
            if location == 'personal':
                rc_stars.append(rc_data[i][2])
            elif location == 'server':
                rc_stars.append(rc_data[i][2].decode('UTF-8'))
        rc_stars = np.array(rc_stars)

        #Read in PJ catalogue data
        if location == 'personal':
            apogee_cluster_data = np.load(
                '/Users/chloecheng/Personal/published_clusters.npy')
        elif location == 'server':
            apogee_cluster_data = np.load(
                '/geir_data/scr/ccheng/AST425/Personal/published_clusters.npy')

        #Get temperatures
        #T = photometric_Teff(apogee_cluster_data)
        T = apogee_cluster_data['TEFF']

        #Get spectra for each star
        number_of_members = 360
        spectra = np.zeros((number_of_members, 7514))
        spectra_errs = np.zeros((number_of_members, 7514))
        bitmask = np.zeros((number_of_members, 7514))
        missing_spectra = []
        stars = []
        for s, star in enumerate(apogee_cluster_data):
            loc = star['FIELD'].decode('utf-8')
            apo = star['APOGEE_ID'].decode('utf-8')
            stars.append(apo)
            try:
                spectra[s] = apread.aspcapStar(
                    loc,
                    apo,
                    ext=1,
                    header=False,
                    dr='16',
                    aspcapWavegrid=True,
                    telescope=star['TELESCOPE'].decode('utf-8'))
                spectra_errs[s] = apread.aspcapStar(
                    loc,
                    apo,
                    ext=2,
                    header=False,
                    dr='16',
                    aspcapWavegrid=True,
                    telescope=star['TELESCOPE'].decode('utf-8'))
                bitmask[s] = apread.apStar(
                    loc,
                    apo,
                    ext=3,
                    header=False,
                    dr='16',
                    aspcapWavegrid=True,
                    telescope=star['TELESCOPE'].decode('utf-8'))[1]
            #If the spectrum is missing, set bitmask to value that will be removed
            except OSError:
                bitmask[s] = -1.0
                missing_spec.append(s)
                print('missing ', star['APOGEE_ID'].decode("utf-8"))

        #Mark red clump stars
        PJ_stars = np.array(stars)
        PJ_marked = np.copy(PJ_stars)
        for i in range(len(PJ_stars)):
            for j in range(len(rc_stars)):
                if PJ_stars[i] == rc_stars[j]:
                    PJ_marked[i] = np.nan

        #Set all entries in bitmask to integers
        bitmask = bitmask.astype(int)
        bitmask_flip = np.zeros_like(bitmask)
        for i in range(len(spectra)):
            for j in range(7514):
                if bitmask[i][j] == 0:
                    bitmask_flip[i][j] = 1
                else:
                    bitmask_flip[i][j] = 0

        #Remove empty spectra
        full_spectra = []
        full_spectra_errs = []
        full_bitmask = []
        full_stars = []
        full_T = []
        for i in range(len(spectra)):
            if any(spectra[i, :] != 0):
                full_spectra.append(spectra[i])
                full_spectra_errs.append(spectra_errs[i])
                full_bitmask.append(bitmask_flip[i])
                full_stars.append(i)
                full_T.append(T[i])
        full_spectra = np.array(full_spectra)
        full_spectra_errs = np.array(full_spectra_errs)
        full_bitmask = np.array(full_bitmask)
        full_stars = np.array(full_stars)
        full_T = np.array(full_T)
        full_marked_stars = PJ_marked[full_stars]

        #Create array of nans to replace flagged values in spectra
        masked_spectra = np.empty_like(full_spectra)
        masked_spectra_errs = np.empty_like(full_spectra_errs)
        masked_spectra[:] = np.nan
        masked_spectra_errs[:] = np.nan

        #Mask the spectra
        for i in range(len(full_spectra)):
            for j in range(7514):
                if full_bitmask[i][j] != 0:
                    masked_spectra[i][j] = full_spectra[i][j]
                    masked_spectra_errs[i][j] = full_spectra_errs[i][j]

        #Cut stars that are outside of the temperature limits
        good_T_inds = (full_T > 4000) & (full_T < 5000)
        final_spectra = masked_spectra[good_T_inds]
        final_spectra_errs = masked_spectra_errs[good_T_inds]
        good_T = full_T[good_T_inds]
        apogee_cluster_data = apogee_cluster_data[good_T_inds]
        full_bitmask = full_bitmask[good_T_inds]
        final_stars = full_marked_stars[good_T_inds]  #ADDED
        rgs = (final_stars != 'nan')  #ADDED

        #Want an SNR of 200 so set those errors that have a larger SNR to have an SNR of 200
        spectra_err_200 = np.zeros_like(final_spectra_errs)
        for i in range(len(final_spectra)):
            for j in range(7514):
                if final_spectra[i][j] / final_spectra_errs[i][j] <= 200:
                    spectra_err_200[i][j] = final_spectra_errs[i][j]
                else:
                    spectra_err_200[i][j] = final_spectra[i][j] / 200

        #Cut errors with SNR of less than 50
        spectra_50 = np.copy(final_spectra)
        spectra_err_50 = np.copy(spectra_err_200)
        for i in range(len(final_spectra)):
            for j in range(7514):
                if final_spectra[i][j] / spectra_err_200[i][j] <= 50:
                    spectra_50[i][j] = np.nan
                    spectra_err_50[i][j] = np.nan

        #Separate out individual clusters
        cluster_ids = apogee_cluster_data['CLUSTER_ID']
        PJ_26 = []
        PJ_95 = []
        PJ_471 = []
        PJ_162 = []
        PJ_398 = []
        PJ_151 = []
        PJ_230 = []
        PJ_939 = []
        PJ_262 = []
        PJ_289 = []
        PJ_359 = []
        PJ_396 = []
        PJ_899 = []
        PJ_189 = []
        PJ_574 = []
        PJ_641 = []
        PJ_679 = []
        PJ_1976 = []
        PJ_88 = []
        PJ_1349 = []
        PJ_1811 = []

        for i in range(len(apogee_cluster_data)):
            if cluster_ids[i] == 26:
                PJ_26.append(i)
            elif cluster_ids[i] == 95:
                PJ_95.append(i)
            elif cluster_ids[i] == 471:
                PJ_471.append(i)
            elif cluster_ids[i] == 162:
                PJ_162.append(i)
            elif cluster_ids[i] == 398:
                PJ_398.append(i)
            elif cluster_ids[i] == 151:
                PJ_151.append(i)
            elif cluster_ids[i] == 230:
                PJ_230.append(i)
            elif cluster_ids[i] == 939:
                PJ_939.append(i)
            elif cluster_ids[i] == 262:
                PJ_262.append(i)
            elif cluster_ids[i] == 289:
                PJ_289.append(i)
            elif cluster_ids[i] == 359:
                PJ_359.append(i)
            elif cluster_ids[i] == 396:
                PJ_396.append(i)
            elif cluster_ids[i] == 899:
                PJ_899.append(i)
            elif cluster_ids[i] == 189:
                PJ_189.append(i)
            elif cluster_ids[i] == 574:
                PJ_574.append(i)
            elif cluster_ids[i] == 641:
                PJ_641.append(i)
            elif cluster_ids[i] == 679:
                PJ_679.append(i)
            elif cluster_ids[i] == 1976:
                PJ_1976.append(i)
            elif cluster_ids[i] == 88:
                PJ_88.append(i)
            elif cluster_ids[i] == 1349:
                PJ_1349.append(i)
            elif cluster_ids[i] == 1811:
                PJ_1811.append(i)

        cluster_dict = {
            'PJ_26': PJ_26,
            'PJ_95': PJ_95,
            'PJ_471': PJ_471,
            'PJ_162': PJ_162,
            'PJ_398': PJ_398,
            'PJ_151': PJ_151,
            'PJ_230': PJ_230,
            'PJ_939': PJ_939,
            'PJ_262': PJ_262,
            'PJ_289': PJ_289,
            'PJ_359': PJ_359,
            'PJ_396': PJ_396,
            'PJ_899': PJ_899,
            'PJ_189': PJ_189,
            'PJ_574': PJ_574,
            'PJ_641': PJ_641,
            'PJ_679': PJ_679,
            'PJ_1976': PJ_1976,
            'PJ_88': PJ_88,
            'PJ_1349': PJ_1349,
            'PJ_1811': PJ_1811
        }

        cluster_data_full = apogee_cluster_data[cluster_dict[name]]
        cluster_spectra_full = spectra_50[cluster_dict[name]]
        cluster_spectra_errs_full = spectra_err_50[cluster_dict[name]]
        cluster_T_full = good_T[cluster_dict[name]]

        #Cut red clump stars
        cluster_rgs = rgs[cluster_dict[name]]
        cluster_data = cluster_data_full[cluster_rgs]
        cluster_spectra = cluster_spectra_full[cluster_rgs]
        cluster_spectra_errs = cluster_spectra_errs_full[cluster_rgs]
        cluster_T = cluster_T_full[cluster_rgs]
        bitmask_final = full_bitmask[rgs]

        if red_clump == 'False':
            #Write to file
            file = h5py.File(path, 'w')
            file['apogee_cluster_data'] = cluster_data_full
            file['spectra'] = cluster_spectra_full
            file['spectra_errs'] = cluster_spectra_errs_full
            file['T'] = cluster_T_full
            file['bitmask'] = full_bitmask
            file.close()
            print(name, 'complete')

            return cluster_data_full, cluster_spectra_full, cluster_spectra_errs_full, cluster_T_full, full_bitmask

        elif red_clump == 'True':
            #Write to file
            file = h5py.File(path, 'w')
            file['apogee_cluster_data'] = cluster_data
            file['spectra'] = cluster_spectra
            file['spectra_errs'] = cluster_spectra_errs
            file['T'] = cluster_T
            file['bitmask'] = bitmask_final
            file.close()
            print(name, 'complete')

            return cluster_data, cluster_spectra, cluster_spectra_errs, cluster_T, bitmask_final
예제 #19
0
import apogee.tools.read as apread
import apogee.spec.plot as splot
from apogee.modelspec import ferre
import matplotlib.pyplot as plt
import apogee.spec.plot as splot
import numpy as np

locationIDs, apogeeIDs = np.loadtxt("lists/binaries3.dat", unpack=True, delimiter=",", dtype=str)
targetCount = len(locationIDs)
visit = 1
f = open("chipRanges.txt", "w")
for i in range(len(locationIDs)):
    locationID = locationIDs[i]
    apogeeID = apogeeIDs[i]
    badheader, header = apread.apStar(locationID, apogeeID, ext=0, header=True)
    nvisits = header["NVISITS"]
    print("Getting chip ranges of: " + locationIDs[i] + ", " + apogeeIDs[i] + ", nvisits: " + str(nvisits))
    print(str(i + 1) + "/" + str(targetCount) + " targets completed")
    spec = apread.apStar(locationID, apogeeID, ext=1, header=False)[2]
    for visit in spec:
        for chip in visit:
            print(chip)
            nan_vals_spec = np.where(chip == 0)[0]
            nan_ranges_spec = [
                (nan_vals_spec[i] + 1, nan_vals_spec[i + 1])
                for i in range(len(nan_vals_spec) - 1)
                if nan_vals_spec[i + 1] != nan_vals_spec[i] + 1
            ]
            print(nan_ranges_spec)
            f.write(
예제 #20
0
from apogee.spec import continuum
import apogee.spec.plot as splot
import matplotlib.pyplot as plt

import BinModelGen as bm
import BinPlot
from BinaryGrid import calcChi2
from GridParam import GridParam
from Timer import Timer

locationID = 4586
apogeeID = "2M03441568+3231282"
restLambda = splot.apStarWavegrid()
visit = 1

badheader, header = apread.apStar(locationID, apogeeID, ext=0, header=True)

gridParam = GridParam(locationID, apogeeID)
gridParam.constructParams()

spec = apread.apStar(locationID, apogeeID, ext=1, header=False)[1 + visit]
specerr = apread.apStar(locationID, apogeeID, ext=2, header=False)[1 + visit]
# plt.plot(restLambda, spec)

aspec = np.reshape(spec, (1, len(spec)))
aspecerr = np.reshape(specerr, (1, len(specerr)))
cont = spec / continuum.fit(aspec, aspecerr, type="aspcap")[0]
conterr = specerr / continuum.fit(aspec, aspecerr, type="aspcap")[0]
shiftedSpec = bm.shiftFlux(cont, header["VHELIO" + str(visit)])
conterr = bm.shiftFlux(cont, header["VHELIO" + str(visit)])
BinPlot.plotDeltaVCheck(
예제 #21
0
def get_combined_spectrum_single_object(apogee_id,
                                        catalog=None,
                                        save_local=False):
    '''
    apogee_id should be a byte-like object; i.e b'2M13012770+5754582'
    This downloads a single combined spectrum and the associated error array,
        and it normalizes both. 
    '''
    # read in the allStar catalog if you haven't already
    if catalog is None:
        catalog, fibers = read_apogee_catalog()

    # Set up bad pixel mask
    badcombpixmask = bitmask.badpixmask() + 2**bitmask.apogee_pixmask_int(
        "SIG_SKYLINE")
    _COMBINED_INDEX = 1

    msk = np.where(catalog['APOGEE_ID'] == apogee_id)[0]
    if not len(msk):
        raise ValueError(
            'the desired Apogee ID was not found in the allStar catalog.')

    field = catalog['FIELD'][msk[0]].decode()
    ap_id = apogee_id.decode()
    loc_id = catalog['LOCATION_ID'][msk[0]]

    if loc_id == 1:
        temp1 = apread.apStar(field,
                              ap_id,
                              ext=1,
                              header=False,
                              aspcapWavegrid=True)
        temp2 = apread.apStar(field,
                              ap_id,
                              ext=2,
                              header=False,
                              aspcapWavegrid=True)
        temp3 = apread.apStar(field,
                              ap_id,
                              ext=3,
                              header=False,
                              aspcapWavegrid=True)
    else:
        temp1 = apread.apStar(loc_id,
                              ap_id,
                              ext=1,
                              header=False,
                              aspcapWavegrid=True)
        temp2 = apread.apStar(loc_id,
                              ap_id,
                              ext=2,
                              header=False,
                              aspcapWavegrid=True)
        temp3 = apread.apStar(loc_id,
                              ap_id,
                              ext=3,
                              header=False,
                              aspcapWavegrid=True)

    if temp1.shape[0] > 6000:
        spec = temp1
        specerr = temp2
        mask = temp3
    else:
        spec = temp1[_COMBINED_INDEX]
        specerr = temp2[_COMBINED_INDEX]
        mask = temp3[_COMBINED_INDEX]

    # Inflate uncertainties for bad pixels
    specerr[(mask & (badcombpixmask)) != 0] += 100 * np.mean(
        spec[np.isfinite(spec)])

    # Inflate pixels with high SNR to 0.5
    highsnr = spec / specerr > 200.
    specerr[highsnr] = 0.005 * np.fabs(spec[highsnr])

    # Continuum-normalize
    cont = utils.get_apogee_continuum(wavelength=wavelength,
                                      spec=spec,
                                      spec_err=specerr,
                                      cont_pixels=cont_pixels)
    spec /= cont
    specerr /= cont
    specerr[highsnr] = 0.005

    if save_local:
        np.savez('spectra/combined/spectrum_ap_id_' + str(apogee_id.decode()) +
                 '_.npz',
                 spectrum=spec,
                 spec_err=specerr)
    return spec, specerr
예제 #22
0
apogeeID = '2M05350392-0529033'

'''spec, hdr = apread.apStar(locationID,apogeeID,ext=1)
err, hdr = apread.apStar(locationID, apogeeID, ext=2)

# Clean the zero's
spec[0][spec[0] <= 0.] = np.nan

nan_vals = [i for i in range(len(spec[0])) if np.isnan(spec[0][i])]
nan_ranges = [(nan_vals[i] + 1, nan_vals[i+1]) for i in range(len(nan_vals) - 1) if nan_vals[i+1]!=nan_vals[i]+1]

print(nan_ranges)
specChunk = spec[0][nan_ranges[0][0]:nan_ranges[0][1]]
errChunk = err[0][nan_ranges[0][0]:nan_ranges[0][1]]'''

aspec= apread.apStar(locationID, apogeeID, ext=1, header=False)[1]
aspecerr= apread.apStar(locationID, apogeeID, ext=2, header=False)[1]
# Input needs to be (nspec,nwave)
aspec= np.reshape(aspec,(1,len(aspec)))
aspecerr= np.reshape(aspecerr,(1,len(aspecerr)))
# Fit the continuum
from apogee.spec import continuum
cont= continuum.fit(aspec,aspecerr,type='aspcap')

cspec= apread.aspcapStar(locationID, apogeeID,ext=1,header=False)
import apogee.spec.plot as splot
splot.waveregions(aspec[0]/cont[0])
splot.waveregions(cspec,overplot=True)

'''params = ferre.fit(locationID, apogeeID,
							teff=teff1, fixteff=True,
예제 #23
0
# This is for VS code only.
import vsEnvironSetup
vsEnvironSetup.setVariables()

import apogee.tools.read as apread
import apogee.spec.plot as splot
import matplotlib.pyplot as plt
import numpy as np

locationID = 4611
apogeeID = '2M05350392-0529033'
rMin, rMax = 16740., 16820.


restLambda = splot.apStarWavegrid()
ind = np.argwhere(np.logical_and(restLambda > rMin, restLambda < rMax))
rMin, rMax = ind[0], ind[-1]

# Get the continuum-normalized spectrum
cspec = apread.aspcapStar(locationID, apogeeID, ext=1, header=False)
# Get visit 1
spec = apread.apStar(locationID, apogeeID, ext=1, header=False)[3]
spec[np.isnan(spec)] = 0.
specNorm = spec[rMin:rMax] / spec[rMin:rMax].max(axis=0)

# compare
plt.plot(restLambda[rMin:rMax], cspec[rMin:rMax])
plt.plot(restLambda[rMin:rMax], specNorm)
plt.draw()
plt.show()
예제 #24
0
def targetGrid(gridParam, minimizedVisitParams, plot=True):
	'''
	The grid tests against ranging effective temperatures for both stars and the flux ratio of the
	secondary component. This is done by target.

	:param gridParam: [in/out] The GridParam of the target
	:param gridRes: [out] The visits that have the same paramters as the minimized chi2 visit
	:param plot: [in] If true makes plots to see intermediate steps (default=True)
	'''
	locationID = gridParam.locationID
	apogeeID = gridParam.apogeeID

	badheader, header = apread.apStar(locationID, apogeeID, ext=0, header=True)
	specs = apread.apStar(locationID, apogeeID, ext=1, header=False)
	specerrs = apread.apStar(locationID, apogeeID, ext=2, header=False)
	nvisits = header['NVISITS']
	
	# chi2 = np.full((nvisits, nrangeTeffA, nrangeTeffB, nrangeFluxRatio), -1.)
	#chi2 = np.full((nvisits, nrangeTeffA, nrangeTeffB, nrangeFluxRatio, nrangeRVA, nrangeRVB), -1.)
	ipg = ferre.Interpolator(lib='GK')
	ipf = ferre.Interpolator(lib='F')

	# Create file to store all the chi2 values
	path = 'lists/all_chi2/' + str(locationID) + '/'
	if not os.path.exists(path):
		os.makedirs(path)
	fn = open(path + apogeeID + '.lis', 'w')
	fn.write(gridParam.toStringHeader())
	timer = Timer()
	timeSum = 0.0
	allChi2 = []
	visitGridParamsBuffer = []
	for visit in range(1, nvisits + 1):
		timer.start()
		if (nvisits != 1):
			spec = specs[1+visit]
			specerr = specerrs[1+visit]
		else:
			spec = specs
			specerr = specerrs
		
		if (len(minimizedVisitParams) == 0):
			gridParam = GridParam(locationID, apogeeID)
			gridParam.constructParams()
			gridParam.getRVs(visit)
		else:
			gridParam = minimizedVisitParams[visit - 1]
		visitGridParamsBuffer.append(gridParam)
		
		# Prepare grid ranges
		rangeTeffA = np.arange(gridParam.minTeffA, gridParam.maxTeffA, gridParam.teffStepA)
		rangeTeffB = np.arange(gridParam.minTeffB, gridParam.maxTeffB, gridParam.teffStepB)
		rangeFluxRatio = np.arange(gridParam.minFluxRatio, gridParam.maxFluxRatio, gridParam.fluxStep)
		rangeRVA = np.arange(gridParam.minRVA, gridParam.maxRVA, gridParam.rvAStep)
		rangeRVB = np.arange(gridParam.minRVB, gridParam.maxRVB, gridParam.rvBStep)
		nrangeTeffA = len(rangeTeffA)
		nrangeTeffB = len(rangeTeffB)
		nrangeFluxRatio = len(rangeFluxRatio)
		nrangeRVA =len(rangeRVA)
		nrangeRVB =len(rangeRVB)

		chi2 = np.full((nrangeTeffA, nrangeTeffB, nrangeFluxRatio, nrangeRVA, nrangeRVB), -1.)
		print('Visit: ' + str(visit) ,'Grid dimensions: ' + str(chi2.shape))
		# Prep Spectra
		aspec= np.reshape(spec,(1, len(spec)))
		aspecerr= np.reshape(specerr,(1, len(specerr)))
		cont= spec / continuum.fit(aspec, aspecerr, type='aspcap')[0]
		conterr = specerr / continuum.fit(aspec, aspecerr, type='aspcap')[0]
		shiftedSpec = bm.shiftFlux(cont, header['VHELIO' + str(visit)])

		# Run grid
		for i in range(nrangeTeffA):
			gridParam.modelParamA.teff = rangeTeffA[i]
			componentA = bm.genComponent(gridParam.modelParamA, ipf, ipg)
			for j in range(nrangeTeffB):
				gridParam.modelParamB.teff = rangeTeffB[j]
				componentB = bm.genComponent(gridParam.modelParamB, ipf, ipg)
				for k in range(nrangeFluxRatio):
					gridParam.modelParamB.fluxRatio = rangeFluxRatio[k]
					componentBR = componentB * rangeFluxRatio[k]
					for l in range(nrangeRVA):
						gridParam.modelParamA.rv = rangeRVA[l]
						componentAS = bm.shiftFlux(componentA, rangeRVA[l])
						for m in range(nrangeRVB):
							gridParam.modelParamB.rv = rangeRVB[m]
							componentBS = bm.shiftFlux(componentBR, rangeRVB[m])
							binaryFlux = bm.combineFlux(componentAS, componentBS)
							chi2[i][j][k][l][m] = calcChi2(binaryFlux, shiftedSpec, conterr) / (len(binaryFlux) - 5.0)
							gridParam.chi2 = chi2[i][j][k][l][m]
							fn.write(gridParam.toString())
							if (plot is True):
								restLambda = splot.apStarWavegrid()
								BinPlot.plotDeltaVCheck(locationID, apogeeID, visit,
													[	[ restLambda, binaryFlux, 'blue', 'model' ],
														[ restLambda, cont, 'orange', 'unshifted' ],
														[ restLambda, shiftedSpec, 'green', 'shifted' ]],
														[gridParam.modelParamA.teff,gridParam.modelParamB.teff, gridParam.modelParamB.fluxRatio],
														'Delta V Shift', folder='grid_deltaVCheck')

		timeSum+=timer.end()
		allChi2.append(chi2)
	fn.close()

	print('Average visit time: ' + str(round(timeSum/nvisits, 2)) + str('s'))

	# Get minized values for each visit
	indices = None
	for i in range(nvisits):
		inds = getMinIndicies(allChi2[i])
		rangeTeffA = np.arange(visitGridParamsBuffer[i].minTeffA, visitGridParamsBuffer[i].maxTeffA, visitGridParamsBuffer[i].teffStepA)
		rangeTeffB = np.arange(visitGridParamsBuffer[i].minTeffB, visitGridParamsBuffer[i].maxTeffB, visitGridParamsBuffer[i].teffStepB)
		rangeFluxRatio = np.arange(visitGridParamsBuffer[i].minFluxRatio, visitGridParamsBuffer[i].maxFluxRatio, visitGridParamsBuffer[i].fluxStep)
		rangeRVA = np.arange(visitGridParamsBuffer[i].minRVA, visitGridParamsBuffer[i].maxRVA, visitGridParamsBuffer[i].rvAStep)
		rangeRVB = np.arange(visitGridParamsBuffer[i].minRVB, visitGridParamsBuffer[i].maxRVB, visitGridParamsBuffer[i].rvBStep)
		nrangeTeffA = len(rangeTeffA)
		nrangeTeffB = len(rangeTeffB)
		nrangeFluxRatio = len(rangeFluxRatio)
		nrangeRVA =len(rangeRVA)
		nrangeRVB =len(rangeRVB)
		visitGridParamsBuffer[i].setParams(i + 1, rangeTeffA[inds[0]], rangeTeffB[inds[1]], rangeFluxRatio[inds[2]],
					rangeRVA[inds[3]], rangeRVB[inds[4]], allChi2[i][inds[0]][inds[1]][inds[2]][inds[3]][inds[4]])

		if (indices is None):
			indices = [i + 1, inds, allChi2[i][inds[0]][inds[1]][inds[2]][inds[3]][inds[4]]]
		if (allChi2[i][inds[0]][inds[1]][inds[2]][inds[3]][inds[4]] < indices[2]):
			indices = [i + 1, inds, allChi2[i][inds[0]][inds[1]][inds[2]][inds[3]][inds[4]]]
	
	inds = getMinIndicies(allChi2)
	gridParam = visitGridParamsBuffer[inds[0]]
	return gridParam, visitGridParamsBuffer
예제 #25
0
def read_batch_of_spectra(batch_count, batch_size=10000):
    '''
    Download a bunch of *combined* spectra in one go. Set the uncertainties to a large
    value in bad pixels, normalize, and save the batch locally. 
    '''
    # read in the catalog catalog
    catalog, fibers = read_apogee_catalog()
    catalog = catalog[batch_count * batch_size:(batch_count + 1) * batch_size]
    fibers = fibers[batch_count * batch_size:(batch_count + 1) * batch_size]
    _COMBINED_INDEX = 1

    nspec = len(catalog)
    spec = np.zeros((nspec, 7214))
    specerr = np.zeros((nspec, 7214))

    # Set up bad pixel mask
    badcombpixmask = bitmask.badpixmask() + 2**bitmask.apogee_pixmask_int(
        "SIG_SKYLINE")

    # loop through the individual targets
    for ii in range(nspec):
        field = catalog['FIELD'][ii].decode()
        ap_id = catalog['APOGEE_ID'][ii].decode()
        loc_id = catalog['LOCATION_ID'][ii]
        print('processing target %d with id %s' % (ii, ap_id))

        try:
            if loc_id == 1:
                temp1 = apread.apStar(field,
                                      ap_id,
                                      ext=1,
                                      header=False,
                                      aspcapWavegrid=True)
                temp2 = apread.apStar(field,
                                      ap_id,
                                      ext=2,
                                      header=False,
                                      aspcapWavegrid=True)
                temp3 = apread.apStar(field,
                                      ap_id,
                                      ext=3,
                                      header=False,
                                      aspcapWavegrid=True)
            else:
                temp1 = apread.apStar(loc_id,
                                      ap_id,
                                      ext=1,
                                      header=False,
                                      aspcapWavegrid=True)
                temp2 = apread.apStar(loc_id,
                                      ap_id,
                                      ext=2,
                                      header=False,
                                      aspcapWavegrid=True)
                temp3 = apread.apStar(loc_id,
                                      ap_id,
                                      ext=3,
                                      header=False,
                                      aspcapWavegrid=True)

            if temp1.shape[0] > 6000:
                spec[ii] = temp1
                specerr[ii] = temp2
                mask = temp3
            else:
                spec[ii] = temp1[_COMBINED_INDEX]
                specerr[ii] = temp2[_COMBINED_INDEX]
                mask = temp3[_COMBINED_INDEX]

            # Inflate uncertainties for bad pixels
            specerr[ii, (mask & (badcombpixmask)) != 0] += \
                100. * np.mean(spec[ii, np.isfinite(spec[ii])])

            # Inflate pixels with high SNR to 0.5
            highsnr = spec[ii] / specerr[ii] > 200.
            specerr[ii, highsnr] = 0.005 * np.fabs(spec[ii, highsnr])

            # Continuum-normalize
            cont = utils.get_apogee_continuum(wavelength=wavelength,
                                              spec=spec[ii],
                                              spec_err=specerr[ii],
                                              cont_pixels=cont_pixels)
            spec[ii] /= cont
            specerr[ii] /= cont
            specerr[ii, highsnr] = 0.005
        except OSError:
            print('target could not be found!')
            continue

    # save spectra
    np.savez('spectra/apogee_all_spectra_' + str(batch_count) + '.npz',
             wavelength=wavelength,
             spectra=spec,
             spec_err=specerr,
             apogee_id=np.array(catalog["APOGEE_ID"]),
             apogee_fiber_id=fibers)
예제 #26
0
def read_spectra(cluster,teffmin=4000.,teffmax=5000.,cont_type='cannon',
                 cont_deg=4):
    """
    NAME:
       read_spectra
    PURPOSE:
       Read the APOGEE spectra and their errors for stars in a given cluster
    INPUT:
       cluster - Name of the cluster (name in one of the data files)
       teffmin= (4000.) minimum temperature
       teffmax= (5000.) maximum temperature
       cont_type = ('cannon') type of continuum normalization to perform
       cont_deg= (4) degree polynomial to fit for continuum normalization
    OUTPUT:
       (data, spec, specerr) - (full data structure, spectra [nspec,nlam], spectral uncertainties [nspec,nlam]) nlam=7214 on ASPCAP grid
    HISTORY:
       2015-08-13 - Written based on some older code - Bovy (UofT)
    """
    if cluster.upper() in _GCS:
        data= read_meszarosgcdata()
    else:
        data= read_caldata()
    # Cut to just this cluster and temperature range
    if 'rc' in cluster.lower():
        # Only for NGC 6819
        rc= True
        cluster= cluster[:-2]
    else:
        rc= False
    data= data[data['CLUSTER'] == cluster.upper()]
    data= data[(data['TEFF'] < teffmax)\
                   *(data['TEFF'] > teffmin)]
    if cluster.lower() == 'n6819':
        g4CN= good4CN(cluster,data)
        g4CN[10]= False # another one, by hand!
        if rc:
            data= data[True-g4CN] # Just those!
        else:
            data= data[g4CN] # Just those!
    # Load all the spectra
    nspec= len(data)
    spec= numpy.zeros((nspec,7214))
    specerr= numpy.zeros((nspec,7214))
    # Setup bad pixel mask
    badcombpixmask= bitmask.badpixmask()\
        +2**bitmask.apogee_pixmask_int("SIG_SKYLINE")
    for ii in range(nspec):
        sys.stdout.write('\r'+"Loading spectrum %i / %i ...\r" % (ii+1,nspec))
        sys.stdout.flush()
        spec[ii]= apread.apStar(data['LOCATION_ID'][ii],
                                data['ID'][ii],
                                ext=1,header=False,
                                aspcapWavegrid=True)[_COMBINED_INDEX]
        specerr[ii]= apread.apStar(data['LOCATION_ID'][ii],
                                   data['ID'][ii],
                                   ext=2,header=False,
                                   aspcapWavegrid=True)[_COMBINED_INDEX]
        # Inflate uncertainties for bad pixels
        mask= apread.apStar(data['LOCATION_ID'][ii],
                            data['ID'][ii],
                            ext=3,header=False,
                            aspcapWavegrid=True)[_COMBINED_INDEX]
        specerr[ii,(mask & (badcombpixmask)) != 0]+=\
            100.*numpy.mean(spec[ii,True-numpy.isnan(spec[ii])])
        # Also inflate pixels with high SNR to 0.5%
        highsnr= spec[ii]/specerr[ii] > 200.
        specerr[ii,highsnr]= 0.005*numpy.fabs(spec[ii,highsnr])
        # Continuum-normalize
        cont= continuum.fit(spec[ii],specerr[ii],type=cont_type,deg=cont_deg)
        spec[ii]/= cont
        specerr[ii]/= cont
        specerr[ii,highsnr]= 0.005 # like standard APOGEE reduction
    sys.stdout.write('\r'+_ERASESTR+'\r')
    sys.stdout.flush()
    return (data,spec,specerr)
def get_spectra(
    name, red_clump, location
):  ###Function to read the allStar file and get the spectra, correct spectra for
    ###small and large uncertainties, remove red clump stars
    """Return cluster data, spectra, spectral errors, photometric Teffs, and bitmask from APOGEE.
	
	If the data file for the specified cluster already exists locally, 
	import the data from the file (cluster data, spectra, spectral errors, bitmask).
	If the data file does not exist, obtain the APOGEE spectra from a specified cluster 
	from the allStar catalogue, replacing ASPCAP abundances with astroNN abundances.
	
	Parameters
	----------
	name : str
		Name of desired cluster (i.e. 'NGC 2682') 
	red_clump : str
		If the red clump stars in rcsample are to be removed, set to 'True'.  If all stars are to be used,
		set to 'False'.
	location : str
		If running locally, set to 'personal'.  If running on the server, set to 'server'.
	
	Returns
	-------
	apogee_cluster_data (all stars) or apogee_cluster_data_final (red clumps removed) : structured array
		All cluster data from APOGEE
	spectra_50 (all stars) or spectra_final (red clumps removed) : tuple
		Array of floats representing the cleaned-up fluxes in the APOGEE spectra with red clump stars removed
	spectra_err_50 (all stars) or spectra_err_final (red clumps removed) : tuple
		Array of floats representing the cleaned-up spectral errors from the APOGEE spectra with red clump stars 
		removed
	good_T (all stars) or T_final (red clumps removed) : tuple
		Array of floats representing the effective temperatures of the stars in the cluster
		between 4000K and 5000K
	full_bitmask (all stars) or bitmask_final (red clumps removed) : tuple
		Array of ints (1 or 0), cleaned in the same way as the spectra, representing the bad pixels 
		in the APOGEE_PIXMASK bitmask
	"""

    #Path, strip spaces in cluster name
    if location == 'personal':  ###If running on my Mac
        path = '/Users/chloecheng/Personal/' + str(name).replace(
            ' ', '') + '.hdf5'  ###Path to folder named after cluster
    elif location == 'server':  ###If running on the server
        path = '/geir_data/scr/ccheng/AST425/Personal/' + str(name).replace(
            ' ', '') + '.hdf5'  ###Path to cluster folder

    #If the data file for this cluster exists, save the data to variables and return them
    if glob.glob(path):  ###If the file exists
        if red_clump == 'False':  ###If we're keeping all of the stars, read in the data
            file = h5py.File(path, 'r')
            apogee_cluster_data = file['apogee_cluster_data'][()]
            spectra_50 = file['spectra'][()]
            spectra_err_50 = file['spectra_errs'][()]
            good_T = file['T'][()]
            full_bitmask = file['bitmask'][()]
            file.close()
            print(name,
                  ' complete.')  ###Notification that this function is done
            return apogee_cluster_data, spectra_50, spectra_err_50, good_T, full_bitmask

        elif red_clump == 'True':  ###If we're removing the red clumps, read in the data
            file = h5py.File(path, 'r')
            apogee_cluster_data_final = file['apogee_cluster_data'][()]
            spectra_final = file['spectra'][()]
            spectra_err_final = file['spectra_errs'][()]
            T_final = file['T'][()]
            bitmask_final = file['bitmask'][()]
            file.close()
            print(name,
                  ' complete.')  ###Notification that this function is done
            return apogee_cluster_data_final, spectra_final, spectra_err_final, T_final, bitmask_final

    #If the file does not exist, get the data from APOGEE
    else:  ###If the file does not exist
        #Get red clump stars from rcsample
        rc_data = rcsample(dr='14')  ###Get the rcsample data for DR14
        rc_stars = []  ###Empty list for the stars
        for i in range(len(rc_data)):  ###Iterate through the rcsample data
            if location == 'personal':  ###If running on Mac
                rc_stars.append(
                    rc_data[i][2])  ###Append just the names of the stars
            elif location == 'server':  ###If running on server
                rc_stars.append(
                    rc_data[i][2].decode('UTF-8')
                )  ###Append just the names of the stars (decode because on server the names are bitwise for some reason)
        rc_stars = np.array(
            rc_stars)  ###Make list of red clump star names into array

        #Read in APOGEE catalogue data, removing duplicated stars and replacing ASPCAP with astroNN abundances
        apogee_cat = apread.allStar(
            use_astroNN_abundances=True
        )  ###Read the allStar file, using the astroNN abundances
        unique_apoids, unique_inds = np.unique(
            apogee_cat['APOGEE_ID'], return_index=True)  ###Get the APOGEE IDs
        apogee_cat = apogee_cat[unique_inds]  ###Get the APOGEE IDs

        #Read in overall cluster information
        cls = afits.open('occam_cluster-DR14.fits')  ###Read in the OCCAM data
        cls = cls[1].data  ###Get the cluster information

        #Read in information about cluster members
        members = afits.open(
            'occam_member-DR14.fits')  ###Read in the OCCAM members data
        members = members[1].data  ###Get the member information

        #Select all members of a given cluster
        cluster_members = (members['CLUSTER'] == name) & (
            members['MEMBER_FLAG'] == 'GM'
        )  #second part of the mask indicates to only use giant stars
        member_list = members[
            cluster_members]  ###Make a list of all member stars in the cluster

        #Find APOGEE entries for that cluster
        #numpy.in1d finds the 1D intersection between two lists.
        #In this case we're matching using the unique APOGEE ID assigned to each star
        #The indices given by numpy.in1d are for the first argument, so in this case the apogee catalogue
        cluster_inds = np.in1d((apogee_cat['APOGEE_ID']).astype('U100'),
                               member_list['APOGEE_ID']
                               )  ###Get the indices of the cluster members
        apogee_cluster_data = apogee_cat[
            cluster_inds]  ###Get the allStar data for these members
        T = photometric_Teff(
            apogee_cluster_data
        )  ###Compute the photometric effective temperature

        #Mark red clump stars in the members of the cluster as NaNs
        cluster_stars = member_list[
            'APOGEE_ID']  ###Get a list of all the names of the member stars in the cluster
        cluster_marked = np.copy(
            cluster_stars
        )  ###Create a copy of this list to mark which stars are red clumps
        for i in range(len(cluster_stars)
                       ):  ###Iterate through all of the stars in the cluster
            for j in range(len(
                    rc_stars)):  ###Iterate through all of the rcsample stars
                if cluster_stars[i] in rc_stars[
                        j]:  ###If a cluster member is also a member of the rcsample stars
                    cluster_marked[
                        i] = np.nan  ###Replace the name of that star with a NaN to ignore it

        #Get spectra, spectral errors, and bitmask for each star - apStar
        #We can use the APOGEE package to read each star's spectrum
        #We'll read in the ASPCAP spectra, which have combined all of the visits for each star and removed the spaces between the spectra
        number_of_members = len(
            member_list)  ###Number of members in the cluster
        spectra = np.zeros((number_of_members,
                            7514))  ###Create an empty array to add the spectra
        spectra_errs = np.zeros(
            (number_of_members,
             7514))  ###Create an empty array to add the spectral errors
        bitmask = np.zeros((number_of_members,
                            7514))  ###Create an empty array to add the bitmask
        for s, star in enumerate(
                apogee_cluster_data):  ###Iterate through the allStar data
            spectra[s] = apread.aspcapStar(
                star['LOCATION_ID'],
                star['APOGEE_ID'],
                ext=1,
                header=False,
                dr='14',
                aspcapWavegrid=True)  ###Get the spectra
            spectra_errs[s] = apread.aspcapStar(
                star['LOCATION_ID'],
                star['APOGEE_ID'],
                ext=2,
                header=False,
                dr='14',
                aspcapWavegrid=True)  ###Get the spectral errors
            bitmask[s] = apread.apStar(
                star['LOCATION_ID'],
                star['APOGEE_ID'],
                ext=3,
                header=False,
                dr='14',
                aspcapWavegrid=True)[1]  ###Get the bitmask

        #Set all entries in bitmask to integers
        bitmask = bitmask.astype(
            int)  ###Set all entries in the bitmask to integers
        bitmask_flip = np.zeros_like(
            bitmask
        )  ###Create an empty array for the bitmask with flipped entries
        for i in range(
                len(spectra
                    )):  ###Iterate through the number of stars in the cluster
            for j in range(7514):  ###Iterate through the wavelength range
                if bitmask[i][j] == 0:  ###If the bitmask entry is set to 0
                    bitmask_flip[i][j] = 1  ###Set it to 1
                else:  ###If the bitmask entry is not set to 0
                    bitmask_flip[i][j] = 0  ###Set it to 0
        ###I do this part because the unmasked entries are always 0 in the original bitmask but I think before I was maybe adding in other values to include in the mask that may not have necessarily been 1 so I just set all masked bits to 0 and all unmasked bits to 1 (or maybe this just made more sense in my head for masked to be 0 and unmasked to be 1)

        #Remove empty spectra
        full_spectra = [
        ]  ###Empty list for the spectra sans empty ones, list not array because we don't know how many stars will be eliminated
        full_spectra_errs = [
        ]  ###Empty list for the spectral errors sans empty spectra
        full_bitmask = []  ###Empty list for bitmask sans empty spectra
        full_T = []  ###Empty list for temperatures sans empty spectra
        full_stars = []  ###Empty list for indices of stars sans empty spectra
        for i in range(len(spectra)):  ###Iterate through the number of stars
            if any(spectra[i, :] != 0
                   ):  ###For all of the rows whose entries are not all 0
                full_spectra.append(spectra[i])  ###Append those spectra
                full_spectra_errs.append(
                    spectra_errs[i])  ###Append those spectral errors
                full_bitmask.append(
                    bitmask_flip[i])  ###Append those bitmask rows
                full_T.append(T[i])  ###Append those temperatures
                full_stars.append(i)  ###Append the indices of those stars
        full_spectra = np.array(full_spectra)  ###Make list into array
        full_spectra_errs = np.array(
            full_spectra_errs)  ###Make list into array
        full_bitmask = np.array(full_bitmask)  ###Make list into array
        full_T = np.array(full_T)  ###Make list into array
        full_stars = np.array(full_stars)  ###Make list into array
        full_marked_stars = cluster_marked[
            full_stars]  ###Use array of stars left to index marked stars so we know which ones are red clump stars

        #Create array of NaNs to replace flagged values in spectra
        masked_spectra = np.empty_like(
            full_spectra
        )  ###Create an empty array that is the same shape as full_spectra
        masked_spectra_errs = np.empty_like(
            full_spectra_errs
        )  ###Create an empty array that is the same shape as full_spectra_errs
        masked_spectra[:] = np.nan  ###Set all of the entries to NaNs
        masked_spectra_errs[:] = np.nan  ###Set all of the entries to NaNs

        #Mask the spectra
        for i in range(
                len(full_spectra)):  ###Iterate through the number of stars
            for j in range(7514):  ###Iterate through the wavelength range
                if full_bitmask[i][
                        j] != 0:  ###If the bitmask is not 0 (i.e. if the bit is unmasked)
                    masked_spectra[i][j] = full_spectra[i][
                        j]  ###Retain the value of the unmasked spectra here
                    masked_spectra_errs[i][j] = full_spectra_errs[i][
                        j]  ###Retain the value of the unmasked spectral errors here
        ###All of the masked bits that were not captured by this if statement will remain NaNs and will thus be ignored

        #Cut stars that are outside of the temperature limits
        good_T_inds = (full_T > 4000) & (
            full_T < 5000
        )  ###Get the indices of the temperatures that are between 4000K and 5000K
        final_spectra = masked_spectra[
            good_T_inds]  ###Index the spectra to only keep stars that are within the temperature limits
        final_spectra_errs = masked_spectra_errs[
            good_T_inds]  ###Index the spectral errors to only keep stars within Teff limits
        good_T = full_T[
            good_T_inds]  ###Index the temperatures to keep only stars within Teff limits
        apogee_cluster_data = apogee_cluster_data[
            good_T_inds]  ###Index the allStar data to keep stars only within Teff limits
        full_bitmask = full_bitmask[
            good_T_inds]  ###Index the bitmask to keep stars only within Teff limits
        final_stars = full_marked_stars[
            good_T_inds]  ###Index the array of red-clump-marked stars to keep only those within Teff limits
        rgs = (final_stars != 'nan'
               )  #Get indices for final red giant stars to be used

        #Want an SNR of 200 so set those errors that have a larger SNR to have an SNR of 200
        spectra_err_200 = np.zeros_like(
            final_spectra_errs
        )  ###Create an empty array to add corrected spectral errors to - shape will not change, just altering values
        for i in range(len(final_spectra)):  ###Iterate through the stars
            for j in range(7514):  ###Iterate through wavelength range
                if final_spectra[i][j] / final_spectra_errs[i][
                        j] <= 200:  ###If errors are of a reasonable size
                    spectra_err_200[i][j] = final_spectra_errs[i][
                        j]  ###Leave them as they are
                else:  ###If errors are too small
                    spectra_err_200[i][j] = final_spectra[i][
                        j] / 200  ###Make them a bit bigger

        #Cut errors with SNR of less than 50
        spectra_50 = np.copy(
            final_spectra
        )  ###Create a copy of the spectra to cut large error pixels
        spectra_err_50 = np.copy(
            spectra_err_200
        )  ###Create a copy of the spectral errors to cut large error pixels

        for i in range(len(final_spectra)):  ###Iterate through stars
            for j in range(7514):  ###Iterate through wavelength range
                if final_spectra[i][j] / spectra_err_200[i][
                        j] <= 50:  ###If an error is too big
                    spectra_50[i][
                        j] = np.nan  ###Set the corresponding entry in the spectra to be a NaN, will be ignored
                    spectra_err_50[i][
                        j] = np.nan  ###Set the corresponding entry in the spectral errors to be a NaN, will be ignored

        #Cut red clumps
        logg = apogee_cluster_data[
            'LOGG']  ###Get the logg values for the cluster (all corrections have been applied)
        apogee_cluster_data_final = apogee_cluster_data[
            rgs]  ###Get the allStar data for the RGB stars only (no red clumps)
        spectra_final = spectra_50[
            rgs]  ###Get the spectra for the RGB stars only
        spectra_err_final = spectra_err_50[
            rgs]  ###Get the spectral errors for the RGB stars only
        T_final = good_T[rgs]  ###Get the temperatures for the RGB stars only
        bitmask_final = full_bitmask[
            rgs]  ###Get the bitmask for the RGB stars only

        if red_clump == 'False':  ###If we are looking at all of the stars, save all data before red clumps were cut to file
            #Write to file
            file = h5py.File(path, 'w')
            file['apogee_cluster_data'] = apogee_cluster_data
            file['spectra'] = spectra_50
            file['spectra_errs'] = spectra_err_50
            file['T'] = good_T
            file['bitmask'] = full_bitmask
            file.close()
            print(name, 'complete')  ###Notification that this function is done

            return apogee_cluster_data, spectra_50, spectra_err_50, good_T, full_bitmask

        elif red_clump == 'True':  ###If we are removing the red clump stars, save the data after red clumps cut to file
            #Write to file
            file = h5py.File(path, 'w')
            file['apogee_cluster_data'] = apogee_cluster_data_final
            file['spectra'] = spectra_final
            file['spectra_errs'] = spectra_err_final
            file['T'] = T_final
            file['bitmask'] = bitmask_final
            file.close()
            print(name, 'complete')  ###Notification that this function is done

            return apogee_cluster_data_final, spectra_final, spectra_err_final, T_final, bitmask_final
예제 #28
0
def read_spectra(cluster,
                 teffmin=4000.,
                 teffmax=5000.,
                 cont_type='cannon',
                 cont_deg=4):
    """
    NAME:
       read_spectra
    PURPOSE:
       Read the APOGEE spectra and their errors for stars in a given cluster
    INPUT:
       cluster - Name of the cluster (name in one of the data files)
       teffmin= (4000.) minimum temperature
       teffmax= (5000.) maximum temperature
       cont_type = ('cannon') type of continuum normalization to perform
       cont_deg= (4) degree polynomial to fit for continuum normalization
    OUTPUT:
       (data, spec, specerr) - (full data structure, spectra [nspec,nlam], spectral uncertainties [nspec,nlam]) nlam=7214 on ASPCAP grid
    HISTORY:
       2015-08-13 - Written based on some older code - Bovy (UofT)
    """
    if cluster.upper() in _GCS:
        data = read_meszarosgcdata()
    else:
        data = read_caldata()
    # Cut to just this cluster and temperature range
    if 'rc' in cluster.lower():
        # Only for NGC 6819
        rc = True
        cluster = cluster[:-2]
    else:
        rc = False
    data = data[data['CLUSTER'] == cluster.upper()]
    data= data[(data['TEFF'] < teffmax)\
                   *(data['TEFF'] > teffmin)]
    if cluster.lower() == 'n6819':
        g4CN = good4CN(cluster, data)
        g4CN[10] = False  # another one, by hand!
        if rc:
            data = data[True - g4CN]  # Just those!
        else:
            data = data[g4CN]  # Just those!
    # Load all the spectra
    nspec = len(data)
    spec = numpy.zeros((nspec, 7214))
    specerr = numpy.zeros((nspec, 7214))
    # Setup bad pixel mask
    badcombpixmask= bitmask.badpixmask()\
        +2**bitmask.apogee_pixmask_int("SIG_SKYLINE")
    for ii in range(nspec):
        sys.stdout.write('\r' + "Loading spectrum %i / %i ...\r" %
                         (ii + 1, nspec))
        sys.stdout.flush()
        spec[ii] = apread.apStar(data['LOCATION_ID'][ii],
                                 data['ID'][ii],
                                 ext=1,
                                 header=False,
                                 aspcapWavegrid=True)[_COMBINED_INDEX]
        specerr[ii] = apread.apStar(data['LOCATION_ID'][ii],
                                    data['ID'][ii],
                                    ext=2,
                                    header=False,
                                    aspcapWavegrid=True)[_COMBINED_INDEX]
        # Inflate uncertainties for bad pixels
        mask = apread.apStar(data['LOCATION_ID'][ii],
                             data['ID'][ii],
                             ext=3,
                             header=False,
                             aspcapWavegrid=True)[_COMBINED_INDEX]
        specerr[ii,(mask & (badcombpixmask)) != 0]+=\
            100.*numpy.mean(spec[ii,True-numpy.isnan(spec[ii])])
        # Also inflate pixels with high SNR to 0.5%
        highsnr = spec[ii] / specerr[ii] > 200.
        specerr[ii, highsnr] = 0.005 * numpy.fabs(spec[ii, highsnr])
        # Continuum-normalize
        cont = continuum.fit(spec[ii],
                             specerr[ii],
                             type=cont_type,
                             deg=cont_deg)
        spec[ii] /= cont
        specerr[ii] /= cont
        specerr[ii, highsnr] = 0.005  # like standard APOGEE reduction
    sys.stdout.write('\r' + _ERASESTR + '\r')
    sys.stdout.flush()
    return (data, spec, specerr)