def checkDistances(inFileName, angDiamFileName, outFileName): hashSearch = csvFree.readCSVFile( hashSearchFileName[:hashSearchFileName.rfind('.')] + '_with_header.csv') hashFound = csvFree.readCSVFile(inFileName) angDiams = csvFree.readCSVFile(angDiamFileName) csvOut = csvData.CSVData() csvOut.header = hashFound.header nNotFound = 0 ra_3238 = hmsToDeg('17:59:45.20') dec_3238 = dmsToDeg('-33:21:13.00') toDelete = [] for i in range(hashFound.size()): name = hashFound.getData('id', i) idPNMain = hashFound.getData('pndb', i) if idPNMain == '': csvOut.append(hashFound.getData(i)) else: dist = float(hashFound.getData('dist[arcsec]', i)) found = False for j in range(angDiams.size()): if angDiams.getData('idPNMain', j) == idPNMain: if angDiams.getData('InUse', j) == '1': found = True if dist > float(angDiams.getData('MajDiam', j)): csvOut.append([name, '', '']) else: csvOut.append(hashFound.getData(i)) if not found: nNotFound += 1 print( 'Problem: did not find an angular diameter for <' + name + '>: idPNMain = ', idPNMain, ', dist = ', dist) if dist > 50.: csvOut.append([name, '', '']) else: csvOut.append(hashFound.getData(i)) for j in range(hashSearch.size()): if hashSearch.getData('id', j) == csvOut.getData('id', csvOut.size() - 1): ra = hmsToDeg(hashSearch.getData('ra', j)) dec = dmsToDeg(hashSearch.getData('dec', j)) angDist = angularDistance(ra, dec, ra_3238, dec_3238) print('ra = ', ra, ', dec = ', dec, ': angDist = ', angDist) angDistPyAsl = pyasl.getAngDist(ra, dec, ra_3238, dec_3238) * 3600. if angDist < 800: if csvOut.getData('pndb', csvOut.size() - 1) != '3238': toDelete.append([ csvOut.getData('pndb', csvOut.size() - 1), ra, dec, angDist, angDistPyAsl ]) csvOut.setData('pndb', csvOut.size() - 1, '3238') csvFree.writeCSVFile(csvOut, outFileName) for i in toDelete: print('toDelete : ', i)
def readFiles(): csv = csvData.CSVData() for iFile in np.arange(0, len(files), 1): dataTemp = readFile(files[iFile]) if iFile == 0: csv.header = dataTemp[0] csv.data = dataTemp[1:] else: csv.append(dataTemp[1:]) return csv
def findClosestMatch(hashFoundFile, outFileName): hashFound = csvFree.readCSVFile(hashFoundFile) closestMatch = csvData.CSVData() closestMatch.header = hashFound.header hashFoundNames = np.array([str(n) for n in hashFound.getData('id')]) print('hashFoundNames = ', hashFoundNames) nDoubles = 0 for name in hashFoundNames: name = str(name) idx = np.where(hashFoundNames == name)[0] if len(idx) == 0: arr = bytes(name, 'utf-8') arr2 = bytes(hashFoundNames[0], 'utf-8') for byte in arr: print(byte, end=' ') print("\n") for byte in arr2: print(byte, end=' ') print("\n") if name == hashFoundNames[0]: print('both are the same') else: print('both are still not the same') if str(name) == str(hashFoundNames[0]): print('both are the same if using unicode') else: print('both are still not the same if using unicode') print('found name <' + name + '> ', len(idx), ' times in indices ', idx) lineToAdd = '' if len(idx) == 1: lineToAdd = hashFound.getData(idx[0]) else: dists = [] for i in range(len(idx)): dist = hashFound.getData('dist[arcsec]', idx[i]) if dist != '': dists.append(float(dist)) if len(dists) > 0: minId = np.where(dists == np.min(dists))[0][0] else: minId = 0 print('idx[', minId, '] = ', idx[minId]) lineToAdd = hashFound.getData(idx[minId]) closestMatchNames = closestMatch.getData('id') if name not in closestMatchNames: print('lineToAdd = ', lineToAdd) closestMatch.append(lineToAdd) else: print('name <' + name + '> already in closestMatch') nDoubles += 1 print('nDoubles = ', nDoubles) csvFree.writeCSVFile(closestMatch, outFileName)
def combineDiscrepancies(): discrepanciesA = readCSVFile(discrepanciesAFile) discrepanciesB = readCSVFile(discrepanciesBFile) csvOut = csvData.CSVData() csvOut.header = ['Name','RA','DEC'] for c in [discrepanciesA,discrepanciesB]: for i in range(c.size()): lineOut = [c.getData('NOM',i),c.getData('AD:(J2000)',i),c.getData('DEC (J2000)',i)] csvOut.append(lineOut) print('csvOut.size() = ',csvOut.size()) return csvOut
def checkCombinedDiscrepancies(combinedDiscrepancies, allHASHobjects, hashCommonNames): csvOut = csvData.CSVData() csvOut.header = ['Name','idPNMain','HASH common names', 'RA FRA', 'RA HASH', 'DEC FRA', 'DEC HASH', 'angular distance [arcsec]'] csvOut.data = [] emptyData = ['','','', '', '', '', '', ''] for i in range(combinedDiscrepancies.size()): ra = combinedDiscrepancies.getData('RA',i) dec = combinedDiscrepancies.getData('DEC',i) raDeg = hmsToDeg(ra) decDeg = dmsToDeg(dec) name = getName(combinedDiscrepancies,'Name',i) print('name = <'+name+'>') found = False for j in range(allHASHobjects.size()): if name == allHASHobjects.getData('Name',j).replace(' ',''): hashID = allHASHobjects.getData('idPNMain',j) angDist = degToArcsec(angularDistancePyAsl(raDeg,decDeg,hmsToDeg(allHASHobjects.getData('RAJ2000',j)),dmsToDeg(allHASHobjects.getData('DECJ2000',j)))) commonNames = getCommonNames(hashCommonNames,'idPNMain',hashID) print('Name = '+name+': HASH ID = ',hashID,': commonNames = ',commonNames,': ra = ',ra,', RA = ',allHASHobjects.getData('RAJ2000',j),', dec = ',dec,', DEC = ',allHASHobjects.getData('DECJ2000',j),', angDist = ',angDist) found = True if not found: print('ERROR: object with name <'+name+'> not found in HASH') for j in range(allHASHobjects.size()): angDist = degToArcsec(angularDistancePyAsl(raDeg,decDeg,hmsToDeg(allHASHobjects.getData('RAJ2000',j)),dmsToDeg(allHASHobjects.getData('DECJ2000',j)))) if angDist < maxAngularDistance: hashID = allHASHobjects.getData('idPNMain',j) commonNames = getCommonNames(hashCommonNames,'idPNMain',hashID) if name in commonNames: print('Name = '+name+': HASH ID = ',hashID,': commonNames = ',commonNames,': ra = ',ra,', RA = ',allHASHobjects.getData('RAJ2000',j),', dec = ',dec,', DEC = ',allHASHobjects.getData('DECJ2000',j),', angDist = ',angDist) found = True else: print('Name = <'+name+'>: object found within ',maxAngularDistance,' arcsec: angDist = ',angDist,': ra = ',ra,', RA = ',allHASHobjects.getData('RAJ2000',j),', dec = ',dec,', DEC = ',allHASHobjects.getData('DECJ2000',j),', HASH name = <'+allHASHobjects.getData('Name',j)) csvOut.append(emptyData) csvOut.setData('Name',csvOut.size()-1,name) csvOut.setData('idPNMain',csvOut.size()-1,hashID) cNames = commonNames[0] for k in np.arange(1,len(commonNames),1): cNames += ';'+commonNames[k] csvOut.setData('HASH common names',csvOut.size()-1,cNames) csvOut.setData('RA FRA',csvOut.size()-1,ra) csvOut.setData('RA HASH',csvOut.size()-1,allHASHobjects.getData('RAJ2000',j)) csvOut.setData('DEC FRA',csvOut.size()-1,dec) csvOut.setData('DEC HASH',csvOut.size()-1,allHASHobjects.getData('DECJ2000',j)) csvOut.setData('angular distance [arcsec]',csvOut.size()-1,str(angDist)) writeCSVFile(csvOut,disrepanciesOutFileName)
def fixHashFile(): with open(outputSQLFile, 'w') as w: csvHash = csvData.CSVData() with open(inputHashFile, 'r') as f: hashData = csv.DictReader(f) print('hashData.fieldnames = ', hashData.fieldnames) csvHash.header = hashData.fieldnames with open(inputHashFile, 'r') as f: hashData = csv.DictReader(f) print('hashData.fieldnames = ', hashData.fieldnames) nRows = 0 for row in hashData: nRows += 1 csvHash.append([row[x] for x in hashData.fieldnames]) # if row['CS_DRAJ2000'] == 'NULL': print("row['CS_DECJ2000'].split(':')[0][:2] = <" + row['CS_DECJ2000'].split(':')[0][:2] + ">") if row['CS_DECJ2000'].split(':')[0][:2] == '-0': print('row = ', row) dra = hmsToDeg( csvHash.getData('CS_RAJ2000', csvHash.size() - 1)) ddec = dmsToDeg( csvHash.getData('CS_DECJ2000', csvHash.size() - 1)) lon, lat = raDecToLonLat(dra, ddec) csvHash.setData('CS_DRAJ2000', csvHash.size() - 1, str(dra)) csvHash.setData('CS_DDECJ2000', csvHash.size() - 1, str(ddec)) csvHash.setData('CS_Glon', csvHash.size() - 1, str(lon)) csvHash.setData('CS_Glat', csvHash.size() - 1, str(lat)) w.write("UPDATE MainGPN.tbCSCoords SET CS_DRAJ2000 = " + str(dra) + ", CS_DDECJ2000 = " + str(ddec) + ", CS_Glon = " + str(lon) + ", CS_Glat = " + str(lat) + ", CSstat = 'p' WHERE idtbCSCoords = " + csvHash.getData('idtbCSCoords', csvHash.size() - 1) + ";\n") return csvHash
import csvData,csvFree inFileName = '/Users/azuri/daten/uni/HKU/HASH/hash-no-show.csv' inData = csvFree.readCSVFile(inFileName) print('inData.size() = ',inData.size()) pnStat = inData.getData('PNstat') print('pnStat = ',pnStat) tlps = csvData.CSVData() tlps.header = inData.header newCandidates = csvData.CSVData() newCandidates.header = inData.header others = csvData.CSVData() others.header = inData.header for i in range(inData.size()): pnStat = inData.getData('PNstat',i) if pnStat in ['T','L','P']: tlps.append(inData.getData(i)) elif pnStat == 'c': newCandidates.append(inData.getData(i)) else: others.append(inData.getData(i)) csvFree.writeCSVFile(tlps,inFileName[:-4]+'_TLPs.csv') csvFree.writeCSVFile(newCandidates,inFileName[:-4]+'_newCandidates.csv') csvFree.writeCSVFile(others,inFileName[:-4]+'_others.csv')
import numpy as np import csvFree, csvData eventRegistrationFile = '/Users/azuri/daten/parties/Wild Wood/contacts/Completed orders export.csv' oldContactsFile = '/Users/azuri/daten/parties/Wild Wood/contacts/contacts.csv' contactsToImport = '/Users/azuri/daten/parties/Wild Wood/contacts/contacts_to_import.csv' existingContactsFile = '/Users/azuri/daten/parties/Wild Wood/existing_contacts.csv' volunteersFile = '/Users/azuri/daten/parties/Wild Wood/Wild Woods Volunteers Questionnaire.csv' csvEventRegistration = csvFree.readCSVFile(eventRegistrationFile) csvOldContacts = csvFree.readCSVFile(oldContactsFile) existingContacts = csvFree.readCSVFile(existingContactsFile) volunteers = csvFree.readCSVFile(volunteersFile) csvContactsToImport = csvData.CSVData() csvContactsToImport.header = csvOldContacts.header newLine = ['' for i in csvOldContacts.header] print('csvEventRegistration.header = ', csvEventRegistration.header) iContact = 0 names = [] for i in range(csvEventRegistration.size()): name = csvEventRegistration.getData( 'First Name (Billing)', i) + ' ' + csvEventRegistration.getData( 'Last Name (Billing)', i) + ' Wild Wood Guest' print('name = ', name) if name not in names: print(name + ' not found') names.append(name)
import csvFree, csvData from myUtils import angularDistance, hmsToDeg, dmsToDeg f1name = '/Users/azuri/daten/uni/HKU/interns_projects/simba/Weidman-table1.csv' f2name = '/Users/azuri/daten/uni/HKU/interns_projects/simba/Weidmann2020-cs-parameters.txt' sqlFileOut = '/Users/azuri/daten/uni/HKU/interns_projects/simba/Weidman-table1.sql' hashFile = '/Users/azuri/daten/uni/HKU/HASH/PNMain_full_June-30-2020.csv' csvHash = csvFree.readCSVFile(hashFile) simbaFile = '/Users/azuri/daten/uni/HKU/interns_projects/simba/All list v1.csv' #simbaFile = '/Users/azuri/daten/uni/HKU/interns_projects/simba/simba_table.csv' csvSimba = csvFree.readCSVFile(simbaFile) with open(f2name, encoding="utf8", errors='ignore') as f: lines = f.readlines() csv2 = csvData.CSVData() csv2.header = [ 'PNG', 'log g', 'ref log g', 'met', 'log T', 'ref log T', 'log (L_star/L_sun)', 'ref log(L_star/L_sun', 'mag', 'ref mag' ] for iLine in np.arange(1, len(lines), 1): data = [] lines[iLine] = lines[iLine].replace('\r', '') lines[iLine] = lines[iLine].replace('\n', '') lines[iLine] = lines[iLine].replace('}', '+-') lines[iLine] = lines[iLine].replace(' v ', ' v') lines[iLine] = lines[iLine].replace(' r ', ' r') lines[iLine] = lines[iLine].replace(' b ', ' b') lines[iLine] = lines[iLine].replace(' i ', ' i') data = lines[iLine].split(' ') if len(data) < len(csv2.header):
csvA = csvFree.readCSVFile( '/Users/azuri/daten/uni/HKU/HASH/ziggy_Calern_PN_candidates_May2019.csv') csvA.append( csvFree.readCSVFile( '/Users/azuri/daten/uni/HKU/HASH/ziggy_Calern_PN_candidates_May2019_II.csv' )) csvA.append( csvFree.readCSVFile( '/Users/azuri/daten/uni/HKU/HASH/ziggy_Calern_PN_candidates_May2019_III.csv' )) sorted = np.sort( csvFree.convertStringVectorToUnsignedVector(csvA.getData('idPNMain'))) print('sorted = ', sorted) fNameOut = '/Users/azuri/daten/uni/HKU/HASH/ziggy_Calern_PN_candidates_May2019_out.csv' csvOut = csvData.CSVData() csvOut.header = csvA.header for i in np.arange(0, len(sorted), 1): found = csvA.find('idPNMain', str(sorted[i])) print('i = ', i, ': found = ', found) csvOut.append(csvA.getData(found)) print('csvOut.size() = ', csvOut.size()) csvFree.writeCSVFile(csvOut, fNameOut)
import csvFree,csvData csv = csvData.CSVData() csv.header = ['fiber', 'centerDistanceX', 'centerDistanceY', 'Halpha6563a', 'Halpha6563b', 'OIII5007a', 'OIII5007b', 'SII6716a', 'SII6731a', 'SII6716b', 'SII6731b', 'ArIII7136'] tab = [ #{'fiber': 1135, 'centerDistanceX' : -85.12, 'centerDistanceY' : -85.12, 'SII6716a': 0.0, 'SII6731a' : 0.0, 'SII6716b' : 6724.81, 'SII6731b' : 6737.28, 'unidentified1' : 0.0, 'ArIII7136' : 0.0, 'unidentified2' : 0.0}, {'fiber': 1, 'centerDistanceX' : 9.85, 'centerDistanceY' : 0.0, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6696.62, 18289.,20.9], 'SII6731a' : [0.0,0.0,0.0], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [6754.51, 10737., 23.6], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 2, 'centerDistanceX' : -29.56, 'centerDistanceY' : 61.92, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6713.3,7149.0,5.672], 'SII6731a' : [6726.07,7619.0,7.832], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [0.0,0.0,0.0], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 3, 'centerDistanceX' : 34.49, 'centerDistanceY' : 25.33, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [0.0,0.0,0.0], 'SII6731a' : [0.0,0.0,0.0], 'SII6716b' : [6730.48,3930.0,5.12], 'SII6731b' : [6745.08,6399.0,6.95], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 4, 'centerDistanceX' : 24.64, 'centerDistanceY' : 8.44, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6703.77,4931.0,8.954], 'SII6731a' : [6721.79,9516.0,13.13], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [0.0,0.0,0.0], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 5, 'centerDistanceX' : 29.56, 'centerDistanceY' : 16.89, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6716.82,2505.0,4.993], 'SII6731a' : [0.0,0.0,0.0], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [6746.48,12718.0,21.9], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 6, 'centerDistanceX' : 29.56, 'centerDistanceY' : 33.77, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6702.26,76862.0,9.071], 'SII6731a' : [6717.98,7442.0,9.852], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [0.0,0.0,0.0], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 7, 'centerDistanceX' : 24.64, 'centerDistanceY' : 25.33, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6693.59,5645.0,13.32], 'SII6731a' : [6711.32,8018.0,15.05], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [0.0,0.0,0.0], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 8, 'centerDistanceX' : 29.56, 'centerDistanceY' : 0.0, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6700.25,4676.0,11.78], 'SII6731a' : [0.0,0.0,0.0], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [0.0,0.0,0.0], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 9, 'centerDistanceX' : 34.49, 'centerDistanceY' : 8.44, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [0.0,0.0,0.0], 'SII6731a' : [0.0,0.0,0.0], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [0.0,0.0,0.0], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 10, 'centerDistanceX' : 34.49, 'centerDistanceY' : -25.33, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [0.0,0.0,0.0], 'SII6731a' : [0.0,0.0,0.0], 'SII6716b' : [6726.09,6722.0,11.31], 'SII6731b' : [6743.01,10599.0,14.66], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 11, 'centerDistanceX' : 24.64, 'centerDistanceY' : -25.33, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6698.94,6805.0,8.99], 'SII6731a' : [6714.9,3550.0,4.009], 'SII6716b' : [6730.94,4552.0,7.866], 'SII6731b' : [6743.89,8403.0,13.03], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 12, 'centerDistanceX' : 29.56, 'centerDistanceY' : -33.77, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6704.1,4780.0,7.665], 'SII6731a' : [0.0,0.0,0.0], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [6745.27,7437.0,10.23], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 13, 'centerDistanceX' : 34.49, 'centerDistanceY' : -8.44, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [0.0,0.0,0.0], 'SII6731a' : [0.0,0.0,0.0], 'SII6716b' : [6727.22,4436.0,10.62], 'SII6731b' : [6747.52,4568.0,6.754], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 14, 'centerDistanceX' : 29.56, 'centerDistanceY' : -16.89, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [0.0,0.0,0.0], 'SII6731a' : [0.0,0.0,0.0], 'SII6716b' : [6739.42,6693.0,6.918], 'SII6731b' : [6753.71,4630.0,5.577], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 15, 'centerDistanceX' : 24.64, 'centerDistanceY' : -8.44, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6696.13,3748.0,5.126], 'SII6731a' : [6708.96,4156.0,10.11], 'SII6716b' : [6730.28,4694.0,6.399], 'SII6731b' : [6744.81,7185.0,8.379], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 16, 'centerDistanceX' : 0.0, 'centerDistanceY' : 61.92, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [0.0,0.0,0.0], 'SII6731a' : [0.0,0.0,0.0], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [6746.04,5866.0,8.922], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 17, 'centerDistanceX' : 19.71, 'centerDistanceY' : 33.77, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6702.74,4785.0,7.129], 'SII6731a' : [6722.02,10062.0,14.37], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [6744.92,3591.0,5.948], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 18, 'centerDistanceX' : 4.93, 'centerDistanceY' : 25.33, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [4988.85,20479.0,2.921], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6695.67,9157.0,7.79], 'SII6731a' : [6708.06,8492.0,8.629], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [0.0,0.0,0.0], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 19, 'centerDistanceX' : 9.85, 'centerDistanceY' : 33.77, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6701.8,3261.0,3.222], 'SII6731a' : [6714.96,6414.0,10.71], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [0.0,0.0,0.0], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 20, 'centerDistanceX' : 14.78, 'centerDistanceY' : 25.33, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6703.34,4708.0,6.392], 'SII6731a' : [6717.12,6281.0,7.234], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [0.0,0.0,0.0], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 21, 'centerDistanceX' : 9.85, 'centerDistanceY' : 16.89, 'Halpha6563a': [6538.0,5087.0,2.931], 'Halpha6563b': [6581.12,4449.0,3.129], 'OIII5007a': [4990.98,27483.0,3.864], 'OIII5007b': [5020.9,15711.0,2.96], 'SII6716a': [6696.75,17792.0,6.442], 'SII6731a' : [6711.58,14306.0,7.242], 'SII6716b' : [6734.78,5993.0,7.468], 'SII6731b' : [6751.43,4412.0,8.583], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 22, 'centerDistanceX' : 29.56, 'centerDistanceY' : 61.92, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6713.41,2359.0,3.665], 'SII6731a' : [6725.49,7284.0,7.063], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [6744.61,3890.0,6.373], 'ArIII7136' : [0.0,0.0,0.0]}, {'fiber': 23, 'centerDistanceX' : 14.78, 'centerDistanceY' : 8.44, 'Halpha6563a': [0.0,0.0,0.0], 'Halpha6563b': [0.0,0.0,0.0], 'OIII5007a': [0.0,0.0,0.0], 'OIII5007b': [0.0,0.0,0.0], 'SII6716a': [6706.16,4161.0,7.101], 'SII6731a' : [0.0,0.0,0.0], 'SII6716b' : [0.0,0.0,0.0], 'SII6731b' : [0.0,0.0,0.0], 'ArIII7136' : [0.0,0.0,0.0]},
import numpy as np import os import csvFree, csvData imageDirList = '/Users/azuri/daten/uni/HKU/HASH/%s_images.txt' hashPNMain = '/Users/azuri/daten/uni/HKU/HASH/PNMain_full.csv' commandFileOut = '/Users/azuri/daten/uni/HKU/HASH/addSurveyImages' if os.path.exists(commandFileOut): os.remove(commandFileOut) surveyCSV = csvData.CSVData() surveyCSV.header = ['Name', 'lMin', 'lMax', 'bMin', 'bMax'] surveyCSV.append(['IPHAS', '29.', '215.', '-5.', '5.']) surveyCSV.append(['VVV', '350.', '360.', '-14.5', '9.5']) surveyCSV.append(['VVV', '0.', '10.', '-14.5', '9.5']) surveyCSV.append(['VVV', '230.', '350.', '-4.5', '4.5']) surveyCSV.append(['VVV', '10.', '20.', '-4.5', '4.5']) if __name__ == '__main__': for iSurvey in range(surveyCSV.size()): with open(imageDirList % surveyCSV.getData('Name', iSurvey).lower(), 'r') as f: imDirLines = f.readlines() idsWithoutSurveyImages = [] freshIDFound = False idPNMain = 0 idsStr = '' for line in imDirLines:
def plotHistogram(): csvSG = csvData.CSVData() csvSG.header = ['PNG', 'Name', 'DRA', 'DDec', 'flag'] with open(inputSGFile, 'r') as f: linesSG = f.readlines() for iLine in range(len(linesSG)): linesSG[iLine] = linesSG[iLine].strip().split() print('linesSG[', iLine, '] = ', linesSG[iLine]) csvSG.append([ linesSG[iLine][1][1:], linesSG[iLine][2], linesSG[iLine][5], linesSG[iLine][6], linesSG[iLine][4] ]) csvPNMain = csvFree.readCSVFile(inputHashPNMainFile) csvHash = fixHashFile() csvHash.addColumn('PNG') for iRow in range(csvHash.size()): csvHash.setData( 'PNG', iRow, csvPNMain.getData( 'PNG', csvPNMain.find('idPNMain', csvHash.getData('idPNMain', iRow))[0])) print('csvSG.size() = ', csvSG.size(), ', csvHash.size() = ', csvHash.size(), ', csvPNMain.size() = ', csvPNMain.size()) print('csvPNMain.header = ', csvPNMain.header) distances = [] print('csvSG.header = ', csvSG.header) for i in range(csvHash.size()): if csvHash.getData('InUse', i) == '1': posSG = csvSG.find('PNG', csvHash.getData('PNG', i)) print('i = ', i, ': posSG = ', posSG) if len(posSG) > 1: print('Problem: len(posSG) = ', len(posSG), ' > 1') STOP if posSG[0] >= 0: print("csvHash.getData('CS_DRAJ2000',", i, ") = ", csvHash.getData('CS_DRAJ2000', i)) print("csvHash.getData('CS_DDECJ2000',", i, ") = ", csvHash.getData('CS_DDECJ2000', i)) print("csvSG.getData('DRA',", posSG[0], ") = ", csvSG.getData('DRA', posSG[0])) print("csvSG.getData('DDec',", posSG[0], ") = ", csvSG.getData('DDec', posSG[0])) distances.append([ angularDistancePyAsl( float(csvHash.getData('CS_DRAJ2000', i)), float(csvHash.getData('CS_DDECJ2000', i)), float(csvSG.getData('DRA', posSG[0])), float(csvSG.getData('DDec', posSG[0]))) * 3600., csvHash.getData('idPNMain', i), csvDiam.getData( 'MajDiam', csvDiam.find('idPNMain', csvHash.getData('idPNMain', i))[0]), csvHash.getData('refCSstat', i), float(csvSG.getData('DRA', posSG[0])), float(csvSG.getData('DDec', posSG[0])), float(csvHash.getData('CS_DRAJ2000', i)), float(csvHash.getData('CS_DDECJ2000', i)), ]) if distances[len(distances) - 1][0] > 5: print('distances[', len(distances) - 1, '] = ', distances[len(distances) - 1]) distances = np.array(distances) print('distances = ', len(distances), ': ', distances) # print('[d[0] for d in distances] = ',[d[0] for d in distances]) # print('np.array( [float(d[0]) for d in distances])>5. = ',np.array( [float(d[0]) for d in distances])>5.) largeDistances = distances[np.array([float(d[0]) for d in distances]) > 5.] print('largeDistances = ', len(largeDistances), ': ', largeDistances) histVals = plt.hist(np.sort( np.array([float(distance[0]) for distance in distances])), bins=40, range=[0., 40.]) print('histVals = ', histVals) plt.xlabel('distance in "') plt.ylabel('number of CSPN') plt.show() return csvHash, csvSG