def makeHashFile(): inputData = csvFree.readCSVFile(newPNeFile) with open(newPNeFile[:newPNeFile.rfind('.')] + '.hash', 'w') as fh: for i in range(inputData.size()): dra = float(inputData.getData('DRAJ2000', i)) ddec = float(inputData.getData('DDECJ2000', i)) lon, lat = raDecToLonLat(dra, ddec) ra = degToHMS(dra) dec = degToDMS(ddec) print('lon = ', lon, ', lat = ', lat, ', dra = ', dra, ', ddec = ', ddec, ', ra = ', ra, ', dec = ', dec) png = getPNG(lon, lat) fh.write(png + ',' + ra + ',' + dec + '\n')
def fixHashFile(): with open(outputSQLFile, 'w') as w: csvHash = csvData.CSVData() with open(inputHashFile, 'r') as f: hashData = csv.DictReader(f) print('hashData.fieldnames = ', hashData.fieldnames) csvHash.header = hashData.fieldnames with open(inputHashFile, 'r') as f: hashData = csv.DictReader(f) print('hashData.fieldnames = ', hashData.fieldnames) nRows = 0 for row in hashData: nRows += 1 csvHash.append([row[x] for x in hashData.fieldnames]) # if row['CS_DRAJ2000'] == 'NULL': print("row['CS_DECJ2000'].split(':')[0][:2] = <" + row['CS_DECJ2000'].split(':')[0][:2] + ">") if row['CS_DECJ2000'].split(':')[0][:2] == '-0': print('row = ', row) dra = hmsToDeg( csvHash.getData('CS_RAJ2000', csvHash.size() - 1)) ddec = dmsToDeg( csvHash.getData('CS_DECJ2000', csvHash.size() - 1)) lon, lat = raDecToLonLat(dra, ddec) csvHash.setData('CS_DRAJ2000', csvHash.size() - 1, str(dra)) csvHash.setData('CS_DDECJ2000', csvHash.size() - 1, str(ddec)) csvHash.setData('CS_Glon', csvHash.size() - 1, str(lon)) csvHash.setData('CS_Glat', csvHash.size() - 1, str(lat)) w.write("UPDATE MainGPN.tbCSCoords SET CS_DRAJ2000 = " + str(dra) + ", CS_DDECJ2000 = " + str(ddec) + ", CS_Glon = " + str(lon) + ", CS_Glat = " + str(lat) + ", CSstat = 'p' WHERE idtbCSCoords = " + csvHash.getData('idtbCSCoords', csvHash.size() - 1) + ";\n") return csvHash
def fixAccuracy(): with open(sqlFileOut[:sqlFileOut.rfind('.')]+'_accuracy.sql','w') as f: f.write("USE `MainGPN`;\n") cspn = csv.DictReader(open(fNameHashCSPN)) for row in cspn: problem = False ra = row['CS_RAJ2000'] dec = row['CS_DECJ2000'] if '\t' in ra: print('found a tab in ra') ra = ra.strip() problem = True if '\t' in dec: print('found a tab in dec') problem = True dec = dec.strip() rah, ram, ras = [i for i in ra.split(':')] accuracyRA = len(ras[ras.rfind('.')+1:]) decd, decm, decs = [i for i in dec.split(':')] accuracyDEC = len(decs[decs.rfind('.')+1:]) print('ra = '+ra+': accuracyRA = ',accuracyRA) print('dec = '+dec+': accuracyDEC = ',accuracyDEC) if accuracyRA > 2: problem = True ra = rah+':'+ram+':'+'%.2f' % float(ras) print('new ra = ',ra) if accuracyDEC > 1: problem = True dec = decd+':'+decm+':'+'%.1f' % float(decs) print('new dec = ',dec) if problem: draCalc = hmsToDeg(ra) ddecCalc = dmsToDeg(dec) lCalc, bCalc = raDecToLonLat(draCalc,ddecCalc) f.write("UPDATE `tbCSCoords` SET `CS_RAJ2000` = '%s', `CS_DECJ2000` = '%s', `CS_DRAJ2000` = %.5f, `CS_DDECJ2000` = %.5f, `CS_Glon` = %.5f, `CS_Glat` = %.5f WHERE `idtbCSCoords` = %d;\n" % (ra, dec, draCalc, ddecCalc, lCalc, bCalc, int(row['idtbCSCoords'])))
def checkCSPNCoordsAndAddMissingAngles(): # pnMain = csv.DictReader(open(fNameHashPNMain)) cspn = csv.DictReader(open(fNameHashCSPN)) # tbCSPN = csv.DictReader(open(fNameHashTbCSPN)) iZiggy = 1 nProblems = 0 nWrongCoords = 0 nWrongidPNMain = 0 with open(sqlFileOut,'w') as f: f.write("USE `MainGPN`;\n") for line in cspn: ra = line['CS_RAJ2000'] dec = line['CS_DECJ2000'] dra = line['CS_DRAJ2000'] ddec = line['CS_DDECJ2000'] l = line['CS_Glon'] b = line['CS_Glat'] idPNMain = line['idPNMain'] idtbCSCoords = line['idtbCSCoords'] print('idPNMain = ',idPNMain,', idtbCSCoords = ',idtbCSCoords) # print('idPNMain = ',idPNMain,': line = ',line) print('idPNMain = ',idPNMain,': ra = ',ra,', dec = ',dec) print('idPNMain = ',idPNMain,': dra = ',dra,', ddec = ',ddec) print('idPNMain = ',idPNMain,': l = ',l,', b = ',b) isProblem = False wrongCoords = False if (ra != 'NULL') and (line['userRecord'] != 'chandra0101'): draCalc = hmsToDeg(ra) ddecCalc = dmsToDeg(dec) lCalc, bCalc = raDecToLonLat(draCalc,ddecCalc) if dec == 'NULL': print('Problem with idPNMain = ',idPNMain,': ra is not empty but dec is => aborting') isProblem = True STOP if dra == 'NULL': isProblem = True else: if abs((angularDistancePyAsl(float(dra), float(ddec), draCalc, ddecCalc)*3600.) - angularDistance(float(dra), float(ddec), draCalc, ddecCalc)) > 0.1: print('separation = ',abs((angularDistancePyAsl(float(dra), float(ddec), draCalc, ddecCalc)*3600.) - angularDistance(float(dra), float(ddec), draCalc, ddecCalc))) print('problem: angular distances differ by more than 0.1 arcsec') wrongCoords = True if angularDistance(float(dra), float(ddec), draCalc, ddecCalc) > 0.1: print('angular distance = ',angularDistance(float(dra), float(ddec), draCalc, ddecCalc)) print('problem: angular distances between stated and calculated dRA and dDEC differ by more than 0.1 arcsec') wrongCoords = True if ddec == 'NULL': isProblem = True if l == 'NULL': isProblem = True else: raFromlb, decFromlb = lonLatToRaDec(float(l), float(b)) raFromlbCalc, decFromlbCalc = lonLatToRaDec(lCalc, bCalc) if angularDistance(raFromlb, decFromlb, raFromlbCalc, decFromlbCalc) > 0.1: print('lCalc = ',lCalc,', bCalc = ',bCalc) print('angular distance = ',angularDistance(raFromlb, decFromlb, raFromlbCalc, decFromlbCalc)) print('problem: angular distances between stated and calculated l and b differ by more than 0.1 arcsec') wrongCoords = True if b == 'NULL': isProblem = True else: #remove from both tables # if getidtbCSCoords(idPNMain) != line['idtbCSCoords']: # print("PROBLEM: getidtbCSCoords(idPNMain)=",getidtbCSCoords(idPNMain)," != line['idtbCSCoords']=",line['idtbCSCoords']) f.write('DELETE FROM `tbCSCoords` WHERE `idtbCSCoords` = %d;\n' % int(idtbCSCoords)) print('deleted record from tbCSCoords') if line['InUse'] == '1': f.write('DELETE FROM `PNMain_tbCSCoords` WHERE `idtbCSCoords` = %d;\n' % int(idtbCSCoords)) print('deleted record from PNMain_tbCSCoords') cspnTemp = csv.DictReader(open(fNameHashCSPN)) setInUse = 0 for lineTemp in cspnTemp: if lineTemp['idPNMain'] == idPNMain: if lineTemp['userRecord'] != 'chandra0101': if lineTemp['InUse'] == '1': print('Found another entry for idPNMain=',idPNMain,' which was set InUse') setInUse = 1 if setInUse == 0: cspnTemp = csv.DictReader(open(fNameHashCSPN)) for lineTemp in cspnTemp: if lineTemp['idPNMain'] == idPNMain: if (lineTemp['userRecord'] != 'chandra0101') and (setInUse == 0): print('lineTemp[userRecord] = ',lineTemp['userRecord']) setInUse = 1 f.write('UPDATE `tbCSCoords` SET `InUse` = 1 WHERE `idtbCSCoords` = %d;\n' % (int(lineTemp['idtbCSCoords']))) f.write("INSERT INTO `PNMain_tbCSCoords`(`idPNMain`,`idtbCSCoords`)") f.write(" VALUES (%d,%d);\n" % (int(idPNMain), int(lineTemp['idtbCSCoords']))) print('updated tbCSCoords and added entry to PNMain_tbCSCoords') #STOP elif (lineTemp['userRecord'] != 'chandra0101') and (setInUse == 1): print('Problem: found another entry with InUse == 1') STOP # idtbCSCoords = getidtbCSCoords(idPNMain) # print('idtbCSCoords = ',idtbCSCoords) addedTotbCSCoords = False if isProblem: if getidtbCSCoords(idPNMain) != idtbCSCoords: print("PROBLEM: getidtbCSCoords(idPNMain)=",getidtbCSCoords(idPNMain)," != line['idtbCSCoords']=",line['idtbCSCoords']) # idtbCSCoords = getidtbCSCoords(idPNMain) if getidtbCSCoords(idPNMain) is None: if line['InUse'] == '1': f.write("INSERT INTO `PNMain_tbCSCoords`(`idPNMain`,`idtbCSCoords`)") f.write(" VALUES (%d,%d);\n" % (int(idPNMain), int(idtbCSCoords))) print('added entry to PNMain_tbCSCoords') addedTotbCSCoords = True # STOP nProblems += 1 f.write('UPDATE `tbCSCoords` SET `CS_DRAJ2000` = %.5f, `CS_DDECJ2000` = %.5f, `CS_Glon` = %.5f, `CS_Glat` = %.5f WHERE `idtbCSCoords` = %d;\n' % (draCalc, ddecCalc, lCalc, bCalc, int(idtbCSCoords))) print('updated tbCSCoords') if wrongCoords: if line['InUse'] == '1': if getidtbCSCoords(idPNMain) != idtbCSCoords: print("PROBLEM: getidtbCSCoords(idPNMain)=",getidtbCSCoords(idPNMain)," != line['idtbCSCoords']=",line['idtbCSCoords']) STOP # idtbCSCoords = getidtbCSCoords(idPNMain) nWrongCoords += 1 #check RA and DEC with the PN draPN, ddecPN = getRaDecForPN(idPNMain) if angularDistance(draPN, ddecPN, draCalc, ddecCalc) > 20.: print('draPN = ',draPN,', ddecPN = ',ddecPN) print('angular distance = ',angularDistance(draPN, ddecPN, draCalc, ddecCalc)) print('problem: angular distances between stated and calculated l and b differ by more than 20. arcsec') #STOP f.write('UPDATE `tbCSCoords` SET `CS_DRAJ2000` = %.5f, `CS_DDECJ2000` = %.5f, `CS_Glon` = %.5f, `CS_Glat` = %.5f WHERE `idtbCSCoords` = %d;\n' % (draCalc, ddecCalc, lCalc, bCalc, int(idtbCSCoords))) if line['InUse'] == '1': if getidPNMain(line['idtbCSCoords']) is None: print('idtbCSCoords = '+idtbCSCoords+' not found in PNMain_tbCSCoords') if not addedTotbCSCoords: f.write("INSERT INTO `PNMain_tbCSCoords`(`idPNMain`,`idtbCSCoords`)") f.write(" VALUES (%d,%d);\n" % (int(idPNMain), int(line['idtbCSCoords']))) print('added entry to PNMain_tbCSCoords') addedTotbCSCoords = True STOP elif getidPNMain(line['idtbCSCoords']) != idPNMain: print('PROBLEM: wrong idPNMain(=',idPNMain,') in PNMain_tbCSCoords(getidPNMain(',line['idtbCSCoords'],') = ',getidPNMain(line['idtbCSCoords']),')') nWrongidPNMain += 1 f.write('DELETE FROM `PNMain_tbCSCoords` WHERE `idtbCSCoords` = %d;\n' % int(line['idtbCSCoords'])) print('deleted entry from PNMain_tbCSCoords') f.write("INSERT INTO `PNMain_tbCSCoords`(`idPNMain`,`idtbCSCoords`)") f.write(" VALUES (%d,%d);\n" % (int(idPNMain), int(idtbCSCoords))) print('added entry to PNMain_tbCSCoords') # STOP if line['userRecord'] == 'ziggy': with open(sqlFileOutZiggy,'a') as fZiggy: fZiggy.write("INSERT INTO `CSPN_ziggy`(`idCSPN_ziggy`,`idPNMain`,`mapFlag`)") fZiggy.write(" VALUES (%d,%d,'%s');\n" % (iZiggy, int(idPNMain), 'y')) iZiggy+=1 print('found ',nProblems,' problematic CSPN and ',nWrongCoords,' CSPN with problematic coordinates, ',nWrongidPNMain,' wrong idPNMain in PNMain_tcCSCoords')
def createMySQLCommands(): csv = csvFree.readCSVFile(inList) with open(inList[:inList.rfind('.')] + '.sql', 'w') as f: for i in np.arange(0, csv.size(), 1): f.write("USE `MainGPN`;\n") hashIDs.append(idPNMainStart + i) raHMS = csv.getData("RAJ2000", i) ra = hmsToDeg(raHMS) decDMS = csv.getData("DECJ2000", i) dec = dmsToDeg(decDMS) refined = csv.getData('Refined coordinates', i) refined = refined.replace(' ', ' ') if refined != '': print('refined = <' + refined + '>') raH, raM, raS, decD, decM, decS = refined.split(' ') raHMS = raH + ':' + raM + ':' + raS ra = hmsToDeg(raHMS) decDMS = decD + ':' + decM + ':' + decS dec = dmsToDeg(decDMS) print('refined coordinates: raHMS = <' + raHMS + '> decDMS = <' + decDMS + '>') print('refined coordinates: ra = ', ra, ' dec = ', dec) print('raHMS = <' + raHMS + '> decDMS = <' + decDMS + '>') print('ra = ', ra, ' dec = ', dec) lon, lat = raDecToLonLat(ra, dec) print('lon=', lon, ', lat=', lat) png = '' if lon < 100: png += '0' if lon < 10: png += '0' png += str(lon) png = png[:png.rfind('.') + 2] # print('png = <'+png+'>') if lat >= 0.: png += '+' # print('png = <'+png+'>') png += str(lat) # print('png = <'+png+'>') png = png[:png.rfind('.') + 2] # print('png = <'+png+'>') if (lat < 10.) and (lat >= 0.): png = png[:png.rfind('+') + 1] + '0' + png[png.rfind('+') + 1:] if (lat > -10.) and (lat < 0.): png = png[:png.rfind('-') + 1] + '0' + png[png.rfind('-') + 1:] # print('png = <'+png+'>') print('PNG ' + png) # STOP f.write( "INSERT INTO `PNMain`(`idPNMain`,`PNG`,`refPNG`,`RAJ2000`,`DECJ2000`,`DRAJ2000`,`DDecJ2000`," ) f.write( "`Glon`,`Glat`,`refCoord`,`Catalogue`,`refCatalogue`,`userRecord`,`domain`,`refDomain`,`PNstat`,`refPNstat`,`refSimbadID`,`show`) " ) f.write( "VALUES (%d,'%s','%s','%s','%s',%.5f,%.5f,%.5f,%.5f,'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s');\n" % (idPNMainStart + i, png, 'sys', raHMS, decDMS, ra, dec, lon, lat, 'DSHPNe', 'DSHPNe', 'ziggy', 'ziggy', 'Galaxy', 'ziggy', csv.getData('PNstat', i), 'ziggy', 'sys', 'y')) f.write( "INSERT INTO `tbCNames`(`idtbCNames`,`Name`,`reference`,`InUse`,`refInUse`,`userRecord`,`idPNMain`,`simbadID`,`flag`)" ) f.write("VALUES (%d,'%s','%s',%d,'%s','%s',%d,'%s','%s');\n" % (idtbCNamesStart + i, csv.getData('Name', i), 'DSHPNe', 1, 'sys', 'ziggy', idPNMainStart + i, 'n', 'n')) f.write( "INSERT INTO `PNMain_tbCNames`(`idPNMain`,`idtbCNames`) VALUES (%d,%d);\n" % (idPNMainStart + i, idtbCNamesStart + i)) majDiamStr = csv.getData("MajDiam", i).strip('~').rstrip('"').rstrip(':') minDiamStr = csv.getData("MinDiam", i).strip('~').rstrip('"').rstrip(':') if majDiamStr in ['stellar?', 'stellar']: majDiamStr = '1' minDiamStr = '1' # if 'x' in diamStr: f.write( "INSERT INTO `tbAngDiam`(`idtbAngDiam`,`MajDiam`,`MinDiam`,`reference`,`InUse`,`refInUse`,`userRecord`,`idPNMain`,`tempflag`) " ) f.write( "VALUES (%d,%.1f,%.1f,'%s',%d,'%s','%s',%d,'%s');\n" % (idtbAngDiamStart + i, float(majDiamStr), float(minDiamStr), 'DSHPNe', 1, 'sys', 'ziggy', idPNMainStart + i, 'n')) # else: # f.write("INSERT INTO `tbAngDiam`(`idtbAngDiam`,`MajDiam`,`reference`,`InUse`,`refInUse`,`userRecord`,`idPNMain`,`tempflag`) ") # f.write("VALUES (%d,%.1f,'%s',%d,'%s','%s',%d,'%s');\n" % (idtbAngDiamStart+i, # float(diamStr), # 'FrenchAmateurs', # 1, # 'sys', # 'ziggy', # idPNMainStart+i, # 'n')) f.write( "INSERT INTO `PNMain_tbAngDiam`(`idPNMain`,`idtbAngDiam`) VALUES (%d,%d);\n" % (idPNMainStart + i, idtbAngDiamStart + i)) name = csv.getData('Name', i) if name[:4] == 'Pa J': name = name[3:] notes = csv.getData('comment1', i) if notes == []: notes = csv.getData('comment2', i) else: notes += ', ' + csv.getData('comment2', i) print('notes = <' + notes + '>') f.write( "INSERT INTO `tbUsrComm`(`idtbUsrComm`,`idPNMain`,`user`,`public`,`comment`,`date`) " ) f.write("VALUES (%d,%d,'%s','%s','%s','%s');\n" % (idtbUsrCommStart + i, idPNMainStart + i, 'ziggy', 'y', notes, '2020-03-31 19:30:00')) f.write("USE `MainPNData`;\n") f.write( "INSERT INTO `DSHPNe`(`idDSHPNe`,`Discoverer`,`ID`,`PNG`,`RAJ2000`,`DECJ2000`,`DRAJ2000`,`DDECJ2000`,`Glon`,`Glat`,`MajDiam`,`MinDiam`,`status`,`discovery`,`narrowImag`,`broadband`,`echelle`,`notes`,`PNMainDist`,`mapFlag`,`idPNMain`) " ) f.write( "VALUES (%d,'%s','%s','%s','%s','%s',%.4f,%.4f,%.2f,%.2f,%d,%d,'%s','%s','%s','%s','%s','%s',%d,'%s',%d);\n" % (idDSHPNeStart + i, 'Pa', name, png, raHMS, decDMS, ra, dec, lon, lat, float(majDiamStr), float(minDiamStr), 'New Candidates', '', '', '', '', notes, -1, 'y', idPNMainStart + i)) with open(inList[:inList.rfind('.')] + '.fetch', 'w') as f: f.write('hashpn fetch all ' + str(hashIDs[0])) for id in np.arange(1, len(hashIDs), 1): f.write(',' + str(hashIDs[id])) f.write(' -w force\n') f.write('hashpn brew all ' + str(hashIDs[0])) for id in np.arange(1, len(hashIDs), 1): f.write(',' + str(hashIDs[id])) f.write(' -w\n')
foundIDx = iHash found = True print( 'found Barlow PNG ' + csvBarlow.getData('PNG', iRow) + ', HASH PNG = ' + csvHash.getData('PNG', foundIDx) + ', name = <' + cNames.getData('Name', iName) + '> at idPNMain = ', idPNMain) if not found: ra, dec = getRAandDECFromIPHAS( csvBarlow.getData('Name', iRow)) print('ra = <' + ra + '>, dec = <' + dec + '>') lon, lat = raDecToLonLat(hmsToDeg(ra), dmsToDeg(dec)) png = '%010.6f' % lon png = png[:png.find('.') + 2] #png = png.zfill(3) if lat > 0: png = png + '+' png = png + '%08.6g' % lat png = png[:png.rfind('.') + 2] print( 'lon = ', lon, ', lat = ', lat, ', png = <' + png + '>, PNG = <' + csvBarlow.getData('PNG', iRow) + '>') for iHash in range(csvHash.size()): if csvHash.getData('PNG', iHash) == png: found = True idPNMain = int(csvHash.getData('idPNMain', iHash)) foundIDx = iHash
def addNewPNeToHASH(): inputData = csvFree.readCSVFile(newPNeFile) PNMain = csvFree.readCSVFile(hashPNMainFileName) hashOut = csvFree.readCSVFile(newHashOutFile) pneInHash = hashOut.getData('id') pngs = PNMain.getData('PNG') pngsInHash = [] with open(sqlCommandsFile, 'w') as f: with open(iphasTable, 'w') as ft: ft.write('CREATE TABLE IF NOT EXISTS MainPNData.' + catalogName + ' (\n') ft.write('id' + catalogName + ' INT AUTO_INCREMENT PRIMARY KEY UNIQUE,\n') ft.write('idPNMain INT,\n') ft.write('mapFlag VARCHAR(1) NOT NULL\n') ft.write(');\n') ft.write("USE `MainPNData`;\n") ids = [] f.write("USE `MainGPN`;\n") for i in range(inputData.size()): dra = float(inputData.getData('DRAJ2000', i)) ddec = float(inputData.getData('DDECJ2000', i)) lon, lat = raDecToLonLat(dra, ddec) ra = degToHMS(dra) dec = degToDMS(ddec) print('lon = ', lon, ', lat = ', lat) png = getPNG(lon, lat) print('png = <' + png + '>') if png in pngs: print('PNG ' + png + ' already in HASH') pngsInHash.append(png) png = png + 'a' if png in pngs: pngsInHash.append(png) print('PNG ' + png + ' already in HASH') png = png[:-1] + 'b' if png in pngs: pngsInHash.append(png) print('PNG ' + png + ' already in HASH') png = png[:-1] + 'c' if png in pngs: pngsInHash.append(png) print('PNG ' + png + ' already in HASH') png = png[:-1] + 'd' if png in pngs: pngsInHash.append(png) print('PNG ' + png + ' already in HASH') png = png[:-1] + 'e' if (png in pneInHash) and (hashOut.getData( 'pndb', pneInHash.index(png)) != ''): print('PNG ' + png, ' found in pneInHash: pneInHash.index(', png, ') = ', pneInHash.index(png)) idPNMain = int( hashOut.getData('pndb', pneInHash.index(png))) # add IPHAS name to common names else: idPNMain = idPNMainStart + i ids.append(idPNMain) f.write( "INSERT INTO `PNMain`(`idPNMain`,`PNG`,`refPNG`,`RAJ2000`,`DECJ2000`,`DRAJ2000`,`DDecJ2000`," ) f.write( "`Glon`,`Glat`,`refCoord`,`Catalogue`,`refCatalogue`,`userRecord`,`domain`,`refDomain`,`PNstat`,`refPNstat`,`refSimbadID`,`show`) " ) f.write( "VALUES (%d,'%s','%s','%s','%s',%.5f,%.5f,%.5f,%.5f,'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s');\n" % ( idPNMain, png, #csvBarlow.getData('PNG',iRow), 'sys', ra, dec, dra, ddec, lon, lat, 'ziggy', 'ziggy', 'ziggy', 'ziggy', 'Galaxy', 'ziggy', 'c', 'ziggy', 'sys', 'y')) ft.write("INSERT INTO `" + catalogName + "`(`idPNMain`,`mapflag`) ") ft.write("VALUES (%d,'%s');\n" % (idPNMain, 'y')) iphasName = getIPHASName(ra, dec) f.write( "INSERT INTO `PNMain_tbCNames`(`idPNMain`,`idtbCnames`) VALUES (%d,%d);\n" % (idPNMain, idtbCNamesStart + i)) f.write( "INSERT INTO `tbCNames`(`idtbCNames`,`Name`,`InUse`,`refInUse`,`userRecord`,`idPNMain`,`simbadID`,`flag`) " ) f.write("VALUES (%d,'%s',%d,'%s','%s',%d,'%s','%s');\n" % (idtbCNamesStart + 1, iphasName, 1, 'sys', 'sys', idPNMain, 'n', 'n')) f.write( "INSERT INTO `PNMain_tbAngDiam`(`idPNMain`,`idtbAngDiam`) VALUES (%d,%d);\n" % (idPNMain, idtbAngDiamStart + i)) f.write( "INSERT INTO `tbAngDiam`(`idtbAngDiam`,`MajDiam`,`InUse`,`userRecord`,`idPNMain`,`tempflag`) " ) f.write("VALUES (%d,%.0f,%d,'%s',%d,'%s');\n" % (idtbAngDiamStart + i, 300., 1, 'sys', idPNMain, 'n')) with open(hashpnFile, 'w') as hf: for id in ids: hf.write('hashpn fetch all ' + str(id) + ' -w force\n') hf.write('hashpn brew all ' + str(id) + ' -w\n') hf.write('echo "finished HASH ID %d" >> logfile_IPHAS.log\n' % id) print('pngsInHash = ', pngsInHash)
with open(sqlOut,'w') as f: f.write('USE MainGPN;\n') with open(sqlOutSuvas,'w') as fs: fs.write('CREATE TABLE IF NOT EXISTS MainPNData.CSPN_suvas (\n') fs.write('idCSPN_suvas INT UNIQUE AUTO_INCREMENT PRIMARY KEY,\n') fs.write('idPNMain INT UNIQUE,\n') fs.write('mapFlag VARCHAR(1) NOT NULL\n') fs.write(');\n') fs.write('USE MainPNData;\n') for row in simba: if (row['userRecord'] == 'chandra0101') and (':' in row['CS_RA_Simba']): draCalc = hmsToDeg(row['CS_RA_Simba']) ddecCalc = dmsToDeg(row['CS_Dec_Simba']) lCalc, bCalc = raDecToLonLat(draCalc,ddecCalc) with open(sqlOut,'a') as f: f.write("INSERT INTO `tbCSCoords`(`idtbCSCoords`,`CS_RAJ2000`,`CS_DECJ2000`,`CS_DRAJ2000`,`CS_DDECJ2000`,`CS_Glon`,`CS_Glat`,`InUse`,`userRecord`,`idPNMain`)\n") f.write("VALUES (%d,'%s','%s',%.5f,%.5f,%.5f,%.5f,%d,'%s',%d);\n" % (idtbCSCoords, row['CS_RA_Simba'], row['CS_Dec_Simba'], draCalc, ddecCalc, lCalc, bCalc, 0, 'Simba', int(row['idPNMain']))) with open(sqlOutSuvas,'a') as fs: fs.write("INSERT INTO `CSPN_suvas`(`idCSPN_suvas`,`idPNMain`,`mapflag`)\n") fs.write("VALUES (%d,%d,'%s');\n" % (iSuvas,
def findPNeNotInHASH(): csvHASH = csvFree.readCSVFile(hashOutList) print('csvAll.size() = ', csvHASH.size()) print('csvAll.header = ', csvHASH.header) found = [] for i in np.arange(0, csvHASH.size(), 1): if csvHASH.getData('pndb', i) != '': found.append(i) print('found = ', found) found.reverse() print('found = ', found) for i in found: csvHASH.removeRow(i) print('csvAll.size() = ', csvHASH.size()) keepColumns = [ "NOM", "AD:(J2000)", "DEC (J2000)", "Dimension en minute d'arc (')", "Coordonnées galactiques" ] print('keepColumns = ', keepColumns) csvOut = None for iList in [longList, shortList]: csv = readFRAFile(iList) for keyword in csv.header: print('checking keyword <' + keyword + '>') if keyword not in keepColumns: print('keyword <' + keyword + '> not found in keepColumns => removing column') csv.removeColumn(keyword) print('csv.header = ', csv.header) print('csv.data = ', csv.data) hashNames = csvHASH.getData('id') remove = [] for i in np.arange(0, csv.size(), 1): if csv.getData('NOM', i).replace(' ', '') not in hashNames: remove.append(i) remove.reverse() for i in remove: csv.removeRow(i) if "Coordonnées galactiques" not in csv.header: csv.addColumn("Coordonnées galactiques") for i in np.arange(0, csv.size(), 1): lon, lat = raDecToLonLat(hmsToDeg(csv.getData("AD:(J2000)", i)), dmsToDeg(csv.getData("DEC (J2000)", i))) print('lon=', lon, ', lat=', lat) png = '' if lon < 100: png += '0' if lon < 10: png += '0' png += str(lon) png = png[:png.rfind('.') + 2] # print('png = <'+png+'>') if lat >= 0.: png += '+' # print('png = <'+png+'>') png += str(lat) # print('png = <'+png+'>') png = png[:png.rfind('.') + 2] # print('png = <'+png+'>') if (lat < 10.) and (lat >= 0.): png = png[:png.rfind('+') + 1] + '0' + png[png.rfind('+') + 1:] if (lat > -10.) and (lat < 0.): png = png[:png.rfind('-') + 1] + '0' + png[png.rfind('-') + 1:] # print('png = <'+png+'>') print('PNG ' + png) csv.setData("Coordonnées galactiques", i, png) if not csvOut: csvOut = csv else: csvOut.append(csv) # convert diameters from arcmin to arcsec for i in np.arange(0, csvOut.size(), 1): diamStr = csvOut.getData("Dimension en minute d'arc (')", i).rstrip(' ') print('diamStr = <' + diamStr + '>') diamStrOut = '' if 'x' in diamStr: diamA = float(diamStr[:diamStr.find(' ')]) * 60. diamB = float(diamStr[diamStr.rfind(' ') + 1:]) * 60. diamStrOut = '%.1f x %.1f' % (diamA, diamB) else: diamStrOut = '%.1f' % (float(diamStr) * 60.) csvOut.setData("Dimension en minute d'arc (')", i, diamStrOut) # write output csvFree.writeCSVFile( csvOut, hashOutList[:hashOutList.rfind('.')] + '_not_in_HASH.csv')
def createMySQLCommands(): csv = csvFree.readCSVFile(hashOutList[:hashOutList.rfind('.')] + '_not_in_HASH.csv') print('csv.header = ', csv.header) with open(hashOutList[:hashOutList.rfind('.')] + '_not_in_HASH.sql', 'w') as f: f.write("USE `MainGPN`;\n") for i in np.arange(0, csv.size(), 1): hashIDs.append(idPNMainStart + i) lon, lat = raDecToLonLat(hmsToDeg(csv.getData("AD:(J2000)", i)), dmsToDeg(csv.getData("DEC (J2000)", i))) f.write( "INSERT INTO `PNMain`(`idPNMain`,`PNG`,`refPNG`,`RAJ2000`,`DECJ2000`,`DRAJ2000`,`DDecJ2000`," ) f.write( "`Glon`,`Glat`,`refCoord`,`Catalogue`,`refCatalogue`,`userRecord`,`domain`,`refDomain`,`PNstat`,`refPNstat`,`refSimbadID`,`show`) " ) f.write( "VALUES (%d,'%s','%s','%s','%s',%.5f,%.5f,%.5f,%.5f,'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s');\n" % (idPNMainStart + i, csv.getData('Coordonnes galactiques', i), 'sys', csv.getData("AD:(J2000)", i), csv.getData("DEC (J2000)", i), hmsToDeg(csv.getData("AD:(J2000)", i)), dmsToDeg(csv.getData("DEC (J2000)", i)), lon, lat, 'FrenchAmateurs', 'FrenchAmateurs', 'ziggy', 'ziggy', 'Galaxy', 'ziggy', 'c', 'ziggy', 'sys', 'y')) f.write( "INSERT INTO `tbCNames`(`idtbCNames`,`Name`,`reference`,`InUse`,`refInUse`,`userRecord`,`idPNMain`,`simbadID`,`flag`)" ) f.write( "VALUES (%d,'%s','%s',%d,'%s','%s',%d,'%s','%s');\n" % (idtbCNamesStart + i, csv.getData('NOM', i), 'FrenchAmateurs', 1, 'sys', 'ziggy', idPNMainStart + i, 'n', 'n')) f.write( "INSERT INTO `PNMain_tbCNames`(`idPNMain`,`idtbCNames`) VALUES (%d,%d);\n" % (idPNMainStart + i, idtbCNamesStart + i)) diamStr = csv.getData("Dimension en minute d'arc (')", i).strip(' ').rstrip(' ') if 'x' in diamStr: f.write( "INSERT INTO `tbAngDiam`(`idtbAngDiam`,`MajDiam`,`MinDiam`,`reference`,`InUse`,`refInUse`,`userRecord`,`idPNMain`,`tempflag`) " ) f.write( "VALUES (%d,%.1f,%.1f,'%s',%d,'%s','%s',%d,'%s');\n" % (idtbAngDiamStart + i, float(diamStr[:diamStr.find(' ')]), float(diamStr[diamStr.rfind(' ') + 1:]), 'FrenchAmateurs', 1, 'sys', 'ziggy', idPNMainStart + i, 'n')) else: f.write( "INSERT INTO `tbAngDiam`(`idtbAngDiam`,`MajDiam`,`reference`,`InUse`,`refInUse`,`userRecord`,`idPNMain`,`tempflag`) " ) f.write( "VALUES (%d,%.1f,'%s',%d,'%s','%s',%d,'%s');\n" % (idtbAngDiamStart + i, float(diamStr), 'FrenchAmateurs', 1, 'sys', 'ziggy', idPNMainStart + i, 'n')) f.write( "INSERT INTO `PNMain_tbAngDiam`(`idPNMain`,`idtbAngDiam`) VALUES (%d,%d);\n" % (idPNMainStart + i, idtbAngDiamStart + i)) with open(hashOutList[:hashOutList.rfind('.')] + '_not_in_HASH.fetch', 'w') as f: f.write('hashpn fetch all ' + str(hashIDs[0])) for id in np.arange(1, len(hashIDs), 1): f.write(',' + str(hashIDs[id])) f.write(' -w force\n') f.write('hashpn brew all ' + str(hashIDs[0])) for id in np.arange(1, len(hashIDs), 1): f.write(',' + str(hashIDs[id])) f.write(' -w\n')
def createMySQLCommands(): csv = csvFree.readCSVFile(inList) with open(inList[:inList.rfind('.')]+'.sql','w') as f: f.write('DROP TABLE IF EXISTS MainPNData.FrenchAmateurs_Oct2021;\n') f.write('CREATE TABLE IF NOT EXISTS MainPNData.FrenchAmateurs_Oct2021 (\n') f.write('idFrenchAmateurs_Oct2021 INT AUTO_INCREMENT PRIMARY KEY,\n') f.write('idPNMain INT,\n') f.write('mapFlag VARCHAR(1) NOT NULL\n') f.write(');\n') for i in np.arange(0,csv.size(),1): f.write("USE `MainGPN`;\n") hashIDs.append(idPNMainStart+i) raHMS = csv.getData("AD:(J2000)",i) print('raHMS = <'+raHMS+'>') ra = hmsToDeg(raHMS) decDMS = csv.getData("DEC (J2000)",i) dec = dmsToDeg(decDMS) print('raHMS = <'+raHMS+'> decDMS = <'+decDMS+'>') print('ra = ',ra,' dec = ',dec) lon, lat = raDecToLonLat(ra,dec) print('lon=',lon,', lat=',lat) png = '' if lon < 100: png += '0' if lon < 10: png += '0' png += str(lon) png = png[:png.rfind('.')+2] # print('png = <'+png+'>') if lat >= 0.: png += '+' # print('png = <'+png+'>') png += str(lat) # print('png = <'+png+'>') png = png[:png.rfind('.')+2] # print('png = <'+png+'>') if (lat < 10.) and (lat >= 0.): png = png[:png.rfind('+')+1]+'0'+png[png.rfind('+')+1:] if (lat > -10.) and (lat < 0.): png = png[:png.rfind('-')+1]+'0'+png[png.rfind('-')+1:] # print('png = <'+png+'>') print('PNG '+png) # STOP f.write("INSERT INTO `PNMain`(`idPNMain`,`PNG`,`refPNG`,`RAJ2000`,`DECJ2000`,`DRAJ2000`,`DDecJ2000`,") f.write("`Glon`,`Glat`,`refCoord`,`Catalogue`,`refCatalogue`,`userRecord`,`domain`,`refDomain`,`PNstat`,`refPNstat`,`refSimbadID`,`show`) ") f.write("VALUES (%d,'%s','%s','%s','%s',%.5f,%.5f,%.5f,%.5f,'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s');\n" % (idPNMainStart+i, png, 'sys', raHMS, decDMS, ra, dec, lon, lat, 'FrenchAmateurs', 'FrenchAmateurs', 'ziggy', 'ziggy', 'Galaxy', 'ziggy', 'c', 'ziggy', 'sys', 'y')) f.write("INSERT INTO `tbCNames`(`idtbCNames`,`Name`,`reference`,`InUse`,`refInUse`,`userRecord`,`idPNMain`,`simbadID`,`flag`)") f.write("VALUES (%d,'%s','%s',%d,'%s','%s',%d,'%s','%s');\n" % (idtbCNamesStart + i, csv.getData('NOM',i), 'FrenchAmateurs', 1, 'sys', 'ziggy', idPNMainStart+i, 'n', 'n')) f.write("INSERT INTO `PNMain_tbCNames`(`idPNMain`,`idtbCNames`) VALUES (%d,%d);\n" % (idPNMainStart+i,idtbCNamesStart+i)) majDiamStr = csv.getData("Dimension en minute d'arc (')",i).strip('~').rstrip('"').rstrip(':') if 'x' in majDiamStr: majDiamStr,minDiamStr = majDiamStr.split(' x ') print('majDiamStr = <',majDiamStr,'>, minDiamStr = <',minDiamStr,'>') if float(majDiamStr) < float(minDiamStr): minDiamStr,majDiamStr = [majDiamStr,minDiamStr] else: minDiamStr = '' if majDiamStr in ['stellar?','stellar']: majDiamStr = '1' minDiamStr = '1' # if 'x' in diamStr: print('majDiamStr = <',majDiamStr,'>, minDiamStr = <',minDiamStr,'>') if minDiamStr != '': f.write("INSERT INTO `tbAngDiam`(`idtbAngDiam`,`MajDiam`,`MinDiam`,`reference`,`InUse`,`refInUse`,`userRecord`,`idPNMain`,`tempflag`) ") f.write("VALUES (%d,%.1f,%.1f,'%s',%d,'%s','%s',%d,'%s');\n" % (idtbAngDiamStart+i, float(majDiamStr)*60., float(minDiamStr)*60., 'FrenchAmateurs', 1, 'sys', 'ziggy', idPNMainStart+i, 'n')) else: f.write("INSERT INTO `tbAngDiam`(`idtbAngDiam`,`MajDiam`,`reference`,`InUse`,`refInUse`,`userRecord`,`idPNMain`,`tempflag`) ") f.write("VALUES (%d,%.1f,'%s',%d,'%s','%s',%d,'%s');\n" % (idtbAngDiamStart+i, float(majDiamStr)*60., 'FrenchAmateurs', 1, 'sys', 'ziggy', idPNMainStart+i, 'n')) f.write("INSERT INTO `PNMain_tbAngDiam`(`idPNMain`,`idtbAngDiam`) VALUES (%d,%d);\n" % (idPNMainStart+i,idtbAngDiamStart+i)) # name=csv.getData('Name',i) # if name[:4] == 'Pa J': # name = name[3:] # notes = csv.getData('comment1',i) # if notes == []: # notes = csv.getData('comment2',i) # else: # notes += ', '+csv.getData('comment2',i) # print('notes = <'+notes+'>') # f.write("INSERT INTO `tbUsrComm`(`idtbUsrComm`,`idPNMain`,`user`,`public`,`comment`,`date`) ") # f.write("VALUES (%d,%d,'%s','%s','%s','%s');\n" % (idtbUsrCommStart+i, # idPNMainStart+i, # 'ziggy', # 'y', # notes, # '2020-03-31 19:30:00')) f.write("USE `MainPNData`;\n") f.write("INSERT INTO `FrenchAmateurs_Oct2021`(`idFrenchAmateurs_Oct2021`,`idPNMain`,`mapFlag`) ") f.write("VALUES (%d,%d,'%s');\n" % (idFRAStart+i, idPNMainStart+i, 'y', )) with open(inList[:inList.rfind('.')]+'.fetch','w') as f: f.write('hashpn fetch all '+str(hashIDs[0])) for id in np.arange(1,len(hashIDs),1): f.write(','+str(hashIDs[id])) f.write(' -w force\n') f.write('hashpn brew all '+str(hashIDs[0])) for id in np.arange(1,len(hashIDs),1): f.write(','+str(hashIDs[id])) f.write(' -w\n')
def addNewCandidates(inFileName, sqlFileName): ids = '' idsNew = '' nIds = 0 idPNMainStart = 0 idtbCNamesStart = 0 newNames = [] rows = csv.DictReader(open(pnMainFile)) pngNames = [] for row in rows: pngNames.append(row['PNG']) idPNMainStart = int(row['idPNMain']) + 1 rows = csv.DictReader(open(tbCNamesFile)) for row in rows: idtbCNamesStart = int(row['idtbCNames']) + 1 print('idPNMainStart = ', idPNMainStart) print('idtbCNamesStart = ', idtbCNamesStart) nNew = 0 nFound = 0 with open(sqlFileName, 'w') as f: rows = csv.DictReader(open(inFileName)) for row in rows: if row['pndb'] == '': nNew += 1 print(row['id'] + ": new PN") rowsRaDec = csv.DictReader(open(raDecFileName)) ra = '' dec = '' for rowRaDec in rowsRaDec: if row['id'] == rowRaDec['id']: ra = rowRaDec['RA'] dec = rowRaDec['DEC'] lon, lat = raDecToLonLat(hmsToDeg(ra), dmsToDeg(dec)) pngName = getNewPNGName(pngNames, lon, lat) print('pngName = <' + pngName + '>') pngNames.append(pngName) if True: newNames.append(row['id']) f.write("USE `MainGPN`;\n") f.write( "INSERT INTO `PNMain`(`idPNMain`,`PNG`,`refPNG`,`RAJ2000`,`DECJ2000`,`DRAJ2000`,`DDECJ2000`,`Glon`,`Glat`,`refCoord`,`Catalogue`,`refCatalogue`,`userRecord`,`domain`,`refDomain`,`PNstat`,`refPNstat`,`refSimbadID`,`show`)" ) f.write( "VALUES (%d,'%s','%s','%s','%s',%.5f,%.5f,%.5f,%.5f,'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s');\n" % (idPNMainStart, pngName, 'sys', ra, dec, hmsToDeg(ra), dmsToDeg(dec), lon, lat, 'ziggy', 'ziggy', 'ziggy', 'ziggy', 'Galaxy', 'ziggy', 'c', 'ziggy', 'sys', 'y')) f.write( "INSERT INTO `tbCNames`(`idtbCNames`,`Name`,`reference`,`InUse`,`refInUse`,`userRecord`,`idPNMain`)" ) f.write("VALUES (%d,'%s','%s',%d,'%s','%s',%d);\n" % (idtbCNamesStart, row['id'], 'ziggy', 1, 'ziggy', 'ziggy', idPNMainStart)) f.write( "INSERT INTO `PNMain_tbCNames`(`idPNMain`,`idtbCNames`)" ) f.write("VALUES (%d,%d);\n" % (idPNMainStart, idtbCNamesStart)) idtbCNamesStart += 1 idPNMainStart += 1 idsNew += str(idPNMainStart) + ',' nIds += 1 else: nFound += 1 print(row['id'] + ": " + row['pndb']) nameFound = False tbCNames = csv.DictReader(open(tbCNamesFile)) for tbCName in tbCNames: if tbCName['Name'].replace(' ', '') == row['id'].replace( ' ', ''): nameFound = True if not nameFound: if row['id'][:2] != 'PN': f.write( "INSERT INTO `tbCNames`(`idtbCNames`,`Name`,`reference`,`InUse`,`refInUse`,`userRecord`,`idPNMain`)" ) f.write("VALUES (%d,'%s','%s',%d,'%s','%s',%d);\n" % ( idtbCNamesStart, row['id'], 'ziggy', 0, 'ziggy', 'ziggy', int(row['pndb']), )) # f.write("INSERT INTO `PNMain_tbCNames`(`idtbCNames`,`idPNMain`)") # f.write("VALUES (%d,%d);\n" # % (idtbCNamesStart, # int(row['pndb']), # )) idtbCNamesStart += 1 ids += row['pndb'] + ',' print('ids = ', nIds, ': ', ids) print('idsNew = ', idsNew, ' newNames = ', newNames) print('nNew = ', nNew, ', nFound = ', nFound)
import numpy as np import csvFree,csvData from myUtils import raDecToLonLat csvFitsFiles = '/Users/azuri/daten/uni/HKU/HASH/hash_FitsFiles_181121.csv' fitsFilesTable = csvFree.readCSVFile(csvFitsFiles) setnames = fitsFilesTable.getData('setname') outsideBulge = [] for i in range(len(setnames)): if setnames[i] == 'AAOmega_bulge': dra = float(fitsFilesTable.getData('DRAJ2000',i)) ddec = float(fitsFilesTable.getData('DDECJ2000',i)) l,b = raDecToLonLat(dra,ddec) if abs(l) > 10. or abs(b) > 10.: outsideBulge.append(i) print('outsideBulge = ',len(outsideBulge),': ',outsideBulge) STOP instrument = fitsFilesTable.getData('instrument') for i in np.arange(len(instrument)-1,-1,-1): inst = instrument[i] if '2dF' not in inst: fitsFilesTable.removeRow(i) print('found ',fitsFilesTable.size(),' spectra from 2dF') instrument = fitsFilesTable.getData('instrument') print('instrument = ',instrument)