def makeHashFile(): inputData = csvFree.readCSVFile(newPNeFile) with open(newPNeFile[:newPNeFile.rfind('.')] + '.hash', 'w') as fh: for i in range(inputData.size()): dra = float(inputData.getData('DRAJ2000', i)) ddec = float(inputData.getData('DDECJ2000', i)) lon, lat = raDecToLonLat(dra, ddec) ra = degToHMS(dra) dec = degToDMS(ddec) print('lon = ', lon, ', lat = ', lat, ', dra = ', dra, ', ddec = ', ddec, ', ra = ', ra, ', dec = ', dec) png = getPNG(lon, lat) fh.write(png + ',' + ra + ',' + dec + '\n')
def addNewPNeToHASH(): inputData = csvFree.readCSVFile(newPNeFile) PNMain = csvFree.readCSVFile(hashPNMainFileName) hashOut = csvFree.readCSVFile(newHashOutFile) pneInHash = hashOut.getData('id') pngs = PNMain.getData('PNG') pngsInHash = [] with open(sqlCommandsFile, 'w') as f: with open(iphasTable, 'w') as ft: ft.write('CREATE TABLE IF NOT EXISTS MainPNData.' + catalogName + ' (\n') ft.write('id' + catalogName + ' INT AUTO_INCREMENT PRIMARY KEY UNIQUE,\n') ft.write('idPNMain INT,\n') ft.write('mapFlag VARCHAR(1) NOT NULL\n') ft.write(');\n') ft.write("USE `MainPNData`;\n") ids = [] f.write("USE `MainGPN`;\n") for i in range(inputData.size()): dra = float(inputData.getData('DRAJ2000', i)) ddec = float(inputData.getData('DDECJ2000', i)) lon, lat = raDecToLonLat(dra, ddec) ra = degToHMS(dra) dec = degToDMS(ddec) print('lon = ', lon, ', lat = ', lat) png = getPNG(lon, lat) print('png = <' + png + '>') if png in pngs: print('PNG ' + png + ' already in HASH') pngsInHash.append(png) png = png + 'a' if png in pngs: pngsInHash.append(png) print('PNG ' + png + ' already in HASH') png = png[:-1] + 'b' if png in pngs: pngsInHash.append(png) print('PNG ' + png + ' already in HASH') png = png[:-1] + 'c' if png in pngs: pngsInHash.append(png) print('PNG ' + png + ' already in HASH') png = png[:-1] + 'd' if png in pngs: pngsInHash.append(png) print('PNG ' + png + ' already in HASH') png = png[:-1] + 'e' if (png in pneInHash) and (hashOut.getData( 'pndb', pneInHash.index(png)) != ''): print('PNG ' + png, ' found in pneInHash: pneInHash.index(', png, ') = ', pneInHash.index(png)) idPNMain = int( hashOut.getData('pndb', pneInHash.index(png))) # add IPHAS name to common names else: idPNMain = idPNMainStart + i ids.append(idPNMain) f.write( "INSERT INTO `PNMain`(`idPNMain`,`PNG`,`refPNG`,`RAJ2000`,`DECJ2000`,`DRAJ2000`,`DDecJ2000`," ) f.write( "`Glon`,`Glat`,`refCoord`,`Catalogue`,`refCatalogue`,`userRecord`,`domain`,`refDomain`,`PNstat`,`refPNstat`,`refSimbadID`,`show`) " ) f.write( "VALUES (%d,'%s','%s','%s','%s',%.5f,%.5f,%.5f,%.5f,'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s');\n" % ( idPNMain, png, #csvBarlow.getData('PNG',iRow), 'sys', ra, dec, dra, ddec, lon, lat, 'ziggy', 'ziggy', 'ziggy', 'ziggy', 'Galaxy', 'ziggy', 'c', 'ziggy', 'sys', 'y')) ft.write("INSERT INTO `" + catalogName + "`(`idPNMain`,`mapflag`) ") ft.write("VALUES (%d,'%s');\n" % (idPNMain, 'y')) iphasName = getIPHASName(ra, dec) f.write( "INSERT INTO `PNMain_tbCNames`(`idPNMain`,`idtbCnames`) VALUES (%d,%d);\n" % (idPNMain, idtbCNamesStart + i)) f.write( "INSERT INTO `tbCNames`(`idtbCNames`,`Name`,`InUse`,`refInUse`,`userRecord`,`idPNMain`,`simbadID`,`flag`) " ) f.write("VALUES (%d,'%s',%d,'%s','%s',%d,'%s','%s');\n" % (idtbCNamesStart + 1, iphasName, 1, 'sys', 'sys', idPNMain, 'n', 'n')) f.write( "INSERT INTO `PNMain_tbAngDiam`(`idPNMain`,`idtbAngDiam`) VALUES (%d,%d);\n" % (idPNMain, idtbAngDiamStart + i)) f.write( "INSERT INTO `tbAngDiam`(`idtbAngDiam`,`MajDiam`,`InUse`,`userRecord`,`idPNMain`,`tempflag`) " ) f.write("VALUES (%d,%.0f,%d,'%s',%d,'%s');\n" % (idtbAngDiamStart + i, 300., 1, 'sys', idPNMain, 'n')) with open(hashpnFile, 'w') as hf: for id in ids: hf.write('hashpn fetch all ' + str(id) + ' -w force\n') hf.write('hashpn brew all ' + str(id) + ' -w\n') hf.write('echo "finished HASH ID %d" >> logfile_IPHAS.log\n' % id) print('pngsInHash = ', pngsInHash)
def findHashIDs(path): notFound = [] if os.path.exists(notFoundFileName): with open(notFoundFileName, 'r') as f: notFound = f.readlines() fibreListName = list(pathlib.Path(path).glob('*.csv'))[0].name fibreList = csvFree.readCSVFile(os.path.join(path, fibreListName)) fitsFiles = list(pathlib.Path(path).glob('*.fits')) fitsFiles = [fitsFile.name for fitsFile in fitsFiles] targetNames = [fitsFile[:fitsFile.find('_')] for fitsFile in fitsFiles] searchList = [] if os.path.exists(hashSearchFileName): with open(hashSearchFileName, 'r') as f: searchList = f.readlines() for targetName in targetNames: found = False for i in range(fibreList.size()): if targetName == 'PNG357.12+01.66': print('searching for PNG357.12+01.66') if 'PNG357.12' in fibreList.getData('Name', i): print('PNG357.12 found in ', fibreList.getData('Name', i)) if fibreList.getData('Name', i).replace('PNG', '') == targetName.replace( 'PNG', ''): found = True searchList.append('%s,%s,%s\n' % ( targetName, degToHMS(np.degrees(float(fibreList.getData('RA', i)))), degToDMS(np.degrees(float(fibreList.getData('DEC', i)))), )) if not found: print('ERROR: did not find target <' + targetName + '> in ' + fibreListName) if targetName == 'PNG357.12+01.66': print('PNG357.12+01.66 not found') STOP notFound.append(targetName + ' in ' + fibreListName + '\n') """ double check that all files mentioned in the fibre list are there""" for i in range(fibreList.size()): found = False for targetName in targetNames: if fibreList.getData('Name', i).replace('PNG', '') == targetName.replace( 'PNG', ''): found = True if not found: if 'Parked' not in fibreList.getData('Name', i): if ('Type' not in fibreList.header) or ( ('Type' in fibreList.header) and (fibreList.getData('Type', i) == 'P')): notFound.append( fibreList.getData('Name', i) + '_*.fits in ' + path[path.rfind('/') + 1:] + '\n') with open(hashSearchFileName, 'w') as f: for line in searchList: f.write(line) with open(notFoundFileName, 'w') as f: for line in notFound: f.write(line)
distanceTraveled_km = kickVelocity * 3600. * 24. * 365.2425 * 1000. #km distanceTraveled_pc = distanceTraveled_km / 3.086e+13 print('distanceTraveled_pc = ', distanceTraveled_pc) L_max = 1e40 L_min = 1e37 L0 = 3.0128e28 Mv_max = -2.5 * np.log10(L_max / L0) Mv_min = -2.5 * np.log10(L_min / L0) print('Mv_max = ', Mv_max, ', Mv_min = ', Mv_min) import astropy.units as u from astropy.coordinates import SkyCoord raDeg = 23.3333 decDeg = 66.66667 ra = degToHMS(raDeg) dec = degToDMS(decDeg) print('ra = ', ra, ', dec = ', dec) raDeg2000 = 127.37996101 decDeg2000 = 4.44349150 ra2000 = degToHMS(raDeg2000) dec2000 = degToDMS(decDeg2000) print('ra2000 = ', ra2000, ', dec2000 = ', dec2000)
# print('filename[-5:] = ',filename[-5:]) if (filename[-5:] == '.fits') and (filename[:3] == 'Fib'): print('filename = <' + filename + '>') fibreID = filename[3:] if fibreID[fibreID.rfind('.') - 1] == 'B': fibreID = fibreID[:fibreID.find('B')] elif fibreID[fibreID.rfind('.') - 1] == 'C': fibreID = fibreID[:fibreID.find('R')] rows = csv.DictReader(open(csvFileName)) print('fibreID = ', fibreID) for row in rows: #print('row[#Fibre] = ',row['#Fibre']) if row['#Fibre'] == fibreID: ra = math.degrees(float(row['RA'])) dec = math.degrees(float(row['DEC'])) raHMS = degToHMS(ra) decDMS = degToDMS(dec) print('ra = ', ra, ', raHMS = ', raHMS) print('dec = ', dec, ', decDMS = ', decDMS) if 'B' in filename: arm = 'B' else: arm = 'R' newFileName = os.path.join( path, row['Name'] + arm + '_2D180614.fits') copyfile(os.path.join(path, filename), newFileName) setHeaderKeyWord(newFileName, 'RA', value=raHMS, hdu=0) setHeaderKeyWord(newFileName, 'DEC', value=decDMS, hdu=0) setHeaderKeyWord(newFileName, 'DATE-OBS', value='18-06-2014',