Exemplo n.º 1
0
def localization_post(dateobs, localization_name):
    """
    Manual FITS file upload::

        $ curl -c cookie.txt -d "user=XXXX" -d "password=XXXX" http://example.edu/login
        $ curl -b cookie.txt --data-binary @/path/to/skymap.fits http://example.edu/event/YYYY-MM-DDTHH:MM:SS/localization/bayestar.fits
        $ rm cookie.txt

    FIXME: figure out how to use HTTP Basic auth or session auth transparently.
    """  # noqa: E501
    with tempfile.NamedTemporaryFile(suffix=localization_name) as localfile:
        shutil.copyfileobj(request.stream, localfile)
        localfile.flush()
        skymap = io.read_sky_map(localfile.name, moc=True)

    def get_col(m, name):
        try:
            col = m[name]
        except KeyError:
            return None
        else:
            return col.tolist()

    models.db.session.add(
        models.Localization(localization_name=localization_name,
                            dateobs=dateobs,
                            uniq=get_col(skymap, 'UNIQ'),
                            probdensity=get_col(skymap, 'PROBDENSITY'),
                            distmu=get_col(skymap, 'DISTMU'),
                            distsigma=get_col(skymap, 'DISTSIGMA'),
                            distnorm=get_col(skymap, 'DISTNORM')))

    models.db.session.commit()
    tasks.skymaps.contour.delay(localization_name, dateobs)
    return '', 201
Exemplo n.º 2
0
    def test_plot_skymap_meta_data(self):
        from ligo.skymap import io

        expected_keys = {
            "HISTORY",
            "creator",
            "distmean",
            "diststd",
            "gps_creation_time",
            "gps_time",
            "nest",
            "objid",
            "origin",
            "vcs_version",
            "instruments",
        }
        self.result.plot_skymap(maxpts=50,
                                geo=False,
                                objid="test",
                                instruments="H1L1")
        fits_filename = f"{self.result.outdir}/{self.result.label}_skymap.fits"
        skymap_filename = f"{self.result.outdir}/{self.result.label}_skymap.png"
        pickle_filename = f"{self.result.outdir}/{self.result.label}_skypost.obj"
        hpmap, meta = io.read_sky_map(fits_filename)
        self.assertEqual(expected_keys, set(meta.keys()))
        self.assertTrue(os.path.exists(skymap_filename))
        self.assertTrue(os.path.exists(pickle_filename))
        self.result.plot_skymap(
            maxpts=50,
            geo=False,
            objid="test",
            instruments="H1L1",
            load_pickle=True,
            colorbar=True,
        )
def get_skymap(url):
    # Try to download the multiorder sky map, since it will be faster.
    try:
        new_url = url.replace('.fits.gz', '.multiorder.fits')
        filename = download_file(new_url, cache=True)
    except urllib.request.HTTPError:
        filename = download_file(url, cache=True)
    return read_sky_map(filename, moc=True)
Exemplo n.º 4
0
def readskymap(skymap):

    # Read the HEALPix sky map and the FITS header.

    hdulist = fits.open(skymap)
    distest = hdulist[1].header['DISTMEAN']
    diststd = hdulist[1].header['DISTSTD']

    skymap = read_sky_map(skymap, moc=False, distances=True)

    prob = skymap[0][0]
    distmu = skymap[0][1]
    distsigma = skymap[0][2]
    distnorm = skymap[0][3]
    npix = len(prob)
    nside = hp.npix2nside(npix)
    return skymap, prob, distmu, distsigma, distnorm, npix, nside, distest, diststd
Exemplo n.º 5
0
def download(url, dateobs):

    def get_col(m, name):
        try:
            col = m[name]
        except KeyError:
            return None
        else:
            return col.tolist()

    filename = os.path.basename(urlparse(url).path)
    skymap = io.read_sky_map(url, moc=True)
    models.db.session.merge(
        models.Localization(
            localization_name=filename,
            dateobs=dateobs,
            uniq=get_col(skymap, 'UNIQ'),
            probdensity=get_col(skymap, 'PROBDENSITY'),
            distmu=get_col(skymap, 'DISTMU'),
            distsigma=get_col(skymap, 'DISTSIGMA'),
            distnorm=get_col(skymap, 'DISTNORM')))
    models.db.session.commit()
    return filename
Exemplo n.º 6
0
def process_gcn(payload, root):

    # respond to only real 'observation' events.
    if root.attrib['role'] != 'observation':
        return

    # Read all of the VOEvent parameters from the "What" section.
    params = {
        elem.attrib['name']: elem.attrib['value']
        for elem in root.iterfind('.//Param')
    }

    # Respond only to 'CBC' events. Change 'CBC' to "Burst'
    # to respond to only unmodeled burst events.
    if params['Group'] != 'CBC':
        return

    graceid = params['GraceID']
    prelim = params['AlertType'] + params['Pkt_Ser_Num']

    #read sky map parameters
    skymap, prob, distmu, distsigma, distnorm, npix, nside, distest, diststd = readskymap(
        params['skymap_fits'])

    #create integrated probability skymap
    csm = integrated_probability(prob)
    #calculate contours from csm
    contours = hpix_contours(csm, levels=[0.99], nest=False)

    #define 99% region
    levels = [0.99]
    levelsper = [99]

    #define distance limits
    distmax = distest + 5 * diststd
    distmin = distest - 5 * diststd

    #get coordinates of all GLADEV2 galaxies
    coordinates, data = GLADEV2coordinates(distmax, distmin)

    #crossmatch GLADE with multiorder skymap
    if 'v1' in params['skymap_fits']:
        version = '.v1'
    if 'v0' in params['skymap_fits']:
        version = '.v0'
    if 'v2' in params['skymap_fits']:
        version = '.v2'
    else:
        version = ''
    if ',0' in params['skymap_fits']:
        after = ',0'
    if ',1' in params['skymap_fits']:
        after = ',1'
    if ',2' in params['skymap_fits']:
        after = ',2'
    if ',3' in params['skymap_fits']:
        after = ',3'
    else:
        after = ''

    if 'bayestar' in params['skymap_fits']:

        url = 'https://gracedb.ligo.org/api/superevents/' + graceid + '/files/bayestar.multiorder.fits' + after
    else:
        url = 'https://gracedb.ligo.org/api/superevents/' + graceid + '/files/LALInference' + version + '.multiorder.fits' + after
    skymap = read_sky_map(url, moc=True)
    result = crossmatch(skymap, coordinates)

    #for each contour region (Eg. 99%)
    for d in range(0, len(contours)):

        jsonlist = []
        jsonlist2 = []
        tablenames = []
        ra_incontourlist = []
        contourlens = []
        dec_incontourlist = []
        finalprobslist = []
        finalgalnamelist = []
        dist_incontourlist = []
        Bmag_incontourlist = []
        ra_incontourlist1 = []
        dec_incontourlist1 = []
        probs_incontourlist1 = []
        probs_incontourlist = []
        finalgalnamelist1 = []
        dist_incontourlist1 = []
        Bmag_incontourlist1 = []
        finalgalname = []
        mudists_incontourlist1 = []
        distssigma_incontourlist1 = []
        distsnorm_incontourlist1 = []
        pdist_incontourlist1 = []
        Slum_incontourlist1 = []
        contourlist1 = []
        contourlist = []
        contourss = []
        ccc = []

        #separate masked array into separate contours
        split_dec, split_ra = split_contours(contours, levels[d], d)

        #retrieve galaxies in 99 percent regions
        results = data[result.searched_prob < 0.99]
        ra_incontour = results['RA'].values
        dec_incontour = results['Dec'].values
        dist_incontour = results['dist'].values
        Bmag_incontour = results['Bmag'].values
        name_incontour = results['HyperLEDA'].values

        # if the contour is split at 0/360 degrees, rejoin back together for plot
        split_ra2, split_dec2 = join_0_360(split_ra, split_dec)

        #create a plot of contours and number them
        contour_plots(split_ra2, split_dec2, graceid, prelim, levelsper[d])
        contourss = np.ones(len(ra_incontour))

        # extract probability parameters at galaxy positions

        probs, mudists, distssigma, distsnorm = extract_LIGO_probability(
            ra_incontour, dec_incontour, nside, distsigma, prob, distnorm,
            distmu)

        # remove duplicates
        indices, finalgalname, ra_incontourlist1, dec_incontourlist1, dist_incontourlist1, Bmag_incontourlist1, probs_incontourlist1, mudists_incontourlist1, distssigma_incontourlist1, distsnorm_incontourlist1, contourlist = unique_galaxies(
            contourlist, contourss, ra_incontourlist1, ra_incontour,
            dec_incontourlist1, dec_incontour, dist_incontourlist1,
            dist_incontour, probs_incontourlist1, name_incontour, finalgalname,
            probs, Bmag_incontourlist1, Bmag_incontour, mudists_incontourlist1,
            mudists, distssigma_incontourlist1, distssigma,
            distsnorm_incontourlist1, distsnorm)

        # Calculate probability score

        finalprobs, pdist, Slum = calculate_absolute_probability(
            dist_incontourlist1, Bmag_incontourlist1, mudists_incontourlist1,
            distssigma_incontourlist1, distsnorm_incontourlist1,
            probs_incontourlist1)

        finalprobss = []
        for j in range(0, len(finalprobs[0])):
            finalprobss.append(finalprobs[0, j])
        # make lists for dataframes

        finalprobss, ra_incontourlist, dec_incontourlist, finalprobslist, probs_incontourlist, finalgalnamelist, dist_incontourlist, Bmag_incontourlist, contourlist1, pdist_incontourlist1, Slum_incontourlist1 = makelists(
            finalprobss, ra_incontourlist, ra_incontourlist1,
            dec_incontourlist, dec_incontourlist1, finalprobslist,
            probs_incontourlist1, probs_incontourlist, finalgalnamelist,
            finalgalname, dist_incontourlist, dist_incontourlist1,
            Bmag_incontourlist, Bmag_incontourlist1, contourlist1, contourlist,
            pdist, pdist_incontourlist1, Slum, Slum_incontourlist1)

        #sort by descending probability

        finaldictsorted, cumsumprobs = sortbyprob(finalprobslist)

        #create dataframe for jsons

        dataf = create_dataframe(finaldictsorted, ra_incontourlist,
                                 dec_incontourlist, probs_incontourlist,
                                 finalgalnamelist, dist_incontourlist,
                                 pdist_incontourlist1, Bmag_incontourlist,
                                 Slum_incontourlist1, contourlist, cumsumprobs)

        jsonlist.append(dataf[[
            'Galaxy name', 'Galaxy probability score', 'RA (degrees)',
            'Dec (degrees)', 'Location probability score', 'Distance (Mpc)',
            'Distance probability score', 'B magnitude',
            'B luminosity probability score', 'Cumulative Score'
        ]].to_json())
        jsonlist2.append(dataf[[
            'Galaxy name', 'Galaxy probability score', 'RA (degrees)',
            'Dec (degrees)', 'Location probability score', 'Distance (Mpc)',
            'Distance probability score', 'B magnitude',
            'B luminosity probability score', 'Cumulative Score'
        ]].to_csv())

        #createtxt(dataf,finalgalnamelist, finaldictsorted,graceid,prelim,levelsper,d,ccc)
        createjsonfile(jsonlist, graceid, prelim, levelsper, d)
        createasciifile(jsonlist2, graceid, prelim, levelsper, d)
Exemplo n.º 7
0
                    help='Size of the field of view in degrees')
parser.add_argument('--title', type=str, default=None,
                    help='If given, include a title to the plot')

parser.add_argument('--q', type=float, help="mass ratio")
parser.add_argument('--qscale', type=float, default=1, help="scaling factor for q")
parser.add_argument('--mchirp', type=float, help="Chirp mass")
parser.add_argument('--mchirpscale', type=float, default=1, help="scaling factor for mchirp")
parser.add_argument('--dsname', type=str, help='dataset name for h5 file')
parser.add_argument('--statsfile', type=str, help='Stats file')

args = parser.parse_args()

## Plot the Optimal Skymap

optprob, _ = read_sky_map(args.opt, nest=False, distances=False) ###

radec = [float(x) for x in args.radec]

deg_per_pix = 4
header = Header(dict(
    NAXIS=2,
    NAXIS1=360*deg_per_pix, NAXIS2=180*deg_per_pix, # number of pixels
    CRPIX1=180*deg_per_pix, CRPIX2=90*deg_per_pix, # reference pixel
    CRVAL1=radec[0], CRVAL2=radec[1], # physical value at reference pixel
    CDELT1=1./deg_per_pix,
    CDELT2=1./deg_per_pix,
    CTYPE1='RA---AIT',
    CTYPE2='DEC--AIT',
    RADESYS='ICRS'))
wcs = WCS(header)
Exemplo n.º 8
0
import io
from ligo.skymap.io import read_sky_map
from lsst.alert.packet import Schema
import numpy as np

import ligo.skymap.plot
import matplotlib.pyplot as plt

skymap_filename = 'plasticc_schema/sample_data/S190814bv-bayestar.multiorder.fits'

skymap, header = read_sky_map(skymap_filename)
with open(skymap_filename, 'rb') as f:
    skymap_bytes = f.read()

schema = Schema.from_file('plasticc_schema/lsst.v4_1.lvkAlertContent.avsc')

mock_alert_content = dict(supereventId='S123456',
                          gpstime=header['gps_time'],
                          skymapFilename='bayestar.fits.gz',
                          skymapHealpix=skymap_bytes)
serialized_alert = schema.serialize(mock_alert_content)

# sanity check
deserialized_alert = schema.deserialize(serialized_alert)
hpx, header = read_sky_map(io.BytesIO(deserialized_alert['skymapHealpix']))
import healpy
healpy.mollview(hpx)
plt.show()