예제 #1
0
    def __init__(self, ff, mjd, x0, y0):
        """Define an observation"""

        # Store
        self.mjd = mjd
        self.x0 = x0
        self.y0 = y0

        # Get times
        self.nfd = Time(self.mjd, format='mjd', scale='utc').isot

        # Correct for rotation
        tobs = Time(ff.mjd + 0.5 * ff.texp / 86400.0,
                    format='mjd',
                    scale='utc')
        tobs.delta_ut1_utc = 0
        hobs = tobs.sidereal_time("mean", longitude=0.0).degree
        tmid = Time(self.mjd, format='mjd', scale='utc')
        tmid.delta_ut1_utc = 0
        hmid = tmid.sidereal_time("mean", longitude=0.0).degree

        # Compute ra/dec
        world = ff.w.wcs_pix2world(np.array([[self.x0, self.y0]]), 1)
        if ff.tracked:
            self.ra = world[0, 0]
        else:
            self.ra = world[0, 0] + hobs - hmid
        self.de = world[0, 1]
예제 #2
0
 def test_delta_ut1_utc(self):
     t = Time('2010-01-01 00:00:00', format='iso', scale='utc', precision=6)
     t.delta_ut1_utc = 0.3 * u.s
     assert t.ut1.iso == '2010-01-01 00:00:00.300000'
     t.delta_ut1_utc = 0.4 / 60. * u.minute
     assert t.ut1.iso == '2010-01-01 00:00:00.400000'
     with pytest.raises(u.UnitsError):
         t.delta_ut1_utc = 0.4 * u.m
     # Also check that a TimeDelta works.
     t.delta_ut1_utc = TimeDelta(0.3, format='sec')
     assert t.ut1.iso == '2010-01-01 00:00:00.300000'
     t.delta_ut1_utc = TimeDelta(0.5/24./3600., format='jd')
     assert t.ut1.iso == '2010-01-01 00:00:00.500000'
 def test_delta_ut1_utc(self):
     t = Time('2010-01-01 00:00:00', format='iso', scale='utc', precision=6)
     t.delta_ut1_utc = 0.3 * u.s
     assert t.ut1.iso == '2010-01-01 00:00:00.300000'
     t.delta_ut1_utc = 0.4 / 60. * u.minute
     assert t.ut1.iso == '2010-01-01 00:00:00.400000'
     with pytest.raises(u.UnitsError):
         t.delta_ut1_utc = 0.4 * u.m
     # Also check that a TimeDelta works.
     t.delta_ut1_utc = TimeDelta(0.3, format='sec')
     assert t.ut1.iso == '2010-01-01 00:00:00.300000'
     t.delta_ut1_utc = TimeDelta(0.5 / 24. / 3600., format='jd')
     assert t.ut1.iso == '2010-01-01 00:00:00.500000'
    def on_new_position(self):
        t = Time(datetime.datetime.utcnow(),
                 scale='utc',
                 location=vancouver_location)
        t.delta_ut1_utc = 0
        sidereal_degrees = t.sidereal_time('mean').degree
        self.relative_ra_degrees = sidereal_degrees - self.ha_relative_degrees

        absolute_ra_degrees = self.relative_ra_degrees - self.ra_zero_pos_degrees
        absolute_dec_degrees = self.relative_dec_degrees - self.dec_zero_pos_degrees
        c = SkyCoord(ra=absolute_ra_degrees,
                     dec=absolute_dec_degrees,
                     frame='icrs',
                     unit='deg')
        ra_output = '%dh%02dm%.2fs' % (c.ra.hms)
        dec_output = '%dd%2d%.1fs' % (c.dec.dms)

        #output_str = '%s/%s' % (ra_output, dec_output)

        #c = SkyCoord(ra=absolute_ra_degrees, dec=absolute_dec_degrees, frame='icrs', unit='deg')
        #ra_output = '%dh%02dm%.2fs' % (c.ra.hms)
        #dec_output = '%dd%2d%.1fs' % (c.dec.dms)

        #output_str = '%s/%s' % (ra_output, dec_output)
        #self.r.publish(messages.STATUS_DISPLAY_CURRENT_RA_DEC, redis_helpers.toRedis(output_str))
        self.r.publish(
            messages.STATUS_DISPLAY_CURRENT_RA_DEC,
            redis_helpers.toRedis((absolute_ra_degrees, absolute_dec_degrees)))
예제 #5
0
파일: lib_ms.py 프로젝트: eretana/LiLF
    def print_HAcov(self, png=None):
        """
        some info on the MSs
        """
        telescope = self.mssListObj[0].getTelescope()
        if telescope == 'LOFAR':
            telescope_coords = EarthLocation(lat=52.90889*u.deg, lon=6.86889*u.deg, height=0*u.m)
        elif telescope == 'GMRT':
            telescope_coords = EarthLocation(lat=19.0948*u.deg, lon=74.0493*u.deg, height=0*u.m)
        else:
            raise('Unknown Telescope.')
        
        has = []; elevs = []
        for ms in self.mssListObj:
            time = np.mean(ms.getTimeRange())
            time = Time( time/86400, format='mjd')
            time.delta_ut1_utc = 0. # no need to download precise table for leap seconds
            logger.info('%s (%s): Hour angle: %.1f hrs - Elev: %.2f (Sun distance: %.0f)' % (ms.nameMS,time.iso,ms.ha.deg/15.,ms.elev.deg,ms.sun_dist.deg))
            has.append(ms.ha.deg/15.)
            elevs.append(ms.elev.deg)

        if png is not None:
            import matplotlib.pyplot as pl
            pl.figure(figsize=(6,6))
            ax1 = pl.gca()
            ax1.plot(has, elevs, 'ko')
            ax1.set_xlabel('HA [hrs]')
            ax1.set_ylabel('elevs [deg]')
            logger.debug('Save plot: %s' % png)
            pl.savefig(png)
예제 #6
0
def _get_time():
    t = Time([[1], [2]], format='cxcsec',
             location=EarthLocation(1000, 2000, 3000, unit=u.km))
    t.format = 'iso'
    t.precision = 5
    t.delta_ut1_utc = np.array([[3.0], [4.0]])
    t.delta_tdb_tt = np.array([[5.0], [6.0]])
    t.out_subfmt = 'date_hm'

    return t
예제 #7
0
파일: stio.py 프로젝트: cbassa/sattools
    def __init__(self,ff,mjd,x0,y0):
        """Define an observation"""

        # Store
        self.mjd=mjd
        self.x0=x0
        self.y0=y0
        
        # Get times
        self.nfd=Time(self.mjd,format='mjd',scale='utc').isot
        
        # Correct for rotation
        tobs=Time(ff.mjd+0.5*ff.texp/86400.0,format='mjd',scale='utc')
        tobs.delta_ut1_utc=0
        hobs=tobs.sidereal_time("mean",longitude=0.0).degree
        tmid=Time(self.mjd,format='mjd',scale='utc')
        tmid.delta_ut1_utc=0
        hmid=tmid.sidereal_time("mean",longitude=0.0).degree
        
        # Compute ra/dec
        world=ff.w.wcs_pix2world(np.array([[self.x0,self.y0]]),1)
        self.ra=world[0,0]+hobs-hmid
        self.de=world[0,1]
예제 #8
0
파일: lib_ms.py 프로젝트: henedler/LiLF
    def __init__(self, pathMS):
        """
        pathMS:        path of the MS, without '/' at the end!
        pathDirectory: path of the parent directory of the MS
        nameMS:        name of the MS, without parent directories and extension (which is assumed to be ".MS" always)
        """
        self.setPathVariables(pathMS)

        # If the field name is not a recognised calibrator name, one of two scenarios is true:
        # 1. The field is not a calibrator field;
        # 2. The field is a calibrator field, but the name was not properly set.
        # The following lines correct the field name if scenario 2 is the case.
        calibratorDistanceThreshold = 0.5  # in degrees
        if (not self.isCalibrator()):
            if (self.getCalibratorDistancesSorted()[0] <
                    calibratorDistanceThreshold):
                nameFieldOld = self.getNameField()
                nameFieldNew = self.getCalibratorNamesSorted()[0]
                #logger.warning("Although the field name '" + nameFieldOld + "' is not recognised as a known calibrator name, " +
                #                "the phase centre coordinates suggest that this scan is a calibrator scan. Changing field name into '" +
                #                nameFieldNew + "'...")
                self.setNameField(nameFieldNew)

        telescope = self.getTelescope()
        if telescope == 'LOFAR':
            telescope_coords = EarthLocation(lat=52.90889 * u.deg,
                                             lon=6.86889 * u.deg,
                                             height=0 * u.m)
        elif telescope == 'GMRT':
            telescope_coords = EarthLocation(lat=19.0948 * u.deg,
                                             lon=74.0493 * u.deg,
                                             height=0 * u.m)
        else:
            raise ('Unknown Telescope.')

        time = np.mean(self.getTimeRange())
        time = Time(time / 86400, format='mjd')
        time.delta_ut1_utc = 0.  # no need to download precise table for leap seconds
        coord_sun = get_sun(time)
        coord_sun = SkyCoord(ra=coord_sun.ra,
                             dec=coord_sun.dec)  # fix transformation issue
        ra, dec = self.getPhaseCentre()
        coord = SkyCoord(ra * u.deg, dec * u.deg)
        self.elev = coord.transform_to(
            AltAz(obstime=time, location=telescope_coords)).alt
        self.sun_dist = coord.separation(coord_sun)
        lst = time.sidereal_time('mean', telescope_coords.lon)
        self.ha = lst - coord.ra  # hour angle
예제 #9
0
파일: test_basic.py 프로젝트: MQQ/astropy
    def test_transforms(self):
        """Transform from UTC to all supported time scales (TAI, TCB, TCG,
        TDB, TT, UT1, UTC).  This requires auxilliary information (latitude and
        longitude)."""

        lat = 19.48125
        lon = -155.933222
        t = Time("2006-01-15 21:24:37.5", format="iso", scale="utc", precision=6, lat=lat, lon=lon)
        t.delta_ut1_utc = 0.3341  # Explicitly set one part of the xform
        assert t.utc.iso == "2006-01-15 21:24:37.500000"
        assert t.ut1.iso == "2006-01-15 21:24:37.834100"
        assert t.tai.iso == "2006-01-15 21:25:10.500000"
        assert t.tt.iso == "2006-01-15 21:25:42.684000"
        assert t.tcg.iso == "2006-01-15 21:25:43.322690"
        assert t.tdb.iso == "2006-01-15 21:25:42.683799"
        assert t.tcb.iso == "2006-01-15 21:25:56.893378"
예제 #10
0
def set_lsts_from_time_array(uvd):
    lsts = []
    curtime = uvd.time_array[0]
    for ind, jd in enumerate(uvd.time_array):
        if ind == 0 or not np.isclose(jd, curtime, atol=1e-6, rtol=1e-12):
            #              print 'Curtime/jd: ', curtime, jd
            curtime = jd
            latitude, longitude, altitude = uvd.telescope_location_lat_lon_alt_degrees
            #              print 'Loc: ', latitude, longitude, altitude
            t = Time(jd, format='jd', location=(longitude, latitude))
            #              print 't: ', t
            t.delta_ut1_utc = iers_a.ut1_utc(t)
#              print 't.delta_ut1_utc: ', t.delta_ut1_utc
        print "LST: ", t.sidereal_time('apparent')
        lsts.append(t)
    return lsts
예제 #11
0
def set_lsts_from_time_array(uvd):
      lsts = []
      curtime = uvd.time_array[0]
      for ind, jd in enumerate(uvd.time_array):
          if ind == 0 or not np.isclose(jd, curtime, atol=1e-6, rtol=1e-12):
#              print 'Curtime/jd: ', curtime, jd
              curtime = jd
              latitude, longitude, altitude = uvd.telescope_location_lat_lon_alt_degrees
#              print 'Loc: ', latitude, longitude, altitude
              t = Time(jd, format='jd', location=(longitude, latitude))
#              print 't: ', t
              t.delta_ut1_utc = iers_a.ut1_utc(t)
#              print 't.delta_ut1_utc: ', t.delta_ut1_utc
          print "LST: ", t.sidereal_time('apparent')
	  lsts.append(t)
      return lsts
예제 #12
0
파일: test_basic.py 프로젝트: MQQ/astropy
    def test_properties(self):
        """Use properties to convert scales and formats.  Note that the UT1 to
        UTC transformation requires a supplementary value (``delta_ut1_utc``)
        that can be obtained by interpolating from a table supplied by IERS.
        This will be included in the package later."""

        t = Time("2010-01-01 00:00:00", format="iso", scale="utc")
        t.delta_ut1_utc = 0.3341  # Explicitly set one part of the xform
        assert np.allclose(t.jd, 2455197.5)
        assert t.iso == "2010-01-01 00:00:00.000"
        assert t.tt.iso == "2010-01-01 00:01:06.184"
        assert t.tai.iso == "2010-01-01 00:00:34.000"
        assert np.allclose(t.utc.jd, 2455197.5)
        assert np.allclose(t.ut1.jd, 2455197.500003867)
        assert t.tcg.isot == "2010-01-01T00:01:06.910"
        assert np.allclose(t.unix, 1262304000.0)
        assert np.allclose(t.cxcsec, 378691266.184)
예제 #13
0
def height_time(coord, time, time_left=False, limalt=0.0*u.deg, site=EarthLocation(0.0, 0.0, 0.0), fuse=TimeDelta(0, format='sec', scale='tai')):
    """
    """
    coord = coord_pack(coord)
    timeut = time - fuse
    if len(time.shape) == 1:
        timeut = Time([[i] for i in timeut.jd], format='jd', scale='utc')
    timeut.location = site
    timeut.delta_ut1_utc = 0
    ra, ts = mesh_coord(coord, timeut)
    hourangle = Angle(ts-ra)
    distzen = np.arccos(np.sin(coord.dec)*np.sin(site.latitude) + np.cos(coord.dec)*np.cos(site.latitude)*np.cos(hourangle))
    altura = 90*u.deg - distzen
    if time_left == True:
        poente = sky_time(coord, time, rise_set=True, limalt=limalt, site=site, fuse=fuse)[2]
        time_rest = poente - time
        return altura, time_rest
    return altura
예제 #14
0
def sky_time(coord, time, rise_set=False, limalt=0*u.deg, site=EarthLocation(0.0, 0.0, 0.0), fuse=TimeDelta(0, format='sec', scale='tai')):
    """
    """
    if type(limalt) != u.quantity.Quantity:
        limalt = limalt*u.deg
    if time.isscalar == True:
        time = Time([time.iso], format='iso', scale='utc')
    coord = coord_pack(coord)
    timeut = time - fuse
    if len(time.shape) == 1:
        timeut = Time([[i] for i in timeut.jd], format='jd', scale='utc')
    timeut.delta_ut1_utc = 0
    timeut.location = site
    ra, ts = mesh_coord(coord, timeut)
    dif_h_sid = Angle(ra-ts)
    dif_h_sid.wrap_at('180d', inplace=True)
    dif_h_sol = dif_h_sid * (23.0 + 56.0/60.0 + 4.0916/3600.0) / 24.0
    dif = TimeDelta(dif_h_sol.hour*u.h, scale='tai')
    culminacao = timeut + dif
    culminacao.delta_ut1_utc = 0
    culminacao.location = site
    if rise_set == True:
        hangle_lim = np.arccos((np.cos(90.0*u.deg-limalt) - np.sin(coord.dec)*np.sin(site.latitude)) / (np.cos(coord.dec)*np.cos(site.latitude)))
        tsg_lim = Angle(ra + hangle_lim)
        dtsg_lim = tsg_lim - culminacao.sidereal_time('mean')
        dtsg_lim.wrap_at(360 * u.deg, inplace=True)
        dtsg_lim_sol = dtsg_lim * (23.0 + 56.0/60.0 + 4.0916/3600.0) / 24.0
        a = np.where(np.isnan(dtsg_lim_sol))
        dtsg_lim_sol[a] = Angle([48.0]*len(a[0])*u.hour)
        dtsg_np = TimeDelta((dtsg_lim_sol.hour*u.h))
        sunrise = culminacao - dtsg_np
        sunset = culminacao + dtsg_np
        if (site.latitude > 0*u.deg):
            alwaysup = np.where(coord.dec >= 90*u.deg - site.latitude + limalt)
            neverup = np.where(coord.dec <= -(90*u.deg - site.latitude - limalt))
        else:
            alwaysup = np.where(coord.dec <= -(90*u.deg + site.latitude + limalt))
            neverup = np.where(coord.dec >= 90*u.deg + site.latitude - limalt)
        culminacao = culminacao + fuse
        sunrise = sunrise + fuse
        sunset = sunset + fuse
        return culminacao, sunrise, sunset, alwaysup, neverup
    culminacao = culminacao + fuse
    return culminacao
예제 #15
0
def sky_time(coord, time, rise_set=False, limalt=0*u.deg, site=EarthLocation(0.0, 0.0, 0.0), fuse=TimeDelta(0, format='sec', scale='tai')):
    """
    """
    if type(limalt) != u.quantity.Quantity:
        limalt = limalt*u.deg
    if time.isscalar == True:
        time = Time([time.iso], format='iso', scale='utc')
    coord = coord_pack(coord)
    timeut = time - fuse
    if len(time.shape) == 1:
        timeut = Time([[i] for i in timeut.jd], format='jd', scale='utc')
    timeut.delta_ut1_utc = 0
    timeut.location = site
    ra, ts = mesh_coord(coord, timeut[:,0])
    dif_h_sid = Angle(ra-ts)
    dif_h_sid.wrap_at('180d', inplace=True)
    dif_h_sol = dif_h_sid * (23.0 + 56.0/60.0 + 4.0916/3600.0) / 24.0
    dif = TimeDelta(dif_h_sol.hour*u.h, scale='tai')
    culminacao = timeut + dif
    culminacao.delta_ut1_utc = 0
    culminacao.location = site
    if (site.latitude > 0*u.deg):
        alwaysup = np.where(coord.dec >= 90*u.deg - site.latitude + limalt)
        neverup = np.where(coord.dec <= -(90*u.deg - site.latitude - limalt))
    else:
        alwaysup = np.where(coord.dec <= -(90*u.deg + site.latitude + limalt))
        neverup = np.where(coord.dec >= 90*u.deg + site.latitude - limalt)
    if rise_set == True:
        hangle_lim = np.arccos((np.cos(90.0*u.deg-limalt) - np.sin(coord.dec)*np.sin(site.latitude)) / (np.cos(coord.dec)*np.cos(site.latitude)))
        tsg_lim = Angle(ra + hangle_lim)
        dtsg_lim = tsg_lim - culminacao.sidereal_time('mean')
        dtsg_lim.wrap_at(360 * u.deg, inplace=True)
        dtsg_lim_sol = dtsg_lim * (23.0 + 56.0/60.0 + 4.0916/3600.0) / 24.0
        a = np.where(np.isnan(dtsg_lim_sol))
        dtsg_lim_sol[a] = Angle([48.0]*len(a[0])*u.hour)
        dtsg_np = TimeDelta((dtsg_lim_sol.hour*u.h))
        sunrise = culminacao - dtsg_np
        sunset = culminacao + dtsg_np
        culminacao = culminacao + fuse
        sunrise = sunrise + fuse
        sunset = sunset + fuse
        return culminacao, sunrise, sunset, alwaysup, neverup
    culminacao = culminacao + fuse
    return culminacao, alwaysup, neverup
예제 #16
0
def examine_exposure(info, cframe, cframe_keys):
    # Process CFrame header keywords
    for keyword in cframe_keys:
        info[keyword] = cframe.header[keyword]

    obs_ra = cframe.header['RADEG']
    obs_dec = cframe.header['DECDEG']
    taibeg = cframe.header['TAI-BEG']
    taiend = cframe.header['TAI-END']

    taimid = 0.5*(taibeg+taiend)
    dec = Angle(obs_dec, u.degree)
    ra = Angle(obs_ra, u.degree)



    time = Time(taimid/86400.0, format='mjd', scale='tai', location=apo)
    try:
        lst = time.sidereal_time('apparent')
    except IndexError:
        ## workaround for problem with recent observations relative to astropy release
        ## http://astropy.readthedocs.org/en/v0.4.2/time/index.html#transformation-offsets
        from astropy.utils.iers import IERS_A, IERS_A_URL
        from astropy.utils.data import download_file 
        iers_a_file = download_file(IERS_A_URL, cache=True)  
        iers_a = IERS_A.open(iers_a_file)                     
        time.delta_ut1_utc = time.get_delta_ut1_utc(iers_a)

        lst = time.sidereal_time('apparent')

    ha = (lst - ra)

    if ha > np.pi*u.radian:
        ha -= 2*np.pi*u.radian
    elif ha < -np.pi*u.radian:
        ha += 2*np.pi*u.radian
    info['mean_ha'] = ha.to(u.degree).value

    alt, az = equatorial_to_horizontal(ra, dec, apolat, ha)
    info['mean_alt'] = alt.to(u.degree).value
예제 #17
0
def examine_exposure(info, cframe, cframe_keys):
    # Process CFrame header keywords
    for keyword in cframe_keys:
        info[keyword] = cframe.header[keyword]

    obs_ra = cframe.header['RADEG']
    obs_dec = cframe.header['DECDEG']
    taibeg = cframe.header['TAI-BEG']
    taiend = cframe.header['TAI-END']

    taimid = 0.5 * (taibeg + taiend)
    dec = Angle(obs_dec, u.degree)
    ra = Angle(obs_ra, u.degree)

    time = Time(taimid / 86400.0, format='mjd', scale='tai', location=apo)
    try:
        lst = time.sidereal_time('apparent')
    except IndexError:
        ## workaround for problem with recent observations relative to astropy release
        ## http://astropy.readthedocs.org/en/v0.4.2/time/index.html#transformation-offsets
        from astropy.utils.iers import IERS_A, IERS_A_URL
        from astropy.utils.data import download_file
        iers_a_file = download_file(IERS_A_URL, cache=True)
        iers_a = IERS_A.open(iers_a_file)
        time.delta_ut1_utc = time.get_delta_ut1_utc(iers_a)

        lst = time.sidereal_time('apparent')

    ha = (lst - ra)

    if ha > np.pi * u.radian:
        ha -= 2 * np.pi * u.radian
    elif ha < -np.pi * u.radian:
        ha += 2 * np.pi * u.radian
    info['mean_ha'] = ha.to(u.degree).value

    alt, az = equatorial_to_horizontal(ra, dec, apolat, ha)
    info['mean_alt'] = alt.to(u.degree).value
예제 #18
0
def get_lst_for_time(jd_array, latitude, longitude, altitude):
    """
    Get the lsts for a set of jd times at an earth location.

    Args:
        jd_array: an array of JD times to get lst for
        latitude: latitude of location to get lst for in degrees
        longitude: longitude of location to get lst for in degrees
        altitude: altitude of location to get lst for in meters

    Returns:
        an array of lst times corresponding to the jd_array
    """
    lsts = []
    lst_array = np.zeros_like(jd_array)
    for ind, jd in enumerate(np.unique(jd_array)):
        t = Time(jd,
                 format='jd',
                 location=(Angle(longitude,
                                 unit='deg'), Angle(latitude, unit='deg')))

        # avoid errors if iers.conf.auto_max_age is set to None, as we do in testing if the iers url is down
        if iers.conf.auto_max_age is None:  # pragma: no cover
            delta, status = t.get_delta_ut1_utc(return_status=True)
            if ((status == iers.TIME_BEFORE_IERS_RANGE)
                    or (status == iers.TIME_BEYOND_IERS_RANGE)):
                warnings.warn(
                    'time is out of IERS range, setting delta ut1 utc to extrapolated value'
                )
                t.delta_ut1_utc = delta

        lst_array[np.where(
            np.isclose(jd, jd_array, atol=1e-6,
                       rtol=1e-12))] = t.sidereal_time('apparent').radian

    return lst_array
def computation(name, start_year, start_month, start_day, start_hour,
                start_minute, start_second, end_year, end_month, end_day,
                end_hour, end_minute, end_second):

    # set initial and final times
    aepoch = time.strptime('2004 1 1 0 0 0', '%Y %m %d %H %M %S')
    tepoch = timegm(aepoch)

    dini = datetime(start_year, start_month, start_day, start_hour,
                    start_minute, start_second)  # start date for the analysis
    dfin = datetime(end_year, end_month, end_day, end_hour, end_minute,
                    end_second)  # stop date for the analysis
    dt = timedelta(
        0, 1
    )  # time interval for orbital sampling (1 s ~ 8 km ~24us max timing error)

    #  create output root file and tree

    f = TFile(name, "recreate")
    tree = TTree("position", "start of 2017 to 2017-11-09T10:09:10")
    year = array('i', [0])
    month = array('i', [0])
    day = array('i', [0])
    hour = array('i', [0])
    min = array('i', [0])
    sec = array('i', [0])
    X = array('f', [0.])
    Y = array('f', [0.])
    Z = array('f', [0.])
    lon = array('f', [0.])
    lat = array('f', [0.])
    h = array('f', [0.])
    obt = array('d', [0.])
    tree.Branch('year', year, 'year/I')
    tree.Branch('month', month, 'month/I')
    tree.Branch('day', day, 'day/I')
    tree.Branch('hour', hour, 'hour/I')
    tree.Branch('min', min, 'min/I')
    tree.Branch('sec', sec, 'sec/I')
    tree.Branch('X', X, 'X/F')
    tree.Branch('Y', Y, 'Y/F')
    tree.Branch('Z', Z, 'Z/F')
    tree.Branch('lon', lon, 'lon/F')
    tree.Branch('lat', lat, 'lat/F')
    tree.Branch('h', h, 'h/F')
    tree.Branch('obt', obt, 'obt/D')

    # load AGILE TLE

    agile_tle_file = open('AGILE_TLE_2017-Feb2018.dat',
                          'r')  #data from 22 March 2015 (newconf data)
    agile_tle_list = agile_tle_file.readlines()
    agile_tle_list.reverse()

    # track AGILE orbit

    l5 = agile_tle_list.pop()
    l6 = agile_tle_list.pop()
    agile0 = twoline2rv(l5, l6, wgs72)
    l5 = agile_tle_list.pop()
    l6 = agile_tle_list.pop()
    agile1 = twoline2rv(l5, l6, wgs72)

    # loop on time range

    d = dini

    counter = 0
    while d <= dfin:

        while d > agile1.epoch:
            print("From time ", d, " using TLE: ", l5)
            agile0 = agile1
            l3 = agile_tle_list.pop()
            l4 = agile_tle_list.pop()
            agile1 = twoline2rv(l3, l4, wgs72)

        xagile, vagile = agile0.propagate(d.year, d.month, d.day, d.hour,
                                          d.minute,
                                          d.second + d.microsecond * 1.e-6)
        x0 = np.array(xagile)
        print xagile
        X[0] = x0[0]
        Y[0] = x0[1]
        Z[0] = x0[2]
        year[0] = d.year
        month[0] = d.month
        day[0] = d.day
        hour[0] = d.hour
        min[0] = d.minute
        sec[0] = d.second

        t = Time(d)
        t.delta_ut1_utc = iers_a.ut1_utc(t)
        s = t.sidereal_time('mean', 'greenwich')
        s.wrap_angle = 180 * u.deg
        r = rot.rotation_z(s)
        x0_ecef = r.dot(x0)
        lat[0], lon[0], h[0] = rot.ecef2geodetic(x0_ecef[0], x0_ecef[1],
                                                 x0_ecef[2])
        obt[0] = timegm(d.timetuple()) - tepoch

        #print (d, year[0], month[0], day[0], hour[0], min[0], sec[0], obt[0], x0[0], x0[1], x0[2], lon[0], lat[0], h[0])

        counter = counter + 1
        if counter == 5000:
            counter = 0
            print(year[0], month[0], day[0], hour[0], min[0], sec[0])

        tree.Fill()
        d = d + dt

    tree.GetCurrentFile().Write()
    tree.GetCurrentFile().Close()
    '''
예제 #20
0
def start_survey(args):
    """Sets up a survey mode observation from the master node
    """

    # initialize parameters
    pars = {}
    # initialize coordinate overview
    coordinates = []
    # Load static configuration
    filename = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            CONFIG)
    with open(filename, 'r') as f:
        config = yaml.load(f)
    conf_sc = 'sc{:.0f}'.format(args.science_case)  # sc3 or sc4
    conf_mode = args.science_mode.lower()  # i+tab, iquv+tab, i+iab, iquv+iab
    # IQUV not yet supported
    if 'iquv' in conf_mode:
        log("ERROR: IQUV modes not yet supported")
        exit()
    # save user home dir
    pars['home'] = os.path.expanduser('~')
    # science case specific
    pars['affinity'] = config['affinity']
    pars['usemac'] = args.mac
    pars['parset'] = args.parset
    pars['science_case'] = args.science_case
    pars['time_unit'] = config[conf_sc]['time_unit']
    pars['nbit'] = config[conf_sc]['nbit']
    pars['nchan'] = config[conf_sc]['nchan']
    # debug options
    pars['debug'] = args.debug
    if args.debug and not '{cb}' in args.dada_dir:
        log("WARNING: {cb} not present in dada_dir")
    pars['dada_dir'] = args.dada_dir
    if args.mac:
        # could have non-zero starting subband
        pars['freq'] = config[conf_sc]['freq'] - .5*(config[conf_sc]['bw_rf'] - config[conf_sc]['bw']) +\
                       config[conf_sc]['first_subband'] * pars['time_unit'] * 1E-6
    else:
        pars['freq'] = config[conf_sc]['freq']
    pars['bw'] = config[conf_sc]['bw']
    pars['nbeams'] = config[conf_sc]['nbeams']
    pars['missing_beams'] = config[conf_sc]['missing_beams']
    pars['nbuffer'] = config[conf_sc]['nbuffer']
    pars['hdr_size'] = config[conf_sc]['hdr_size']
    pars['valid_modes'] = config[conf_sc]['valid_modes']
    pars['network_port_start'] = config[conf_sc]['network_port_start']
    pars['tsamp'] = config[conf_sc]['tsamp']
    pars['page_size'] = config[conf_sc]['page_size']
    pars['fits_templates'] = config[conf_sc]['fits_templates'].format(**pars)
    # pol and beam specific
    pars['ntabs'] = config[conf_mode]['ntabs']
    pars['nsynbeams'] = config[conf_mode]['nsynbeams']
    pars['science_mode'] = config[conf_mode]['science_mode']

    # derived values
    pars['chan_width'] = float(pars['bw']) / pars['nchan']
    pars['min_freq'] = pars['freq'] - pars['bw'] / 2 + pars['chan_width'] / 2
    if args.obs_mode == 'survey':
        # filterbank + fits + 3x AMBER
        pars['nreader'] = 5
    elif args.obs_mode == 'amber':
        # 3x AMBER
        pars['nreader'] = 3
    else:
        # filterbank or fits or dbdisk or dbscrubber
        pars['nreader'] = 1

    # load observation specific arguments
    pars['proctrigger'] = args.proctrigger
    pars['amber_mode'] = args.amber_mode
    pars['snrmin'] = args.snrmin
    pars['source'] = args.source
    pars['ra'] = args.ra
    pars['dec'] = args.dec.replace('m', '-')
    # Observing time, has to be multiple of 1.024 seconds
    pars['nbatch'] = int(np.ceil(args.duration / 1.024))
    pars['tobs'] = pars['nbatch'] * 1.024
    # start time
    if args.tstart == 'default':
        # start in 30 s
        starttime = Time.now() + TimeDelta(30, format='sec')
    else:
        starttime = Time(args.tstart, scale='utc')
        if ((starttime - Time.now()).sec < 30) and not pars['debug']:
            log("ERROR: start time should be at least 30 seconds in the future, got {}"
                .format(starttime))
            exit()

    # Time(pars['utc_start'], format='iso', scale='utc')
    # round to multiple of 1.024 s since sync time (=init bsn)
    # note: init bsn is multiple of 781250
    # then increases by 80000 every 1.024s
    # simply use user-provided value in debug mode
    if not pars['debug']:
        cmd = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                           CHECKBSN)
        try:
            init_bsn = float(subprocess.check_output(cmd).strip())
        except Exception:
            log("ERROR: Could not get init bsn from ccu-corr")
            exit()
        init_unix = init_bsn / pars['time_unit']
        unixstart = round(
            (starttime.unix - init_unix) / 1.024) * 1.024 + init_unix
        delta_bsn = (unixstart - init_unix) * pars['time_unit']
        pars['startpacket'] = "{:.0f}".format(init_bsn + delta_bsn)
    else:
        unixstart = starttime.unix
        pars['startpacket'] = "{:.0f}".format(unixstart * pars['time_unit'])
    starttime = Time(unixstart, format='unix')
    # delta=0 means slightly less accurate (~10arcsec), but no need for internet
    starttime.delta_ut1_utc = 0

    pars['utc_start'] = starttime.datetime.strftime('%Y-%m-%d-%H:%M:%S')
    pars['date'] = starttime.datetime.strftime("%Y%m%d")
    pars['datetimesource'] = "{}.{}".format(pars['utc_start'], pars['source'])
    pars['mjd_start'] = starttime.mjd
    pars['debug_dir'] = config[conf_sc]['debug_dir']
    # change output directories in debug mode
    if args.debug:
        config[conf_sc]['output_dir'] = '{debug_dir}/output/'.format(**pars)
        config[conf_sc]['amber_dir'] = '{debug_dir}/output/amber'.format(
            **pars)
        config[conf_sc]['log_dir'] = '{debug_dir}/output/log'.format(**pars)
        config[conf_sc]['master_dir'] = '{debug_dir}/output/results'.format(
            **pars)
    # output directories
    pars['master_dir'] = config[conf_sc]['master_dir'].format(**pars)
    pars['output_dir'] = config[conf_sc]['output_dir'].format(**pars)
    pars['log_dir'] = config[conf_sc]['log_dir'].format(**pars)
    pars['amber_dir'] = config[conf_sc]['amber_dir'].format(**pars)

    # observing mode
    if args.obs_mode not in pars['valid_modes']:
        log("ERROR: observation mode not valid: {}".format(args.obs_mode))
        exit()
    else:
        pars['obs_mode'] = args.obs_mode
    # beams
    if args.beams is not None:
        pars['beams'] = [int(beam) for beam in args.beams.split(',')]
        # make sure each beam is present only once
        pars['beams'] = list(set(pars['beams']))
    else:
        pars['sbeam'] = args.sbeam
        if args.ebeam == 0:
            pars['ebeam'] = pars['sbeam']
        elif args.ebeam < pars['sbeam']:
            log("WARNING: ebeam cannot be smaller than sbeam. Setting ebeam to sbeam ({})"
                .format(pars['sbeam']))
            pars['ebeam'] = pars['sbeam']
        else:
            pars['ebeam'] = args.ebeam
        pars['beams'] = range(pars['sbeam'], pars['ebeam'] + 1)

    # check validity of beams
    if min(pars['beams']) < 0:
        log("ERORR: CB index < 0 is impossible")
        exit()
    if max(pars['beams']) > pars['nbeams'] - 1:
        log("ERROR: CB index > {} is impossible".format(pars['nbeams'] - 1))
        exit()

    # remove the missing beams
    for beam in pars['missing_beams']:
        try:
            pars['beams'].remove(beam)
        except ValueError:
            # beam was not in list of beams anyway
            continue

    # we have all parameters now
    # create output dir on master node
    cmd = "mkdir -p {master_dir}/".format(**pars)
    os.system(cmd)
    log(cmd)

    # create psrdada header and config file for each beam
    # config file
    cfg = {}
    cfg['buffersize'] = pars['ntabs'] * pars['nchan'] * pars['page_size']
    cfg['nbuffer'] = pars['nbuffer']
    cfg['nreader'] = pars['nreader']
    cfg['obs_mode'] = pars['obs_mode']
    cfg['startpacket'] = pars['startpacket']
    cfg['duration'] = pars['tobs']
    cfg['nbatch'] = pars['nbatch']
    cfg['output_dir'] = pars['output_dir']
    cfg['ntabs'] = pars['ntabs']
    cfg['nsynbeams'] = pars['nsynbeams']
    cfg['amber_conf_dir'] = os.path.join(
        os.path.dirname(os.path.realpath(__file__)), AMBERCONFDIR)
    cfg['amber_config'] = os.path.join(
        os.path.dirname(os.path.realpath(__file__)), AMBERCONFIG)
    cfg['amber_dir'] = pars['amber_dir']
    cfg['log_dir'] = pars['log_dir']
    cfg['master_dir'] = pars['master_dir']
    cfg['snrmin'] = pars['snrmin']
    cfg['proctrigger'] = pars['proctrigger']
    cfg['amber_mode'] = pars['amber_mode']
    cfg['fits_templates'] = pars['fits_templates']
    cfg['min_freq'] = pars['min_freq']
    cfg['max_freq'] = pars['min_freq'] + pars['bw'] - pars['chan_width']
    cfg['usemac'] = pars['usemac']
    cfg['affinity'] = pars['affinity']
    cfg['page_size'] = pars['page_size']
    cfg['hdr_size'] = pars['hdr_size']
    cfg['debug'] = pars['debug']

    # load PSRDADA header template
    with open(
            os.path.join(os.path.dirname(os.path.realpath(__file__)),
                         TEMPLATE), 'r') as f:
        header_template = f.read()

    # define pointing coordinates
    coord = SkyCoord(pars['ra'], pars['dec'], unit=(u.hourangle, u.deg))
    # wsrt location required for alt/az calculation
    wsrt_lat = 52.915184 * u.deg
    wsrt_lon = 6.60387 * u.deg
    wsrt_loc = EarthLocation(lat=wsrt_lat, lon=wsrt_lon, height=0 * u.m)
    # load the parset
    if not pars['parset'] == '':
        with open(pars['parset']) as f:
            parset = f.read().encode('bz2').encode('hex')
            if len(parset) > 24575:
                log("Error: compressed parset is longer than maximum for header (24575 characters)"
                    )
                exit()
    else:
        parset = 'no parset'

    for beam in pars['beams']:
        # add CB-dependent parameters
        cfg['beam'] = beam
        cfg['dadakey'] = pars['network_port_start'] + beam
        cfg['network_port'] = pars['network_port_start'] + beam
        cfg['header'] = os.path.join(
            os.path.dirname(os.path.realpath(__file__)),
            NODEHEADER.format(beam))
        if cfg['debug']:
            cfg['dada_dir'] = pars['dada_dir'].replace('{cb}',
                                                       '{:02d}'.format(beam))

        # save to file
        filename = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                                NODECONFIG.format(beam))
        with open(filename, 'w') as f:
            yaml.dump(cfg, f, default_flow_style=False)

        # save the coordinates of the beams
        this_cb_coord = pointing_to_CB_pos(beam, coord)
        gl, gb = this_cb_coord.galactic.to_string(precision=8).split(' ')
        altaz = this_cb_coord.transform_to(
            AltAz(obstime=starttime, location=wsrt_loc))
        az = altaz.az.deg
        za = 90 - altaz.alt.deg
        ra = this_cb_coord.ra.to_string(unit=u.hourangle,
                                        sep=':',
                                        pad=True,
                                        precision=1)
        dec = this_cb_coord.dec.to_string(unit=u.degree,
                                          sep=':',
                                          pad=True,
                                          precision=1)
        coordinates.append(["{:02d}".format(beam), ra, dec, gl, gb])
        # get LST start in seconds
        lststart = starttime.sidereal_time('mean', wsrt_lon).to(
            u.arcsecond).value / 15

        # fill in the psrdada header keys
        temppars = pars.copy()
        temppars['ra'] = ra.replace(':', '')
        temppars['ra_hms'] = ra
        temppars['dec'] = dec.replace(':', '')
        temppars['dec_hms'] = dec
        temppars['lst_start'] = lststart
        temppars['az_start'] = az
        temppars['za_start'] = za
        temppars[
            'resolution'] = pars['page_size'] * pars['nchan'] * pars['ntabs']
        temppars['file_size'] = pars['page_size'] * pars['nchan'] * pars[
            'ntabs'] * 10  # 10 pages per file
        temppars['bps'] = int(pars['page_size'] * pars['nchan'] *
                              pars['ntabs'] / 1.024)
        temppars['beam'] = beam
        temppars['parset'] = parset
        temppars['scanlen'] = pars['tobs']
        temppars['hdr_size'] = pars['hdr_size']

        header = header_template.format(**temppars)

        with open(
                os.path.join(os.path.dirname(os.path.realpath(__file__)),
                             NODEHEADER.format(beam)), 'w') as f:
            f.write(header)

    # save coordinate overview to disk
    filename = os.path.join(pars['master_dir'], COORD)
    with open(filename, 'w') as f:
        for line in coordinates:
            f.write(' '.join(line) + '\n')

    # save obs info to disk
    info = {}
    for key in ['utc_start', 'source', 'tobs']:
        info[key] = pars[key]
    # get MW DMs
    # YMW16
    # mode, Gl, Gb, dist(pc), dist->DM. 1E6 pc should cover entire MW
    cmd = "ymw16 Gal {} {} 1E6 2 | awk '{{print $8}}'".format(
        *coord.galactic.to_string(precision=8).split(' '))
    log(cmd)
    ymw16_dm = subprocess.check_output(cmd, shell=True)
    try:
        ymw16_dm = str(float(ymw16_dm))
    except ValueError:
        ymw16_dm = "-"
    info['ymw16'] = ymw16_dm
    filename = os.path.join(pars['master_dir'], INFO)
    with open(filename, 'w') as f:
        yaml.dump(info, f, default_flow_style=False)

    # Start the node scripts
    script_path = os.path.realpath(os.path.dirname(__file__))
    for beam in pars['beams']:
        node = beam + 1
        node_script = os.path.join(script_path, "start_survey_node.py")
        cmd = "{} nodes/CB{:02d}.yaml".format(node_script, beam)
        run_on_node(node, cmd, background=True)

    sleep(1)
    # done
    log("All nodes started for observation")

    # start the trigger listener + emailer NOTE: this is the only command
    # that keeps running in the foreground during the obs
    if pars['proctrigger']:
        email_script = os.path.join(script_path, "emailer.py")
        cmd = "sleep {tobs}; python {email_script} {master_dir} '{beams}'".format(
            email_script=email_script, **pars)
        log(cmd)
        os.system(cmd)
예제 #21
0
    st_off_ra = [float(in_data[12].rsplit('#')[0])] * u.mas
    st_off_de = [float(in_data[13].rsplit('#')[0])] * u.mas
    off_ra = ob_off_ra - st_off_ra
    off_de = ob_off_de - st_off_de
    magR = [float(in_data[14].rsplit()[0])]
    magK = [float(in_data[15].rsplit()[0])]
    longi = [float(in_data[16].rsplit()[0])]
    dca = off_ra * np.sin(pa) + off_de * np.cos(pa)
    dt = int(((off_ra * np.cos(pa) - off_de * np.sin(pa)).to(u.rad) *
              dist.to(u.km) / vel).value) * u.s
    ca = ca + dca
    datas = datas + dt
    vals = [0]

f.close()

datas.delta_ut1_utc = 0

if os.path.isfile(sitearq) == True:
    sites = np.loadtxt(sitearq,
                       dtype={
                           'names': ('lat', 'lon', 'alt', 'nome'),
                           'formats': ('f8', 'f8', 'f8', 'S30')
                       })

###################### rodando o programa ######################

map(geramapa, vals)

os.system('notify-send "Terminou de gerar os mapas" --icon=dialog-information')
예제 #22
0
def get_noise(event_id):


    nstations=0

    try:
        #for u in np.arange(1):
        event = crdb.Event(db=db, id=event_id)

        event_time=np.asarray(event["lora_time"])
        event_time=event_time[event_time>1.0]
        event_time=np.min(event_time)
        
        #if event_time<100.0:
        #   break
        print(event_time)
        time = Time(event_time, format='unix', scale='utc',location=loc)
        time.delta_ut1_utc = 0.
        
        LST=time.sidereal_time('apparent').hour

        print('event time UTC: {0}'.format(event_time))
        print('event time LST: {0}'.format(LST))


        stations = []
        #collect stations with "GOOD" status for event
        for f in event.datafiles:
            stn=[]
            stn.extend(f.stations)
            #print stn[0].stationname
            #print f.stations.stationname
            if stn[0].stationname == "CS001" or stn[0].stationname == "CS002" or stn[0].stationname == "CS003" or stn[0].stationname == "CS004" or stn[0].stationname == "CS005" or stn[0].stationname == "CS006" or stn[0].stationname == "CS007" or stn[0].stationname == "CS011" or stn[0].stationname == "CS013" or stn[0].stationname == "CS017" or stn[0].stationname == "CS021" or stn[0].stationname == "CS026" or stn[0].stationname == "CS028" or stn[0].stationname == "CS030" or stn[0].stationname == "CS031" or stn[0].stationname == "CS032" or stn[0].stationname == "CS101" or stn[0].stationname == "CS103" or stn[0].stationname == "CS301" or stn[0].stationname == "CS302" or stn[0].stationname == "CS401" or stn[0].stationname == "CS501":
                if stn[0].status=="GOOD":
                    stations.extend(f.stations)
    
        nstations=len(stations)

    except:
        print('no event at this point')


    for s in np.arange(nstations):

        station_flag=0
        station=stations[s]

        # The following steps are copied from cr_physics pipeline
        # there are a million try/excepts because I ran into lots of specific errors I didn't want to handle
        
        try:
            # Open file
            f = cr.open(station.datafile.settings.datapath + '/' + station.datafile.filename)
            antenna_set= f["ANTENNA_SET"]
            
            # Check if we are dealing with LBA or HBA observations
            if "LBA" in f["ANTENNA_SET"]:
                print ('LBA event')
            else:
                print ('HBA event')
                continue
        except:
            print ('no event at antennas')
            continue
            
            
            
        # Read LORA information
        try:
            tbb_time = f["TIME"][0]
            max_sample_number = max(f["SAMPLE_NUMBER"])
            min_sample_number = min(f["SAMPLE_NUMBER"])

            (tbb_time_sec, tbb_time_nsec) = lora.nsecFromSec(tbb_time, logfile=os.path.join(lora_directory,lora_logfile))
            (block_number_lora, sample_number_lora) = lora.loraTimestampToBlocknumber(tbb_time_sec, tbb_time_nsec, tbb_time, max_sample_number, blocksize=blocksize)
        except:
            continue

       # Check if starting time in sample units (SAMPLE_NUMBER) does not deviate among antennas
        try:
            sample_number_per_antenna = np.array(f["SAMPLE_NUMBER"])
            median_sample_number = np.median(sample_number_per_antenna)
            data_length = np.median(np.array(f["DATA_LENGTH"]))
            deviating_antennas = np.where( np.abs(sample_number_per_antenna - median_sample_number) > data_length/4)[0]
            nof_deviating_antennas = len(deviating_antennas)
            print ('Number of deviating antennas: %d' % nof_deviating_antennas)
        except:
            continue


        try:
            frequencies = f["FREQUENCY_DATA"]
            print ('blocksize:  {0}'.format(f["BLOCKSIZE"]))
            
            # Get bandpass filter
            nf = f["BLOCKSIZE"] / 2 + 1
            ne = int(10. * nf / f["CLOCK_FREQUENCY"])
            bandpass_filter.fill(0.)

            bandpass_filter[int(nf * 30.0 / 100.)-(ne/2):int(nf * 80.0 / 100.)+(ne/2)] = 1.0
            gaussian_weights = cr.hArray(cr.hGaussianWeights(ne, 4.0))
            cr.hRunningAverage(bandpass_filter, gaussian_weights)
        except:
            continue
        
        try:
            raw_data = f["TIMESERIES_DATA"].toNumpy()
            # Find outliers
            tmp = np.max(np.abs(raw_data), axis=1)
            outlier_antennas = np.argwhere(np.abs(tmp-np.median(tmp[tmp>0.1])) > 2*np.std(tmp[tmp>0.1])).ravel()
            print("Outlier antennas", outlier_antennas)


        except:
            print 'no raw data'
            continue


        try:
            # Get calibration delays to flag antennas with wrong calibration values
            try:
                cabledelays = cr.hArray(f["DIPOLE_CALIBRATION_DELAY"])
                cabledelays = np.abs(cabledelays.toNumpy())
            except:
                print 'problem with cable delays'
                continue
                
            # Find RFI and bad antennas
            findrfi = cr.trun("FindRFI", f=f, nofblocks=10, plotlist=[], apply_hanning_window=True, hanning_fraction=0.2, bandpass_filter=bandpass_filter)
            print "Bad antennas", findrfi.bad_antennas
            antenna_ids_findrfi = f["SELECTED_DIPOLES"]
            nAnt=len(f["SELECTED_DIPOLES"])
            bad_antennas_spikes = []
            bad_antennas = findrfi.bad_antennas[:]

            dipole_names = f["SELECTED_DIPOLES"]
            good_antennas = [n for n in dipole_names if n not in bad_antennas]
            station["crp_bad_antennas_power"] = findrfi.bad_antennas
            station["crp_bad_antennas_spikes"] = bad_antennas_spikes
            selected_dipoles = []

            for i in range(len(dipole_names) / 2):
                if dipole_names[2 * i] in good_antennas and dipole_names[2 * i + 1] in good_antennas and f.nof_consecutive_zeros[2 * i] < 512 and f.nof_consecutive_zeros[2 * i + 1] < 512 and cabledelays[2 * i] < 150.e-9 and cabledelays[2 * i + 1] < 150.e-9:
                    selected_dipoles.extend([dipole_names[2 * i], dipole_names[2 * i + 1]])
            
            f["SELECTED_DIPOLES"] = selected_dipoles
            station["crp_selected_dipoles"] = selected_dipoles



            nDipoles=len(selected_dipoles)


        except:
            print 'issue with RFI'
            continue

        try:
            print block_number_lora
            nF= len(frequencies.toNumpy())
            all_ffts=np.zeros([nAvg,nDipoles,nF])
            all_ffts_cleaned=np.zeros([nAvg,nDipoles,nF])
        except:
            continue
        
        #______________________________________________________________________
        
        block_number=0

        for i in np.arange(nAvg):
            try:
        
                # make sure not to include the signal window in the average
                if abs(block_number-block_number_lora)<5:
                    block_number=block_number+10
    
                fft_data = f.empty("FFT_DATA")
                #f.getFFTData(fft_data, block_number_lora, True, hanning_fraction=0.2, datacheck=True)   # this is what is in the pipeline
                f.getFFTData(fft_data, block_number, True, hanning_fraction=0.2, datacheck=True)

                # Apply bandpass
                fft_data[...].mul(bandpass_filter)
    
                # Normalize spectrum
                fft_data /= f["BLOCKSIZE"]
                fft_hold=fft_data
                # Reject DC component
                fft_data[..., 0] = 0.0
            
                # Also reject 1st harmonic (gives a lot of spurious power with Hanning window)
                fft_data[..., 1] = 0.0
                
                # Flag dirty channels (from RFI excission)
                fft_data[..., cr.hArray(findrfi.dirty_channels)] = 0


                # factor of two because reall FFT
                all_ffts[i]=2*np.abs(fft_hold.toNumpy())**2
                all_ffts_cleaned[i]=2*np.abs(fft_data.toNumpy())**2
                
                
                badFreq=frequencies.toNumpy()[findrfi.dirty_channels]/1e6
                
                
                nBadChannelsFilt=len(badFreq[(badFreq>=30.0)*(badFreq<=80.0)])
                
                if nBadChannelsFilt>1:
                    print 'n bad channels: {0}'.format(nBadChannelsFilt)
                    continue


                block_number=block_number+1
                
            except:
                print 'error'
                continue

        try:
            #for y in np.arange(1):

            fft_avg=np.average(all_ffts_cleaned,axis=0)
            
            
            freq=frequencies.toNumpy()
            df=(freq[1]-freq[0])/1e6
            freq_new=np.arange(30,81,1)
            fft_resample=np.zeros([nAnt,nResample])

        except:
            print 'error in average'
            continue


        for n in np.arange(nDipoles):
            try:
                
                fft_use=fft_avg[n][fft_avg[n]>1e-100]
                freq_use=freq[fft_avg[n]>1e-100]
                
                if len(fft_avg[n])>len(fft_use):
                    station_flag=1

                f=interp1d(freq_use/1e6,fft_use)
                
                f_new=f(freq_new)
            
                start_f=np.argmin(np.abs((freq/1e6)-30))
                stop_f=np.argmin(np.abs((freq/1e6)-80))


                fft_resample[n]=f_new*(1/df)
            except:
                station_flag=1
                print 'issue with interp'

        analysisinfo={'event_number': event_id,'station': station.stationname,'UTC_time':event_time,'LST':LST,'frequencies':freq,'FFT_data':fft_avg,'frequencies_50':freq_new,'FFT_data_resampled':fft_resample,'flag': station_flag,'antenna_set':antenna_set,'selected_dipoles': dipole_names,'bad_dipoles':bad_antennas,'nBadChannelsFilt':nBadChannelsFilt}

        outputfile=open(station.stationname+'/'+str(int(event_id))+'_noise_OUTER.p','w')
                
        pickle.dump(analysisinfo,outputfile)
        outputfile.close()

        print '{0} done'.format(station.stationname)
      
                
    print 'done with event'
예제 #23
0
    def plot_grid(self,ra_delta=30.0,dec_delta=30.0,dec_label=0.0,ra_label=0.0,npoints=100,textcolor='g',textsize=None,auto_ra=True,**kwd):
        # plot ra dec grid 

        altaz=kwd.get('altaz',False)
        wcs=kwd.get('wcs',None)
        lon=kwd.get('lon',-111.600)
        lat=kwd.get('lat',31.9633)
        height=kwd.get('height',2120.0)
        timezone=kwd.get('timezone',-7)
        datetime=kwd.get('datetime','2016-5-26 00:00:00')
        local=kwd.get('local',False)
        xc=kwd.get('xc',None)
        yc=kwd.get('yc',None)
        radius=kwd.get('radius',None)
        origin=kwd.get('origin','top')

        # get local sideral time
        time=Time(datetime)
        if local is True: 
            time-=utcoffset
        time.delta_ut1_utc = 0.
        lst=time.sidereal_time('mean',longitude=lon)


        nra=360.0//ra_delta+1
        ndec=90.0//dec_delta+1
        ra=np.linspace(0,360,npoints)
        if altaz:
            dec=np.linspace(0,90,ndec)
        else:
            dec=np.linspace(-90,90,ndec)
        for idec in dec:
            self.plot_radec(ra,idec,**kwd)
            tmpra=ra_label
            tmpdec=idec
            if altaz: 
                az=tmpra
                alt=tmpdec
            else: 
                if auto_ra: tmpra=lst
                alt,az=self.radec_altaz(tmpra,tmpdec,wcs=wcs,lon=lon,lat=lat,height=height,timezone=timezone,datetime=datetime,local=local)
            x,y=self.altaz_xy(alt,az,xc=xc,yc=yc,radius=radius,origin=origin)
            self.image.axes.text(x,y,'%+3d' % idec,horizontalalignment='center',verticalalignment='center',color=textcolor,clip_on=True,size=textsize) 
        
        ra=np.linspace(0,360,nra)
        if altaz:
            dec=np.linspace(0,90,npoints)
        else:
            dec=np.linspace(-90,90,npoints)
        for ira in ra:
            if ira == 360.0: continue
            self.plot_radec(ira,dec,**kwd)
            tmpra=ira
            tmpdec=dec_label
            if altaz:
                az=tmpra
                alt=tmpdec
            else:
                alt,az=self.radec_altaz(tmpra,tmpdec,wcs=wcs,lon=lon,lat=lat,height=height,timezone=timezone,datetime=datetime,local=local)
                
            x,y=self.altaz_xy(alt,az,xc=xc,yc=yc,radius=radius,origin=origin)
            self.image.axes.text(x,y,'%4d' % ira,horizontalalignment='center',verticalalignment='center',color=textcolor,clip_on=True,size=textsize) 
예제 #24
0
t_gp = Time('2015-10-19T00:17:47.415')

tel1 = Ef
tel2 = Jb

uvw_mat = np.zeros((len(EVN), 3))

for i in range(len(EVN)):
    tel = EVN[i]
    
    X = tel.x
    Y = tel.y
    Z = tel.z
    Xvec = np.array([X.value, Y.value, Z.value])
    ot=Time(t_gp, scale='utc', location=tel1)
    ot.delta_ut1_utc = 0.
    obst = ot.sidereal_time('mean')

    # I'm certain there's a better astropy way to get ot_avg in degrees
    h = obst.deg*u.deg - crab.ra   
    dec = crab.dec

    # matrix to transform xyz to uvw
    mat = np.array([(np.sin(h), np.cos(h), 0), (-np.sin(dec)*np.cos(h), np.sin(dec)*np.sin(h), 
                    np.cos(dec)), (np.cos(dec)*np.cos(h), -np.cos(dec)*np.sin(h), np.sin(dec))])

    uvw = np.dot(mat, Xvec)
    uvw_mat[i] = uvw
    
print uvw_mat[0]
예제 #25
0
def corrections(lon, lat, alt, ra, dec, mjd):
    """
    Calculate the heliocentric radial velocity corrections for an astronomical 
    source.
    Parameters
    ----------
    lon : `~astropy.coordinates.Longitude` or float
        Earth longitude of the observatory (western direction is positive). Can
        be anything that initialises an `~astropy.coordinates.Angle` object
        (if float, in degrees).
    lat : `~astropy.coordinates.Latitude` or float
        Earth latitude of observatory. Can be anything that initialises an
        `~astropy.coordinates.Latitude` object (if float, in degrees).
    alt : `~astropy.units.Quantity` or float
        Altitude of the observatory (if float, in meters).
    ra : `~astropy.coordinates.Angle` or float
        Right ascension of the object for epoch J2000 (if float, in degrees).
    dec : `~astropy.coordinates.Angle` or float
        Declination of the object for epoch J2000 (if float, in degrees).
    mjd : float
        The modified Julian date for the middle of exposure.
    Returns
    -------
    barycorr : `~astropy.units.Quantity`
        The barycentric velocity correction.
    helcorr : `~astropy.units.Quantity`
        The heliocentric velocity correction.
    """

    if not isinstance(lon, coord.Longitude):
        lon = coord.Longitude(lon * u.deg)

    if not isinstance(lat, coord.Latitude):
        lat = coord.Latitude(lat * u.deg)

    if not isinstance(alt, u.Quantity):
        alt *= u.m

    if not isinstance(ra, u.Quantity):
        ra *= u.deg

    if not isinstance(dec, u.Quantity):
        dec *= u.deg

    # Here we specify the location so that we can easily calculate the mean
    # local siderial time later on
    time = Time(2.4e6 + mjd, format="jd", location=(lon, lat, alt))
    epoch = time.datetime.year + time.datetime.month/12. \
        + time.datetime.day/365.

    # Precess the coordinates to the current epoch
    coordinate = coord.SkyCoord(ra, dec, frame="fk5").transform_to(
        coord.FK5(equinox="J%s" % (epoch)))

    # Convert geodetic latitude into geocentric latitude to correct for rotation
    # of the Earth
    dlat = ((-11. * 60. + 32.743) * np.sin(2 * lat) + 1.1633 * np.sin(4 * lat) \
        - 0.0026 * np.sin(6 * lat)) * u.degree
    geocentric_lat = lat + dlat / 3600.

    # Calculate distance of observer from Earth center
    r = alt + 6378160.0 * u.m * (0.998327073 \
        + 0.001676438 * np.cos(2 * geocentric_lat) \
        - 0.000003510 * np.cos(4 * geocentric_lat) \
        + 0.000000008 * np.cos(6 * geocentric_lat))

    # Calculate rotational velocity perpendicular to the radius vector
    # Note: 23.934469591229 is the siderial day in hours for 1986
    v = 2 * np.pi * r / (23.934469591229 * 3600 * u.second)

    # Calculate vdiurnal velocity
    time.delta_ut1_utc = 0  #we get error otherwise. No big dela for this application
    vdiurnal = v * np.cos(lat) * np.cos(coordinate.dec) \
      * np.sin(coordinate.ra - time.sidereal_time("mean"))

    # Calculate baricentric and heliocentric velocities
    vh, vb = baryvel(time)

    # Project along the line of sight
    projection = np.array([
        np.cos(coordinate.dec) * np.cos(coordinate.ra),
        np.cos(coordinate.dec) * np.sin(coordinate.ra),
        np.sin(coordinate.dec)
    ])
    vbar = (vb * projection).sum()
    vhel = (vh * projection).sum()

    # Using baricentric velocity for correction
    vbar_correction = vdiurnal + vbar
    vhel_correction = vdiurnal + vhel

    # [TODO] it may be useful to return other components of velocity or extra
    # information about the transforms (e.g., gmst, ut, lmst, dlat, lat, vbar,
    # vhel, etc)
    return (vbar_correction, vhel_correction)
예제 #26
0
def get_skytemp(datetimestring, delays, frequency, alpha=-2.6, verbose=True):
    """
    Tx,Ty=get_skytemp(datetimestring, delays, frequency, alpha=-2.6, verbose=True)
    not completely sure about the normalization, since the Haslam FITS image is not specific

    """
    su.init_data()

    if not os.path.exists(config.RADIO_IMAGE_FILE):
        logger.error("Could not find 408 MHz image: %s\n" %
                     (config.RADIO_IMAGE_FILE))
        return None
    try:
        if (verbose):
            print("Loading 408 MHz map from %s..." % config.RADIO_IMAGE_FILE)
        f = pyfits.open(config.RADIO_IMAGE_FILE)
    except Exception as e:
        logger.error("Error opening 408 MHz image: %s\nError: %s\n" %
                     (config.RADIO_IMAGE_FILE, e))
        return None
    skymap = f[0].data[0]

    ra = (f[0].header.get('CRVAL1') +
          (numpy.arange(1, skymap.shape[1] + 1) - f[0].header.get('CRPIX1')) *
          f[0].header.get('CDELT1')) / 15.0
    dec = (f[0].header.get('CRVAL2') +
           (numpy.arange(1, skymap.shape[0] + 1) - f[0].header.get('CRPIX2')) *
           f[0].header.get('CDELT2'))

    # parse the datetimestring
    try:
        yr = int(datetimestring[:4])
        mn = int(datetimestring[4:6])
        dy = int(datetimestring[6:8])
        hour = int(datetimestring[8:10])
        minute = int(datetimestring[10:12])
        second = int(datetimestring[12:14])
    except ValueError:
        logger.error('Could not parse datetimestring %s\n' % datetimestring)
        return None
    # UT = hour + minute / 60.0 + second / 3600.0
    UTs = '%02d:%02d:%02d' % (hour, minute, second)
    a_obstime = Time('%d-%d-%d %s' % (yr, mn, dy, UTs), scale='utc')
    a_obstime.delta_ut1_utc = 0
    a_obstime.location = config.MWAPOS
    if (verbose):
        print("For %02d-%02d-%02d %s UT, LST=%6.3f" %
              (yr, mn, dy, UTs, a_obstime.sidereal_time(kind='mean').hour))

    RA, Dec = numpy.meshgrid(ra * 15, dec)
    coords = SkyCoord(ra=RA,
                      dec=Dec,
                      equinox='J2000',
                      unit=(astropy.units.deg, astropy.units.deg))
    coords.location = config.MWAPOS
    coords.obstime = a_obstime
    coords_prec = coords.transform_to('altaz')
    Az, Alt = coords_prec.az.deg, coords_prec.alt.deg

    if (verbose):
        print("Creating primary beam response for frequency %.2f MHz..." %
              (frequency))
        print("Beamformer delays are %s" % delays)
    # get the beam response
    # first go from altitude to zenith angle
    theta = (90 - Alt) * math.pi / 180
    phi = Az * math.pi / 180

    # this is the response for XX and YY
    try:
        respX, respY = primary_beam.MWA_Tile_analytic(
            theta, phi, freq=frequency * 1e6, delays=numpy.array(delays))
    except Exception as e:
        logger.error('Error creating primary beams: %s\n' % e)
        return None
    rX = numpy.real(numpy.conj(respX) * respX)
    rY = numpy.real(numpy.conj(respY) * respY)

    maskedskymap = numpy.ma.array(skymap, mask=Alt <= 0)
    maskedskymap *= (frequency / 408.0)**alpha
    rX /= rX.sum()
    rY /= rY.sum()
    return ((rX * maskedskymap).sum()) / 10.0, (
        (rY * maskedskymap).sum()) / 10.0
예제 #27
0
    # parse the datetimestring
    try:
        yr = int(datetimestring[:4])
        mn = int(datetimestring[4:6])
        dy = int(datetimestring[6:8])
        hour = int(datetimestring[8:10])
        minute = int(datetimestring[10:12])
        second = int(datetimestring[12:14])
    except ValueError:
        logger.error('Could not parse datetimestring %s\n' % datetimestring)
        return None
    # UT = hour + minute / 60.0 + second / 3600.0
    UTs = '%02d:%02d:%02d' % (hour, minute, second)
    obstime = Time('%d-%d-%d %s' % (yr, mn, dy, UTs), scale='utc')
    obstime.delta_ut1_utc = 0
    if (verbose):
        print "For %02d-%02d-%02d %s UT, LST=%6.3f" % (yr, mn, dy, UTs, obstime.sidereal_time(kind='mean').hour)

    RA, Dec = numpy.meshgrid(ra * 15, dec)
    coords = SkyCoord(ra=RA, dec=Dec, equinox='J2000', unit=(astropy.units.deg, astropy.units.deg))
    coords.location = config.MWAPOS
    coords.obstime = obstime
    coords_prec = coords.transform_to('altaz')
    Az, Alt = coords_prec.az.deg, coords_prec.alt.deg

    if (verbose):
        print "Creating primary beam response for frequency %.2f MHz..." % (frequency)
        print "Beamformer delays are %s" % delays
    # get the beam response
    # first go from altitude to zenith angle
예제 #28
0
def geramapa(obj, diam, *args, **kwargs):
    print("######################## Gera Mapas ########################")
    print("Gerando mapas para Objeto [ %s ] Diametro [ %s ]" % (obj, diam))
##### Default values ###########################################
    mapstyle = 1
    resolution = 'l'
    fmt='png'
    dpi=100
    step=1
    sitearq=''
    country=''
    mapsize=[46.0, 38.0]
    erro=None
    ring=None
    atm=None
    cpoints=60
    limits=None
    meridians=30
    parallels=30
    nscale=1
    cscale=1
    sscale=1
    pscale=1

#############################################
    if not type(obj) == str:
        raise TypeError('obj keyword must be a string')

    if not type(diam) in [int,float]:
        raise TypeError('diam keyword must be a number')
    diam = diam*u.km

#################### Lendo arquivo ###################################
    if args:
        arquivo = args[0]
    elif 'file' in kwargs.keys():
        arquivo = kwargs['file']
    if os.path.isfile(arquivo) == False:
        raise IOError('File {} not found'.format(arquivo))
    try:
        print("Lendo tabela de predicao. [ %s ]" % arquivo)

        dados = np.loadtxt(arquivo, skiprows=41, usecols=(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 18, 19, 20, 21, 22, 25, 26, 28, 29), \
            dtype={'names': ('dia', 'mes', 'ano', 'hor', 'min', 'sec', 'afh', 'afm', 'afs', 'ded', 'dem', 'des', 'ca', 'pa', 'vel', 'delta', 'mR', 'mK', 'long', 'ora', 'ode'),
            'formats': ('S30', 'S30', 'S30','S30', 'S30', 'S30','S20', 'S20', 'S20','S20', 'S20', 'S20', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8')}, ndmin=1)

        print("Predicoes: [ %s ] " % dados.size)
    except:
        raise IOError('{} is not in PRAIA format'.format(arquivo))
################## lendo coordenadas #################
    print("-------------- Lendo Coordendas --------------")

    coor = np.char.array(dados['afh'], unicode=True)
    for i in ['afm', 'afs', 'ded', 'dem', 'des']:
        coor = np.core.defchararray.add(coor, ' ')
        coor = np.core.defchararray.add(coor, np.char.array(dados[i], unicode=True))
    stars = SkyCoord(coor, frame='icrs', unit=(u.hourangle, u.degree))

    print("Stars:")
    print(stars)

################### lendo tempo ########################
    print("-------------- Lendo Tempo --------------")

    tim=np.char.array(dados['ano'], unicode=True)
    len_iso = ['-', '-', ' ', ':',':']
    arr = ['mes', 'dia', 'hor', 'min', 'sec']
    for i in np.arange(len(arr)):
        tim = np.core.defchararray.add(tim, len_iso[i])
        tim = np.core.defchararray.add(tim, np.char.array(dados[arr[i]], unicode=True))
    tim = np.char.array(tim) + '000'
    datas = Time(tim, format='iso', scale='utc')

    print("Datas:")
    print(datas)
############### definindo parametros #############

    print("-------------- Definindo Parametros --------------")

    ca = dados['ca']*u.arcsec
    posa = dados['pa']*u.deg
    vel = dados['vel']*(u.km/u.s)
    dist = dados['delta']*u.AU
    ob_off_ra = dados['ora']*u.mas
    ob_off_de = dados['ode']*u.mas
    magR = dados['mR']
    magK = dados['mK']
    longi = dados['long']
    datas.delta_ut1_utc = 0

    if 'mapstyle' in kwargs.keys():
        mapstyle = kwargs['mapstyle']
    if not type(mapstyle) == int:
        raise TypeError('mapstyle keyword must be an integer')

    print("mapstyle: %s" % mapstyle)

    if 'resolution' in kwargs.keys():
        resolution = kwargs['resolution']
    if resolution not in ['c', 'l', 'i', 'h', 'f']:
        raise TypeError('resolution keyword must be one of these: [c, l, i, h, f]')

    print("resolution: %s" % resolution)

    if 'fmt' in kwargs.keys():
        fmt = kwargs['fmt']
    if not type(fmt) == str:
        raise TypeError('fmt keyword must be a string')

    print("fmt: %s" % fmt)

    if 'dpi' in kwargs.keys():
        dpi = kwargs['dpi']
    if not type(dpi) == int:
        raise TypeError('dpi keyword must be an integer')

    print("dpi: %s" % dpi)

    if 'step' in kwargs.keys():
        step = kwargs['step']
    if not type(step) in [int,float]:
        raise TypeError('step keyword must be a number')

    print("step: %s" % step)

    if 'sitearq' in kwargs.keys():
        sitearq = kwargs['sitearq']
    if not type(sitearq) == str:
        raise TypeError('sitearq keyword must be a string')

    print("sitearq: %s" % sitearq)

    if 'country' in kwargs.keys():
        country = kwargs['country']
    if not type(country) == str:
        raise TypeError('country keyword must be a string')

    print("country: %s" % country)

    if 'mapsize' in kwargs.keys():
        mapsize = kwargs['mapsize']
    if not type(mapsize) == list:
        raise TypeError('mapsize keyword must be a list with 2 numbers')
    mapsize = mapsize*u.cm

    print("mapsize: %s" % mapsize)

    if 'erro' in kwargs.keys():
        erro = kwargs['erro']
    if not type(erro) in [int,float,type(None)]:
        raise TypeError('erro keyword must be a number')

    print("erro: %s" % erro)

    if 'ring' in kwargs.keys():
        ring = kwargs['ring']
    if not type(ring) in [int,float,type(None)]:
        raise TypeError('ring keyword must be a number')

    print("ring: %s" % ring)

    if 'atm' in kwargs.keys():
        atm = kwargs['atm']
    if not type(atm) in [int,float,type(None)]:
        raise TypeError('atm keyword must be a number')

    print("atm: %s" % atm)

    if 'cpoints' in kwargs.keys():
        cpoints = kwargs['cpoints']
    if not type(cpoints) in [int]:
        raise TypeError('cpoints keyword must be an integer')

    print("cpoints: %s" % cpoints)

    if 'limits' in kwargs.keys():
        maplats = False
        limits = kwargs['limits']
        if type(limits[0]) == float:
            maplats = True

    print("limits: %s" % limits)

    if ('meridians' in kwargs.keys()) and (type(kwargs['meridians']) in [int,float]):
        meridians = kwargs['meridians']

    print("meridians: %s" % meridians)

    if ('parallels' in kwargs.keys()) and (type(kwargs['parallels']) in [int,float]):
        parallels = kwargs['parallels']

    print("parallels: %s" % parallels)

    if ('nscale' in kwargs.keys()) and (type(kwargs['nscale']) in [int,float]):
        nscale = kwargs['nscale']

    print("nscale: %s" % nscale)

    if ('sscale' in kwargs.keys()) and (type(kwargs['sscale']) in [int,float]):
        sscale = kwargs['sscale']

    print("sscale: %s" % sscale)

    if ('cscale' in kwargs.keys()) and (type(kwargs['cscale']) in [int,float]):
        cscale = kwargs['cscale']

    print("cscale: %s" % cscale)

    if ('pscale' in kwargs.keys()) and (type(kwargs['pscale']) in [int,float]):
        pscale = kwargs['pscale']

    print("pscale: %s" % pscale)
##############################################################################################################################################################
#### Define funcao que computa e gera o mapa para cada predicao ##############################################################################################
##############################################################################################################################################################
    def compute(elem):
        print("============== Gerando o mapa para a predicao ==============")
        print("Predicao: [ %s ] Star RA: [ %s ] Dec: [ %s ]" %(elem, stars[elem].ra, stars[elem].dec))

        r = 6370997.0

        print("r: %s" % r)

########## aplica offsets ######
        print("-------------- Aplica Offsets --------------")

        off_ra = 0.0*u.mas
        off_de = 0.0*u.mas
        ob_ra = 0.0*u.mas
        ob_de = 0.0*u.mas
        if 'off_o' in kwargs.keys():
            off_ra = off_ra + kwargs['off_o'][0]*u.mas
            off_de = off_de + kwargs['off_o'][1]*u.mas
            ob_ra = ob_off_ra[elem] + kwargs['off_o'][0]*u.mas
            ob_de = ob_off_de[elem] + kwargs['off_o'][1]*u.mas
        st_off_ra = st_off_de = 0.0*u.mas
        if 'off_s' in kwargs.keys():
            off_ra = off_ra - kwargs['off_s'][0]*u.mas
            off_de = off_de - kwargs['off_s'][1]*u.mas
            st_off_ra = kwargs['off_s'][0]*u.mas
            st_off_de = kwargs['off_s'][1]*u.mas
        dca = off_ra*np.sin(posa[elem]) + off_de*np.cos(posa[elem])
        dt = ((off_ra*np.cos(posa[elem]) - off_de*np.sin(posa[elem])).to(u.rad)*dist[elem].to(u.km)/np.absolute(vel[elem])).value*u.s
        ca1 = ca[elem] + dca
        data = datas[elem] + dt

        print("off_ra: %s" % off_ra)
        print("off_de: %s" % off_de)
        print("ob_ra: %s" % ob_ra)
        print("ob_de: %s" % ob_de)
        print("st_off_ra: %s" % st_off_ra)
        print("dca: %s" % dca)
        print("dt: %s" % dt)
        print("ca1: %s" % ca1)
        print("data: %s" % data)

##### define parametros do mapa #####

        print("-------------- define parametros do mapa --------------")

        lon = stars[elem].ra - data.sidereal_time('mean', 'greenwich')
        center_map = EarthLocation(lon, stars[elem].dec)
        centert = True


        if 'centermap' in kwargs.keys():
            if not type(kwargs['centermap']) == EarthLocation:
                raise TypeError('centermap must be an Astropy EarthLocation Object')
            center_map = kwargs['centermap']
            centert = False
        fig = plt.figure(figsize=(mapsize[0].to(u.imperial.inch).value, mapsize[1].to(u.imperial.inch).value))
        if not limits:
            m = Basemap(projection='ortho',lat_0=center_map.lat.value,lon_0=center_map.lon.value,resolution=resolution)
        elif np.array(limits).shape == (3,):
            if maplats:
                m = Basemap(projection='ortho',lat_0=center_map.lat.value,lon_0=center_map.lon.value,resolution=resolution)
                cx, cy = m(limits[1], limits[0])
                limits[0] = (cx - r)/1000.0
                limits[1] = (cy - r)/1000.0
            if np.any(np.absolute(limits[0:2]) > r):
                raise ValueError('Value for limits out of range (not in the map)')
            if mapsize[1] < mapsize[0]:
                ly = (limits[1]*u.km).to(u.m).value - r/limits[2]
                uy = (limits[1]*u.km).to(u.m).value + r/limits[2]
                lx = (limits[0]*u.km).to(u.m).value - (r/limits[2])*(mapsize[0]/mapsize[1])
                ux = (limits[0]*u.km).to(u.m).value + (r/limits[2])*(mapsize[0]/mapsize[1])
            else:
                lx = (limits[0]*u.km).to(u.m).value - r/limits[2]
                ux = (limits[0]*u.km).to(u.m).value + r/limits[2]
                ly = (limits[1]*u.km).to(u.m).value - (r/limits[2])*(mapsize[1]/mapsize[0])
                uy = (limits[1]*u.km).to(u.m).value + (r/limits[2])*(mapsize[1]/mapsize[0])
            m = Basemap(projection='ortho',lat_0=center_map.lat.value,lon_0=center_map.lon.value,resolution=resolution,llcrnrx=lx,llcrnry=ly,urcrnrx=ux,urcrnry=uy, area_thresh=2000)
            axf = fig.add_axes([-0.001,-0.001,1.002,1.002])
            axf.set_rasterization_zorder(1)
        else:
            raise ValueError('limits keyword must be an array with 3 elements: [centerx, centery, zoom]')
        if mapstyle == 1:
            m.drawmapboundary(fill_color='0.9')
            m.fillcontinents(color='1.0',lake_color='0.9')
            ptcolor= 'red'
            lncolor= 'blue'
            ercolor= 'blue'
            rncolor= 'blue'
            atcolor= 'blue'
            outcolor= 'red'
        elif mapstyle == 2:
            m.drawmapboundary(fill_color='aqua')
            m.fillcontinents(color='coral',lake_color='aqua')
            ptcolor= 'red'
            lncolor= 'blue'
            ercolor= 'red'
            rncolor= 'black'
            atcolor= 'black'
            outcolor= 'red'
        elif mapstyle == 3:
            m.shadedrelief()
            ptcolor= 'red'
            lncolor= 'blue'
            ercolor= 'red'
            rncolor= 'black'
            atcolor= 'black'
            outcolor= 'red'
        elif mapstyle == 4:
            m.bluemarble()
            ptcolor= 'red'
            lncolor= 'red'
            ercolor= 'red'
            rncolor= 'black'
            atcolor= 'black'
            outcolor= 'red'
        elif mapstyle == 5:
            m.etopo()
            ptcolor= 'red'
            lncolor= 'red'
            ercolor= 'red'
            rncolor= 'black'
            atcolor= 'black'
            outcolor= 'red'

        m.drawcoastlines(linewidth=0.5)  ## desenha as linhas da costa
        m.drawcountries(linewidth=0.5)  ## desenha os paises
        if 'states' in kwargs.keys():
            m.drawstates(linewidth=0.5)    ## Desenha os estados
        m.drawmeridians(np.arange(0,360,meridians))  ## desenha os meridianos
        m.drawparallels(np.arange(-90,90,parallels))  ## desenha os paralelos
        m.drawmapboundary()  ## desenha o contorno do mapa
        m.nightshade(data.datetime, alpha=0.25, zorder=1.2)  ## desenha a sombra da noite

        if 'ptcolor' in kwargs.keys():
            ptcolor = kwargs['ptcolor']
        if 'lncolor' in kwargs.keys():
            lncolor = kwargs['lncolor']
        if 'ercolor' in kwargs.keys():
            ercolor = kwargs['ercolor']
        if 'rncolor' in kwargs.keys():
            rncolor = kwargs['rncolor']
        if 'atcolor' in kwargs.keys():
            atcolor = kwargs['atcolor']
        if 'outcolor' in kwargs.keys():
            outcolor = kwargs['outcolor']

########### calcula caminho ##################
        print("-------------- calcula caminho --------------")

        vec = np.arange(0, int(8000/(np.absolute(vel[elem].value))), step)
        vec = np.sort(np.concatenate((vec,-vec[1:]), axis=0))
        pa = Angle(posa[elem])
        pa.wrap_at('180d', inplace=True)
        if pa > 90*u.deg:
            paplus = pa - 180*u.deg
        elif pa < -90*u.deg:
            paplus = pa + 180*u.deg
        else:
            paplus = pa
        deltatime = vec*u.s
        datas1 = data + TimeDelta(deltatime)
        datas1.delta_ut1_utc = 0
        longg = stars[elem].ra - datas1.sidereal_time('mean', 'greenwich')
        centers = EarthLocation(longg, stars[elem].dec, height=0.0*u.m)

        a = r*u.m
        b = r*u.m
        dista = (dist[elem].to(u.km)*ca1.to(u.rad)).value*u.km
        ax = a + dista*np.sin(pa) + (deltatime*vel[elem])*np.cos(paplus)
        by = b + dista*np.cos(pa) - (deltatime*vel[elem])*np.sin(paplus)
        ax2 = ax - (diam/2.0)*np.sin(paplus)
        by2 = by - (diam/2.0)*np.cos(paplus)
        ax3 = ax + (diam/2.0)*np.sin(paplus)
        by3 = by + (diam/2.0)*np.cos(paplus)

        lon1, lat1 = xy2latlon(ax2.value, by2.value, centers.lon.value, centers.lat.value)
        j = np.where(lon1 < 1e+30)
        xs, ys = m(lon1[j], lat1[j])
        xs = [i for i in xs if i < 1e+30]
        ys = [i for i in ys if i < 1e+30]
        m.plot(xs, ys, color=lncolor)
        if centert:
            j = np.where(lon1 > 1e+30)
            m.plot(ax2[j].value, by2[j].value, color=outcolor, clip_on=False, zorder=-0.2)

        lon2, lat2 = xy2latlon(ax3.value, by3.value, centers.lon.value, centers.lat.value)
        j = np.where(lon2 < 1e+30)
        xt, yt = m(lon2[j], lat2[j])
        xt = [i for i in xt if i < 1e+30]
        yt = [i for i in yt if i < 1e+30]
        m.plot(xt, yt, color=lncolor)
        if centert:
            j = np.where(lon2 > 1e+30)
            m.plot(ax3[j].value, by3[j].value, color=outcolor, clip_on=False, zorder=-0.2)

##### plot erro #####
        if erro:
            err = erro*u.mas
            errd = (dist[elem].to(u.km)*err.to(u.rad)).value*u.km
            ax2 = ax - errd*np.sin(paplus) - (diam/2.0)*np.sin(paplus)
            by2 = by - errd*np.cos(paplus) - (diam/2.0)*np.cos(paplus)
            ax3 = ax + errd*np.sin(paplus) + (diam/2.0)*np.sin(paplus)
            by3 = by + errd*np.cos(paplus) + (diam/2.0)*np.cos(paplus)
            lon1, lat1 = xy2latlon(ax2.value, by2.value, centers.lon.value, centers.lat.value)
            j = np.where(lon1 < 1e+30)
            xs, ys = m(lon1[j], lat1[j])
            xs = [i for i in xs if i < 1e+30]
            ys = [i for i in ys if i < 1e+30]
            m.plot(xs, ys, '--', color=ercolor)

            lon2, lat2 = xy2latlon(ax3.value, by3.value, centers.lon.value, centers.lat.value)
            j = np.where(lon2 < 1e+30)
            xt, yt = m(lon2[j], lat2[j])
            xt = [i for i in xt if i < 1e+30]
            yt = [i for i in yt if i < 1e+30]
            m.plot(xt, yt, '--', color=ercolor)

##### plot ring #####
        if ring:
            rng = ring*u.km
            ax2 = ax - rng*np.sin(paplus)
            by2 = by - rng*np.cos(paplus)
            ax3 = ax + rng*np.sin(paplus)
            by3 = by + rng*np.cos(paplus)
            lon1, lat1 = xy2latlon(ax2.value, by2.value, centers.lon.value, centers.lat.value)
            j = np.where(lon1 < 1e+30)
            xs, ys = m(lon1[j], lat1[j])
            xs = [i for i in xs if i < 1e+30]
            ys = [i for i in ys if i < 1e+30]
            m.plot(xs, ys, '--', color=rncolor)

            lon2, lat2 = xy2latlon(ax3.value, by3.value, centers.lon.value, centers.lat.value)
            j = np.where(lon2 < 1e+30)
            xt, yt = m(lon2[j], lat2[j])
            xt = [i for i in xt if i < 1e+30]
            yt = [i for i in yt if i < 1e+30]
            m.plot(xt, yt, '--', color=rncolor)

##### plot atm #####
        if atm:
            atmo = atm*u.km
            ax2 = ax - atmo*np.sin(paplus)
            by2 = by - atmo*np.cos(paplus)
            ax3 = ax + atmo*np.sin(paplus)
            by3 = by + atmo*np.cos(paplus)
            lon1, lat1 = xy2latlon(ax2.value, by2.value, centers.lon.value, centers.lat.value)
            j = np.where(lon1 < 1e+30)
            xs, ys = m(lon1[j], lat1[j])
            xs = [i for i in xs if i < 1e+30]
            ys = [i for i in ys if i < 1e+30]
            m.plot(xs, ys, color=atcolor)

            lon2, lat2 = xy2latlon(ax3.value, by3.value, centers.lon.value, centers.lat.value)
            j = np.where(lon2 < 1e+30)
            xt, yt = m(lon2[j], lat2[j])
            xt = [i for i in xt if i < 1e+30]
            yt = [i for i in yt if i < 1e+30]
            m.plot(xt, yt, '--', color=atcolor)

##### plot clat #####
        vec = np.arange(0, int(8000/(np.absolute(vel[elem].value))), cpoints)
        deltatime = np.sort(np.concatenate((vec,-vec[1:]), axis=0))*u.s
        axc = a + dista*np.sin(pa) + (deltatime*vel[elem])*np.cos(paplus)
        byc = b + dista*np.cos(pa) - (deltatime*vel[elem])*np.sin(paplus)
        if centert:
            m.plot(axc.value, byc.value, 'o', color=ptcolor, clip_on=False, markersize=mapsize[0].value*pscale*8.0/46.0, zorder=-0.2)

        datas2 = data + TimeDelta(deltatime)
        datas2.delta_ut1_utc = 0
        lon3 = stars[elem].ra - datas2.sidereal_time('mean', 'greenwich')
        clon1, clat1 = xy2latlon(axc.value, byc.value, lon3.value, stars[elem].dec.value)
        j = np.where(clon1 < 1e+30)
        xc, yc = m(clon1[j], clat1[j])
        xc = [i for i in xc if i < 1e+30]
        yc = [i for i in yc if i < 1e+30]
        m.plot(xc, yc, 'o', color=ptcolor, clip_on=False, markersize=mapsize[0].value*pscale*8.0/46.0)

#        xc, yc = m(lon.value, stars[elem].dec.value)
        if centert:
            m.plot(a + dista*np.sin(pa), b + dista*np.cos(pa), 'o', color=ptcolor, clip_on=False, markersize=mapsize[0].value*pscale*24.0/46.0)

######## Define o titulo e o label da saida #########
        title = 'Object        Diam   Tmax   dots <> ra_off_obj_de  ra_of_star_de\n{:10s} {:4.0f} km  {:5.1f}s  {:02d} s <>{:+6.1f} {:+6.1f}  {:+6.1f} {:+6.1f} \n'\
    .format(obj, diam.value, (diam/np.absolute(vel[elem])).value, cpoints, ob_ra.value, ob_de.value, st_off_ra.value, st_off_de.value)
        labelx = '\n year-m-d    h:m:s UT     ra__dec__J2000__candidate    C/A    P/A    vel   Delta   G*  long\n\
{}  {:02d} {:02d} {:07.4f} {:+03d} {:02d} {:06.3f} {:6.3f} {:6.2f} {:6.2f}  {:5.2f} {:5.1f}  {:3.0f}'.format(data.iso,
int(stars[elem].ra.hms.h), int(stars[elem].ra.hms.m), stars[elem].ra.hms.s, int(stars[elem].dec.dms.d), np.absolute(int(stars[elem].dec.dms.m)), np.absolute(stars[elem].dec.dms.s),
            ca1.value, posa[elem].value, vel[elem].value, dist[elem].value, magR[elem], longi[elem])

########### plota seta de direcao ##################
        print("--------- plota seta de direcao ---------")
        print("Limits: %s" % limits)
        if not limits:
            print(a+5500000*u.m,b-5500000*u.m, np.sin(paplus+90*u.deg)*np.sign(vel[elem]), np.cos(paplus+90*u.deg)*np.sign(vel[elem]))

            tmp_sin = np.sin(paplus+90*u.deg)*np.sign(vel[elem])
            tmp_cos = np.cos(paplus+90*u.deg)*np.sign(vel[elem])
            # plt.quiver(11870997,870997, 0.84395364, 0.53641612, width=0.005)
            plt.quiver(a+5500000*u.m,b-5500000*u.m, tmp_sin.value, tmp_cos.value, width=0.005)

            # plt.quiver(a+5500000*u.m,b-5500000*u.m, np.sin(paplus+90*u.deg)*np.sign(vel[elem]), np.cos(paplus+90*u.deg)*np.sign(vel[elem]), width=0.005)
        else:
            plt.quiver(a.value + lx + (ux-lx)*0.9,b.value + ly + (uy-ly)*0.1, np.sin(paplus+90*u.deg)*np.sign(vel[elem]), np.cos(paplus+90*u.deg)*np.sign(vel[elem]), width=0.005, zorder = 1.3)

####### imprime os nomes dos paises #####
        print("--------- imprime os nomes dos paises ---------")
        if os.path.isfile(country) == True:
            paises = np.loadtxt(country, dtype={'names': ('nome', 'lat', 'lon'), 'formats': ('S30', 'f8', 'f8')}, delimiter=',', ndmin=1)
            xpt,ypt = m(paises['lon'], paises['lat'])
            for i in np.arange(len(xpt)):
                plt.text(xpt[i],ypt[i],np.char.strip(paises['nome'][i]), weight='bold', color='grey', fontsize=30*cscale)

####### imprime os sitios ##############
        print("--------- imprime os sitios ---------")
        if os.path.isfile(sitearq) == True:
            sites = np.loadtxt(sitearq, ndmin=1,  dtype={'names': ('lat', 'lon', 'alt', 'nome', 'offx', 'offy', 'color'), 'formats': ('f8', 'f8', 'f8', 'S30',  'f8', 'f8', 'S30')}, delimiter=',')

            print(sites)

            xpt,ypt = m(sites['lon'],sites['lat'])
            sss = EarthLocation(sites['lon']*u.deg,sites['lat']*u.deg,sites['alt']*u.km)
            for i in np.arange(len(xpt)):
                m.plot(xpt[i],ypt[i],'o', markersize=mapsize[0].value*sscale*10.0/46.0, color=sites['color'][i].strip().decode('utf-8'))
                plt.text(xpt[i] + sites['offx'][i]*1000,ypt[i]+sites['offy'][i]*1000, sites['nome'][i].strip().decode('utf-8'), weight='bold', fontsize=25*nscale)

####### finaliza a plotagem do mapa#####
        print("--------- finaliza a plotagem do mapa ---------")

        plt.title(title, fontsize=mapsize[0].value*25/46, fontproperties='FreeMono', weight='bold')

        plt.xlabel(labelx, fontsize=mapsize[0].value*21/46, fontproperties='FreeMono', weight='bold')
        if 'nameimg' in kwargs.keys():
            nameimg = kwargs['nameimg']
        else:
            nameimg = '{}_{}'.format(obj, data.isot)

        print("nameimg: %s" % nameimg)
        print("fmt: %s" % fmt)
        print("dpi: %s" % dpi)

        plt.savefig('{}.{}'.format(nameimg, fmt), format=fmt, dpi=dpi)

        print('Gerado: {}.{}'.format(nameimg, fmt))
        plt.clf()
        plt.close()


####### roda todos as predicoes #####
    vals = np.arange(len(stars))
    if 'n' in kwargs.keys():
        vals = np.array(kwargs['n'], ndmin=1)
    if vals.max() >= len(stars):
        raise IndexError('values {} out of range for table with {} predictions'.format(vals[np.where(vals >= len(stars))],len(stars)))
    if ('process' in kwargs.keys()) and (type(kwargs['process']) == int):
        p = Pool(kwargs['process'])
        p.map(compute, vals)
    else:
        for i in vals:
            compute(i)
예제 #29
0
def corrections(lon, lat, alt, ra, dec, mjd, bcv_shift=None):
    """
    Calculate the heliocentric radial velocity corrections for an astronomical 
    source.

    :param lon:
        Earth longitude of the observatory (western direction is positive). Can
        be anything that initialises an `~astropy.coordinates.Angle` object
        (if float, in degrees).

    :type lon:
        :class:`~astropy.coordinates.Longitude` or float

    :param lat:
        Earth latitude of observatory. Can be anything that initialises an
        `~astropy.coordinates.Latitude` object (if float, in degrees).
    
    :type lat:
        :class:`~astropy.coordinates.Latitude` or float

    :param alt:
        Altitude of the observatory (if float, in meters).

    :type alt:
        :class:`~astropy.units.Quantity` or float

    :param ra:
        Right ascension of the object for epoch J2000 (if float, in degrees).

    :type ra:
        :class:`~astropy.coordinates.Angle` or float

    :param dec:
        Declination of the object for epoch J2000 (if float, in degrees).

    :type dec:
        :class:`~astropy.coordinates.Angle` or float

    :param mjd:
        The modified Julian date for the middle of exposure.

    :type mjd:
        float

    :returns:
        A two-length tuple containing the barycentric velocity correction and
        the heliocentric velocity correction. Both velocity corrections are
        given as :class:`~astropy.units.Quantity` objects.
    """

    if not isinstance(lon, coord.Longitude):
        lon = coord.Longitude(lon * u.deg)

    if not isinstance(lat, coord.Latitude):
        lat = coord.Latitude(lat * u.deg)

    if not isinstance(alt, u.Quantity):
        alt *= u.m

    if not isinstance(ra, u.Quantity):
        ra *= u.deg

    if not isinstance(dec, u.Quantity):
        dec *= u.deg

    # Here we specify the location so that we can easily calculate the mean
    # local siderial time later on
    time = Time(2.4e6 + mjd, format="jd", location=(lon, lat, alt))
    epoch = time.datetime.year + time.datetime.month/12. \
        + time.datetime.day/365.

    # Precess the coordinates to the current epoch
    coordinate = coord.SkyCoord(ra, dec, frame="fk5").transform_to(
        coord.FK5(equinox="J{}".format(epoch)))

    # Convert geodetic latitude into geocentric latitude to correct for rotation
    # of the Earth
    dlat = ((-11. * 60. + 32.743) * np.sin(2 * lat) + 1.1633 * np.sin(4 * lat) \
        - 0.0026 * np.sin(6 * lat)) * u.degree
    geocentric_lat = lat + dlat / 3600.

    # Calculate distance of observer from Earth center
    r = alt + 6378160.0 * u.m * (0.998327073 \
        + 0.001676438 * np.cos(2 * geocentric_lat) \
        - 0.000003510 * np.cos(4 * geocentric_lat) \
        + 0.000000008 * np.cos(6 * geocentric_lat))

    # Calculate rotational velocity perpendicular to the radius vector
    # Note: 23.934469591229 is the siderial day in hours for 1986
    v = 2 * np.pi * r / (23.934469591229 * 3600 * u.second)

    # Calculate vdiurnal velocity
    try:
        vdiurnal = v * np.cos(lat) * np.cos(coordinate.dec) \
          * np.sin(coordinate.ra - time.sidereal_time("mean"))

    except:
        logging.exception("exception in calculating vdirunal velocity")

        # Try again with decreased precision.
        time.delta_ut1_utc = 0.0

        vdiurnal = v * np.cos(lat) * np.cos(coordinate.dec) \
          * np.sin(coordinate.ra - time.sidereal_time("mean"))

        logging.warn("Explicitly set delta_ut1_utc = 0")

    # Calculate baricentric and heliocentric velocities
    vh, vb = celestial_velocities(time)

    # Project along the line of sight
    projection = np.array([
        np.cos(coordinate.dec) * np.cos(coordinate.ra),
        np.cos(coordinate.dec) * np.sin(coordinate.ra),
        np.sin(coordinate.dec)
    ])
    vbar = (vb * projection).sum()
    vhel = (vh * projection).sum()

    # Using baricentric velocity for correction
    # ---------------------------------------------------------------------
    # E. Holmbeck put this if statement in
    if bcv_shift != None:
        vbar_correction = bcv_shift
    else:
        vbar_correction = vdiurnal + vbar
    # ---------------------------------------------------------------------
    vhel_correction = vdiurnal + vhel

    # [TODO] it may be useful to return other components of velocity or extra
    # information about the transforms (e.g., gmst, ut, lmst, dlat, lat, vbar,
    # vhel, etc)
    return (vbar_correction, vhel_correction)
예제 #30
0
def NewSt(utctime):
    t = Time(utctime, scale='utc', location=xinglong)
    t.delta_ut1_utc = 0.
    return t.sidereal_time('apparent').value
예제 #31
0
def corrections(lon, lat, alt, ra, dec, mjd):
    """
    Calculate the heliocentric radial velocity corrections for an astronomical 
    source.
    Parameters
    ----------
    lon : `~astropy.coordinates.Longitude` or float
        Earth longitude of the observatory (western direction is positive). Can
        be anything that initialises an `~astropy.coordinates.Angle` object
        (if float, in degrees).
    lat : `~astropy.coordinates.Latitude` or float
        Earth latitude of observatory. Can be anything that initialises an
        `~astropy.coordinates.Latitude` object (if float, in degrees).
    alt : `~astropy.units.Quantity` or float
        Altitude of the observatory (if float, in meters).
    ra : `~astropy.coordinates.Angle` or float
        Right ascension of the object for epoch J2000 (if float, in degrees).
    dec : `~astropy.coordinates.Angle` or float
        Declination of the object for epoch J2000 (if float, in degrees).
    mjd : float
        The modified Julian date for the middle of exposure.
    Returns
    -------
    barycorr : `~astropy.units.Quantity`
        The barycentric velocity correction.
    helcorr : `~astropy.units.Quantity`
        The heliocentric velocity correction.
    """

    if not isinstance(lon, coord.Longitude):
        lon = coord.Longitude(lon * u.deg)

    if not isinstance(lat, coord.Latitude):
        lat = coord.Latitude(lat * u.deg)

    if not isinstance(alt, u.Quantity):
        alt *= u.m

    if not isinstance(ra, u.Quantity):
        ra *= u.deg

    if not isinstance(dec, u.Quantity):
        dec *= u.deg

    # Here we specify the location so that we can easily calculate the mean
    # local siderial time later on
    time = Time(2.4e6 + mjd, format="jd", location=(lon, lat, alt))
    epoch = time.datetime.year + time.datetime.month/12. \
        + time.datetime.day/365.

    # Precess the coordinates to the current epoch
    coordinate = coord.SkyCoord(ra, dec, frame="fk5").transform_to(coord.FK5(equinox="J%s" % (epoch)))

    # Convert geodetic latitude into geocentric latitude to correct for rotation
    # of the Earth
    dlat = ((-11. * 60. + 32.743) * np.sin(2 * lat) + 1.1633 * np.sin(4 * lat) \
        - 0.0026 * np.sin(6 * lat)) * u.degree
    geocentric_lat = lat + dlat / 3600.

    # Calculate distance of observer from Earth center
    r = alt + 6378160.0 * u.m * (0.998327073 \
        + 0.001676438 * np.cos(2 * geocentric_lat) \
        - 0.000003510 * np.cos(4 * geocentric_lat) \
        + 0.000000008 * np.cos(6 * geocentric_lat))

    # Calculate rotational velocity perpendicular to the radius vector
    # Note: 23.934469591229 is the siderial day in hours for 1986
    v = 2 * np.pi * r / (23.934469591229 * 3600 * u.second)

    # Calculate vdiurnal velocity
    time.delta_ut1_utc = 0#we get error otherwise. No big dela for this application
    vdiurnal = v * np.cos(lat) * np.cos(coordinate.dec) \
      * np.sin(coordinate.ra - time.sidereal_time("mean"))

    # Calculate baricentric and heliocentric velocities
    vh, vb = baryvel(time)

    # Project along the line of sight
    projection = np.array([
        np.cos(coordinate.dec) * np.cos(coordinate.ra),
        np.cos(coordinate.dec) * np.sin(coordinate.ra),
        np.sin(coordinate.dec)])
    vbar = (vb * projection).sum()
    vhel = (vh * projection).sum()

    # Using baricentric velocity for correction
    vbar_correction = vdiurnal + vbar
    vhel_correction = vdiurnal + vhel

    # [TODO] it may be useful to return other components of velocity or extra
    # information about the transforms (e.g., gmst, ut, lmst, dlat, lat, vbar,
    # vhel, etc)
    return (vbar_correction, vhel_correction)
예제 #32
0
# Loop over i, j to cover all baselines
for i in xrange(len(telescopes)):
    t1 = telescopes[i]

    dX = t1.x
    dY = t1.y
    dZ = t1.z
    Xvec = np.array([dX / (1 * u.m), dY / (1 * u.m), dZ / (1 * u.m)])

    k = 0
    for t in trange:
        # calculate sidereal times at both telescopes,
        # average for use in hourangle
        ot = Time(midnight + t, scale='utc', location=t1)
        ot.delta_ut1_utc = 0.
        obst = ot.sidereal_time('mean')

        # I'm certain there's a better astropy way to get ot_avg in degrees
        h = obst.deg * u.deg - source.ra
        dec = source.dec

        # matrix to transform xyz to uvw
        mat = np.array([
            (np.sin(h), np.cos(h), 0),
            (-np.sin(dec) * np.cos(h), np.sin(dec) * np.sin(h), np.cos(dec)),
            (np.cos(dec) * np.cos(h), -np.cos(dec) * np.sin(h), np.sin(dec))
        ])

        uvw = np.dot(mat, Xvec)
        uvw_mat[k, i] = uvw
예제 #33
0
파일: mapa.py 프로젝트: altairgomes/altair
stars = SkyCoord(coor, frame='icrs', unit=(u.hourangle, u.degree))

################### lendo tempo ########################

tempo = np.core.defchararray.add(np.array(dados['ano']), ['-'])
tempo = np.core.defchararray.add(tempo, np.array(dados['mes']))
tempo = np.core.defchararray.add(tempo, ['-'])
tempo = np.core.defchararray.add(tempo, np.array(dados['dia']))
tempo = np.core.defchararray.add(tempo, [' '])
tempo = np.core.defchararray.add(tempo, np.array(dados['hor']))
tempo = np.core.defchararray.add(tempo, [':'])
tempo = np.core.defchararray.add(tempo, np.array(dados['min']))
tempo = np.core.defchararray.add(tempo, [':'])
tempo = np.core.defchararray.add(tempo, np.array(map(str, dados['sec'])))
datas = Time(tempo, format='iso', scale='utc')
datas.delta_ut1_utc = 0

############### definindo parametros #############
ca = dados['ca']*u.arcsec
pa = dados['pa']*u.deg
vel = dados['vel']*(u.km/u.s)
dist = dados['delta']*u.AU
off_ra = dados['ora']*u.mas
off_de = dados['ode']*u.mas
vec = [-7, -6, -5, -4, -3, -2, -1, 1, 2, 3, 4, 5, 6, 7]

################################### definido funcao que imprime o mapa #############################################

def geramapa(idx):
    lon = stars[idx].ra - datas[idx].sidereal_time('mean', 'greenwich')
예제 #34
0
파일: skymap.py 프로젝트: tvern23/mwa_pb
def plot_MWAconstellations(
        outfile=None,
        obsinfo=None,
        viewgps=None,
        observing=True,
        showbeam=True,
        constellations=True,
        gleamsources=False,
        notext=False,
        skydata=None,
        background=None,
        hidenulls=False,
        channel=None,  # Frequency channel to use for the beam map, defaults to mean of all channels in obs.
        xmas=XMAS,
        plotscale=SCALE,  # A scale of 1.0 gives a 1200x1200 pixel plot
        logger=DEFAULTLOGGER):
    if obsinfo is None:
        logger.error('Unable to find observation info')
        return None

    if skydata is None:
        skydata = SkyData()
    if not skydata.valid:
        logger.error('Unable to load star/planet data, aborting.')
        return None

    if background is None:
        background = 'transparent'

    if channel is None:
        if 0 in obsinfo['rfstreams']:
            channel = obsinfo['rfstreams'][0]['frequencies'][12]
        elif '0' in obsinfo['rfstreams']:
            channel = obsinfo['rfstreams']['0']['frequencies'][12]

    obstime = Time(obsinfo['starttime'], format='gps', scale='utc')

    if viewgps is None:
        viewtime = obstime
    else:
        viewtime = Time(viewgps, format='gps', scale='utc')

    viewtime.delta_ut1_utc = 0  # We don't care about IERS tables and high precision answers
    LST_hours = viewtime.sidereal_time(kind='apparent',
                                       longitude=config.MWAPOS.longitude)

    mapzenith = SkyCoord(ra=skydata.skymapRA,
                         dec=skydata.skymapDec,
                         equinox='J2000',
                         unit=(astropy.units.deg, astropy.units.deg))
    mapzenith.location = config.MWAPOS
    mapzenith.obstime = viewtime
    altaz = mapzenith.transform_to('altaz')
    Az, Alt = altaz.az.deg, altaz.alt.deg

    fig = plt.figure(figsize=(FIGSIZE * plotscale, FIGSIZE * plotscale),
                     dpi=DPI)
    ax1 = fig.add_subplot(1, 1, 1)

    bmap = Basemap(projection='ortho',
                   lat_0=config.MWAPOS.latitude.deg,
                   lon_0=LST_hours.hour * 15 - 360,
                   ax=ax1)
    nx = len(skydata.skymapra)
    ny = len(skydata.skymapdec)

    ax1.cla()

    # show the Haslam map
    tform_skymap = bmap.transform_scalar(skydata.basemap[0].data[0][:, ::-1],
                                         skydata.skymapra[::-1] * 15,
                                         skydata.skymapdec,
                                         nx,
                                         ny,
                                         masked=True)
    bmap.imshow(numpy.ma.log10(tform_skymap[:, ::-1]),
                cmap=CM,
                vmin=math.log10(LOW),
                vmax=math.log10(HIGH))

    delays = []
    if showbeam:
        if not hidenulls:
            contours = [0.001, 0.1, 0.5, 0.90]
            if observing:
                beamcolor = ((0.0, 0.0, 0.0), (0.0, 0.5, 0.0),
                             (0.0, 0.75, 0.0), (0.0, 1.0, 0.0))
            else:
                beamcolor = ((0.0, 0.0, 0.0), (0.5, 0.5, 0.5),
                             (0.75, 0.75, 0.75), (1.0, 1.0, 1.0))
        else:
            contours = [0.1, 0.5, 0.90]
            if observing:
                beamcolor = ((0.0, 0.5, 0.0), (0.0, 0.75, 0.0), (0.0, 1.0,
                                                                 0.0))
            else:
                beamcolor = ((0.5, 0.5, 0.5), (0.75, 0.75, 0.75), (1.0, 1.0,
                                                                   1.0))

        # If the observation is in the future, calculate what delays will be used, instead of using the recorded actual delays
        if obstime.gps > Time.now().gps + 10:
            if 0 in obsinfo['rfstreams']:
                delays = calc_delays(az=obsinfo['rfstreams'][0]['azimuth'],
                                     el=obsinfo['rfstreams'][0]['elevation'])
            elif '0' in obsinfo['rfstreams']:
                delays = calc_delays(az=obsinfo['rfstreams']['0']['azimuth'],
                                     el=obsinfo['rfstreams']['0']['elevation'])
            else:
                delays = [33] * 16
            logger.debug("Calculated future delays: %s" % delays)
        else:
            if 0 in obsinfo['rfstreams']:
                delays = obsinfo['rfstreams'][0]['xdelays']
            elif '0' in obsinfo['rfstreams']:
                delays = obsinfo['rfstreams']['0']['xdelays']
            logger.debug("Used actual delays: %s" % delays)

        # get the primary beam
        R = primarybeammap.return_beam(Alt, Az, delays, channel * 1.28)

        # show the beam
        X, Y = bmap(skydata.skymapRA, skydata.skymapDec)
        CS = bmap.contour(bmap.xmax - X,
                          Y,
                          R,
                          contours,
                          linewidths=plotscale,
                          colors=beamcolor)
        ax1.clabel(CS, inline=1, fontsize=10 * plotscale)

    # Find the constellation that the beam is in
    if obsinfo['ra_phase_center'] is not None:
        ra = obsinfo['ra_phase_center']
        dec = obsinfo['dec_phase_center']
    else:
        ra = obsinfo['metadata']['ra_pointing']
        dec = obsinfo['metadata']['dec_pointing']
    if (ra is not None) and (dec is not None):
        constellation = ephem.constellation(
            (ra * math.pi / 180.0, dec * math.pi / 180.0))
    else:
        constellation = ["N/A", "N/A"]

    X0, Y0 = bmap(LST_hours.hour * 15 - 360, config.MWAPOS.latitude.deg)

    if constellations:
        # plot the constellations
        ConstellationStars = []
        for c in skydata.constellations.keys():
            for i in xrange(0, len(skydata.constellations[c][1]), 2):
                i1 = numpy.where(skydata.hip['HIP'] ==
                                 skydata.constellations[c][1][i])[0][0]
                i2 = numpy.where(skydata.hip['HIP'] ==
                                 skydata.constellations[c][1][i + 1])[0][0]
                star1 = skydata.hip[i1]
                star2 = skydata.hip[i2]
                if i1 not in ConstellationStars:
                    ConstellationStars.append(i1)
                if i2 not in ConstellationStars:
                    ConstellationStars.append(i2)
                ra1, dec1 = map(numpy.degrees,
                                (star1['RArad'], star1['DErad']))
                ra2, dec2 = map(numpy.degrees,
                                (star2['RArad'], star2['DErad']))
                ra = numpy.array([ra1, ra2])
                dec = numpy.array([dec1, dec2])
                newx, newy = bmap(ra, dec)
                testx, testy = bmap(newx, newy, inverse=True)
                if testx.max() < 1e30 and testy.max() < 1e30:
                    bmap.plot(2 * X0 - newx,
                              newy,
                              'r-',
                              linewidth=plotscale,
                              latlon=False)  # This bit generates an error

        # figure out the coordinates
        # and plot the stars
        ra = numpy.degrees(skydata.hip[ConstellationStars]['RArad'])
        dec = numpy.degrees(skydata.hip[ConstellationStars]['DErad'])
        m = numpy.degrees(skydata.hip[ConstellationStars]['Hpmag'])
        newx, newy = bmap(ra, dec)
        # testx, testy = bmap(newx, newy, inverse=True)
        good = (newx > bmap.xmin) & (newx < bmap.xmax) & (newy > bmap.ymin) & (
            newy < bmap.ymax)
        size = 60 - 15 * m
        size[size <= 15] = 15
        size[size >= 60] = 60
        bmap.scatter(bmap.xmax - newx[good],
                     newy[good],
                     size[good] * plotscale,
                     'r',
                     edgecolor='none',
                     alpha=0.7)

    if gleamsources:
        ra = numpy.array([x[1] for x in skydata.gleamcat])
        dec = numpy.array([x[2] for x in skydata.gleamcat])
        flux = numpy.array([x[3] for x in skydata.gleamcat])
        newx, newy = bmap(ra, dec)
        # testx, testy = bmap(newx, newy, inverse=True)
        good = (newx > bmap.xmin) & (newx < bmap.xmax) & (newy > bmap.ymin) & (
            newy < bmap.ymax)
        size = flux / 1.0
        size[size <= 7] = 7
        size[size >= 60] = 60
        bmap.scatter(bmap.xmax - newx[good],
                     newy[good],
                     size[good] * plotscale,
                     'b',
                     edgecolor='none',
                     alpha=0.7)

    observer = ephem.Observer()
    # make sure no refraction is included
    observer.pressure = 0
    observer.long = config.MWAPOS.longitude.radian
    observer.lat = config.MWAPOS.latitude.radian
    observer.elevation = config.MWAPOS.height.value
    observer.date = viewtime.datetime.strftime('%Y/%m/%d %H:%M:%S')

    # plot the bodies
    for b in skydata.bodies.keys():
        name = skydata.bodies[b][2]
        color = skydata.bodies[b][1]
        size = skydata.bodies[b][0]
        body = b(observer)
        ra, dec = map(numpy.degrees, (body.ra, body.dec))
        newx, newy = bmap(ra, dec)
        testx, testy = bmap(newx, newy, inverse=True)
        if testx < 1e30 and testy < 1e30:
            bmap.scatter(2 * X0 - newx,
                         newy,
                         s=size * plotscale,
                         c=color,
                         alpha=1.0,
                         latlon=False,
                         edgecolor='none')
            ax1.text(bmap.xmax - newx + 2e5,
                     newy,
                     name,
                     horizontalalignment='left',
                     fontsize=12 * plotscale,
                     color=color,
                     verticalalignment='center')

    # and label some sources
    for source in primarybeammap.sources.keys():
        if source == 'EOR0b':
            continue
        if source == 'CenA':
            primarybeammap.sources[source][0] = 'Cen A'
        if source == 'ForA':
            primarybeammap.sources[source][0] = 'For A'
        r = astropy.coordinates.Longitude(
            angle=primarybeammap.sources[source][1],
            unit=astropy.units.hour).hour
        d = astropy.coordinates.Latitude(
            angle=primarybeammap.sources[source][2],
            unit=astropy.units.deg).deg
        horizontalalignment = 'left'
        x = r
        if (len(primarybeammap.sources[source]) >= 6
                and primarybeammap.sources[source][5] == 'c'):
            horizontalalignment = 'center'
            x = r
        if (len(primarybeammap.sources[source]) >= 6
                and primarybeammap.sources[source][5] == 'r'):
            horizontalalignment = 'right'
            x = r
        fontsize = primarybeammap.defaultsize
        if (len(primarybeammap.sources[source]) >= 5):
            fontsize = primarybeammap.sources[source][4]
        color = primarybeammap.defaultcolor
        if (len(primarybeammap.sources[source]) >= 4):
            color = primarybeammap.sources[source][3]
        if color == 'k':
            color = 'w'
        xx, yy = bmap(x * 15 - 360, d)
        try:
            if xx < 1e30 and yy < 1e30:
                ax1.text(bmap.xmax - xx + 2e5,
                         yy,
                         primarybeammap.sources[source][0],
                         horizontalalignment=horizontalalignment,
                         fontsize=fontsize * plotscale,
                         color=color,
                         verticalalignment='center')
        except:
            pass

    if not notext:
        if background == 'black':
            fontcolor = 'white'
        else:
            fontcolor = 'black'

        if showbeam:
            ax1.text(
                0,
                bmap.ymax - 2e5,
                'Obs ID %d with delays %s\n at %s:\n%s at %d MHz\n in the constellation %s'
                % (obsinfo['starttime'], delays,
                   obstime.datetime.strftime('%Y-%m-%d %H:%M UT'),
                   obsinfo['obsname'], channel * 1.28, constellation[1]),
                fontsize=10 * plotscale,
                color=fontcolor)
        else:
            ax1.text(0,
                     bmap.ymax - 2e5,
                     '%s:\nNo recent observation' %
                     (obstime.datetime.strftime('%Y-%m-%d %H:%M UT')),
                     fontsize=10 * plotscale,
                     color=fontcolor)

    ax1.text(bmap.xmax,
             Y0,
             'W',
             fontsize=12 * plotscale,
             horizontalalignment='left',
             verticalalignment='center')
    ax1.text(bmap.xmin,
             Y0,
             'E',
             fontsize=12 * plotscale,
             horizontalalignment='right',
             verticalalignment='center')
    ax1.text(X0,
             bmap.ymax,
             'N',
             fontsize=12 * plotscale,
             horizontalalignment='center',
             verticalalignment='bottom')
    ax1.text(X0,
             bmap.ymin,
             'S',
             fontsize=12 * plotscale,
             horizontalalignment='center',
             verticalalignment='top')

    try:
        if type(outfile) == str:
            if not xmas:
                if background.lower() == 'transparent':
                    fig.savefig(outfile,
                                transparent=True,
                                facecolor='none',
                                dpi=DPI)
                else:
                    fig.savefig(outfile,
                                transparent=False,
                                facecolor=background,
                                dpi=DPI)
                return ''
            else:
                buf = io.BytesIO()
                if background.lower() == 'transparent':
                    fig.savefig(buf,
                                format='png',
                                transparent=True,
                                facecolor='none',
                                dpi=DPI)
                else:
                    fig.savefig(buf,
                                format='png',
                                transparent=False,
                                facecolor=background,
                                dpi=DPI)
                buf.seek(0)
                im = Image.open(buf)
                im.load()
                buf.close()
                r = Image.open('Treindeers.png')
                im.paste(r, box=(250, 300), mask=r)
                buf2 = io.BytesIO()
                im.save(buf2, format=outfile[outfile.find('.') + 1:].upper())
                buf2.seek(0)
                outf = open(outfile, 'w')
                outf.write(buf2.read())
                return ''
        else:
            buf = io.BytesIO()
            if background.lower() == 'transparent':
                fig.savefig(buf,
                            format='png',
                            transparent=True,
                            facecolor='none',
                            dpi=DPI)
            else:
                fig.savefig(buf,
                            format='png',
                            transparent=False,
                            facecolor=background,
                            dpi=DPI)
            buf.seek(0)
            if not xmas:
                return buf.read()
            else:
                im = Image.open(buf)
                im.load()
                buf.close()
                r = Image.open('Treindeers.png')
                im.paste(r, box=(250, 100), mask=r)
                buf2 = io.BytesIO()
                im.save(buf2, format='PNG')
                buf2.seek(0)
                return buf2.read()
    except AssertionError:
        logger.error('Cannot save output: %s', outfile)
        return None
    finally:
        plt.close(fig)
        del ax1
        del fig
예제 #35
0
def get_LST(gps):
    time = Time(gps, format='gps', scale='utc')
    time.delta_ut1_utc = 0.0
    LST = time.sidereal_time('mean', config.MWAPOS.longitude.hour)
    return LST.hour  # keep as decimal hr
예제 #36
0
def main():

    args = parse_args()

    print(args.beams)
    beam_range = args.beams.split(',')
    beams = range(int(beam_range[0]), int(beam_range[1]) + 1)
    freqchunks = args.bin_num
    date = args.date

    print(beams)

    basedir = args.basedir

    with open(args.task_ids) as f:
        task_id = f.read().splitlines()

    np.warnings.filterwarnings('ignore')

    # Find calibrator position
    calib = SkyCoord.from_name(args.calibname)

    cell_size = 100. / 3600.

    # Put all the output from drift_scan_auto_corr.ipynb in a unique folder per source, per set of drift scans.
    datafiles, posfiles = [], []

    for i in range(len(task_id)):
        datafiles.append('{}{}/{}_exported_data_frequency_split.csv'.format(
            basedir, task_id[i], task_id[i]))
        posfiles.append('{}{}/{}_hadec.csv'.format(basedir, task_id[i],
                                                   task_id[i]))

    datafiles.sort()
    posfiles.sort()

    # Put calibrator into apparent coordinates (because that is what the telescope observes it in.)
    test = calib.transform_to('fk5')
    calibnow = test.transform_to(
        FK5(equinox='J{}'.format(task_id2equinox(task_id[0]))))

    # Read data from tables
    data_tab, hadec_tab = [], []
    print("\nReading in all the data...")
    for file, pos in zip(datafiles, posfiles):
        data_tab.append(Table.read(file, format='csv'))  # list of tables
        hadec_tab.append(Table.read(pos, format='csv'))  # list of tables

    print("Making beam maps: ")
    for beam in beams:
        print(beam)

        for f in range(freqchunks):
            x, y, z_xx, z_yy = [], [], [], []

            for data, hadec in zip(data_tab, hadec_tab):
                hadec_start = SkyCoord(ra=hadec['ha'],
                                       dec=hadec['dec'],
                                       unit=(u.rad, u.rad))
                time_mjd = Time(data['time'] / (3600 * 24), format='mjd')
                time_mjd.delta_ut1_utc = 0  # extra line to compensate for missing icrs tables
                lst = time_mjd.sidereal_time('apparent', westerbork().lon)

                HAcal = lst - calibnow.ra  # in sky coords
                dHAsky = HAcal - hadec_start[beam].ra + (
                    24 * u.hourangle)  # in sky coords in hours
                dHAsky.wrap_at('180d', inplace=True)
                dHAphys = dHAsky * np.cos(hadec_start[beam].dec.deg *
                                          u.deg)  # physical offset in hours

                x = np.append(x, dHAphys.deg)
                y = np.append(
                    y, np.full(len(dHAphys.deg), hadec_start[beam].dec.deg))
                z_xx = np.append(
                    z_xx,
                    data['auto_corr_beam_{}_freq_{}_xx'.format(beam, f)] -
                    np.median(data['auto_corr_beam_{}_freq_{}_xx'.format(
                        beam, f)]))
                z_yy = np.append(
                    z_yy,
                    data['auto_corr_beam_{}_freq_{}_yy'.format(beam, f)] -
                    np.median(data['auto_corr_beam_{}_freq_{}_yy'.format(
                        beam, f)]))

            # Create the 2D plane, do a cubic interpolation, and append it to the cube.
            tx = np.arange(min(x), max(x), cell_size)
            ty = np.arange(min(y), max(y), cell_size)
            XI, YI = np.meshgrid(tx, ty)
            gridcubx = interpolate.griddata(
                (x, y), z_xx, (XI, YI),
                method='cubic')  # median already subtracted
            gridcuby = interpolate.griddata((x, y),
                                            z_yy, (XI, YI),
                                            method='cubic')

            # Find the reference pixel at the apparent coordinates of the calibrator
            ref_pixy = (calibnow.dec.deg -
                        min(y)) / cell_size + 1  # FITS indexed from 1
            ref_pixx = (-min(x)) / cell_size + 1  # FITS indexed from 1
            ref_pixz = 1  # FITS indexed from 1

            # Find the peak of the primary beam to normalize
            norm_xx = np.max(gridcubx[int(ref_pixy) - 3:int(ref_pixy) + 4,
                                      int(ref_pixx) - 3:int(ref_pixx) + 4])
            norm_yy = np.max(gridcuby[int(ref_pixy) - 3:int(ref_pixy) + 4,
                                      int(ref_pixx) - 3:int(ref_pixx) + 4])

            # Create 3D array with proper size for given scan set to save data as a cube
            if f == 0:
                cube_xx = np.zeros(
                    (freqchunks, gridcubx.shape[0], gridcubx.shape[1]))
                cube_yy = np.zeros(
                    (freqchunks, gridcuby.shape[0], gridcuby.shape[1]))
                db_xx = np.zeros(
                    (freqchunks, gridcubx.shape[0], gridcubx.shape[1]))
                db_yy = np.zeros(
                    (freqchunks, gridcuby.shape[0], gridcuby.shape[1]))

            cube_xx[f, :, :] = gridcubx / norm_xx
            cube_yy[f, :, :] = gridcuby / norm_yy

            # Convert to decibels
            db_xx[f, :, :] = np.log10(gridcubx / norm_xx) * 10.
            db_yy[f, :, :] = np.log10(gridcuby / norm_yy) * 10.

        stokesI = np.sqrt(0.5 * cube_yy**2 + 0.5 * cube_xx**2)
        squint = cube_xx - cube_yy

        wcs = WCS(naxis=3)
        #wcs.wcs.cdelt = np.array([-cell_size, cell_size, 12.207e3*1500])  ## I think this should be 1050, 12.207e3 is the width of 1 channel
        wcs.wcs.cdelt = np.array([-cell_size, cell_size, 12.207e3 * 1050])
        wcs.wcs.ctype = ['RA---TAN', 'DEC--TAN', 'FREQ']
        #wcs.wcs.crval = [calib.ra.to_value(u.deg), calib.dec.to_value(u.deg), 1219.609e6+(12.207e3*(500+1500/2))] # 1280e6+(12.207e3*(-(24576/2-14000)))]
        wcs.wcs.crval = [
            calib.ra.to_value(u.deg),
            calib.dec.to_value(u.deg),
            1280e6 + (12.207e3 * (-(24576 / 2 - 14000)))
        ]
        wcs.wcs.crpix = [ref_pixx, ref_pixy, ref_pixz]
        wcs.wcs.specsys = 'TOPOCENT'
        wcs.wcs.restfrq = 1.420405752e+9
        header = wcs.to_header()

        hdux = fits.PrimaryHDU(cube_xx, header=header)
        hduy = fits.PrimaryHDU(cube_yy, header=header)
        hduI = fits.PrimaryHDU(stokesI, header=header)
        hdusq = fits.PrimaryHDU(squint, header=header)

        if not os.path.exists(basedir + 'fits_files/{}/'.format(date)):
            os.mkdir(basedir + 'fits_files/{}/'.format(date))

        # Save the FITS files
        hdux.writeto(basedir + 'fits_files/{}/{}_{}_{:02}_xx.fits'.format(
            date, args.calibname.replace(" ", ""), date, beam),
                     overwrite=True)
        hduy.writeto(basedir + 'fits_files/{}/{}_{}_{:02}_yy.fits'.format(
            date, args.calibname.replace(" ", ""), date, beam),
                     overwrite=True)
        hduI.writeto(basedir + 'fits_files/{}/{}_{}_{:02}_I.fits'.format(
            date, args.calibname.replace(" ", ""), date, beam),
                     overwrite=True)
        hdusq.writeto(basedir + 'fits_files/{}/{}_{}_{:02}_diff.fits'.format(
            date, args.calibname.replace(" ", ""), date, beam),
                      overwrite=True)
예제 #37
0
    def _NewVOEvent(self, dm, dm_err, width, snr, flux, ra, dec, semiMaj, semiMin,
                    ymw16, name, importance, utc, gl, gb, gain,
                    dt=TSAMP.to(u.ms).value, delta_nu_MHz=(BANDWIDTH / NCHAN).to(u.MHz).value,
                    nu_GHz=1.37, posang=0, test=None):
        """
        Create a VOEvent

        :param float dm: Dispersion measure (pc cm**-3)
        :param float dm_err: Error on DM (pc cm**-3)
        :param float width: Pulse width (ms)
        :param float snr: Signal-to-noise ratio
        :param float flux: flux density (mJy)
        :param float ra: Right ascension (deg)
        :param float dec: Declination (deg)
        :param float semiMaj: Localisation region semi-major axis (arcmin)
        :param float semiMin: Localisation region semi-minor axis (arcmin)
        :param float ymw16: YMW16 DM (pc cm**-3)
        :param str name: Source name
        :param float importance: Trigger importance (0-1)
        :param str utc: UTC arrival time in ISOT format
        :param float gl: Galactic longitude (deg)
        :param float gb: Galactic latitude (deg)
        :param float gain: Telescope gain (K Jy**-1)
        :param float dt: Telescope time resolution (ms)
        :param float delta_nu_MHz: Telescope frequency channel width (MHz)
        :param float nu_GHz: Telescope centre frequency (GHz)
        :param float posang: Localisation region position angle (deg)
        :param bool test: Whether to send a test event or observation event
        """

        z = dm / 1000.0  # May change
        errDeg = semiMaj / 60.0

        # Parse UTC
        utc_YY = int(utc[:4])
        utc_MM = int(utc[5:7])
        utc_DD = int(utc[8:10])
        utc_hh = int(utc[11:13])
        utc_mm = int(utc[14:16])
        utc_ss = float(utc[17:])
        t = Time(utc, scale='utc', format='isot')
        # IERS server is down, avoid using it
        t.delta_ut1_utc = 0
        mjd = t.mjd

        ivorn = ''.join([name, str(utc_hh), str(utc_mm), '/', str(mjd)])

        # use default value for test flag if not set
        if test is None:
            test = self.test

        # Set role to either test or real observation
        if test:
            self.logger.info("Event type is test")
            v = vp.Voevent(stream='nl.astron.apertif/alert', stream_id=ivorn,
                           role=vp.definitions.roles.test)
        else:
            self.logger.info("Event type is observation")
            v = vp.Voevent(stream='nl.astron.apertif/alert', stream_id=ivorn,
                           role=vp.definitions.roles.observation)
        # Author origin information
        vp.set_who(v, date=datetime.datetime.utcnow(), author_ivorn="nl.astron")
        # Author contact information
        vp.set_author(v, title="ARTS FRB alert system", contactName="Leon Oostrum",
                      contactEmail="*****@*****.**", shortName="ALERT")
        # Parameter definitions

        # Apertif-specific observing configuration
        beam_sMa = vp.Param(name="beam_semi-major_axis", unit="MM",
                            ucd="instr.beam;pos.errorEllipse;phys.angSize.smajAxis", ac=True, value=semiMaj)
        beam_sma = vp.Param(name="beam_semi-minor_axis", unit="MM",
                            ucd="instr.beam;pos.errorEllipse;phys.angSize.sminAxis", ac=True, value=semiMin)
        beam_rot = vp.Param(name="beam_rotation_angle", value=str(posang), unit="Degrees",
                            ucd="instr.beam;pos.errorEllipse;instr.offset", ac=True)
        tsamp = vp.Param(name="sampling_time", value=str(dt), unit="ms", ucd="time.resolution", ac=True)
        bw = vp.Param(name="bandwidth", value=str(delta_nu_MHz), unit="MHz", ucd="instr.bandwidth", ac=True)
        nchan = vp.Param(name="nchan", value=str(NCHAN), dataType="int",
                         ucd="meta.number;em.freq;em.bin", unit="None")
        cf = vp.Param(name="centre_frequency", value=str(1000 * nu_GHz), unit="MHz", ucd="em.freq;instr", ac=True)
        npol = vp.Param(name="npol", value="2", dataType="int", unit="None")
        bits = vp.Param(name="bits_per_sample", value="8", dataType="int", unit="None")
        gain = vp.Param(name="gain", value=str(gain), unit="K/Jy", ac=True)
        tsys = vp.Param(name="tsys", value=str(TSYS.to(u.Kelvin).value), unit="K", ucd="phot.antennaTemp", ac=True)
        backend = vp.Param(name="backend", value="ARTS")
        # beam = vp.Param(name="beam", value= )

        v.What.append(vp.Group(params=[beam_sMa, beam_sma, beam_rot, tsamp,
                                       bw, nchan, cf, npol, bits, gain, tsys, backend],
                               name="observatory parameters"))

        # Event parameters
        DM = vp.Param(name="dm", ucd="phys.dispMeasure", unit="pc/cm^3", ac=True, value=str(dm))
        DM_err = vp.Param(name="dm_err", ucd="stat.error;phys.dispMeasure", unit="pc/cm^3", ac=True, value=str(dm_err))
        Width = vp.Param(name="width", ucd="time.duration;src.var.pulse", unit="ms", ac=True, value=str(width))
        SNR = vp.Param(name="snr", ucd="stat.snr", unit="None", ac=True, value=str(snr))
        Flux = vp.Param(name="flux", ucd="phot.flux", unit="Jy", ac=True, value=str(flux))
        Flux.Description = "Calculated from radiometer equation. Not calibrated."
        Gl = vp.Param(name="gl", ucd="pos.galactic.lon", unit="Degrees", ac=True, value=str(gl))
        Gb = vp.Param(name="gb", ucd="pos.galactic.lat", unit="Degrees", ac=True, value=str(gb))

        # v.What.append(vp.Group(params=[DM, Width, SNR, Flux, Gl, Gb], name="event parameters"))
        v.What.append(vp.Group(params=[DM, DM_err, Width, SNR, Flux, Gl, Gb], name="event parameters"))

        # Advanced parameters (note, change script if using a differeing MW model)
        mw_dm = vp.Param(name="MW_dm_limit", unit="pc/cm^3", ac=True, value=str(ymw16))
        mw_model = vp.Param(name="galactic_electron_model", value="YMW16")
        redshift_inferred = vp.Param(name="redshift_inferred", ucd="src.redshift", unit="None", value=str(z))
        redshift_inferred.Description = "Redshift estimated using z = DM/1000.0"

        v.What.append(vp.Group(params=[mw_dm, mw_model, redshift_inferred], name="advanced parameters"))

        # WhereWhen
        vp.add_where_when(v, coords=vp.Position2D(ra=ra, dec=dec, err=errDeg, units='deg',
                                                  system=vp.definitions.sky_coord_system.utc_fk5_geo),
                          obs_time=datetime.datetime(utc_YY, utc_MM, utc_DD, utc_hh, utc_mm, int(utc_ss),
                                                     tzinfo=pytz.UTC),
                          observatory_location="WSRT")

        # Why
        vp.add_why(v, importance=importance)
        v.Why.Name = name

        if vp.valid_as_v2_0(v):
            with open('{}.xml'.format(utc), 'wb') as f:
                voxml = vp.dumps(v)
                xmlstr = minidom.parseString(voxml).toprettyxml(indent="   ")
                f.write(xmlstr.encode())
                self.logger.info(vp.prettystr(v.Who))
                self.logger.info(vp.prettystr(v.What))
                self.logger.info(vp.prettystr(v.WhereWhen))
                self.logger.info(vp.prettystr(v.Why))
        else:
            self.logger.error("Unable to write file {}.xml".format(name))
예제 #38
0
def report(predic, sitearq, step=0.1, **kwargs):
#################### Lendo arquivo ###################################
    try:
        dados = np.loadtxt(predic, skiprows=41, usecols=(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 18, 19, 20, 21, 22, 25, 26, 28, 29), \
            dtype={'names': ('dia', 'mes', 'ano', 'hor', 'min', 'sec', 'afh', 'afm', 'afs', 'ded', 'dem', 'des', 'ca', 'pa', 'vel', 'delta', 'mR', 'mK', 'long', 'ora', 'ode'),
            'formats': ('S30', 'S30', 'S30','S30', 'S30', 'S30','S20', 'S20', 'S20','S20', 'S20', 'S20', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8')}, ndmin=1)
    except:
        raise IOError('{} is not in PRAIA format'.format(arquivo))
################## lendo coordenadas #################
    coor = dados['afh']
    for i in ['afm', 'afs', 'ded', 'dem', 'des']:
        coor = np.core.defchararray.add(coor, ' ')
        coor = np.core.defchararray.add(coor, dados[i])
    stars = SkyCoord(coor, frame='icrs', unit=(u.hourangle, u.degree))
################### lendo tempo ########################
    tim=dados['ano']
    len_iso = ['-', '-', ' ', ':',':']
    arr = ['mes', 'dia', 'hor', 'min', 'sec']
    for i in np.arange(len(arr)):
        tim = np.core.defchararray.add(tim, len_iso[i])
        tim = np.core.defchararray.add(tim, dados[arr[i]])
    tim = np.char.array(tim) + '000'
    datas = Time(tim, format='iso', scale='utc')
############### definindo parametros #############
    ca = dados['ca']*u.arcsec
    posa = dados['pa']*u.deg
    vel = dados['vel']*(u.km/u.s)
    dist = dados['delta']*u.AU
    ob_off_ra = dados['ora']*u.mas
    ob_off_de = dados['ode']*u.mas
    magR = dados['mR']
    magK = dados['mK']
    longi = dados['long']
    datas.delta_ut1_utc = 0

    for elem in np.arange(len(stars)):
        print('Occ {}'.format(datas[elem].iso))
########## aplica offsets ######
        off_ra = 0.0*u.mas
        off_de = 0.0*u.mas
        ob_ra = 0.0*u.mas
        ob_de = 0.0*u.mas
        if 'off_o' in kwargs.keys():
            off_ra = off_ra + kwargs['off_o'][0]*u.mas
            off_de = off_de + kwargs['off_o'][1]*u.mas
            ob_ra = ob_off_ra[elem] + kwargs['off_o'][0]*u.mas
            ob_de = ob_off_de[elem] + kwargs['off_o'][1]*u.mas
        st_off_ra = st_off_de = 0.0*u.mas
        if 'off_s' in kwargs.keys():
            off_ra = off_ra - kwargs['off_s'][0]*u.mas
            off_de = off_de - kwargs['off_s'][1]*u.mas
            st_off_ra = kwargs['off_s'][0]*u.mas
            st_off_de = kwargs['off_s'][1]*u.mas
        dca = off_ra*np.sin(posa[elem]) + off_de*np.cos(posa[elem])
        dt = ((off_ra*np.cos(posa[elem]) - off_de*np.sin(posa[elem])).to(u.rad)*dist[elem].to(u.km)/np.absolute(vel[elem])).value*u.s
        ca1 = ca[elem] + dca
        data = datas[elem] + dt

    ########### calcula caminho ##################
        vec = np.arange(0, int(8000/(np.absolute(vel[elem].value))), step)
        vec = np.sort(np.concatenate((vec,-vec[1:]), axis=0))
        pa = Angle(posa[elem])
        pa.wrap_at('180d', inplace=True)
        if pa > 90*u.deg:
            paplus = pa - 180*u.deg
        elif pa < -90*u.deg:
            paplus = pa + 180*u.deg
        else:
            paplus = pa
        deltatime = vec*u.s
        datas1 = data + TimeDelta(deltatime)
        datas1.delta_ut1_utc = 0
        longg = stars[elem].ra - datas1.sidereal_time('mean', 'greenwich')
        centers = EarthLocation(longg, stars[elem].dec, height=0.0*u.m)

        dista = (dist[elem].to(u.m)*np.sin(ca1))
        ax = dista*np.sin(pa) + (deltatime*vel[elem])*np.cos(paplus)
        by = dista*np.cos(pa) - (deltatime*vel[elem])*np.sin(paplus)

        sites = np.loadtxt(sitearq, ndmin=1,  dtype={'names': ('lat', 'lon', 'alt', 'nome', 'offx', 'offy', 'color'), 'formats': ('f8', 'f8', 'f8', 'S30',  'f8', 'f8', 'S30')}, delimiter=',')
        sss = EarthLocation(sites['lon']*u.deg,sites['lat']*u.deg)

        for i in np.arange(len(sss)):
            xxx,yyy = latlon2xy(sss[i], centers)
            ddd = np.sqrt((ax.value-xxx)**2+(by.value-yyy)**2)
            mii = np.argmin(ddd)
            xxxx = ax.value-xxx
            yyyy = by.value-yyy
            if ddd[mii]/1000.0 < 7000:
                cinst = 'Central Instant: {}\n    Central distance: {:.0f} km'.format(datas1[mii].iso.split()[1], ddd[mii]/1000.0)
            else:
                cinst = 'Not able to observe'
            vv = np.sqrt((xxxx[mii+1]-xxxx[mii])**2 + (yyyy[mii+1]-yyyy[mii])**2)/(1000.0*step)
            dur = ''
            if 'diam' in kwargs.keys():
                dur = 'Duration: Out of the shadow\n'
                if ddd[mii]/1000.0 < kwargs['diam']/2.0:
                    rr = np.sqrt((kwargs['diam']/2.0)**2 - (ddd[mii]/1000.0)**2)
                    w = (rr*2.0)/vv
                    dur = 'Duration: {:.1f} s\n'.format(w)

            print('  Site: {}\n    {}\n    {}'.format(sites[i]['nome'].strip(), cinst, dur))