示例#1
0
    def __init__(self, name, start_time=None, stop_time=None, step_size=None, center=None):
        """

        @rtype: Ephemeris
        @param name: Body to build an ephemeris for
        @param start_time:  start time of the ephemeris
        @type start_time: Time
        @param stop_time: stop time of the ephemeris
        @type stop_time: Time
        @param step_size: size of time step for ephemeris
        @type step_size: Quantity
        """
        self.name = str(name)

        # make sure the input quantities are reasonable.
        if start_time is None:
            start_time = Time.now()
        self._start_time = Time(start_time, scale='utc')

        if stop_time is None:
            stop_time = Time.now() + 1.0 * units.day
        self._stop_time = Time(stop_time, scale='utc')
        step_size = step_size is None and 1 or step_size

        if not isinstance(step_size, Quantity):
            step_size *= units.day
        self.step_size = step_size
        if center is None:
            center = 568
        self._center = center
        self._ephemeris = None
        self._elements = None
        self._current_time = None
        self._data = None
示例#2
0
def test_timezone_convenience_methods():
    location = EarthLocation(-74.0*u.deg, 40.7*u.deg, 0*u.m)
    obs = Observer(location=location,timezone=pytz.timezone('US/Eastern'))
    t = Time(57100.3, format='mjd')
    assert (obs.astropy_time_to_datetime(t).hour == 3)

    dt = datetime.datetime(2015, 3, 19, 3, 12)
    assert (obs.datetime_to_astropy_time(dt).datetime ==
            datetime.datetime(2015, 3, 19, 7, 12))

    assert (obs.astropy_time_to_datetime(obs.datetime_to_astropy_time(dt)).replace(
            tzinfo=None) == dt)

    # Test ndarray of times:
    times = t + np.linspace(0, 24, 10)*u.hour
    times_dt_ndarray = times.datetime
    assert all((obs.datetime_to_astropy_time(times_dt_ndarray)).jd ==
               (times + 4*u.hour).jd)

    # Test list of times:
    times_dt_list = list(times.datetime)
    assert all((obs.datetime_to_astropy_time(times_dt_list)).jd ==
               (times + 4*u.hour).jd)

    dts = obs.astropy_time_to_datetime(times)
    naive_dts = list(map(lambda t: t.replace(tzinfo=None), dts))
    assert all(naive_dts == times_dt_ndarray - datetime.timedelta(hours=4))
示例#3
0
    def _parse_hdus(cls, hdulist):
        header = MetaDict(OrderedDict(hdulist[0].header))
        # these GBM files have three FITS extensions.
        # extn1 - this gives the energy range for each of the 128 energy bins
        # extn2 - this contains the data, e.g. counts, exposure time, time of observation
        # extn3 - eclipse times?
        energy_bins = hdulist[1].data
        count_data = hdulist[2].data
        misc = hdulist[3].data

        # rebin the 128 energy channels into some summary ranges
        # 4-15 keV, 15 - 25 keV, 25-50 keV, 50-100 keV, 100-300 keV, 300-800 keV, 800 - 2000 keV
        # put the data in the units of counts/s/keV
        summary_counts = _bin_data_for_summary(energy_bins, count_data)

        # get the time information in datetime format with the correct MET adjustment
        gbm_times = Time([fermi.met_to_utc(t) for t in count_data['time']])
        gbm_times.precision = 9
        gbm_times = gbm_times.isot.astype('datetime64')

        column_labels = ['4-15 keV', '15-25 keV', '25-50 keV', '50-100 keV',
                         '100-300 keV', '300-800 keV', '800-2000 keV']

        # Add the units data
        units = OrderedDict([('4-15 keV', u.ct / u.s / u.keV), ('15-25 keV', u.ct / u.s / u.keV),
                             ('25-50 keV', u.ct / u.s / u.keV), ('50-100 keV', u.ct / u.s / u.keV),
                             ('100-300 keV', u.ct / u.s / u.keV), ('300-800 keV', u.ct / u.s / u.keV),
                             ('800-2000 keV', u.ct / u.s / u.keV)])
        return pd.DataFrame(summary_counts, columns=column_labels, index=gbm_times), header, units
示例#4
0
def print_pyephem_parallactic_angle():
    lat = 19.826218*u.deg
    lon = -155.471999*u.deg
    time = Time('2015-01-01 00:00:00')
    LST = time.sidereal_time('mean', longitude=lon)
    desired_HA_1 = 3*u.hourangle
    desired_HA_2 = 19*u.hourangle # = -5*u.hourangle

    obs = ephem.Observer()
    obs.lat = '19:49:34.3848'
    obs.lon = '-155:28:19.1964'
    obs.elevation = 0
    obs.date = time.datetime
    pyephem_target1 = ephem.FixedBody()
    pyephem_target1._ra = ephem.degrees((LST - desired_HA_1).to(u.rad).value)
    pyephem_target1._dec = ephem.degrees((-30*u.deg).to(u.rad).value)
    pyephem_target1.compute(obs)
    pyephem_q1 = (float(pyephem_target1.parallactic_angle())*u.rad).to(u.deg)

    pyephem_target2 = ephem.FixedBody()
    pyephem_target2._ra = ephem.degrees((LST - desired_HA_2).to(u.rad).value)
    pyephem_target2._dec = ephem.degrees((-30*u.deg).to(u.rad).value)
    pyephem_target2.compute(obs)
    pyephem_q2 = (float(pyephem_target2.parallactic_angle())*u.rad).to(u.deg)
    print(pyephem_q1, pyephem_q2)

    assert (obs.astropy_to_local_time(obs.local_to_astropy_time(dt)).replace(
            tzinfo=None) == dt)
示例#5
0
文件: test_basic.py 项目: MQQ/astropy
    def test_precision(self):
        """Set the output precision which is used for some formats.  This is
        also a test of the code that provides a dict for global and instance
        options."""

        t = Time("2010-01-01 00:00:00", format="iso", scale="utc")
        # Uses initial class-defined precision=3
        assert t.iso == "2010-01-01 00:00:00.000"

        # Set global precision = 5  XXX this uses private var, FIX THIS
        Time._precision = 5
        assert t.iso == "2010-01-01 00:00:00.00000"

        # Set instance precision to 9
        t.precision = 9
        assert t.iso == "2010-01-01 00:00:00.000000000"
        assert t.tai.utc.iso == "2010-01-01 00:00:00.000000000"

        # Restore global to original default of 3, instance is still at 9
        Time._precision = 3
        assert t.iso == "2010-01-01 00:00:00.000000000"

        # Make a new time instance and confirm precision = 3
        t = Time("2010-01-01 00:00:00", format="iso", scale="utc")
        assert t.iso == "2010-01-01 00:00:00.000"
示例#6
0
    def seek(self, offset):
        """Move filepointers to given offset

        Parameters
        ----------
        offset : float, Quantity, TimeDelta, Time, or str (iso-t)
            If float, in units of bytes
            If Quantity in time units or TimeDelta, interpreted as offset from
                start time, and converted to nearest record
            If Time, calculate offset from start time and convert
        """
        if isinstance(offset, Time):
            offset = offset-self.time0
        elif isinstance(offset, str):
            offset = Time(offset, scale='utc') - self.time0

        try:
            offset = offset.to(self.dtsample.unit)
        except AttributeError:
            pass
        except u.UnitsError:
            offset = int(offset.to(u.byte).value)
        else:
            offset = (offset/self.dtsample).to(u.dimensionless_unscaled)
            offset = int(round(offset)) * self.recordsize
        self._seek(offset)
示例#7
0
def get_datetime(from_value):
    """
    Ensure datetime values are in MJD. This is meant to handle any odd formats
    that telescopes have for datetime values.

    Relies on astropy, until astropy fails.

    :param from_value:
    :return: datetime instance
    """
    if from_value is not None:
        try:
            result = Time(from_value)
            result.format = 'mjd'
            return result
        except ValueError:
            try:
                # VLASS has a format astropy fails to understand
                # from datetime import datetime
                result = Time(
                    dt_datetime.strptime(from_value, '%H:%M:%S'))
                result.format = 'mjd'
                return result
            except ValueError:
                logging.error('Cannot parse datetime {}'.format(from_value))
                return None
    else:
        return None
示例#8
0
文件: TimeLV.py 项目: dgary50/eovsa
 def __init__(
     self,
     val,
     val2=None,
     format=None,
     scale=None,
     precision=None,
     in_subfmt=None,
     out_subfmt=None,
     location=None,
     copy=False,
 ):
     # Extend formats list to include TimeLV (LabVIEW format)
     self.FORMATS[u"lv"] = TimeLV
     astroTime.__init__(
         self,
         val,
         val2,
         format=format,
         scale=scale,
         precision=precision,
         in_subfmt=in_subfmt,
         out_subfmt=out_subfmt,
         location=location,
         copy=copy,
     )
示例#9
0
def check_moon(file, avoid=30.*u.degree):
    if not isinstance(avoid, u.Quantity):
        avoid = float(avoid)*u.degree
    else:
        avoid = avoid.to(u.degree)

    header = fits.getheader(file)

    mlo = EarthLocation.of_site('Keck Observatory') # Update later
    obstime = Time(header['DATE-OBS'], format='isot', scale='utc', location=mlo)
    moon = get_moon(obstime, mlo)

    if 'RA' in header.keys() and 'DEC' in header.keys():
        coord_string = '{} {}'.format(header['RA'], header['DEC'])
        target = SkyCoord(coord_string, unit=(u.hourangle, u.deg))
    else:
        ## Assume zenith
        target = SkyCoord(obstime.sidereal_time('apparent'), mlo.latitude)

    moon_alt = moon.transform_to(AltAz(obstime=obstime, location=mlo)).alt.to(u.deg)
    if moon_alt < 0*u.degree:
        print('Moon is down')
        return True
    else:
        sep = target.separation(moon)
        print('Moon is up. Separation = {:.1f} deg'.format(sep.to(u.degree).value))
        return (sep > avoid)
示例#10
0
    def _get_ut1_from_utc(cls, UTC):
        """
        Take a numpy array of UTC values and return a numpy array of UT1 and dut1 values
        """

        time_list = Time(UTC, scale='utc', format='mjd')

        try:
            dut1_out = time_list.delta_ut1_utc
            ut1_out = time_list.ut1.mjd
        except IERSRangeError:
            ut1_out = np.copy(UTC)
            dut1_out = np.zeros(len(UTC))
            warnings.warn("ModifiedJulianData.get_list() was given date values that are outside "
                          "astropy's range of interpolation for converting from UTC to UT1. "
                          "We will treat UT1=UTC for those dates, lacking a better alternative.",
                          category=UTCtoUT1Warning)
            from astropy.utils.iers import TIME_BEFORE_IERS_RANGE, TIME_BEYOND_IERS_RANGE
            dut1_test, status = time_list.get_delta_ut1_utc(return_status=True)
            good_dexes = np.where(np.logical_and(status != TIME_BEFORE_IERS_RANGE,
                                                 status != TIME_BEYOND_IERS_RANGE))

            if len(good_dexes[0]) > 0:
                time_good = Time(UTC[good_dexes], scale='utc', format='mjd')
                dut1_good = time_good.delta_ut1_utc
                ut1_good = time_good.ut1.mjd

                ut1_out[good_dexes] = ut1_good
                dut1_out[good_dexes] = dut1_good

        return ut1_out, dut1_out
示例#11
0
def test_strftime_leapsecond():
    time_string = '1995-12-31 23:59:60'
    t = Time(time_string)

    for format in t.FORMATS:
        t.format = format
        assert t.strftime('%Y-%m-%d %H:%M:%S') == time_string
示例#12
0
def test_strftime_array():
    tstrings = ['2010-09-03 00:00:00', '2005-09-03 06:00:00', '1995-12-31 23:59:60']
    t = Time(tstrings)

    for format in t.FORMATS:
        t.format = format
        assert t.strftime('%Y-%m-%d %H:%M:%S').tolist() == tstrings
示例#13
0
def gmst2gps(day, GMST, type='mean', iterations=10, precision=1e-14):

    """
    ----------------------------------------------------------------------------
    gps=gmst2gps(day, GMST, type='mean', iterations=10, precision=1e-14)
    returns the GPS time associated with the GMST/GAST on the given day
    type can be 'mean' or 'apparent'
    uses a root-find, so not super fast
    ----------------------------------------------------------------------------
    """

    assert type in ['mean','apparent']
    gmst=Longitude(GMST,unit='h')
    t=Time(day,scale='utc')
    iteration=0
 
    siderealday_to_solarday=0.99726958
 
    while iteration < iterations:
        error=t.sidereal_time(type,'greenwich')-gmst
        if NP.abs(error/gmst).value <= precision:
            return t.gps
        t=t-TimeDelta((error).hour*u.hour)*siderealday_to_solarday
        iteration+=1
 
    return None
示例#14
0
def convert_time(analysis):
    
    #analysis is the fits file in the photometry function
    t = Time(analysis[0].header['DATE-OBS'])
    t.format = 'mjd'
    result = t.value
    return result
示例#15
0
 def setup(self):
     wht = EarthLocation(342.12*u.deg, 28.758333333333333*u.deg, 2327*u.m)
     self.obstime = Time("2013-02-02T23:00", location=wht)
     self.obstime2 = Time("2013-08-02T23:00", location=wht)
     self.obstimeArr = Time(["2013-02-02T23:00", "2013-08-02T23:00"], location=wht)
     self.star = SkyCoord("08:08:08 +32:00:00", unit=(u.hour, u.degree),
                          frame='icrs')
    def __init__(self, TAI=None, UTC=None):
        """
        Must specify either:

        @param [in] TAI = the International Atomic Time as an MJD

        or

        @param [in] UTC = Universal Coordinate Time as an MJD
        """

        if TAI is None and UTC is None:
            raise RuntimeError("You must specify either TAI or UTC to "
                               "instantiate ModifiedJulianDate")

        if TAI is not None:
            self._time = Time(TAI, scale='tai', format='mjd')
            self._tai = TAI
            self._utc = None
        else:
            self._time = Time(UTC, scale='utc', format='mjd')
            self._utc = UTC
            self._tai = None

        self._tt = None
        self._tdb = None
        self._ut1 = None
        self._dut1 = None
示例#17
0
def test_timedelta(fmt, tmpdir):

    t1 = Time(Time.now())
    t2 = Time(Time.now())

    td = TimeDelta(t2 - t1, format=fmt)
    tree = dict(timedelta=td)
    assert_roundtrip_tree(tree, tmpdir)
示例#18
0
def main():

    parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('--ra', type=float, default=25.0,
        help='Right Ascension (degrees)')
    parser.add_argument('--dec', type=float, default=12.0,
        help='Declination (degrees)')
    parser.add_argument('--mjd', type=float, default=55000.0,
        help='Modified Julien Date (days)')
    parser.add_argument('--scan-offset', type=float, default=15.0,
        help='Scan offset (hours)')
    parser.add_argument('--scan-width', type=float, default=4.0,
        help='Scan width (hours)')
    parser.add_argument('--scan-steps', type=int, default=100,
        help='Number of sampling points')
    parser.add_argument('--astropy-apo', action='store_true',
        help='User apo observatory from astropy')
    args = parser.parse_args()

    if args.astropy_apo:
        sdss = EarthLocation.of_site('apo')
    else:
        sdss = EarthLocation(lat=32.7797556*u.deg, lon=-(105+49./60.+13/3600.)*u.deg, height=2797*u.m)

    coord = SkyCoord(ra=args.ra*u.degree, dec=args.dec*u.degree, frame='icrs')

    # scan of time
    hours = args.scan_offset + np.linspace(-0.5*args.scan_width, 0.5*args.scan_width, args.scan_steps)
    my_alt = np.zeros((hours.size))
    py_alt = np.zeros((hours.size))
    py_ha = np.zeros((hours.size))

    for i in range(hours.size):
        mjd_value = args.mjd*u.day + hours[i]*u.hour
        time = Time(val=mjd_value, scale='tai', format='mjd', location=sdss)
        # altitude from astropy
        py_alt[i] = coord.transform_to(AltAz(obstime=time, location=sdss)).alt.to(u.deg).value
        # this is supposed to be the hour angle from astropy
        py_ha[i] = time.sidereal_time('apparent').to(u.deg).value - args.ra 
        # simple rotation to get alt,az based on ha
        my_alt[i], az = hadec2altaz(py_ha[i], args.dec, sdss.latitude.to(u.deg).value) 
        print hours[i], py_ha[i], py_alt[i], my_alt[i]

    py_ha = np.array(map(normalize_angle, py_ha.tolist()))
    ii = np.argsort(py_ha)
    py_ha=py_ha[ii]
    py_alt=py_alt[ii]
    my_alt=my_alt[ii]

    fig = plt.figure(figsize=(8,6))
    plt.plot(py_ha, py_alt - my_alt, 'o', c='b')
    plt.title('Compare hadec2altaz')
    # plt.title('(ra,dec) = (%.2f,%.2f)' % (args.ra, args.dec))
    plt.xlabel('Hour Angle [deg]')
    plt.ylabel('astropy_alt - rotation_alt [deg]')
    plt.grid(True)
    plt.show()
示例#19
0
def test_strftime_scalar():
    """Test of Time.strftime
    """
    time_string = '2010-09-03 06:00:00'
    t = Time(time_string)

    for format in t.FORMATS:
        t.format = format
        assert t.strftime('%Y-%m-%d %H:%M:%S') == time_string
示例#20
0
def get_member_info(object_name, filtertype='r', imagetype='p'):
    """
    Query the ssois ephemeris for images of a given object. Then parse through for desired image type, 
    filter, exposure time, and telescope instrument
    """

    # From the given input, identify the desired filter and rename appropriately                    Replace this?
    if filtertype.lower().__contains__('r'):
        filtertype = 'r.MP9601'  # this is the old (standard) r filter for MegaCam
    if filtertype.lower().__contains__('u'):
        filtertype = 'u.MP9301'

    # Define time period of image search, basically while MegaCam in operation
    search_start_date = Time('2013-01-01', scale='utc')  # epoch1=2013+01+01
    search_end_date = Time('2017-01-01', scale='utc')  # epoch2=2017+1+1

    print "----- Searching for images of object {}".format(object_name)

    query = Query(object_name, search_start_date=search_start_date, search_end_date=search_end_date)
    try:
        objects = parse_ssois_return(query.get(), object_name, imagetype, camera_filter=filtertype)
    except IOError:
        print "Sleeping 30 seconds"
        time.sleep(30)
        objects = parse_ssois_return(query.get(), object_name, imagetype, camera_filter=filtertype)

    # Setup output, label columns
    if len(objects)>0:
        output = '{}/{}_object_images.txt'.format(_OUTPUT_DIR, object_name)
        with open(output, 'w') as outfile:
            outfile.write("{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n".format(
                "Object", "Image", "Exp_time", "RA (deg)", "Dec (deg)", "time", "filter", "RA rate (\"/hr)", "Dec rate (\"/hr)"))

        mjds = []
        for line in objects:
            mjds.append(float(line['MJD'])) #Have to convert elements to floats
        start_time = Time(min(mjds), format='mjd') - 1.0*units.minute
        stop_time = Time(max(mjds), format='mjd') + 1.0*units.minute

        #Query Horizons once to establish position values over given time period, then give it a current time which it interpolates withl
        body = horizons.Body(object_name, start_time=start_time, stop_time=stop_time, step_size=10 * units.minute)

        for line in objects:
            with open(output, 'a') as outfile:
                time = Time(line['MJD'], format='mjd', scale='utc')
                time.format = 'iso'
                body.current_time = time
                p_ra = body.coordinate.ra.degree  
                p_dec = body.coordinate.dec.degree 
                ra_dot = body.ra_rate.to(units.arcsecond/units.hour).value
                dec_dot = body.dec_rate.to(units.arcsecond/units.hour).value
                try:
                    outfile.write("{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n".format(
                        object_name, line['Image'], line['Exptime'], p_ra, p_dec,
                        Time(line['MJD'], format='mjd', scale='utc'), line['Filter'], ra_dot, dec_dot))
                except Exception, e:
                    print "Error writing to outfile: {}".format(e)
示例#21
0
文件: test_basic.py 项目: MQQ/astropy
 def test_yday_format(self):
     """Year:Day_of_year format"""
     # Heterogeneous input formats with in_subfmt='*' (default)
     times = ["2000-12-01", "2001-12-01 01:01:01.123"]
     t = Time(times, format="iso", scale="tai")
     t.out_subfmt = "date_hm"
     assert np.all(t.yday == np.array(["2000:336:00:00", "2001:335:01:01"]))
     t.out_subfmt = "*"
     assert np.all(t.yday == np.array(["2000:336:00:00:00.000", "2001:335:01:01:01.123"]))
示例#22
0
def radec2altaz(ra, dec, obstime, lat=None, long=None, debug=False):
    """
    calculates the altitude and azimuth, given an ra, dec, time, and observatory location
    :param ra: right ascension of the target (in degrees)
    :param dec: declination of the target (in degrees)
    :param obstime: an astropy.time.Time object containing the time of the observation.
                    Can also contain the observatory location
    :param lat: The latitude of the observatory. Not needed if given in the obstime object
    :param long: The longitude of the observatory. Not needed if given in the obstime object
    :return:
    """

    if lat is None:
        lat = obstime.lat.degree
    if long is None:
        long = obstime.lon.degree
    obstime = Time(obstime.isot, format='isot', scale='utc', location=(long, lat))

    # Find the number of days since J2000
    j2000 = Time("2000-01-01T12:00:00.0", format='isot', scale='utc')
    dt = (obstime - j2000).value  # number of days since J2000 epoch

    # get the UT time
    tstring = obstime.isot.split("T")[-1]
    segments = tstring.split(":")
    ut = float(segments[0]) + float(segments[1]) / 60.0 + float(segments[2]) / 3600

    # Calculate Local Sidereal Time
    lst = obstime.sidereal_time('mean').deg

    # Calculate the hour angle
    HA = lst - ra
    while HA < 0.0 or HA > 360.0:
        s = -np.sign(HA)
        HA += s * 360.0

    # convert everything to radians
    dec *= np.pi / 180.0
    ra *= np.pi / 180.0
    lat *= np.pi / 180.0
    long *= np.pi / 180.0
    HA *= np.pi / 180.0

    # Calculate the altitude
    alt = np.arcsin(np.sin(dec) * np.sin(lat) + np.cos(dec) * np.cos(lat) * np.cos(HA))

    # calculate the azimuth
    az = np.arccos((np.sin(dec) - np.sin(alt) * np.sin(lat)) / (np.cos(alt) * np.cos(lat)))
    if np.sin(HA) > 0:
        az = 2.0 * np.pi - az

    if debug:
        print "UT: ", ut
        print "LST: ", lst
        print "HA: ", HA * 180.0 / np.pi

    return alt * 180.0 / np.pi, az * 180.0 / np.pi
示例#23
0
def test_mask_not_writeable():
    t = Time('2000:001')
    with pytest.raises(AttributeError) as err:
        t.mask = True
    assert "can't set attribute" in str(err)

    t = Time(['2000:001'])
    with pytest.raises(ValueError) as err:
        t.mask[0] = True
    assert "assignment destination is read-only" in str(err)
示例#24
0
文件: test_mpc.py 项目: OSSOS/MOP
 def test_time_formatting(self):
     mpc_time = "2000 01 01.000001"
     iso_time = "2000-01-01 00:00:00.0864"
     t1 = Time(mpc_time, format='mpc', scale='utc', precision=6)
     t2 = Time(iso_time, format='iso', scale='utc', precision=6)
     t3 = t2.replicate(format='mpc')
     t3.precision = 6
     self.assertEquals(mpc_time, str(t1))
     self.assertEquals(t2.jd, t1.jd)
     self.assertEquals(mpc_time, str(t3))
示例#25
0
 def set_lsts_from_time_array(self):
     """Set the lst_array based from the time_array."""
     lsts = []
     curtime = self.time_array[0]
     for ind, jd in enumerate(self.time_array):
         if ind == 0 or not np.isclose(jd, curtime, atol=1e-6, rtol=1e-12):
             curtime = jd
             latitude, longitude, altitude = self.telescope_location_lat_lon_alt_degrees
             t = Time(jd, format='jd', location=(longitude, latitude))
         lsts.append(t.sidereal_time('apparent').radian)
     self.lst_array = np.array(lsts)
示例#26
0
def test_strftime_array_2():
    tstrings = [['1998-01-01 00:00:01', '1998-01-01 00:00:02'],
                ['1998-01-01 00:00:03', '1995-12-31 23:59:60']]
    tstrings = np.array(tstrings)

    t = Time(tstrings)

    for format in t.FORMATS:
        t.format = format
        assert np.all(t.strftime('%Y-%m-%d %H:%M:%S') == tstrings)
        assert t.strftime('%Y-%m-%d %H:%M:%S').shape == tstrings.shape
示例#27
0
文件: test_basic.py 项目: MQQ/astropy
 def test_copy(self):
     """Test copy method"""
     t = Time("2000:001", format="yday", scale="tai")
     t_yday = t.yday
     t2 = t.copy()
     assert t.yday == t2.yday
     # This is not allowed publicly, but here we hack the internal time values
     # to show that t and t2 are not sharing references.
     t2._time.jd1 += 100.0
     assert t.yday != t2.yday
     assert t.yday == t_yday  # prove that it did not change
示例#28
0
def Sidereal(value):#overall gps time ti sidereal time
  result= float()
  tunix = value +315964800 # gps time to unix time
  hour = Time(tunix, format='unix', location=('-68.131389', '-16.353333')) # defines object "hour" needed in astropy
  Side = str( hour.sidereal_time('mean') ) #trasform to sidereal time
  if Side[1]=='h':
    A = Side
    result = (float(A[0])*3600)+(float(A[2:4])*60)+(float(A[5:7]))
  if Side[2]=='h':
    A = Side
    result = (float(A[0:2])*3600)+(float(A[3:5])*60)+(float(A[6:8]))
  return int(result)
示例#29
0
文件: viz.py 项目: apodemus/obstools
def get_sid_trans(date, longitude):
    '''Initialize matplotlib transform for local time - sidereal time conversion'''
    midnight = Time(date)   #midnight UTC
    sidmid = midnight.sidereal_time('mean', longitude)
    #offset from local time
    offset = sidmid.hour / 24 
    #used to convert to origin of plot_date coordinates
    p0 = midnight.plot_date
    #A mean sidereal day is 23 hours, 56 minutes, 4.0916 seconds (23.9344699 hours or 0.99726958 mean solar days)
    scale = 366.25 / 365.25
    
    return Affine2D().translate(-p0, 0).scale(scale).translate(p0 + offset, 0)
示例#30
0
def _(attr, results):
    return set(
        it for it in results
        if
        it.time.end is not None
        and
        attr.min <= Time.strptime(it.time.end, TIMEFORMAT)
        and
        it.time.start is not None
        and
        attr.max >= Time.strptime(it.time.start, TIMEFORMAT)
    )
示例#31
0
def plot_airmass(ra,
                 dec,
                 year,
                 months,
                 days,
                 outfile='plot_airmass.png',
                 date_idx=-1):
    """
    ra =  R.A. value of target (e.g. '17:45:40.04')
    dec = Dec. value of target (e.g. '-29:00:28.12')
    year = int value of year you want to observe
    months = array of months (integers) where each month will have a curve.
    days = array of days (integers), of same length as months.
    observatory = Either 'keck1' or 'keck2'
    date_idx = Index of day to use for twilight dashed lines.  Defaults to first day.

    Notes:
    Months are 1-based (i.e. 1 = January). Same for days.
    """
    # Setup the target
    target = SkyCoord(ra, dec, unit=(u.hour, u.deg), frame='icrs')

    # Setup local time.
    utc_offset = -7 * u.hour  # Pacific Standard Time

    month_labels = [
        'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct',
        'Nov', 'Dec'
    ]

    # Observatory (for symbol)
    obs = 'lick'
    lick = EarthLocation.of_site('lick')

    # Labels and colors for different months.
    labels = []
    label_fmt = '{0:s} {1:d}, {2:d} (HST)'
    for ii in range(len(months)):
        label = label_fmt.format(month_labels[months[ii] - 1], days[ii], year)
        labels.append(label)

    colors = ['r', 'b', 'g', 'c', 'm', 'y']
    heights = [1.45, 1.35, 1.25, 1.15]

    # Get sunset and sunrise times on the first specified day
    midnight = Time('{0:d}-{1:d}-{2:d} 00:00:00'.format(
        year, months[date_idx], days[date_idx])) - utc_offset
    delta_midnight = np.arange(-12, 12, 0.01) * u.hour
    times = midnight + delta_midnight
    altaz_frame = AltAz(obstime=times, location=lick)
    sun_altaz = get_sun(times).transform_to(altaz_frame)

    sun_down = np.where(sun_altaz.alt < -0 * u.deg)[0]
    twilite = np.where(sun_altaz.alt < -12 * u.deg)[0]
    sunset = delta_midnight[sun_down[0]].value
    sunrise = delta_midnight[sun_down[-1]].value
    twilite1 = delta_midnight[twilite[0]].value
    twilite2 = delta_midnight[twilite[-1]].value

    # Get the half-night split times
    splittime = twilite1 + ((twilite2 - twilite1) / 2.0)

    print('Sunrise %4.1f   Sunset %4.1f  (hours around midnight HST)' %
          (sunrise, sunset))
    print('12-degr %4.1f  12-degr %4.1f  (hours around midnight HST)' %
          (twilite1, twilite2))

    plt.close(2)
    plt.figure(2, figsize=(7, 7))
    plt.clf()
    plt.subplots_adjust(left=0.15)
    for ii in range(len(days)):
        midnight = Time('{0:d}-{1:d}-{2:d} 00:00:00'.format(
            year, months[ii], days[ii])) - utc_offset
        delta_midnight = np.arange(-7, 7, 0.2) * u.hour
        times = delta_midnight.value

        target_altaz = target.transform_to(
            AltAz(obstime=midnight + delta_midnight, location=lick))

        airmass = target_altaz.secz

        # Trim out junk where target is at "negative airmass"
        good = np.where(airmass > 0)[0]
        times = times[good]
        airmass = airmass[good]

        # Find the points beyond the Nasmyth deck. Also don't bother with anything above sec(z) = 3
        transitTime = times[airmass.argmin()]

        # FIXME: HOW DOES THIS WORK FOR LICK???????
        #        belowDeck = (np.where((times >= transitTime) & (airmass >= 1.8)))[0]
        #        aboveDeck = (np.where(((times >= transitTime) & (airmass < 1.8)) |
        #                              (times < transitTime)))[0]

        belowDeck = (np.where((times >= transitTime) & (airmass >= 1.8)))[0]
        aboveDeck = (np.where(((times >= transitTime) & (airmass < 1.8))
                              | (times < transitTime)))[0]

        print('belowDeck', belowDeck)
        print('aboveDeck', aboveDeck)
        print('times[belowDeck]', times[belowDeck])
        print('times[aboveDeck]', times[aboveDeck])

        plt.plot(times[belowDeck],
                 airmass[belowDeck],
                 colors[ii] + 'o',
                 mfc='w',
                 mec=colors[ii],
                 ms=12)
        plt.plot(times[aboveDeck],
                 airmass[aboveDeck],
                 colors[ii] + 'o',
                 mec=colors[ii],
                 ms=12)
        plt.plot(times, airmass, colors[ii] + '-')

        plt.text(-3.5,
                 airmass[5] + (ii * 0.1) - 0.65,
                 labels[ii],
                 color=colors[ii])

    plt.title('Obs. RA = 18:00, DEC = -30:00 from Lick', fontsize=18)
    plt.xlabel('Local Time in Hours (0 = midnight)', fontsize=16)
    plt.ylabel('Air Mass', fontsize=16)

    loAirmass = 1
    hiAirmass = 3

    # Draw on the 12-degree twilight limits
    plt.axvline(splittime, color='k', linestyle='--')
    plt.axvline(twilite1 + 0.5, color='k', linestyle='--')
    plt.axvline(twilite2, color='k', linestyle='--')

    plt.axis([sunset, sunrise, loAirmass, hiAirmass])
    plt.savefig(outfile)
示例#32
0
def write_event_file(events, parameters, filename, overwrite=False):
    from astropy.time import Time, TimeDelta
    mylog.info("Writing events to file %s." % filename)

    t_begin = Time.now()
    dt = TimeDelta(parameters["exposure_time"], format='sec')
    t_end = t_begin + dt

    col_x = pyfits.Column(name='X', format='D', unit='pixel', array=events["xpix"])
    col_y = pyfits.Column(name='Y', format='D', unit='pixel', array=events["ypix"])
    col_e = pyfits.Column(name='ENERGY', format='E', unit='eV', array=events["energy"]*1000.)
    col_dx = pyfits.Column(name='DETX', format='D', unit='pixel', array=events["detx"])
    col_dy = pyfits.Column(name='DETY', format='D', unit='pixel', array=events["dety"])
    col_id = pyfits.Column(name='CCD_ID', format='D', unit='pixel', array=events["ccd_id"])

    chantype = parameters["channel_type"]
    if chantype == "PHA":
        cunit = "adu"
    elif chantype == "PI":
        cunit = "Chan"
    col_ch = pyfits.Column(name=chantype.upper(), format='1J', unit=cunit, array=events[chantype])

    col_t = pyfits.Column(name="TIME", format='1D', unit='s', array=events['time'])

    cols = [col_e, col_x, col_y, col_ch, col_t, col_dx, col_dy, col_id]

    coldefs = pyfits.ColDefs(cols)
    tbhdu = pyfits.BinTableHDU.from_columns(coldefs)
    tbhdu.name = "EVENTS"

    tbhdu.header["MTYPE1"] = "sky"
    tbhdu.header["MFORM1"] = "x,y"
    tbhdu.header["MTYPE2"] = "EQPOS"
    tbhdu.header["MFORM2"] = "RA,DEC"
    tbhdu.header["TCTYP2"] = "RA---TAN"
    tbhdu.header["TCTYP3"] = "DEC--TAN"
    tbhdu.header["TCRVL2"] = parameters["sky_center"][0]
    tbhdu.header["TCRVL3"] = parameters["sky_center"][1]
    tbhdu.header["TCDLT2"] = -parameters["plate_scale"]
    tbhdu.header["TCDLT3"] = parameters["plate_scale"]
    tbhdu.header["TCRPX2"] = parameters["pix_center"][0]
    tbhdu.header["TCRPX3"] = parameters["pix_center"][1]
    tbhdu.header["TCUNI2"] = "deg"
    tbhdu.header["TCUNI3"] = "deg"
    tbhdu.header["TLMIN2"] = 0.5
    tbhdu.header["TLMIN3"] = 0.5
    tbhdu.header["TLMAX2"] = 2.0*parameters["num_pixels"]+0.5
    tbhdu.header["TLMAX3"] = 2.0*parameters["num_pixels"]+0.5
    tbhdu.header["TLMIN4"] = parameters["chan_lim"][0]
    tbhdu.header["TLMAX4"] = parameters["chan_lim"][1]
    tbhdu.header["TLMIN6"] = -0.5*parameters["num_pixels"]
    tbhdu.header["TLMAX6"] = 0.5*parameters["num_pixels"]
    tbhdu.header["TLMIN7"] = -0.5*parameters["num_pixels"]
    tbhdu.header["TLMAX7"] = 0.5*parameters["num_pixels"]
    tbhdu.header["EXPOSURE"] = parameters["exposure_time"]
    tbhdu.header["TSTART"] = 0.0
    tbhdu.header["TSTOP"] = parameters["exposure_time"]
    tbhdu.header["HDUVERS"] = "1.1.0"
    tbhdu.header["RADECSYS"] = "FK5"
    tbhdu.header["EQUINOX"] = 2000.0
    tbhdu.header["HDUCLASS"] = "OGIP"
    tbhdu.header["HDUCLAS1"] = "EVENTS"
    tbhdu.header["HDUCLAS2"] = "ACCEPTED"
    tbhdu.header["DATE"] = t_begin.tt.isot
    tbhdu.header["DATE-OBS"] = t_begin.tt.isot
    tbhdu.header["DATE-END"] = t_end.tt.isot
    tbhdu.header["RESPFILE"] = os.path.split(parameters["rmf"])[-1]
    tbhdu.header["PHA_BINS"] = parameters["nchan"]
    tbhdu.header["ANCRFILE"] = os.path.split(parameters["arf"])[-1]
    tbhdu.header["CHANTYPE"] = parameters["channel_type"]
    tbhdu.header["MISSION"] = parameters["mission"]
    tbhdu.header["TELESCOP"] = parameters["telescope"]
    tbhdu.header["INSTRUME"] = parameters["instrument"]
    tbhdu.header["RA_PNT"] = parameters["sky_center"][0]
    tbhdu.header["DEC_PNT"] = parameters["sky_center"][1]
    tbhdu.header["ROLL_PNT"] = parameters["roll_angle"]
    tbhdu.header["AIMPT_X"] = parameters["aimpt_coords"][0]
    tbhdu.header["AIMPT_Y"] = parameters["aimpt_coords"][1]
    if parameters["dither_params"]["dither_on"]:
        tbhdu.header["DITHXAMP"] = parameters["dither_params"]["x_amp"]
        tbhdu.header["DITHYAMP"] = parameters["dither_params"]["y_amp"]
        tbhdu.header["DITHXPER"] = parameters["dither_params"]["x_period"]
        tbhdu.header["DITHYPER"] = parameters["dither_params"]["y_period"]

    start = pyfits.Column(name='START', format='1D', unit='s',
                          array=np.array([0.0]))
    stop = pyfits.Column(name='STOP', format='1D', unit='s',
                         array=np.array([parameters["exposure_time"]]))

    tbhdu_gti = pyfits.BinTableHDU.from_columns([start,stop])
    tbhdu_gti.name = "STDGTI"
    tbhdu_gti.header["TSTART"] = 0.0
    tbhdu_gti.header["TSTOP"] = parameters["exposure_time"]
    tbhdu_gti.header["HDUCLASS"] = "OGIP"
    tbhdu_gti.header["HDUCLAS1"] = "GTI"
    tbhdu_gti.header["HDUCLAS2"] = "STANDARD"
    tbhdu_gti.header["RADECSYS"] = "FK5"
    tbhdu_gti.header["EQUINOX"] = 2000.0
    tbhdu_gti.header["DATE"] = t_begin.tt.isot
    tbhdu_gti.header["DATE-OBS"] = t_begin.tt.isot
    tbhdu_gti.header["DATE-END"] = t_end.tt.isot

    hdulist = [pyfits.PrimaryHDU(), tbhdu, tbhdu_gti]

    pyfits.HDUList(hdulist).writeto(filename, overwrite=overwrite)
示例#33
0
 def convert_time_pandasSeries(time_string, **kwargs):
     return Time(time_string.tolist(), **kwargs)
示例#34
0
文件: arload.py 项目: lwymarie/PYPIT
def load_headers(datlines):
    """
    Load the header information for each fits file

    Parameters
    ----------
    datlines : list
      Input (uncommented) lines specified by the user.
      datlines contains the full data path to every
      raw exposure listed by the user.

    Returns
    -------
    fitsdict : dict
      The relevant header information of all fits files
    """
    def generate_updates(dct, keylst, keys, whddict, headarr):
        """ Generate a list of settings to be updated
        """
        for (key, value) in iteritems(dct):
            keys += [str(key)]
            if isinstance(value, dict):
                generate_updates(value, keylst, keys, whddict, headarr)
            else:
                try:
                    tfrhd = int(value.split('.')[0]) - 1
                    kchk = '.'.join(value.split('.')[1:])
                    frhd = whddict['{0:02d}'.format(tfrhd)]
                    hdrval = headarr[frhd][kchk]
                    if keys[0] not in ["check", "keyword"]:
                        keylst += [
                            str(' ').join(keys) + str(" ") +
                            str("{0}\n".format(hdrval).replace(" ", ""))
                        ]
                        keylst[-1] = keylst[-1].split()
                except (AttributeError, ValueError, KeyError):
                    pass
            del keys[-1]

    chks = settings.spect['check'].keys()
    keys = settings.spect['keyword'].keys()
    fitsdict = dict({'directory': [], 'filename': [], 'utc': []})
    whddict = dict({})
    for k in keys:
        fitsdict[k] = []
    allhead = []
    headarr = [None for k in range(settings.spect['fits']['numhead'])]
    numfiles = len(datlines)
    for i in range(numfiles):
        # Try to open the fits file
        try:
            for k in range(settings.spect['fits']['numhead']):
                headarr[k] = pyfits.getheader(
                    datlines[i],
                    ext=settings.spect['fits']['headext{0:02d}'.format(k + 1)])
                whddict['{0:02d}'.format(
                    settings.spect['fits']['headext{0:02d}'.format(k +
                                                                   1)])] = k
        except:
            if settings.argflag['run']['setup']:
                msgs.warn("Bad header in extension {0:d} of file:".format(
                    settings.spect['fits']['headext{0:02d}'.format(k + 1)]) +
                          msgs.newline() + datlines[i])
                msgs.warn(
                    "Proceeding on the hopes this was a calibration file, otherwise consider removing."
                )
            else:
                msgs.error("Error reading header from extension {0:d} of file:"
                           .format(settings.spect['fits'][
                               'headext{0:02d}'.format(k + 1)]) +
                           msgs.newline() + datlines[i])
        # Save
        for k in range(settings.spect['fits']['numhead']):
            tmp = [head.copy() for head in headarr]
            allhead.append(tmp)
        # Perform checks on each fits files, as specified in the settings.instrument file.
        skip = False
        for ch in chks:
            tfrhd = int(ch.split('.')[0]) - 1
            kchk = '.'.join(ch.split('.')[1:])
            frhd = whddict['{0:02d}'.format(tfrhd)]
            if settings.spect['check'][ch] != str(headarr[frhd][kchk]).strip():
                print(ch, frhd, kchk)
                print(settings.spect['check'][ch],
                      str(headarr[frhd][kchk]).strip())
                msgs.warn(
                    "The following file:" + msgs.newline() + datlines[i] +
                    msgs.newline() +
                    "is not taken with the settings.{0:s} detector".format(
                        settings.argflag['run']['spectrograph']) +
                    msgs.newline() +
                    "Remove this file, or specify a different settings file.")
                msgs.warn("Skipping the file..")
                skip = True
        if skip:
            numfiles -= 1
            continue
        # Now set the key values for each of the required keywords
        dspl = datlines[i].split('/')
        fitsdict['directory'].append('/'.join(dspl[:-1]) + '/')
        fitsdict['filename'].append(dspl[-1])
        # Attempt to load a UTC
        utcfound = False
        for k in range(settings.spect['fits']['numhead']):
            if 'UTC' in headarr[k].keys():
                utc = headarr[k]['UTC']
                utcfound = True
                break
            elif 'UT' in headarr[k].keys():
                utc = headarr[k]['UT']
                utcfound = True
                break
        if utcfound:
            fitsdict['utc'].append(utc)
        else:
            fitsdict['utc'].append(None)
            msgs.warn("UTC is not listed as a header keyword in file:" +
                      msgs.newline() + datlines[i])
        # Read binning-dependent detector properties here? (maybe read speed too)
        #if settings.argflag['run']['spectrograph'] in ['keck_lris_blue']:
        #    arlris.set_det(fitsdict, headarr[k])
        # Now get the rest of the keywords
        for kw in keys:
            if settings.spect['keyword'][kw] is None:
                value = str(
                    'None')  # This instrument doesn't have/need this keyword
            else:
                ch = settings.spect['keyword'][kw]
                try:
                    tfrhd = int(ch.split('.')[0]) - 1
                except ValueError:
                    value = ch  # Keyword given a value. Only a string allowed for now
                else:
                    frhd = whddict['{0:02d}'.format(tfrhd)]
                    kchk = '.'.join(ch.split('.')[1:])
                    try:
                        value = headarr[frhd][kchk]
                    except KeyError:  # Keyword not found in header
                        msgs.warn(
                            "{:s} keyword not in header. Setting to None".
                            format(kchk))
                        value = str('None')
            # Convert the input time into hours
            if kw == 'time':
                if settings.spect['fits']['timeunit'] == 's':
                    value = float(value) / 3600.0  # Convert seconds to hours
                elif settings.spect['fits']['timeunit'] == 'm':
                    value = float(value) / 60.0  # Convert minutes to hours
                elif settings.spect['fits']['timeunit'] in Time.FORMATS.keys(
                ):  # Astropy time format
                    if settings.spect['fits']['timeunit'] in ['mjd']:
                        ival = float(value)
                    else:
                        ival = value
                    tval = Time(ival,
                                scale='tt',
                                format=settings.spect['fits']['timeunit'])
                    # dspT = value.split('T')
                    # dy,dm,dd = np.array(dspT[0].split('-')).astype(np.int)
                    # th,tm,ts = np.array(dspT[1].split(':')).astype(np.float64)
                    # r=(14-dm)/12
                    # s,t=dy+4800-r,dm+12*r-3
                    # jdn = dd + (153*t+2)/5 + 365*s + s/4 - 32083
                    # value = jdn + (12.-th)/24 + tm/1440 + ts/86400 - 2400000.5  # THIS IS THE MJD
                    value = tval.mjd * 24.0  # Put MJD in hours
                else:
                    msgs.error('Bad time unit')
            # Put the value in the keyword
            typv = type(value)
            if typv is int or typv is np.int_:
                fitsdict[kw].append(value)
            elif typv is float or typv is np.float_:
                fitsdict[kw].append(value)
            elif isinstance(value, basestring) or typv is np.string_:
                fitsdict[kw].append(value.strip())
            elif typv is bool or typv is np.bool_:
                fitsdict[kw].append(value)
            else:
                msgs.bug(
                    "I didn't expect a useful header ({0:s}) to contain type {1:s}"
                    .format(kw, typv).replace('<type ', '').replace('>', ''))

        msgs.info("Successfully loaded headers for file:" + msgs.newline() +
                  datlines[i])

    # Check if any other settings require header values to be loaded
    msgs.info("Checking spectrograph settings for required header information")
    # Just use the header info from the last file
    keylst = []
    generate_updates(settings.spect.copy(), keylst, [], whddict, headarr)

    # Convert the fitsdict arrays into numpy arrays
    for k in fitsdict.keys():
        fitsdict[k] = np.array(fitsdict[k])
    msgs.info("Headers loaded for {0:d} files successfully".format(numfiles))
    if numfiles != len(datlines):
        msgs.warn("Headers were not loaded for {0:d} files".format(
            len(datlines) - numfiles))
    if numfiles == 0:
        msgs.error("The headers could not be read from the input data files." +
                   msgs.newline() +
                   "Please check that the settings file matches the data.")
    # Return
    fitsdict['headers'] = allhead
    return fitsdict, keylst
示例#35
0
names = [
    'mjd', 'sun_RA', 'sun_dec', 'sun_alt', 'sun_az', 'moon_RA', 'moon_dec',
    'moon_alt', 'moon_az', 'moon_phase'
]
types = [float] * len(names)

sun_moon_info = np.zeros(mjds.size, dtype=list(zip(names, types)))
sun_moon_info['mjd'] = np.arange(mjd_start - pad_around,
                                 duration + mjd_start + pad_around + t_step,
                                 t_step)

site = Site('LSST')
location = EarthLocation(lat=site.latitude,
                         lon=site.longitude,
                         height=site.height)
t_sparse = Time(mjds, format='mjd', location=location)

sun = get_sun(t_sparse)
aa = AltAz(location=location, obstime=t_sparse)
sun_aa = sun.transform_to(aa)

moon = get_moon(t_sparse)
moon_aa = moon.transform_to(aa)

sun_moon_info['sun_RA'] = sun.ra.rad
sun_moon_info['sun_dec'] = sun.dec.rad

sun_moon_info['sun_alt'] = sun_aa.alt.rad
sun_moon_info['sun_az'] = sun_aa.az.rad

sun_moon_info['moon_RA'] = moon.ra.rad
示例#36
0
def target_post_save(target, created):
    def get(objectId):
        url = 'https://mars.lco.global/'
        request = {'queries': [{'objectId': objectId}]}

        try:
            r = requests.post(url, json=request)
            results = r.json()['results'][0]['results']
            return results

        except Exception as e:
            return [None, 'Error message : \n' + str(e)]

    logger.info('Target post save hook: %s created: %s', target, created)

    ztf_name = next((name for name in target.names if 'ZTF' in name), None)
    if ztf_name:
        alerts = get(ztf_name)

        filters = {1: 'g_ZTF', 2: 'r_ZTF', 3: 'i_ZTF'}
        for alert in alerts:
            if all([
                    key in alert['candidate']
                    for key in ['jd', 'magpsf', 'fid', 'sigmapsf']
            ]):
                jd = Time(alert['candidate']['jd'], format='jd', scale='utc')
                jd.to_datetime(timezone=TimezoneInfo())
                value = {
                    'magnitude': alert['candidate']['magpsf'],
                    'filter': filters[alert['candidate']['fid']],
                    'error': alert['candidate']['sigmapsf']
                }
                rd, created = ReducedDatum.objects.get_or_create(
                    timestamp=jd.to_datetime(timezone=TimezoneInfo()),
                    value=value,
                    source_name=target.name,
                    source_location=alert['lco_id'],
                    data_type='photometry',
                    target=target)
                rd.save()

    gaia_name = next((name for name in target.names if 'Gaia' in name), None)
    if gaia_name:
        base_url = 'http://gsaweb.ast.cam.ac.uk/alerts/alert'
        lightcurve_url = f'{base_url}/{gaia_name}/lightcurve.csv'

        response = requests.get(lightcurve_url)
        data = response._content.decode('utf-8').split('\n')[2:-2]

        jd = [x.split(',')[1] for x in data]
        mag = [x.split(',')[2] for x in data]

        for i in reversed(range(len(mag))):
            try:
                datum_mag = float(mag[i])
                datum_jd = Time(float(jd[i]), format='jd', scale='utc')
                value = {
                    'magnitude': datum_mag,
                    'filter': 'G_Gaia',
                    'error': 0  # for now
                }
                rd, created = ReducedDatum.objects.get_or_create(
                    timestamp=datum_jd.to_datetime(timezone=TimezoneInfo()),
                    value=value,
                    source_name=target.name,
                    source_location=lightcurve_url,
                    data_type='photometry',
                    target=target)
                rd.save()
            except:
                pass

    ### Craig custom code starts here:
    ### ----------------------------------
        _snex1_address = 'mysql://{}:{}@localhost:3306/supernova'.format(
            os.environ['SNEX1_DB_USER'], os.environ['SNEX1_DB_PASSWORD'])

        with _get_session(db_address=_snex1_address) as db_session:
            Targets = _load_table('targets', db_address=_snex1_address)
            Targetnames = _load_table('targetnames', db_address=_snex1_address)
            if created == True:
                # Insert into SNex 1 db
                db_session.add(
                    Targets(ra0=target__ra,
                            dec0=target__dec,
                            lastmodified=target__modified,
                            datecreated=target__created))
                db_session.add(
                    Targetnames(targetid=target__id,
                                name=target__name,
                                datecreated=target__created,
                                lastmodified=target__modified))
            elif created == False:
                # Update in SNex 1 db
                db_session.query(Targets).filter(
                    target__id == Targets__id).update({
                        'ra0':
                        target__ra,
                        'dec0':
                        target__dec,
                        'lastmodified':
                        target__modified,
                        'datecreated':
                        target__created
                    })
                db_session.add(
                    Targetnames(targetid=target__id,
                                name=target__name,
                                datecreated=target__created,
                                lastmodified=target__modified))
            db_session.commit()
示例#37
0
 def refine(estimate):
     crot_estimate = carrington_rotation_number(
         t=Time(estimate, scale='tt', format='jd'))
     dcrot = crot - crot_estimate
     # Correct the estimate using a linear fraction of the Carrington rotation period
     return estimate + (dcrot * _CARRINGTON_ROTATION_PERIOD)
    '''A simple Newton-Raphson root finder method to solve Kepler's
        equation for the eccentric anomaly.
        Me: mean anomaly
        ecc: eccentric anomaly '''

    E = Me
    for i in range(10):
        E = E - (E - ecc * m.sin(E) - Me) / (1 - ecc * m.cos(E))
    return E


#The epoch (in various time systems) of the given Apophis coordinates
# from Giorgini(2008), September 1 2006 midnight
epoch_JD = 2453979.5
epoch_JD2000_s = (epoch_JD - 2451545.0) * 86400
epoch_time = Time(epoch_JD, format="jd")

#Apophis coordinates, plus calculation of the other anomalies.
a = 0.9222654975186300 * au.value  #in metres
e = 0.1910573105
i = 3.33132242244163  #All in DEGREES
asc_node = 204.45996801109067
a_of_p = 126.39643948747843
mean_anomaly = 61.41677858002747

eccentric_anomaly = mean_to_eccentric(m.radians(mean_anomaly), e)  #in radians

true_anomaly_r = 2 * m.atan(
    ((1 + e) / (1 - e))**.5 * m.tan(eccentric_anomaly / 2))
true_anomaly = m.degrees(true_anomaly_r)
'''Conversion 1: TwoBodyOrbit package'''
 def __init__(self, value, add_value=None, form='iso'):
     self.__time = Time(value, add_value, format=form)
示例#40
0
code = 'OH'
list_filt = {
'r cousins': 'R',
'v cousins': 'V'
}

###############################################################

f = open(arquivo, 'r')

dados = f.readlines()

f.close()

erros = np.loadtxt(table, usecols=(2, 3, 25), dtype={'names': ('ra', 'dec', 'time'), 'formats': ('f8', 'f8', 'f16')})
datasgeral = Time(erros['time'], format='jd', scale='utc')

dif = TimeDelta(60*60*6, format='sec')

g = open(output, 'w')

for i in dados:
    coord = SkyCoord(i[252:280], frame='icrs', unit=(u.hourangle,u.deg))
    time = np.float(i[303:320].strip())
    epoch = Time(time, scale='utc', format='jd')
    mag = np.float(i[79:86].strip())
    filtro = i[326:348].strip().lower()
    if filtro not in list_filt.keys():
        list_filt[filtro] = 'NAN'
        list_novo[filtro] = 'NAN'
    filt = list_filt[filtro]
示例#41
0
star = []
ra = []
dec = []
for i in range(len(ff)):
    k = ff[i].split()
    star.append(k[0])
    ra.append(float(k[1]))
    dec.append(float(k[2]))

ctime = dt.utcnow()
coord = SkyCoord(ra,
                 dec,
                 unit="deg",
                 frame="fk5",
                 obstime=Time(ctime),
                 location=nanten2)
planet_altaz = []
for i in planet_list:
    planet_coord = get_body(i, time=Time(ctime))
    planet_coord.location = nanten2
    planet_altaz.append(planet_coord.altaz)

altaz = coord.transform_to(AltAz)
print("*** rising star list [deg] ***")
for i in range(len(altaz)):
    if 20 < altaz[i].alt.deg < 80:
        print(star[i], " " * (11 - len(star[i])), ": (az, el) = (",
              round(altaz[i].az.deg, 4), ", ", round(altaz[i].alt.deg, 4), ")")
print("==============================")
for i in range(len(planet_altaz)):
示例#42
0
@given(time_attr())
def test_can_handle_query(time):
    LCClient = norh.NoRHClient()
    ans1 = LCClient._can_handle_query(time, a.Instrument.norh)
    assert ans1 is True
    ans1 = LCClient._can_handle_query(time, a.Instrument.norh,
                                      a.Wavelength(10*u.GHz))
    assert ans1 is True
    ans2 = LCClient._can_handle_query(time)
    assert ans2 is False


@pytest.mark.remote_data
@pytest.mark.parametrize("wave", [a.Wavelength(17*u.GHz), a.Wavelength(34*u.GHz)])
@given(time=range_time(Time('1992-6-1')))
@settings(max_examples=2, deadline=50000)
def test_query(time, wave):
    LCClient = norh.NoRHClient()
    qr1 = LCClient.search(time, a.Instrument.norh, wave)
    assert isinstance(qr1, QueryResponse)
    # Not all hypothesis queries are going to produce results, and
    if qr1:
        # There are no observations everyday
        #  so the results found have to be equal or later than the queried time
        #  (looking at the date because it may search for miliseconds, but only date is available)
        assert qr1.time_range().start.strftime('%Y-%m-%d') >= time.start.strftime('%Y-%m-%d')
        #  and the end time equal or smaller.
        # hypothesis can give same start-end, but the query will give you from start to end (so +1)
        assert qr1.time_range().end <= time.end + TimeDelta(1*u.day)
示例#43
0
def calc_phasecenter_from_solxy(vis,
                                timerange='',
                                xycen=None,
                                usemsphacenter=True):
    '''
    return the phase center in RA and DEC of a given solar coordinates

    :param vis: input measurement sets file
    :param timerange: can be a string or astropy.time.core.Time object, or a 2-element list of string or Time object
    :param xycen:  solar x-pos and y-pos in arcsec
    :param usemsphacenter:
    :return:
    phasecenter
    midtim: mid time of the given timerange
    '''
    tb.open(vis + '/POINTING')
    tst = Time(tb.getcell('TIME_ORIGIN', 0) / 24. / 3600., format='mjd')
    ted = Time(tb.getcell('TIME_ORIGIN',
                          tb.nrows() - 1) / 24. / 3600.,
               format='mjd')
    tb.close()
    datstr = tst.iso[:10]

    if isinstance(timerange, Time):
        try:
            (sttim, edtim) = timerange
        except:
            sttim = timerange
            edtim = sttim
    else:
        if timerange == '':
            sttim = tst
            edtim = ted
        else:
            try:
                (tstart, tend) = timerange.split('~')
                if tstart[2] == ':':
                    sttim = Time(datstr + 'T' + tstart)
                    edtim = Time(datstr + 'T' + tend)
                    # timerange = '{0}/{1}~{0}/{2}'.format(datstr.replace('-', '/'), tstart, tend)
                else:
                    sttim = Time(qa.quantity(tstart, 'd')['value'],
                                 format='mjd')
                    edtim = Time(qa.quantity(tend, 'd')['value'], format='mjd')
            except:
                try:
                    if timerange[2] == ':':
                        sttim = Time(datstr + 'T' + timerange)
                        edtim = sttim
                    else:
                        sttim = Time(qa.quantity(timerange, 'd')['value'],
                                     format='mjd')
                        edtim = sttim
                except ValueError:
                    print("keyword 'timerange' in wrong format")

    ms.open(vis)
    metadata = ms.metadata()
    observatory = metadata.observatorynames()[0]
    ms.close()

    midtim_mjd = (sttim.mjd + edtim.mjd) / 2.
    midtim = Time(midtim_mjd, format='mjd')
    eph = read_horizons(t0=midtim)
    if observatory == 'EOVSA' or (not usemsphacenter):
        print('This is EOVSA data')
        # use RA and DEC from FIELD ID 0
        tb.open(vis + '/FIELD')
        phadir = tb.getcol('PHASE_DIR').flatten()
        tb.close()
        ra0 = phadir[0]
        dec0 = phadir[1]
    else:
        ra0 = eph['ra'][0]
        dec0 = eph['dec'][0]

    if not xycen:
        # use solar disk center as default
        phasecenter = 'J2000 ' + str(ra0) + 'rad ' + str(dec0) + 'rad'
    else:
        x0 = np.radians(xycen[0] / 3600.)
        y0 = np.radians(xycen[1] / 3600.)
        p0 = np.radians(eph['p0'][0])  # p angle in radians
        raoff = -((x0) * np.cos(p0) - y0 * np.sin(p0)) / np.cos(eph['dec'][0])
        decoff = (x0) * np.sin(p0) + y0 * np.cos(p0)
        newra = ra0 + raoff
        newdec = dec0 + decoff
        phasecenter = 'J2000 ' + str(newra) + 'rad ' + str(newdec) + 'rad'
    return phasecenter, midtim
示例#44
0
def init_times():
    """Generates the initial times"""
    return Time("2020-04-10T00:00:00", scale="utc") + np.arange(1, 4) * TimeDelta(
        1, format="jd"
    )
示例#45
0
# Copyright (C) 2020 Egemen Imre
#
# Licensed under GNU GPL v3.0. See LICENSE.rst for more info.
"""
Test `TimeInterval` class and associated methods and functionalities.

"""
import numpy as np
import pytest
from astropy import units as u
from astropy.time import Time, TimeDelta

from satmad.utils.timeinterval import _EPS_TIME, TimeInterval, TimeIntervalList

before = TimeInterval(
    Time("2020-04-09T00:00:00", scale="utc"),
    Time("2020-04-11T00:00:00", scale="utc"),
    end_inclusive=False,
)
within = TimeInterval(
    Time("2020-04-11T00:05:00", scale="utc"), Time("2020-04-11T00:08:00", scale="utc")
)
intersect = TimeInterval(
    Time("2020-04-10T00:00:00", scale="utc"), Time("2020-04-11T00:08:00", scale="utc")
)
exact = TimeInterval(
    Time("2020-04-11T00:00:00", scale="utc"), Time("2020-04-11T00:10:00", scale="utc")
)
after = TimeInterval(
    Time("2020-04-11T00:10:00", scale="utc"), Time("2020-04-12T00:00:00", scale="utc")
)
示例#46
0
def simulate_a_telescope(name,
                         altitude,
                         longitude,
                         latitude,
                         filter,
                         time_start,
                         time_end,
                         sampling,
                         event,
                         location,
                         bad_weather_percentage=0.0,
                         minimum_alt=20,
                         moon_windows_avoidance=20,
                         maximum_moon_illumination=100.0):
    """ Simulate a telescope. More details in the telescopes module. The observations simulation are made for the
        full time windows, then limitation are applied :
            - Sun has to be below horizon : Sun< -18
            - Moon has to be more than the moon_windows_avoidance distance from the target
            - Observations altitude of the target have to be bigger than minimum_alt

    :param str name:  the name of the telescope.
    :param float altitude: the altitude in meters if the telescope
    :param float longitude: the longitude in degree of the telescope location
    :param float latitude: the latitude in degree of the telescope location
    :param str filter: the filter used for observations
    :param float time_start: the start of observations in JD
    :param float time_end: the end of observations in JD
    :param float sampling: the hour sampling.
    :param object event: the microlensing event you look at
    :param str location: the location of the telescope. If it is 'Space', then the observations are made
                             continuously given the observing windows and the sampling.
    :param float bad_weather_percentage: the percentage of bad nights
    :param float minimum_alt: the minimum altitude ini degrees that your telescope can go to.
    :param float moon_windows_avoidance: the minimum distance in degrees accepted between the target and the Moon
    :param float maximum_moon_illumination: the maximum Moon brightness you allow in percentage

    :return: a telescope object
    :rtype: object
    """

    # fake lightcurve
    if location != 'Space':
        earth_location = EarthLocation(lon=longitude * astropy.units.deg,
                                       lat=latitude * astropy.units.deg,
                                       height=altitude * astropy.units.m)

        target = SkyCoord(event.ra, event.dec, unit='deg')

        minimum_sampling = min(4.0, sampling)
        ratio_sampling = np.round(sampling / minimum_sampling)

        time_of_observations = time_simulation(time_start, time_end,
                                               minimum_sampling,
                                               bad_weather_percentage)

        time_convertion = Time(time_of_observations, format='jd').isot

        telescope_altaz = target.transform_to(
            AltAz(obstime=time_convertion, location=earth_location))
        altazframe = AltAz(obstime=time_convertion, location=earth_location)
        Sun = get_sun(Time(time_of_observations,
                           format='jd')).transform_to(altazframe)
        Moon = get_moon(Time(time_of_observations,
                             format='jd')).transform_to(altazframe)
        Moon_illumination = moon_illumination(Sun, Moon)
        Moon_separation = target.separation(Moon)
        observing_windows = np.where(
            (telescope_altaz.alt > minimum_alt * astropy.units.deg)
            & (Sun.alt < -18 * astropy.units.deg)
            & (Moon_separation > moon_windows_avoidance * astropy.units.deg)
            & (Moon_illumination < maximum_moon_illumination))[0]

        time_of_observations = time_of_observations[observing_windows]

    else:

        time_of_observations = np.arange(time_start, time_end,
                                         sampling / (24.0))

    lightcurveflux = np.ones((len(time_of_observations), 3)) * 42
    lightcurveflux[:, 0] = time_of_observations

    telescope = telescopes.Telescope(name=name,
                                     camera_filter=filter,
                                     light_curve_flux=lightcurveflux)

    return telescope
示例#47
0
def jd_to_date(jd):
    time = Time(jd, scale='utc', format='jd')
    return time.isot
示例#48
0
def convert_time_npdatetime64(time_string, **kwargs):
    return Time(str(time_string.astype('M8[ns]')), **kwargs)
示例#49
0
def moon_table(site, solar_midnight, utc, lst, verbose = False):
    """
    Compute Moon data for scheduling period and return '~astropy.table' object.

    Example Table
    -------------
    >>> import astropy.units as u
    >>> from astroplan import Observer
    >>> site = Observer.at_site('gemini_south')
    >>> start = '2018-07-01'
    >>> utc_to_local = -4 * u.h
    >>> timedata = timetable(site, start, utc_to_local)
    >>> print(moon_table(site, timetable['solar_midnight'].data, timetable['utc'].data, timetable['lst'].data))

        fraction        phase         ra_mid       dec_mid              ra [118]                     dec [118]
                         rad           deg           deg                  deg                           deg
    --------------- ------------- ------------- ------------- ---------------------------- --------------------------
    0.0180504202392 2.87207394213 129.042250359 18.8745316663 124.46942338 .. 133.95157917 19.7627882458 .. 18.410637...

                      ZD [118]                        HA [118]                       AZ [118]
                       deg                          hourangle                         rad
       ------------------------------ ------------------------------- ------------------------------
    ...91.7509852667 .. 118.682184103 5.35237289874 .. -7.54773653324 5.09386841498 .. 1.47422033296

    Parameters
    ----------
    site : '~astroplan.Observer'
        observatory site object

    solar_midnight : '~astropy.time.core.Time' array
        Solar midnight times for nights in scheduling period.

    utc : 'astropy.time.ore.Time' arrays
        UTC time grids for nights in scheduling period in format accepted by '~astropy.time'.

    lst : arrays of floats
        local sidereal time hour angles along time grids of nights in scheduling period.

    Returns
    -------
    '~astropy.table.Table'
        Table of Moon data with rows corresponding to nights in scheduling period.

        Columns
        -------
        fraction : float
            fraction of moon illuminated at solar midnight on nights of scheduling period.

        phase : float (with radian table quantity)
            moon phase angle at solar midnight on nights of scheduling period.

        ra_mid : float (with degree table quantity)
            right ascension at solar midnight on nights of scheduling period.

        dec_mid : float (with degree table quantity)
            declination at solar midnight on nights of scheduling period.

        ra : arrays of float (with degree table quantity)
            right ascensions along time grids.

        dec : arrays of float (with degree table quantity)
            declinations along time grids.

        ZD : arrays of float (with degree table quantity)
            zenith distances along time grids.

        HA : arrays of float (with hourangle table quantity)
            hour angles along time grids.

        AZ : arrays of float (with radian table quantity)
            azimuth angles along time grids.

        AM : arrays of float
            air masses along time grids.
    """

    i_day = np.arange(len(solar_midnight))

    # sun_horiz = sun_horizon(site)  # angle from zenith at rise/set
    # set = Column(Parallel(n_jobs=10)(delayed(get_moon_set_time)
    #                                  (site, solar_midnight[i], horizon=sun_horiz) for i in i_day), name='set')
    # rise = Column(Parallel(n_jobs=10)(delayed(get_moon_rise_time)
    #                                   (site, solar_midnight[i], horizon=sun_horiz) for i in i_day), name='rise')

    ncpu = cpu_count()
    fraction = Column(Parallel(n_jobs=ncpu)(delayed(get_moon_fraction)(site, solar_midnight[i]) for i in i_day),
                      name='fraction')

    phase = Column(Parallel(n_jobs=ncpu)(delayed(get_moon_phase)(site, solar_midnight[i], degree=True) for i in i_day), name='phase',
                   unit='deg')

    moon_midnight = Parallel(n_jobs=ncpu)(delayed(get_moon)(solar_midnight[i], location=site.location) for i in i_day)
    ra_mid = Column([moon_midnight[i].ra.value for i in i_day], name='ra_mid', unit='deg')
    dec_mid = Column([moon_midnight[i].dec.value for i in i_day], name='dec_mid', unit='deg')

    moon = Parallel(n_jobs=ncpu)(delayed(get_moon)(Time(utc[i]), location=site.location) for i in i_day)
    ra = Column([moon[i].ra.value for i in i_day], name='ra', unit='deg')
    dec = Column([moon[i].dec.value for i in i_day], name='dec', unit='deg')

    ZDHAAZ = Parallel(n_jobs=ncpu)(delayed(calc_zd_ha_az)(lst=lst[i]*u.hourangle, latitude=site.location.lat,
                                                        ra=moon[i].ra, dec=moon[i].dec) for i in i_day)

    ZD = Column([ZDHAAZ[i][0].value for i in i_day], name='ZD', unit='deg')
    HA = Column([ZDHAAZ[i][1].value for i in i_day], name='HA', unit='hourangle')
    AZ = Column([ZDHAAZ[i][2].value for i in i_day], name='AZ', unit='deg')
    AM = Column([airmass(ZDHAAZ[i][0]) for i in i_day], name='AM')

    if verbose:
        print('i_day', i_day)
        # print(set)
        # print(rise)
        print(fraction)
        print(phase)
        print(ra_mid)
        print(dec_mid)
        print(ra)
        print(dec)
        print(ZD)
        print(HA)
        print(AZ)

    return Table((fraction, phase, ra_mid, dec_mid, ra, dec, ZD, HA, AZ, AM))
示例#50
0
def convert_time_date(time_string, **kwargs):
    return Time(time_string.isoformat(), **kwargs)
示例#51
0
def convert_time_npndarray(time_string, **kwargs):
    if 'datetime64' in str(time_string.dtype):
        return Time([str(dt.astype('M8[ns]')) for dt in time_string], **kwargs)
    else:
        return convert_time.dispatch(object)(time_string, **kwargs)
示例#52
0
def read_horizons(t0=None,
                  dur=None,
                  vis=None,
                  observatory=None,
                  verbose=False):
    '''
    This function visits JPL Horizons to retrieve J2000 topocentric RA and DEC of the solar disk center
    as a function of time.

    Keyword arguments:
    t0: Referece time in astropy.Time format
    dur: duration of the returned coordinates. Default to 2 minutes
    vis: CASA visibility dataset (in measurement set format). If provided, use entire duration from
         the visibility data
    observatory: observatory code (from JPL Horizons). If not provided, use information from visibility.
         if no visibility found, use earth center (code=500)
    verbose: True to provide extra information

    Usage:
    >>> from astropy.time import Time
    >>> out = read_horizons(t0=Time('2017-09-10 16:00:00'), observatory='-81')
    >>> out = read_horizons(vis = 'mydata.ms')

    History:
    BC (sometime in 2014): function was first wrote, followed by a number of edits by BC and SY
    BC (2019-07-16): Added docstring documentation

    '''
    import urllib2
    import ssl
    if not t0 and not vis:
        t0 = Time.now()
    if not dur:
        dur = 1. / 60. / 24.  # default to 2 minutes
    if t0:
        try:
            btime = Time(t0)
        except:
            print('input time ' + str(t0) + ' not recognized')
            return -1
    if vis:
        if not os.path.exists(vis):
            print('Input ms data ' + vis + ' does not exist! ')
            return -1
        try:
            # ms.open(vis)
            # summary = ms.summary()
            # ms.close()
            # btime = Time(summary['BeginTime'], format='mjd')
            # etime = Time(summary['EndTime'], format='mjd')
            ## alternative way to avoid conflicts with importeovsa, if needed -- more time consuming
            if observatory == 'geocentric':
                observatory = '500'
            else:
                ms.open(vis)
                metadata = ms.metadata()
                if metadata.observatorynames()[0] == 'EVLA':
                    observatory = '-5'
                elif metadata.observatorynames()[0] == 'EOVSA':
                    observatory = '-81'
                elif metadata.observatorynames()[0] == 'ALMA':
                    observatory = '-7'
                ms.close()
            tb.open(vis)
            btime_vis = Time(tb.getcell('TIME', 0) / 24. / 3600., format='mjd')
            etime_vis = Time(tb.getcell('TIME',
                                        tb.nrows() - 1) / 24. / 3600.,
                             format='mjd')
            tb.close()
            if verbose:
                print("Beginning time of this scan " + btime_vis.iso)
                print("End time of this scan " + etime_vis.iso)

            # extend the start and end time for jpl horizons by 0.5 hr on each end
            btime = Time(btime_vis.mjd - 0.5 / 24., format='mjd')
            dur = etime_vis.mjd - btime_vis.mjd + 1.0 / 24.
        except:
            print('error in reading ms file: ' + vis +
                  ' to obtain the ephemeris!')
            return -1

    # default the observatory to geocentric, if none provided
    if not observatory:
        observatory = '500'

    etime = Time(btime.mjd + dur, format='mjd')

    try:
        cmdstr = "https://ssd.jpl.nasa.gov/horizons_batch.cgi?batch=1&TABLE_TYPE='OBSERVER'&QUANTITIES='1,17,20'&CSV_FORMAT='YES'&ANG_FORMAT='DEG'&CAL_FORMAT='BOTH'&SOLAR_ELONG='0,180'&CENTER='{}@399'&COMMAND='10'&START_TIME='".format(
            observatory
        ) + btime.iso.replace(
            ' ', ','
        ) + "'&STOP_TIME='" + etime.iso[:-4].replace(
            ' ', ','
        ) + "'&STEP_SIZE='1m'&SKIP_DAYLT='NO'&EXTRA_PREC='YES'&APPARENT='REFRACTED'"
        cmdstr = cmdstr.replace("'", "%27")
        try:
            context = ssl._create_unverified_context()
            f = urllib2.urlopen(cmdstr, context=context)
        except:
            f = urllib2.urlopen(cmdstr)
        lines = f.readlines()
        f.close()
    except:
        # todo use geocentric coordinate for the new VLA data
        import requests, collections
        params = collections.OrderedDict()
        params['batch'] = '1'
        params['TABLE_TYPE'] = "'OBSERVER'"
        params['QUANTITIES'] = "'1,17,20'"
        params['CSV_FORMAT'] = "'YES'"
        params['ANG_FORMAT'] = "'DEG'"
        params['CAL_FORMAT'] = "'BOTH'"
        params['SOLAR_ELONG'] = "'0,180'"
        if observatory == '500':
            params['CENTER'] = "'500'"
        else:
            params['CENTER'] = "'{}@399'".format(observatory)
        params['COMMAND'] = "'10'"
        params['START_TIME'] = "'{}'".format(btime.iso[:-4].replace(' ', ','))
        params['STOP_TIME'] = "'{}'".format(etime.iso[:-4].replace(' ', ','))
        params['STEP_SIZE'] = "'1m'"
        params['SKIP_DAYLT'] = "'NO'"
        params['EXTRA_PREC'] = "'YES'"
        params['APPAENT'] = "'REFRACTED'"
        results = requests.get("https://ssd.jpl.nasa.gov/horizons_batch.cgi",
                               params=params)
        lines = [ll for ll in results.iter_lines()]

    nline = len(lines)
    istart = 0
    for i in range(nline):
        line = lines[i]
        if line[0:5] == '$$SOE':  # start recording
            istart = i + 1
        if line[0:5] == '$$EOE':  # end recording
            iend = i
    newlines = lines[istart:iend]
    nrec = len(newlines)
    ephem_ = []
    t = []
    ra = []
    dec = []
    p0 = []
    delta = []
    for line in newlines:
        items = line.split(',')
        t.append(Time(float(items[1]), format='jd').mjd)
        ra.append(np.radians(float(items[4])))
        dec.append(np.radians(float(items[5])))
        p0.append(float(items[6]))
        delta.append(float(items[8]))
    # convert list of dictionary to a dictionary of arrays
    ephem = {'time': t, 'ra': ra, 'dec': dec, 'p0': p0, 'delta': delta}
    return ephem
示例#53
0
def convert_time_tuple(time_string, **kwargs):
    # Make sure there are enough values to unpack
    time_string = (time_string + (0, ) * 7)[:7]
    return Time('{}-{}-{}T{}:{}:{}.{:06}'.format(*time_string), **kwargs)
示例#54
0
 def _today():
     # Get current day in scale='tai' without going through a scale change
     # (so we do not need leap seconds).
     s = '{0.year:04d}-{0.month:02d}-{0.day:02d}'.format(datetime.utcnow())
     return Time(s, scale='tai', format='iso', out_subfmt='date')
示例#55
0
def convert_time_datetime(time_string, **kwargs):
    return Time(time_string, **kwargs)
示例#56
0
    roots = np.apply_along_axis(
        np.roots, 0, [r3coeff, r2coeff, hip_data['Plx'] / eplx2, -1 / eplx2])
    roots[np.logical_or(np.real(roots) < 0.0,
                        abs(np.imag(roots)) > 1.0e-6)] = np.nan
    parallax_distance = np.nanmin(np.real(roots), 0) * 1000

    # prefer cluster distances (e_Dist NULL), otherwise use computed distance
    is_cluster_distance = np.logical_and(np.logical_not(hip_data['Dist'].mask),
                                         hip_data['e_Dist'].mask)

    hip_data['r_est'] = np.where(is_cluster_distance, hip_data['Dist'],
                                 parallax_distance)
    hip_data['r_est'].unit = u.pc


HIP_TIME = Time('J1991.25')
GAIA_TIME = Time('J2015.5')


def update_coordinates(hip_data: Table) -> None:
    """Update the coordinates from J1991.25 to J2015.5 to match Gaia."""
    print('Updating coordinates to J2015.5')
    coords = SkyCoord(frame=ICRS,
                      ra=hip_data['RAdeg'],
                      dec=hip_data['DEdeg'],
                      pm_ra_cosdec=hip_data['pmRA'],
                      pm_dec=hip_data['pmDE'],
                      distance=hip_data['r_est'],
                      radial_velocity=hip_data['RV'].filled(0),
                      obstime=HIP_TIME).apply_space_motion(GAIA_TIME)
示例#57
0
 def convert_time_pandasDatetimeIndex(time_string, **kwargs):
     return Time(time_string.tolist(), **kwargs)
示例#58
0
from astropy import units as u
from astropy.time import Time
import matplotlib.pyplot as plt
# %%
# YAML representation
# -------------------
# Here is an example YAML file using the model:
from gammapy.modeling.models import (
    ExpDecayTemporalModel,
    Models,
    PowerLawSpectralModel,
    SkyModel,
)

t0 = "5 h"
t_ref = Time("2020-10-01")
time_range = [t_ref, t_ref + 1 * u.d]
expdecay_model = ExpDecayTemporalModel(t_ref=t_ref.mjd * u.d, t0=t0)
expdecay_model.plot(time_range)
plt.grid(which="both")

model = SkyModel(
    spectral_model=PowerLawSpectralModel(),
    temporal_model=expdecay_model,
    name="expdecay_model",
)
models = Models([model])

print(models.to_yaml())
示例#59
0
def imreg(vis=None,
          ephem=None,
          msinfo=None,
          imagefile=None,
          timerange=None,
          reftime=None,
          fitsfile=None,
          beamfile=None,
          offsetfile=None,
          toTb=None,
          sclfactor=1.0,
          verbose=False,
          p_ang=False,
          overwrite=True,
          usephacenter=True,
          deletehistory=False,
          subregion=[],
          docompress=False):
    ''' 
    main routine to register CASA images
           Required Inputs:
               vis: STRING. CASA measurement set from which the image is derived
               imagefile: STRING or LIST. name of the input CASA image
               timerange: STRING or LIST. timerange used to generate the CASA image, must have the same length as the input images. 
                          Each element should be in CASA standard time format, e.g., '2012/03/03/12:00:00~2012/03/03/13:00:00'
           Optional Inputs:
               msinfo: DICTIONARY. CASA MS information, output from read_msinfo. If not provided, generate one from the supplied vis
               ephem: DICTIONARY. solar ephem, output from read_horizons. 
                      If not provided, query JPL Horizons based on time info of the vis (internet connection required)
               fitsfile: STRING or LIST. name of the output registered fits files
               reftime: STRING or LIST. Each element should be in CASA standard time format, e.g., '2012/03/03/12:00:00'
               offsetfile: optionally provide an offset with a series of solar x and y offsets with timestamps 
               toTb: Bool. Convert the default Jy/beam to brightness temperature?
               sclfactor: scale the image values up by its value (to compensate VLA 20 dB attenuator)
               verbose: Bool. Show more diagnostic info if True.
               usephacenter: Bool -- if True, correct for the RA and DEC in the ms file based on solar empheris.
                                     Otherwise assume the phasecenter is correctly pointed to the solar disk center
                                     (EOVSA case)
               subregion: Region selection. See 'help par.region' for details.
    Usage:
    >>> from suncasa.utils import helioimage2fits as hf
    >>> hf.imreg(vis='mydata.ms', imagefile='myimage.image', fitsfile='myimage.fits',
                 timerange='2017/08/21/20:21:10~2017/08/21/20:21:18')
    The output fits file is 'myimage.fits'

    History:
    BC (sometime in 2014): function was first wrote, followed by a number of edits by BC and SY
    BC (2019-07-16): Added checks for stokes parameter. Verified that for converting from Jy/beam to brightness temperature,
                     the convention of 2*k_b*T should always be used. I.e., for unpolarized source, stokes I, RR, LL, XX, YY, 
                     etc. in the output CASA images from (t)clean should all have same values of radio intensity 
                     (in Jy/beam) and brightness temperature (in K).

    '''
    ia = iatool()

    if deletehistory:
        ms_clearhistory(vis)

    if not imagefile:
        raise ValueError('Please specify input image')
    if not timerange:
        raise ValueError('Please specify timerange of the input image')
    if type(imagefile) == str:
        imagefile = [imagefile]
    if type(timerange) == str:
        timerange = [timerange]
    if not fitsfile:
        fitsfile = [img + '.fits' for img in imagefile]
    if type(fitsfile) == str:
        fitsfile = [fitsfile]
    nimg = len(imagefile)
    if len(timerange) != nimg:
        raise ValueError(
            'Number of input images does not equal to number of timeranges!')
    if len(fitsfile) != nimg:
        raise ValueError(
            'Number of input images does not equal to number of output fits files!'
        )
    nimg = len(imagefile)
    if verbose:
        print(str(nimg) + ' images to process...')

    if reftime:  # use as reference time to find solar disk RA and DEC to register the image, but not the actual timerange associated with the image
        if type(reftime) == str:
            reftime = [reftime] * nimg
        if len(reftime) != nimg:
            raise ValueError(
                'Number of reference times does not match that of input images!'
            )
        helio = ephem_to_helio(vis,
                               ephem=ephem,
                               msinfo=msinfo,
                               reftime=reftime,
                               usephacenter=usephacenter)
    else:
        # use the supplied timerange to register the image
        helio = ephem_to_helio(vis,
                               ephem=ephem,
                               msinfo=msinfo,
                               reftime=timerange,
                               usephacenter=usephacenter)

    if toTb:
        (bmajs, bmins, bpas, beamunits,
         bpaunits) = getbeam(imagefile=imagefile, beamfile=beamfile)

    for n, img in enumerate(imagefile):
        if verbose:
            print('processing image #' + str(n) + ' ' + img)
        fitsf = fitsfile[n]
        timeran = timerange[n]
        # obtain duration of the image as FITS header exptime
        try:
            [tbg0, tend0] = timeran.split('~')
            tbg_d = qa.getvalue(qa.convert(qa.totime(tbg0), 'd'))[0]
            tend_d = qa.getvalue(qa.convert(qa.totime(tend0), 'd'))[0]
            tdur_s = (tend_d - tbg_d) * 3600. * 24.
            dateobs = qa.time(qa.quantity(tbg_d, 'd'), form='fits', prec=10)[0]
        except:
            print('Error in converting the input timerange: ' + str(timeran) +
                  '. Proceeding to the next image...')
            continue

        hel = helio[n]
        if not os.path.exists(img):
            warnings.warn('{} does not existed!'.format(img))
        else:
            if os.path.exists(fitsf) and not overwrite:
                raise ValueError(
                    'Specified fits file already exists and overwrite is set to False. Aborting...'
                )
            else:
                p0 = hel['p0']
                tb.open(img + '/logtable', nomodify=False)
                nobs = tb.nrows()
                tb.removerows([i + 1 for i in range(nobs - 1)])
                tb.close()
                ia.open(img)
                imr = ia.rotate(pa=str(-p0) + 'deg')
                if subregion is not []:
                    imr = imr.subimage(region=subregion)
                imr.tofits(fitsf, history=False, overwrite=overwrite)
                imr.close()
                imsum = ia.summary()
                ia.close()
                ia.done()

            # construct the standard fits header
            # RA and DEC of the reference pixel crpix1 and crpix2
            (imra, imdec) = (imsum['refval'][0], imsum['refval'][1])
            # find out the difference of the image center to the CASA phase center
            # RA and DEC difference in arcseconds
            ddec = degrees((imdec - hel['dec_fld'])) * 3600.
            dra = degrees((imra - hel['ra_fld']) * cos(hel['dec_fld'])) * 3600.
            # Convert into image heliocentric offsets
            prad = -radians(hel['p0'])
            dx = (-dra) * cos(prad) - ddec * sin(prad)
            dy = (-dra) * sin(prad) + ddec * cos(prad)
            if offsetfile:
                try:
                    offset = np.load(offsetfile)
                except:
                    raise ValueError(
                        'The specified offsetfile does not exist!')
                reftimes_d = offset['reftimes_d']
                xoffs = offset['xoffs']
                yoffs = offset['yoffs']
                timg_d = hel['reftime']
                ind = bisect.bisect_left(reftimes_d, timg_d)
                xoff = xoffs[ind - 1]
                yoff = yoffs[ind - 1]
            else:
                xoff = hel['refx']
                yoff = hel['refy']
            if verbose:
                print(
                    'offset of image phase center to visibility phase center (arcsec): dx={0:.2f}, dy={1:.2f}'
                    .format(dx, dy))
                print(
                    'offset of visibility phase center to solar disk center (arcsec): dx={0:.2f}, dy={1:.2f}'
                    .format(xoff, yoff))
            (crval1, crval2) = (xoff + dx, yoff + dy)
            # update the fits header to heliocentric coordinates

            hdu = pyfits.open(fitsf, mode='update')
            hdu[0].verify('fix')
            header = hdu[0].header
            dshape = hdu[0].data.shape
            ndim = hdu[0].data.ndim
            (cdelt1,
             cdelt2) = (-header['cdelt1'] * 3600., header['cdelt2'] * 3600.
                        )  # Original CDELT1, 2 are for RA and DEC in degrees
            header['cdelt1'] = cdelt1
            header['cdelt2'] = cdelt2
            header['cunit1'] = 'arcsec'
            header['cunit2'] = 'arcsec'
            header['crval1'] = crval1
            header['crval2'] = crval2
            header['ctype1'] = 'HPLN-TAN'
            header['ctype2'] = 'HPLT-TAN'
            header['date-obs'] = dateobs  # begin time of the image
            if not p_ang:
                hel['p0'] = 0
            try:
                # this works for pyfits version of CASA 4.7.0 but not CASA 4.6.0
                if tdur_s:
                    header.set('exptime', tdur_s)
                else:
                    header.set('exptime', 1.)
                header.set('p_angle', hel['p0'])
                header.set(
                    'dsun_obs',
                    sun.sunearth_distance(Time(dateobs)).to(u.meter).value)
                header.set(
                    'rsun_obs',
                    sun.solar_semidiameter_angular_size(Time(dateobs)).value)
                header.set('rsun_ref', sun.constants.radius.value)
                header.set('hgln_obs', 0.)
                header.set(
                    'hglt_obs',
                    sun.heliographic_solar_center(Time(dateobs))[1].value)
            except:
                # this works for astropy.io.fits
                if tdur_s:
                    header.append(('exptime', tdur_s))
                else:
                    header.append(('exptime', 1.))
                header.append(('p_angle', hel['p0']))
                header.append(
                    ('dsun_obs',
                     sun.sunearth_distance(Time(dateobs)).to(u.meter).value))
                header.append(
                    ('rsun_obs',
                     sun.solar_semidiameter_angular_size(Time(dateobs)).value))
                header.append(('rsun_ref', sun.constants.radius.value))
                header.append(('hgln_obs', 0.))
                header.append(
                    ('hglt_obs',
                     sun.heliographic_solar_center(Time(dateobs))[1].value))

            # check if stokes parameter exist
            exist_stokes = False
            stokes_mapper = {
                'I': 1,
                'Q': 2,
                'U': 3,
                'V': 4,
                'RR': -1,
                'LL': -2,
                'RL': -3,
                'LR': -4,
                'XX': -5,
                'YY': -6,
                'XY': -7,
                'YX': -8
            }
            if 'CRVAL3' in header.keys():
                if header['CTYPE3'] == 'STOKES':
                    stokenum = header['CRVAL3']
                    exist_stokes = True
            if 'CRVAL4' in header.keys():
                if header['CTYPE4'] == 'STOKES':
                    stokenum = header['CRVAL4']
                    exist_stokes = True
            if exist_stokes:
                stokesstr = stokes_mapper.keys()[stokes_mapper.values().index(
                    stokenum)]
                if verbose:
                    print('This image is in Stokes ' + stokesstr)
            else:
                print(
                    'STOKES Information does not seem to exist! Assuming Stokes I'
                )
                stokenum = 1

            # intensity units to brightness temperature
            if toTb:
                # get restoring beam info
                bmaj = bmajs[n]
                bmin = bmins[n]
                beamunit = beamunits[n]
                data = hdu[
                    0].data  # remember the data order is reversed due to the FITS convension
                keys = header.keys()
                values = header.values()
                # which axis is frequency?
                faxis = keys[values.index('FREQ')][-1]
                faxis_ind = ndim - int(faxis)
                # find out the polarization of this image
                k_b = qa.constants('k')['value']
                c_l = qa.constants('c')['value']
                # Always use 2*kb for all polarizations
                const = 2. * k_b / c_l**2
                if header['BUNIT'].lower() == 'jy/beam':
                    header['BUNIT'] = 'K'
                    header['BTYPE'] = 'Brightness Temperature'
                    for i in range(dshape[faxis_ind]):
                        nu = header['CRVAL' +
                                    faxis] + header['CDELT' + faxis] * (
                                        i + 1 - header['CRPIX' + faxis])
                        if header['CUNIT' + faxis] == 'KHz':
                            nu *= 1e3
                        if header['CUNIT' + faxis] == 'MHz':
                            nu *= 1e6
                        if header['CUNIT' + faxis] == 'GHz':
                            nu *= 1e9
                        if len(bmaj) > 1:  # multiple (per-plane) beams
                            bmajtmp = bmaj[i]
                            bmintmp = bmin[i]
                        else:  # one single beam
                            bmajtmp = bmaj[0]
                            bmintmp = bmin[0]
                        if beamunit == 'arcsec':
                            bmaj0 = np.radians(bmajtmp / 3600.)
                            bmin0 = np.radians(bmintmp / 3600.)
                        if beamunit == 'arcmin':
                            bmaj0 = np.radians(bmajtmp / 60.)
                            bmin0 = np.radians(bmintmp / 60.)
                        if beamunit == 'deg':
                            bmaj0 = np.radians(bmajtmp)
                            bmin0 = np.radians(bmintmp)
                        if beamunit == 'rad':
                            bmaj0 = bmajtmp
                            bmin0 = bmintmp
                        beam_area = bmaj0 * bmin0 * np.pi / (4. * log(2.))
                        factor = const * nu**2  # SI unit
                        jy_to_si = 1e-26
                        # print(nu/1e9, beam_area, factor)
                        factor2 = sclfactor
                        # if sclfactor:
                        #     factor2 = 100.
                        if faxis == '3':
                            data[:,
                                 i, :, :] *= jy_to_si / beam_area / factor * factor2
                        if faxis == '4':
                            data[
                                i, :, :, :] *= jy_to_si / beam_area / factor * factor2

            header = fu.headerfix(header)
            hdu.flush()
            hdu.close()

            if ndim - np.count_nonzero(np.array(dshape) == 1) > 3:
                docompress = False
                '''
                    Caveat: only 1D, 2D, or 3D images are currently supported by
                    the astropy fits compression. If a n-dimensional image data array
                    does not have at least n-3 single-dimensional entries,
                    force docompress to be False
                '''

                print(
                    'warning: The fits data contains more than 3 non squeezable dimensions. Skipping fits compression..'
                )
            if docompress:
                fitsftmp = fitsf + ".tmp.fits"
                os.system("mv {} {}".format(fitsf, fitsftmp))
                hdu = pyfits.open(fitsftmp)
                hdu[0].verify('fix')
                header = hdu[0].header
                data = hdu[0].data
                fu.write_compressed_image_fits(fitsf,
                                               data,
                                               header,
                                               compression_type='RICE_1',
                                               quantize_level=4.0)
                os.system("rm -rf {}".format(fitsftmp))
    if deletehistory:
        ms_restorehistory(vis)
    return fitsfile
示例#60
0
def read_hst_fgs_amudotrep(file=None, version=None):
    """Read HST FGS amu.rep file which contain the TVS matrices.

    Parameters
    ----------
    filepath : str
        Path to file.

    Returns
    -------
    data : dict
        Dictionary that holds the file content ordered by FGS number

    """
    if version is None:
        version = HST_PRD_VERSION  # defaults to 'Latest'
    if file is None:
        file = os.path.join(HST_PRD_DATA_ROOT, 'amu.rep-{}'.format(version))

    # set up regular expressions
    # use https://regexper.com to visualise these if required
    rx_dict = {
        'fgs':
        re.compile(r'FGS - (?P<fgs>\d)'),
        'n_cones':
        re.compile(r'NUMBER OF CONES:   (?P<n_cones>\d)'),
        'date':
        re.compile(
            r'(?P<day>[ 123][0-9])-(?P<month>[A-Z][A-Z][A-Z])-(?P<year>[0-9][0-9])'
        ),
        'cone_vector':
        re.compile(r'(CONE)*(CONE VECTOR)*(CONE ANGLE)'),
        'cone_vector_tel':
        re.compile(r'(CONE)*(REVISED CONE VECTOR)*(PREVIOUS CONE VECTOR)'),
        'tvs':
        re.compile(r'(FGS TO ST TRANSFORMATION MATRICES)'),
    }

    data = {}
    with open(file, 'r') as file_object:
        line_index = 0
        astropy_table_index = 0
        line = file_object.readline()
        while line:
            # print(line)
            # at each line check for a match with a regex
            key, match = _parse_line(line, rx_dict)
            if key == 'fgs':
                fgs_number = int(match.group('fgs'))
                # print('FGS {}:'.format(fgs_number))
                fgs_id = 'fgs{}'.format(fgs_number)
                data[fgs_id] = {}
            elif key == 'n_cones':
                n_cones = int(match.group('n_cones'))
            elif key == 'cone_vector':
                table = Table.read(file,
                                   format='ascii.no_header',
                                   delimiter=' ',
                                   data_start=astropy_table_index + 2,
                                   data_end=astropy_table_index + 2 + n_cones,
                                   guess=False,
                                   names=('CONE', 'X', 'Y', 'Z',
                                          'CONE_ANGLE_DEG'))
                # table.pprint()
                data[fgs_id]['cone_parameters_fgs'] = table
            elif key == 'cone_vector_tel':
                table = Table.read(file,
                                   format='ascii.no_header',
                                   delimiter=' ',
                                   data_start=astropy_table_index + 2,
                                   data_end=astropy_table_index + 2 + n_cones,
                                   guess=False,
                                   names=('CONE', 'V1', 'V2', 'V3', 'V1_PREV',
                                          'V2_PREV', 'V3_PREV'))
                data[fgs_id]['cone_parameters_tel'] = table
            elif key == 'tvs':
                table = Table.read(file,
                                   format='ascii.no_header',
                                   delimiter=' ',
                                   data_start=astropy_table_index + 2,
                                   data_end=astropy_table_index + 2 + 3,
                                   guess=False,
                                   names=('NEW_1', 'NEW_2', 'NEW_3', 'OLD_1',
                                          'OLD_2', 'OLD_3'))
                # table.pprint()
                data[fgs_id]['tvs_parameters'] = table
                data[fgs_id]['tvs'] = np.zeros((3, 3))
                data[fgs_id]['tvs_old'] = np.zeros((3, 3))
                for i in range(3):
                    data[fgs_id]['tvs'][i, :] = [
                        data[fgs_id]['tvs_parameters']['NEW_{}'.format(j +
                                                                       1)][i]
                        for j in range(3)
                    ]
                    data[fgs_id]['tvs_old'][i, :] = [
                        data[fgs_id]['tvs_parameters']['OLD_{}'.format(j +
                                                                       1)][i]
                        for j in range(3)
                    ]

            elif key == 'date':
                match.group('day')
                match.group('month')
                match.group('year')
                data[fgs_id]['timestamp'] = Time('20{}-{:02d}-{}'.format(
                    match.group('year'),
                    month_name_to_number(match.group('month')),
                    match.group('year')))

            line = file_object.readline()
            line_index += 1
            if line.strip():
                astropy_table_index += 1  # astropy.table.Table.read ignores blank lines
        data['ORIGIN'] = file
        data['VERSION'] = version
    return data