예제 #1
0
 def test_quantity_output(self):
     q = 500.25*u.day
     dt = TimeDelta(q)
     assert dt.to(u.day) == q
     assert dt.to(u.second).value == q.to_value(u.second)
     with pytest.raises(u.UnitsError):
         dt.to(u.m)
예제 #2
0
파일: simulate.py 프로젝트: adonath/gammapy
def random_times(
    size,
    rate,
    dead_time=TimeDelta(0, format="sec"),
    return_diff=False,
    random_state="random-seed",
):
    """Make random times assuming a Poisson process.

    This function can be used to test event time series,
    to have a comparison what completely random data looks like.

    Can be used in two ways (in either case the return type is `~astropy.time.TimeDelta`):

    * ``return_delta=False`` - Return absolute times, relative to zero (default)
    * ``return_delta=True`` - Return time differences between consecutive events.

    Parameters
    ----------
    size : int
        Number of samples
    rate : `~astropy.units.Quantity`
        Event rate (dimension: 1 / TIME)
    dead_time : `~astropy.units.Quantity` or `~astropy.time.TimeDelta`, optional
        Dead time after event (dimension: TIME)
    return_diff : bool
        Return time difference between events? (default: no, return absolute times)
    random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}
        Defines random number generator initialisation.
        Passed to `~gammapy.utils.random.get_random_state`.

    Returns
    -------
    time : `~astropy.time.TimeDelta`
        Time differences (second) after time zero.

    Examples
    --------
    Example how to simulate 100 events at a rate of 10 Hz.
    As expected the last event occurs after about 10 seconds.

    >>> from astropy.units import Quantity
    >>> from gammapy.time import random_times
    >>> rate = Quantity(10, 'Hz')
    >>> times = random_times(size=100, rate=rate, random_state=0)
    >>> times[-1]
    <TimeDelta object: scale='None' format='sec' value=9.186484131475076>
    """
    random_state = get_random_state(random_state)

    dead_time = TimeDelta(dead_time)
    scale = (1 / rate).to("s").value
    time_delta = random_state.exponential(scale=scale, size=size)
    time_delta += dead_time.to("s").value

    if return_diff:
        return TimeDelta(time_delta, format="sec")
    else:
        time = time_delta.cumsum()
        return TimeDelta(time, format="sec")
예제 #3
0
    def test_scales_for_delta_scale_is_none(self, scale, op):
        """T(X) +/- dT(None) or T(X) +/- Quantity(time-like)

        This is always allowed and just adds JDs, i.e., the scale of
        the TimeDelta or time-like Quantity will be taken to be X.
        The one exception is again for X=UTC, where TAI is assumed instead,
        so that a day is always defined as 86400 seconds.
        """
        dt_none = TimeDelta([0., 1., -1., 1000.], format='sec')
        assert dt_none.scale is None
        q_time = dt_none.to('s')

        dt = self.dt[scale]
        dt1 = op(dt, dt_none)
        assert dt1.scale == dt.scale
        assert allclose_jd(dt1.jd, op(dt.jd, dt_none.jd))
        dt2 = op(dt_none, dt)
        assert dt2.scale == dt.scale
        assert allclose_jd(dt2.jd, op(dt_none.jd, dt.jd))
        dt3 = op(q_time, dt)
        assert dt3.scale == dt.scale
        assert allclose_jd(dt3.jd, dt2.jd)

        t = self.t[scale]
        t1 = op(t, dt_none)
        assert t1.scale == t.scale
        assert allclose_jd(t1.jd, op(t.jd, dt_none.jd))
        if op is operator.add:
            t2 = op(dt_none, t)
            assert t2.scale == t.scale
            assert allclose_jd(t2.jd, t1.jd)
        t3 = op(t, q_time)
        assert t3.scale == t.scale
        assert allclose_jd(t3.jd, t1.jd)
예제 #4
0
def test_timedelta_to_datetime():
    td = TimeDelta(1, format='jd')

    assert td.to_datetime() == timedelta(days=1)

    td2 = TimeDelta([[1, 2], [3, 4]], format='jd')
    td = [[timedelta(days=1), timedelta(days=2)],
          [timedelta(days=3), timedelta(days=4)]]

    assert np.all(td2.to_datetime() == td)
예제 #5
0
    def test_set_format(self):
        """
        Test basics of setting format attribute.
        """
        dt = TimeDelta(86400.0, format='sec')
        assert dt.value == 86400.0
        assert dt.format == 'sec'

        dt.format = 'jd'
        assert dt.value == 1.0
        assert dt.format == 'jd'

        dt.format = 'datetime'
        assert dt.value == timedelta(days=1)
        assert dt.format == 'datetime'
예제 #6
0
    def test_delta_day_is_86400_seconds(self, scale):
        """TimeDelta or Quantity holding 1 day always means 24*60*60 seconds

        This holds true for all timescales but UTC, for which leap-second
        days are longer or shorter by one second.
        """
        t = self.t[scale]
        dt_day = TimeDelta(1., format='jd')
        q_day = dt_day.to('day')

        dt_day_leap = t[-1] - t[0]
        # ^ = exclusive or, so either equal and not UTC, or not equal and UTC
        assert allclose_jd(dt_day_leap.jd, dt_day.jd) ^ (scale == 'utc')

        t1 = t[0] + dt_day
        assert allclose_jd(t1.jd, t[-1].jd) ^ (scale == 'utc')
        t2 = q_day + t[0]
        assert allclose_jd(t2.jd, t[-1].jd) ^ (scale == 'utc')
        t3 = t[-1] - dt_day
        assert allclose_jd(t3.jd, t[0].jd) ^ (scale == 'utc')
        t4 = t[-1] - q_day
        assert allclose_jd(t4.jd, t[0].jd) ^ (scale == 'utc')
예제 #7
0
def read_event_file( file_path ):
    """Function to read the event parameters from a file"""
    
    if path.isfile( file_path ) == False:
        print 'Error: Cannot find input file ' + file_path
    
    file_lines = open( file_path, 'r' ).readlines()
    
    # Parse parameters to convert to standard units:
    print 'Input parameters:'
    params = {}
    for line in file_lines:
        ( key, value ) = line.replace('\n','').split( '=' )
	key = key.lstrip().rstrip()
	value = str( value ).lstrip().rstrip()
	if key in [ 'u_o', 't_E', 'M_L', 'D_L', 'D_S', 'phi', 'I_base' ]: 
	    value = float(value)
	if key == 't_E':
	    value = TimeDelta((value * 24.0 * 3600.0),format='sec')
	elif key == 'M_L':
	    value = constants.M_sun * value
	elif key == 'D_L' or key == 'D_S':
	    value = value * constants.pc
	elif key == 'phi':
	    value = ( value * np.pi ) / 180.0 
	elif key in [ 't_o', 't_obs_start' ]:
	    value = Time( value , format='isot', scale='utc' )
	elif key == 'visit_intervals':
	    tlist = value.split(',')
	    value = []
	    for entry in tlist: value.append( TimeDelta( (float( entry )*3600.0), format='sec' ) )
	elif key == 'exposure_sequences':
	    tlist1 = value.split(',')
	    value = []
	    for entry in tlist1: 
	        tlist2 = entry.split(':')
		sequence = []
		for exp in tlist2:
		    sequence.append( TimeDelta( float( exp ), format='sec' )  )
		value.append( sequence )
	params[key] = value
        print key, value
	
    return params
예제 #8
0
 def test_valid_quantity_operations2(self):
     """Check that TimeDelta is treated as a quantity where possible."""
     t0 = TimeDelta(100000., format='sec')
     f = 1./t0
     assert isinstance(f, u.Quantity)
     assert f.unit == 1./u.day
     g = 10.*u.m/u.second**2
     v = t0 * g
     assert isinstance(v, u.Quantity)
     assert u.allclose(v, t0.sec * g.value * u.m / u.second)
     q = np.log10(t0/u.second)
     assert isinstance(q, u.Quantity)
     assert q.value == np.log10(t0.sec)
     s = 1.*u.m
     v = s/t0
     assert isinstance(v, u.Quantity)
     assert u.allclose(v, 1. / t0.sec * u.m / u.s)
     t = 1.*u.s
     t2 = t0 * t
     assert isinstance(t2, u.Quantity)
     assert u.allclose(t2, t0.sec * u.s ** 2)
     t3 = [1] / t0
     assert isinstance(t3, u.Quantity)
     assert u.allclose(t3, 1 / (t0.sec * u.s))
     # broadcasting
     t1 = TimeDelta(np.arange(100000., 100012.).reshape(6, 2), format='sec')
     f = np.array([1., 2.]) * u.cycle * u.Hz
     phase = f * t1
     assert isinstance(phase, u.Quantity)
     assert phase.shape == t1.shape
     assert u.allclose(phase, t1.sec * f.value * u.cycle)
     q = t0 * t1
     assert isinstance(q, u.Quantity)
     assert np.all(q == t0.to(u.day) * t1.to(u.day))
     q = t1 / t0
     assert isinstance(q, u.Quantity)
     assert np.all(q == t1.to(u.day) / t0.to(u.day))
예제 #9
0
파일: kepler.py 프로젝트: seberg/astropy
def kepler_fits_reader(filename):
    """
    This serves as the FITS reader for KEPLER or TESS files within
    astropy-timeseries.

    This function should generally not be called directly, and instead this
    time series reader should be accessed with the
    :meth:`~astropy.timeseries.TimeSeries.read` method::

        >>> from astropy.timeseries import TimeSeries
        >>> ts = TimeSeries.read('kplr33122.fits', format='kepler.fits')  # doctest: +SKIP

    Parameters
    ----------
    filename : `str` or `pathlib.Path`
        File to load.

    Returns
    -------
    ts : `~astropy.timeseries.TimeSeries`
        Data converted into a TimeSeries.
    """
    hdulist = fits.open(filename)
    # Get the lightcurve HDU
    telescope = hdulist[0].header['telescop'].lower()

    if telescope == 'tess':
        hdu = hdulist['LIGHTCURVE']
    elif telescope == 'kepler':
        hdu = hdulist[1]
    else:
        raise NotImplementedError(
            "{} is not implemented, only KEPLER or TESS are "
            "supported through this reader".format(
                hdulist[0].header['telescop']))

    if hdu.header['EXTVER'] > 1:
        raise NotImplementedError("Support for {} v{} files not yet "
                                  "implemented".format(hdu.header['TELESCOP'],
                                                       hdu.header['EXTVER']))

    # Check time scale
    if hdu.header['TIMESYS'] != 'TDB':
        raise NotImplementedError("Support for {} time scale not yet "
                                  "implemented in {} reader".format(
                                      hdu.header['TIMESYS'],
                                      hdu.header['TELESCOP']))

    tab = Table.read(hdu, format='fits')

    # Some KEPLER files have a T column instead of TIME.
    if "T" in tab.colnames:
        tab.rename_column("T", "TIME")

    for colname in tab.colnames:
        unit = tab[colname].unit
        # Make masks nan for any column which will turn into a Quantity
        # later. TODO: remove once we support Masked Quantities properly?
        if unit and isinstance(tab[colname], MaskedColumn):
            tab[colname] = tab[colname].filled(np.nan)
        # Fix units
        if unit == 'e-/s':
            tab[colname].unit = 'electron/s'
        if unit == 'pixels':
            tab[colname].unit = 'pixel'

        # Rename columns to lowercase
        tab.rename_column(colname, colname.lower())

    # Filter out NaN rows
    nans = np.isnan(tab['time'].data)
    if np.any(nans):
        warnings.warn(f'Ignoring {np.sum(nans)} rows with NaN times')
    tab = tab[~nans]

    # Time column is dependent on source and we correct it here
    reference_date = Time(hdu.header['BJDREFI'],
                          hdu.header['BJDREFF'],
                          scale=hdu.header['TIMESYS'].lower(),
                          format='jd')
    time = reference_date + TimeDelta(tab['time'].data)
    time.format = 'isot'

    # Remove original time column
    tab.remove_column('time')

    return TimeSeries(time=time, data=tab)
예제 #10
0
def make_test_observation_table(
    observatory_name="hess",
    n_obs=10,
    az_range=Angle([0, 360], "deg"),
    alt_range=Angle([45, 90], "deg"),
    date_range=(Time("2010-01-01"), Time("2015-01-01")),
    use_abs_time=False,
    n_tels_range=(3, 4),
    random_state="random-seed",
):
    """Make a test observation table.
    Create an observation table following a specific pattern.
    For the moment, only random observation tables are created.
    The observation table is created according to a specific
    observatory, and randomizing the observation pointingpositions
    in a specified az-alt range.
    If a *date_range* is specified, the starting time
    of the observations will be restricted to the specified interval.
    These parameters are interpreted as date, the precise hour of the
    day is ignored, unless the end date is closer than 1 day to the
    starting date, in which case, the precise time of the day is also
    considered.
    In addition, a range can be specified for the number of telescopes.
    Parameters
    ----------
    observatory_name : str, optional
        Name of the observatory; a list of choices is given in
        `~gammapy.data.observatory_locations`.
    n_obs : int, optional
        Number of observations for the obs table.
    az_range : `~astropy.coordinates.Angle`, optional
        Azimuth angle range (start, end) for random generation of
        observation pointing positions.
    alt_range : `~astropy.coordinates.Angle`, optional
        Altitude angle range (start, end) for random generation of
        observation pointing positions.
    date_range : `~astropy.time.Time`, optional
        Date range (start, end) for random generation of observation
        start time.
    use_abs_time : bool, optional
        Use absolute UTC times instead of [MET]_ seconds after the reference.
    n_tels_range : int, optional
        Range (start, end) of number of telescopes participating in
        the observations.
    random_state : {int, 'random-seed', 'global-rng', `~numpy.random.RandomState`}, optional
        Defines random number generator initialisation.
        Passed to `~gammapy.utils.random.get_random_state`.
    Returns
    -------
    obs_table : `~gammapy.data.ObservationTable`
        Observation table.
    """
    random_state = get_random_state(random_state)

    n_obs_start = 1

    obs_table = ObservationTable()

    # build a time reference as the start of 2010
    dateref = Time("2010-01-01T00:00:00")
    dateref_mjd_fra, dateref_mjd_int = np.modf(dateref.mjd)

    # define table header
    obs_table.meta["OBSERVATORY_NAME"] = observatory_name
    obs_table.meta["MJDREFI"] = dateref_mjd_int
    obs_table.meta["MJDREFF"] = dateref_mjd_fra
    if use_abs_time:
        # show the observation times in UTC
        obs_table.meta["TIME_FORMAT"] = "absolute"
    else:
        # show the observation times in seconds after the reference
        obs_table.meta["TIME_FORMAT"] = "relative"
    header = obs_table.meta

    # obs id
    obs_id = np.arange(n_obs_start, n_obs_start + n_obs)
    obs_table["OBS_ID"] = obs_id

    # obs time: 30 min
    ontime = Quantity(30. * np.ones_like(obs_id), "minute").to("second")
    obs_table["ONTIME"] = ontime

    # livetime: 25 min
    time_live = Quantity(25. * np.ones_like(obs_id), "minute").to("second")
    obs_table["LIVETIME"] = time_live

    # start time
    #  - random points between the start of 2010 and the end of 2014 (unless
    # otherwise specified)
    #  - using the start of 2010 as a reference time for the header of the table
    #  - observations restrict to night time (only if specified time interval is
    # more than 1 day)
    #  - considering start of astronomical day at midday: implicit in setting
    # the start of the night, when generating random night hours
    datestart = date_range[0]
    dateend = date_range[1]
    time_start = random_state.uniform(datestart.mjd, dateend.mjd, len(obs_id))
    time_start = Time(time_start, format="mjd", scale="utc")

    # check if time interval selected is more than 1 day
    if (dateend - datestart).jd > 1.:
        # keep only the integer part (i.e. the day, not the fraction of the day)
        time_start_f, time_start_i = np.modf(time_start.mjd)
        time_start = Time(time_start_i, format="mjd", scale="utc")

        # random generation of night hours: 6 h (from 22 h to 4 h), leaving 1/2 h
        # time for the last run to finish
        night_start = Quantity(22., "hour")
        night_duration = Quantity(5.5, "hour")
        hour_start = random_state.uniform(
            night_start.value, night_start.value + night_duration.value, len(obs_id)
        )
        hour_start = Quantity(hour_start, "hour")

        # add night hour to integer part of MJD
        time_start += hour_start

    if use_abs_time:
        # show the observation times in UTC
        time_start = Time(time_start.isot)
    else:
        # show the observation times in seconds after the reference
        time_start = time_relative_to_ref(time_start, header)
        # converting to quantity (better treatment of units)
        time_start = Quantity(time_start.sec, "second")

    obs_table["TSTART"] = time_start

    # stop time
    # calculated as TSTART + ONTIME
    if use_abs_time:
        time_stop = Time(obs_table["TSTART"])
        time_stop += TimeDelta(obs_table["ONTIME"])
    else:
        time_stop = TimeDelta(obs_table["TSTART"])
        time_stop += TimeDelta(obs_table["ONTIME"])
        # converting to quantity (better treatment of units)
        time_stop = Quantity(time_stop.sec, "second")

    obs_table["TSTOP"] = time_stop

    # az, alt
    # random points in a portion of sphere; default: above 45 deg altitude
    az, alt = sample_sphere(
        size=len(obs_id),
        lon_range=az_range,
        lat_range=alt_range,
        random_state=random_state,
    )
    az = Angle(az, "deg")
    alt = Angle(alt, "deg")
    obs_table["AZ"] = az
    obs_table["ALT"] = alt

    # RA, dec
    # derive from az, alt taking into account that alt, az represent the values
    # at the middle of the observation, i.e. at time_ref + (TIME_START + TIME_STOP)/2
    # (or better: time_ref + TIME_START + (TIME_OBSERVATION/2))
    # in use_abs_time mode, the time_ref should not be added, since it's already included
    # in TIME_START and TIME_STOP
    az = Angle(obs_table["AZ"])
    alt = Angle(obs_table["ALT"])
    if use_abs_time:
        obstime = Time(obs_table["TSTART"])
        obstime += TimeDelta(obs_table["ONTIME"]) / 2.
    else:
        obstime = time_ref_from_dict(obs_table.meta)
        obstime += TimeDelta(obs_table["TSTART"])
        obstime += TimeDelta(obs_table["ONTIME"]) / 2.
    location = observatory_locations[observatory_name]
    altaz_frame = AltAz(obstime=obstime, location=location)
    alt_az_coord = SkyCoord(az, alt, frame=altaz_frame)
    sky_coord = alt_az_coord.transform_to("icrs")
    obs_table["RA"] = sky_coord.ra
    obs_table["DEC"] = sky_coord.dec

    # positions

    # number of telescopes
    # random integers in a specified range; default: between 3 and 4
    n_tels = random_state.randint(n_tels_range[0], n_tels_range[1] + 1, len(obs_id))
    obs_table["N_TELS"] = n_tels

    # muon efficiency
    # random between 0.6 and 1.0
    muoneff = random_state.uniform(low=0.6, high=1.0, size=len(obs_id))
    obs_table["MUONEFF"] = muoneff

    return obs_table
예제 #11
0
import pickle
from modules.utils import get_min_and_max_mean
from datetime import timedelta
from astropy.time import TimeDelta
from modules.config import delay, bin_width

# delete if the folders do already have files
os.system('find output/ -type f -delete')

# opening the list of dates
with open('UT_data/listofdates.list', 'rb') as f:
    dates = pickle.load(f)
f.close()

# we are interested in delays up to 5 hrs
delay = TimeDelta(timedelta(hours=delay)).to_value('jd')
dates.sort()
n_dates = str(len(dates))
no_plot = '-np --no-plot'
with_out = '-o --output'
spaces = '                                                             '
for i, date in enumerate(dates):
    print(spaces * 2 + '(' + str(i) + '/' + n_dates + ')')
    current_file1_path = './UT_data/file1/' + date + '.csv'
    current_file2_path = './UT_data/file2/' + date + '.csv'
    min_sampling_period, max_sampling_period = get_min_and_max_mean(
        current_file1_path, current_file2_path)
    print('doing corr for date:', date)
    print('bin =', bin_width, 'times max period')
    actual_bin = bin_width * max_sampling_period
    folder_2_output = './output/'
예제 #12
0
import matplotlib.pyplot as plt

import astropy.units as u
from astropy.coordinates import SkyCoord
from astropy.time import TimeDelta

import sunpy.map
import sunpy.coordinates
from sunpy.io.special import srs
from sunpy.time import parse_time
from sunpy.net import Fido, attrs as a

##############################################################################
# For this example, we will search for and download a single HMI using Fido.
start_time = parse_time("2017-01-25")
end_time = start_time + TimeDelta(23 * u.hour + 59 * u.minute + 59 * u.second)
results = Fido.search(
    a.Time(start_time, end_time),
    a.Instrument('HMI') & a.vso.Physobs("LOS_magnetic_field"),
    a.vso.Sample(60 * u.second))

##############################################################################
# Let's select only the first file, download it and create a map.
result = results[0, 0]
file_name = Fido.fetch(result)
smap = sunpy.map.Map(file_name)

##############################################################################
# Download the SRS file.
srs_results = Fido.search(a.Time(start_time, end_time),
                          a.Instrument('SRS_TABLE'))
예제 #13
0
# wait for target to be available
while Time.now() < asteroid_main_observation.min_obs_time:
    wait_time_s = (asteroid_main_observation.min_obs_time-Time.now()).sec
    telescope.slackdebug('The observation (%s) will start in %d min (at %s)...' % (
        asteroid_main_observation.target.getName(), wait_time_s/60, asteroid_main_observation.min_obs_time.iso[:-7]))
    time.sleep(delay_time)

# start observations
telescope.slackdebug("Starting observations...")
telescope.crackit()

calibrationObservations()

# get images of target until target reaches max altitude, if it hasn't already
doCalibrationObservations = False
while Time.now() + TimeDelta(asteroid_calibration_observation_duration_s, format='sec') < asteroid_main_observation.max_alt_time:
    telescope.slackdebug("Performing main observations for %s..." %
                         (asteroid_main_observation.target.getName()))
    # check sun, clouds, slit, etc.
    telescope.checkSun()
    if telescope.is_cracked:
        telescope.checkSlit()
    telescope.checkAlt()
    telescope.checkClouds()
    telescope.getImageStacks(asteroid_main_observation, None, False)
    if asteroid_main_observation.sequence.do_pinpoint:
        telescope.pinpoint(asteroid_main_observation, False)
    doCalibrationObservations = True

if doCalibrationObservations:
    calibrationObservations()
예제 #14
0
def test_insert_timedelta():
    tm = TimeDelta([1, 2], format='sec')

    # Insert a scalar using an auto-parsed string
    tm2 = tm.insert(1, TimeDelta([10, 20], format='sec'))
    assert np.all(tm2 == TimeDelta([1, 10, 20, 2], format='sec'))
예제 #15
0
def get_time_window(tdate,timeDeltaDays):
    deltaDate = TimeDelta(timeDeltaDays)
    t1 = (tdate - deltaDate).iso.split(' ')[0].replace('-','')
    t2 = (tdate + deltaDate).iso.split(' ')[0].replace('-','')
    return t1,t2
예제 #16
0
def sunup(daterange):
    ''' Find out the times when the Sun is up for a given date range. 
           
        Returns: dictionary of time objects with keys 'taz', 'teq', each
                 being a two-element Time() object, where first time is
                 start (rise) and second is end (set)
    '''
    # Use the 24-h day specified by daterange (i.e. drop the time of day)
    mjd1 = int(daterange[0].mjd)
    mjd2 = int(daterange[1].mjd)
    aa = eovsa_cat.eovsa_array_with_cat()
    taz_rise = []
    teq_rise = []
    taz_set = []
    teq_set = []
    mjd_list = []
    for mjd in range(mjd1,mjd2+1):
        t = Time(mjd,format='mjd')
        # Add times in 1-min steps up to duration dur (hours)
        ts = t + TimeDelta(np.arange(0.,24.,1./60.)/24.,format='jd')

        nt = len(ts)
        ha = np.zeros(nt)
        alt = np.zeros(nt)
        az = np.zeros(nt)
        lines = []
        for i in range(nt):
            aa.set_jultime(ts[i].jd)    
            lst = aa.sidereal_time()

            src = aa.cat['Sun']
            src.compute(aa)
            ha[i] = lst - src.ra
            alt[i] = src.alt
            az[i] = src.az

        ha_deg = deg(ha)
        alt = deg(alt)
        az_deg = deg2(az)
        iset_eq = np.where(np.abs(ha_deg - 55.0) < 0.15)[0][0]
        irise_eq = np.where(np.abs(ha_deg + 55.0) < 0.15)[0][0]
        trise_eq = ts[irise_eq]
        tset_eq = ts[iset_eq]
        if iset_eq < irise_eq:
            tset_eq += TimeDelta(1,format='jd')
        for iwindow in np.arange(0.1,0.2,0.01):
            try:
                idx, = np.where(np.abs(alt - 10.0) < iwindow)  #****Was 0.1
                i1 = idx[0]
                i2 = idx[1]
                if i2 == i1+1:
                    if len(idx) > 2:
                        i2 = idx[2]
                    else:
                        i2 = 1439  # Transition is at end
                break
            except:
                print 'Window',iwindow,'did not work.  Trying again'
        if alt[i1] < alt[i1+1]:
            irise_az = i1
            iset_az = i2
        else:
            irise_az = i2
            iset_az = i1
        trise_az = ts[irise_az]
        tset_az = ts[iset_az]
        if iset_az < irise_az:
            tset_az += TimeDelta(1,format='jd')
        taz_rise.append(trise_az.mjd)
        taz_set.append(tset_az.mjd)
        teq_rise.append(trise_eq.mjd)
        teq_set.append(tset_eq.mjd)
        mjd_list.append(mjd)
    return {'date':Time(mjd_list,format='mjd'),
            'taz_rise':Time(taz_rise,format='mjd'),'teq_rise':Time(teq_rise,format='mjd'),
             'taz_set':Time(taz_set, format='mjd'), 'teq_set':Time(teq_set, format='mjd')}
예제 #17
0
    state = "None"
    if (psi < total):
        state = "Total"
    elif (psi < partial):
        state = "Partial"
    elif (psi < penumbral):
        state = "Penumbral"

    return state


solar_system_ephemeris.set('jpl')

t = Time("2019-01-21 0:00:00", scale="utc")
interval = TimeDelta(10.0, format='sec')
lightTime = TimeDelta(499, format='sec')
lastState = 'none'
for x in range(6 * 24 * 60 * 60):

    t = t + interval

    earth = get_body_barycentric('earth', t)
    moon = get_body_barycentric('moon', t)
    sun = get_body_barycentric('sun', t - lightTime)

    sun = vector(sun - earth)
    moon = vector(moon - earth)
    earth = earth - earth
    state = test(sun, moon)
예제 #18
0
 def setup(self):
     # Identical to what is used in LeapSeconds.auto_open().
     self.good_enough = (iers.LeapSeconds._today()
                         + TimeDelta(180 - iers._none_to_float(iers.conf.auto_max_age),
                                     format='jd'))
     self._auto_open_files = iers.LeapSeconds._auto_open_files.copy()
예제 #19
0
    def _start_observation_worker(self, obs_config, reload=True):
        """
        Start observation on worker node:

        #. Candidate clustering
        #. Extraction of filterbank data
        #. ML classification

        :param dict obs_config: Observation config
        :param bool reload: reload service settings (default: True)
        """
        self.logger.info("Starting observation on worker node")

        # reload config
        if reload:
            self.load_config()

        # create result dir if full processing is enabled
        if self.full_processing_enabled:
            try:
                util.makedirs(obs_config['result_dir'])
            except Exception as e:
                self.logger.error("Failed to create results directory")
                raise OfflineProcessingException("Failed to create result directory: {}".format(e))

        # TAB or IAB mode
        if obs_config['ntabs'] == 1:
            obs_config['mode'] = 'IAB'
            trigger_output_file = "{output_dir}/triggers/data/data_00_full.hdf5".format(**obs_config)
        else:
            obs_config['mode'] = 'TAB'
            trigger_output_file = "{output_dir}/triggers/data/data_full.hdf5".format(**obs_config)

        # wait until end time + delay
        start_processing_time = Time(obs_config['parset']['task.stopTime']) + TimeDelta(self.delay, format='sec')
        self.logger.info("Sleeping until {}".format(start_processing_time.iso))
        util.sleepuntil_utc(start_processing_time, event=self.stop_event)

        # fold pulsar if this is beam 0 and a test pulsar is being observed
        try:
            source = obs_config['parset']['task.source.name']
            ref_beam = int(obs_config['parset']['task.source.beam'])
            # Requires access to parset
            if source in self.test_pulsars and (obs_config['beam'] == ref_beam):
                self.logger.info("Test pulsar detected: {}".format(source))
                self._fold_pulsar(source, obs_config)
        except Exception as e:
            self.logger.error("Pulsar folding failed: {}".format(e))

        # run calibration tools if this is a calibrator scan
        # these have "drift" in the source name
        try:
            source = obs_config['parset']['task.source.name']
            if 'drift' in source:
                # split into actual source name and which beams were scanned
                name, beam_range = source.split('drift')
                # parse beam range, can be one beam or start/end beam
                if len(beam_range) == 2:
                    # one beam
                    drift_beams = [int(beam_range)]
                elif len(beam_range) == 4:
                    # start and end beam
                    sbeam = int(beam_range[:2])
                    ebeam = int(beam_range[2:])
                    drift_beams = range(sbeam, ebeam + 1)
                else:
                    self.logger.error("Failed to parse beam range for calibrator scan: {}".format(source))
                    drift_beams = []

                # run calibration tools if this is a calibrator scan of this beam
                if name in self.calibrators and (obs_config['beam'] in drift_beams):
                    self.logger.info("Calibrator scan through this beam detected: {}".format(source))
                    self._run_calibration_tools(name, obs_config)
        except Exception as e:
            self.logger.error("Calibration tools failed: {}".format(e))

        # if full processing is disabled, there is nothing more to do
        if not self.full_processing_enabled:
            self.logger.info("Full processing disabled - not running further offline processing")
            return

        # create trigger directory
        trigger_dir = "{output_dir}/triggers".format(**obs_config)
        try:
            util.makedirs(trigger_dir)
        except Exception as e:
            self.logger.error("Failed to create triggers directory")
            raise OfflineProcessingException("Failed to create triggers directory: {}".format(e))
        # change to trigger directory
        try:
            os.chdir(trigger_dir)
        except Exception as e:
            self.logger.error("Failed to cd to triggers directory {}: {}".format(trigger_dir, e))
        # create subdir
        try:
            util.makedirs('data')
        except Exception as e:
            self.logger.error("Failed to create triggers/data directory")
            raise OfflineProcessingException("Failed to create triggers/data directory: {}".format(e))

        # merge the trigger files
        self.logger.info("Merging raw trigger files")
        numcand_raw = self._merge_triggers(obs_config)

        # Run clustering for each SB in TAB mode if enabled
        if self.process_sb and obs_config['mode'] == 'TAB':
            self.logger.info("Clustering candidates in SB mode")
            tstart = Time.now()
            # spread the SBs over the allowed number of threads
            # output grouped pulses file will only be generated by thread including SB00
            chunks = np.array_split(range(obs_config['nsynbeams']), self.numthread)
            numcand_all = np.zeros(self.numthread)
            filterbank_prefix = "{output_dir}/filterbank/CB{beam:02d}".format(**obs_config)
            threads = []
            self.logger.info("Starting trigger clustering with {} threads".format(self.numthread))
            for ind, chunk in enumerate(chunks):
                # pick the SB range
                sbmin, sbmax = min(chunk), max(chunk)
                # create thread
                thread = threading.Thread(target=self._cluster, args=[obs_config, filterbank_prefix],
                                          kwargs={'out': numcand_all, 'sbmin': sbmin, 'sbmax': sbmax,
                                                  'ind': ind})
                thread.daemon = True
                threads.append(thread)
                thread.start()
            # wait until all are done
            for thread in threads:
                thread.join()
            # gather results
            if self.process_sb:
                # each element equal
                numcand_grouped = int(numcand_all[0])
            else:
                # each element is one TAB
                numcand_grouped = int(np.sum(numcand_all[numcand_all != -1]))

        # Run clustering for IAB / each TAB
        else:
            self.logger.info("Clustering candidates in {} mode".format(obs_config['mode']))
            tstart = Time.now()
            if obs_config['mode'] == 'IAB':
                filterbank_file = "{output_dir}/filterbank/CB{beam:02d}.fil".format(**obs_config)
                numcand_grouped = self._cluster(obs_config, 0, filterbank_file)
            elif obs_config['mode'] == 'TAB':
                numcand_all = np.zeros(obs_config['ntabs'])
                # max numtread tabs per run; so ntabs / numthread chunks
                n_chunk = int(np.ceil(obs_config['ntabs'] / float(self.numthread)))
                chunks = np.array_split(range(obs_config['ntabs']), n_chunk)
                self.logger.info("Starting trigger clustering with {} chunks ""of {} threads".format(n_chunk,
                                                                                                     self.numthread))
                # start the threads
                for tab_set in chunks:
                    threads = []
                    for tab in tab_set:
                        filterbank_file = "{output_dir}/filterbank/CB{beam:02d}_{tab:02d}.fil".format(tab=tab,
                                                                                                      **obs_config)
                        thread = threading.Thread(target=self._cluster, args=[obs_config, filterbank_file],
                                                  kwargs={'out': numcand_all, 'tab': tab})
                        thread.daemon = True
                        threads.append(thread)
                        thread.start()
                    # wait until all are done
                    for thread in threads:
                        thread.join()
                # gather results
                numcand_grouped = int(np.sum(numcand_all[numcand_all != -1]))
        tend = Time.now()
        self.logger.info("Trigger clustering took {}s".format((tend - tstart).sec))

        # Create one hdf5 file for entire CB
        if obs_config['mode'] == 'TAB':
            self.logger.info("Merging output HDF5 files")
            numcand_merged = self._merge_hdf5(obs_config, trigger_output_file)
        # TEMP so that IAB still works
        else:
            numcand_merged = 9999

        # Run classifier
        if numcand_merged != 0:
            self.logger.info("Classifying candidates")
            output_prefix = self._classify(obs_config, trigger_output_file)
        else:
            self.logger.info("No candidates post-merge. Not running classifier")
            output_prefix = ''

        # Merge PDFs
        if numcand_merged > 0:
            self.logger.info("Merging classifier output files")
            self._merge_plots(obs_config)
        else:
            self.logger.info("No candidates found post-classifier, not creating merged PDF")

        # Centralize results
        self.logger.info("Gathering results")

        kwargs = {'output_prefix': output_prefix, 'data_file': trigger_output_file, 'numcand_raw': numcand_raw,
                  'numcand_grouped': numcand_grouped}
        self._gather_results(obs_config, **kwargs)

        self.logger.info("Finished processing of observation {output_dir}".format(**obs_config))

        return
예제 #20
0
def run_on_event(channel_id, bypass=False,
                 program_ids=[1,2], outdir="forcedphot"):

    thread_ts = time.time()

    #response = web_client.chat_postMessage(
    #    channel=channel_id,
    #    text='testing')

    if not bypass:
        try:
            converations = web_client.conversations_list(
                channel=channel_id
            )
            channel_slack_id = channel_id
            for converation in converations:
                for chan in converation.data["channels"]:
                    if chan["name"] == channel_id.replace("#",""):
                        channel_slack_id = chan["id"]
        
            delay_thresh = 15.0
        
            payload = web_client.conversations_history(
                channel=channel_slack_id,
                oldest=thread_ts-delay_thresh
            )
        except:
            return   

        if len(payload["messages"]) == 0:
            return
   
        doTrigger, trigger_action, name, ra, dec = False, 'forced', 'ZTF21abinaiu', 310.480103, 11.70041
        for mess in payload["messages"]:
            print(mess)
            message_ts = float(mess["ts"])
            if np.abs(message_ts - thread_ts) > delay_thresh:
                continue
            txt = mess['text']
            txtsplit = list(filter(None,txt.split(" ")))
            if len(txtsplit) == 0: continue
            if txtsplit[0] == "trigger":
                doTrigger = True
                if len(txtsplit) == 4:
                    name = txtsplit[1]
                    ra = float(txtsplit[2])
                    dec = float(txtsplit[3])
            user = mess['user']
        if not doTrigger:
            return
    else:
        user, message_ts = 'test', thread_ts
        # options: gp,rp,ip,zs,Y
        name, ra, dec = 'ZTF21abinaiu', 310.480103, 11.70041

    message = []
    message.append("Hi <@{0}>! You are interested in forced photometry, right? Let me get right on that for you.".format(user))
    message.append('Received request %.1f seconds ago...' % (np.abs(message_ts - thread_ts)))
    message.append("We are looking into %s for you" % name)

    web_client.chat_postMessage(
        channel=channel_id,
        text="\n".join(message)
    )

    tstart = Time.now() - TimeDelta(100*u.day)
    tend = Time.now()    

    fig = do_forcephot(outdir, name, ra, dec,
                       tstart.jd, tend.jd,
                       ncpu=1, limit=100, do_mcmc=True,
                       program_ids=program_ids, verbose=False)
    upload_fig(fig, user, "forced.png", channel_id)

    message = []
    message.append('Forced photometry complete')

    web_client.chat_postMessage(
        channel=channel_id,
        text="\n".join(message)
    )    
        sys.exit(1)
    elif args.gpsstart is not None:
        try:
            starttime = Time(args.gpsstart, format='gps', scale='utc')
        except ValueError:
            Exception("Could not parse start GPS time: {}".format(
                args.gpsstart))
    else:
        try:
            starttime = Time(args.yearstart, format='decimalyear', scale='utc')
        except ValueError:
            Exception("Could not parse start year: {}".format(args.yearstart))

    # set the time step
    try:
        dt = TimeDelta(args.interval * 3600., format='sec')
    except ValueError:
        Exception("Could not parse time interval: {}".format(args.interval))

    # set the end time
    try:
        endtime = Time(starttime.decimalyear + args.nyears,
                       format='decimalyear',
                       scale='utc') + dt
    except ValueError:
        Exception("Could not parse total timespan")

    pos = []
    vel = []
    acc = []
예제 #22
0
    def fetch(self, qres, path=None, error_callback=None, **kwargs):
        """
        Download a set of results.

        Parameters
        ----------
        qres : `~sunpy.net.dataretriever.QueryResponse`
            Results to download.

        Returns
        -------
        Results Object
        """

        urls = [qrblock.url for qrblock in qres]

        filenames = []
        local_filenames = []

        for i, [url, qre] in enumerate(zip(urls, qres)):
            name = url.split('/')[-1]

            # temporary fix !!! coz All QRBs have same start_time values
            day = Time(qre.time.start.strftime('%Y-%m-%d')) + TimeDelta(i*u.day)

            if name not in filenames:
                filenames.append(name)

            if name.endswith('.gz'):
                local_filenames.append('{}SRS.txt'.format(day.strftime('%Y%m%d')))
            else:
                local_filenames.append(name)

        # Files to be actually downloaded
        paths = self._get_full_filenames(qres, filenames, path)

        # Those files that will be present after get returns
        local_paths = self._get_full_filenames(qres, local_filenames, path)

        # remove duplicate urls. This will make paths and urls to have same number of elements.
        # OrderedDict is required to maintain ordering because it will be zipped with paths later
        urls = list(OrderedDict.fromkeys(urls))

        dobj = Downloader(max_conn=5)

        for aurl, fname in zip(urls, paths):
            dobj.enqueue_file(aurl, filename=fname)

        paths = dobj.download()

        outfiles = []
        for fname, srs_filename in zip(local_paths, local_filenames):

            name = fname.name

            past_year = False
            for i, fname2 in enumerate(paths):
                fname2 = pathlib.Path(fname2)

                if fname2.name.endswith('.txt'):
                    continue

                year = fname2.name.split('_SRS')[0]

                if year in name:
                    TarFile = tarfile.open(fname2)
                    filepath = fname.parent
                    member = TarFile.getmember('SRS/' + srs_filename)
                    member.name = name
                    TarFile.extract(member, path=filepath)
                    TarFile.close()

                    outfiles.append(fname)

                    past_year = True
                    break

            if past_year is False:
                outfiles.append(fname)

        paths.data = list(map(str, outfiles))
        return paths
예제 #23
0
파일: cache.py 프로젝트: zhims/sunpy
 def __init__(self, downloader, storage, cache_dir, expiry=10 * u.day):
     self._downloader = downloader
     self._storage = storage
     self._cache_dir = Path(cache_dir)
     self._expiry = expiry if expiry is None else TimeDelta(expiry)
예제 #24
0
def reduce(telescope, obskey, tstart, tend, nchan, ngate, ntbin, ntw_min,
           rfi_filter_raw=None, fref=None, dedisperse=None,
           rfi_filter_power=None, do_waterfall=True, do_foldspec=True,
           verbose=True):

    comm = MPI.COMM_WORLD
    if dedisperse == 'None':
        dedisperse = None

    if verbose > 3 and comm.rank == 0:
        print(telescope, obskey, tstart, tend, nchan, ngate, ntbin, ntw_min,
              rfi_filter_raw, fref, dedisperse,
              do_waterfall, do_foldspec, verbose)

    if dedisperse is not None and fref is None:
        raise ValueError("Need to give reference frequency to dedisperse to")

    obs = obsdata()
    if verbose > 3 and comm.rank == 0:
        print(obs)
    # find nearest observation to 'date',
    # warning if > 1s off requested start time of observation
    if obskey not in obs[telescope]:
        # assume it is a date, which may be only good to nearest second
        obskey = obs[telescope].nearest_observation(obskey)
    # target of this observation
    psr = obs[telescope][obskey]['src']
    assert psr in obs['psrs'].keys()

    dm = obs['psrs'][psr]['dm']

    if verbose and comm.rank == 0:
        print("Attempting to open {0}: {1}".format(telescope, obskey))

    with obs[telescope].open(obskey, comm=comm) as fh:
        if verbose and comm.rank == 0:
            print("Opened files")

        # nchan = None means data is channelized already, so we get this
        # property directly from the file
        if nchan is None:
            nchan = fh.nchan
        # ensure requested number of channels is integer multiple of
        # existing channels
        if nchan % getattr(fh, 'nchan', 1) != 0:
            raise ValueError("Can only channelize data to an integer multiple "
                             "of the number of input channels (={0})."
                             .format(fh.nchan))

        time0 = fh.time0
        tstart = time0 if tstart is None else Time(tstart, scale='utc')
        if tstart < time0:
            raise ValueError("Cowardly refusing to analyse with requested "
                             "time {0} before start time {1}."
                             .format(tstart.iso, time0.iso))
        if tend is None:
            tend = obs[telescope][obskey]['tend']

        try:
            tend = Time(tend, scale='utc')
            dt = tend - tstart
        except ValueError:
            dt = TimeDelta(float(tend), format='sec')
            tend = tstart + dt

        if verbose and comm.rank == 0:
            print("Requested time span: {0} to {1}".format(tstart.isot,
                                                           tend.isot))

        phasepol = obs[telescope][obskey].get_phasepol(time0)
        nt = fh.ntimebins(tstart, tend)
        ntint = fh.ntint(nchan if fh.nchan == 1 else fh.nchan)
        # number of samples to combine for waterfall
        ntw = min(ntw_min, nt*ntint)
        # number of records to skip

        if verbose and comm.rank == 0:
            print("Using start time {0} and phase polynomial {1}"
                  .format(time0.isot, phasepol))

        fh.seek(tstart)
        if verbose and comm.rank == 0:
            print("Skipped {0} blocks and will fold {1} blocks to cover "
                  "time span {2} to {3}"
                  .format(fh.offset/fh.blocksize, nt, fh.time().isot,
                          fh.time(fh.offset + nt*fh.blocksize).isot))

        # set the default parameters to fold
        # Note, some parameters may be in fh's HDUs, or fh.__getitem__
        # but these are overwritten if explicitly sprecified in Folder
        folder = Folder(fh, nchan=nchan,
                        nt=nt, ntint=ntint, ngate=ngate,
                        ntbin=ntbin, ntw=ntw, dm=dm, fref=fref,
                        phasepol=phasepol, dedisperse=dedisperse,
                        do_waterfall=do_waterfall, do_foldspec=do_foldspec,
                        verbose=verbose, progress_interval=1,
                        rfi_filter_raw=rfi_filter_raw,
                        rfi_filter_power=rfi_filter_power)
        myfoldspec, myicount, mywaterfall = folder(fh, comm=comm)
    # end with

    print("Rank {0} exited with statement".format(comm.rank))

    savepref = "{0}{1}_{2}chan{3}ntbin".format(telescope, psr, nchan, ntbin)
    if do_waterfall:
        waterfall = np.zeros_like(mywaterfall)
        comm.Reduce(mywaterfall, waterfall, op=MPI.SUM, root=0)
        if comm.rank == 0:
            # waterfall = normalize_counts(waterfall)
            np.save("{0}waterfall_{1}+{2:08}sec.npy"
                    .format(savepref, tstart.isot, dt.sec), waterfall)

    if do_foldspec:
        foldspec = np.zeros_like(myfoldspec) if comm.rank == 0 else None
        print("Rank {0} is entering comm.Reduce".format(comm.rank))
        comm.Reduce(myfoldspec, foldspec, op=MPI.SUM, root=0)
        del myfoldspec  # save memory on node 0
        icount = np.zeros_like(myicount) if comm.rank == 0 else None
        comm.Reduce(myicount, icount, op=MPI.SUM, root=0)
        del myicount  # save memory on node 0
        if comm.rank == 0:
            fname = ("{0}foldspec_{1}+{2:08}sec.npy")
            np.save(fname.format(savepref, tstart.isot, dt.sec), foldspec)
            iname = ("{0}icount_{1}+{2:08}sec.npy")
            np.save(iname.format(savepref, tstart.isot, dt.sec), icount)

    if comm.rank == 0:
        if do_foldspec and foldspec.ndim == 3:
            # sum over time slices -> pulse profile vs channel
            foldspec1 = normalize_counts(foldspec.sum(0).astype(np.float64),
                                         icount.sum(0).astype(np.int64))
            # sum over channels -> pulse profile vs time
            foldspec3 = normalize_counts(foldspec.sum(1).astype(np.float64),
                                         icount.sum(1).astype(np.int64))
            fluxes = foldspec1.sum(axis=0)
            with open('{0}flux_{1}+{2:08}sec.dat'
                      .format(savepref, tstart.isot, dt.sec), 'w') as f:
                for i, flux in enumerate(fluxes):
                    f.write('{0:12d} {1:12.9g}\n'.format(i+1, flux))
            # ratio'd flux only if file will not be ridiculously large
            if ntbin*ngate < 10000:
                foldspec2 = normalize_counts(foldspec, icount)

        plots = True
        if plots and do_waterfall and waterfall.ndim == 2:  # no polarizations
            w = waterfall.copy()
            pmap('{0}waterfall_{1}+{2:08}sec.pgm'
                 .format(savepref, tstart.isot, dt.sec), w, 1, verbose=True)
        if plots and do_foldspec and foldspec.ndim == 3:
            pmap('{0}folded_{1}+{2:08}sec.pgm'
                 .format(savepref, tstart.isot, dt.sec), foldspec1, 0, verbose)
            pmap('{0}folded3_{1}+{2:08}sec.pgm'
                 .format(savepref, tstart.isot, dt.sec),
                 foldspec3.T, 0, verbose)
            # ratio'd flux only if file will not be ridiculously large
            if ntbin*ngate < 10000:
                pmap('{0}foldedbin_{1}+{2:08}sec.pgm'
                     .format(savepref, tstart.isot, dt.sec),
                     foldspec2.transpose(1,2,0).reshape(nchan,-1), 1, verbose)
예제 #25
0
                                     np.mod(gmststart, 24.),
                                     np.mod(gmststop, 24.)))

            # get corresponding orbital phases for a range of dates
            #ist_date1 = '2013-06-16 12:00:00'
            #ist_date2 = '2013-07-03 12:00:00'
            ist_date1 = '2013-07-24 12:00:00'
            ist_date2 = '2013-08-08 12:00:00'
            ist_utc = 5.5 / 24.
            mjd1 = Time(ist_date1, scale='utc').mjd - ist_utc
            mjd2 = Time(ist_date2, scale='utc').mjd - ist_utc
            for mjd in np.arange(mjd1, mjd2 + 1.e-5):
                time = Time(mjd, format='mjd', scale='utc', precision=0)
                ut_start = gmst2time(gmststart, time)
                ut_stop = gmst2time(gmststop, time)
                ph_start, ph_stop = src.phase(ut_start), src.phase(ut_stop)
                ist_start = ut_start + TimeDelta(ist_utc, format='jd')
                ist_stop = ut_stop + TimeDelta(ist_utc, format='jd')
                print('{}-{}: {:4.2f}-{:4.2f}'.format(ist_start.iso,
                                                      ist_stop.iso[11:],
                                                      ph_start, ph_stop))

# 0834+06 before 1957+20
#
# 1133+16 before J1012+5207
#
#
# Need scintellation data for B1957, J1012
#
# LOFAR how high makes it useful? (elevation > 30?)
예제 #26
0
    # possible offset
    nhead = recsize * 1000

    ntbin = 10  # number of bins the time series is split into for folding
    ntw = min(100000, nt * ntint)  # number of samples to combine for waterfall

    samplerate = 200 * u.MHz
    # account for offset, recalling there are 2 samples per byte
    phasepol.window += (nhead * 2 / samplerate).to(u.second).value

    fedge = 200. * u.MHz
    fedge_at_top = True

    fref = 150. * u.MHz  # ref. freq. for dispersion measure

    time0 = Time(date_dict[psr], scale='utc') + TimeDelta(4 * 3600,
                                                          format='sec')

    verbose = 'very'
    do_waterfall = True
    do_foldspec = True
    dedisperse = 'by-channel'

    #with open(file1, 'rb') as fh1:
    with multifile(seq_file, raw_files) as fh1:

        foldspec2, waterfall = fold(fh1,
                                    '4bit',
                                    samplerate,
                                    fedge,
                                    fedge_at_top,
                                    nchan,
예제 #27
0
##############################################################################
# The data from the TimeSeries can be retrieved in a number of formats:
ts_goes.to_dataframe()
ts_goes.to_table()
ts_goes.to_array()
# Note: the array doesn't include the datetime index column.

##############################################################################
# Creating a TimeSeries from scratch can be done in a lot of ways, much like a
# Map.
# Input data can be in the form of a Pandas DataFrame (preferred), an astropy
# Table or a Numpy Array.
# To generate some data and the corresponding dates
base = datetime.datetime.today()
dates = base - TimeDelta(np.arange(24 * 60) * u.minute)
intensity = np.sin(np.arange(0, 12 * np.pi, ((12 * np.pi) / (24 * 60))))
# Create the data DataFrame, header MetaDict and units OrderedDict
data = DataFrame(intensity, index=dates, columns=['intensity'])
units = OrderedDict([('intensity', u.W / u.m**2)])
meta = MetaDict({'key': 'value'})
# Create the time series
ts_custom = sunpy.timeseries.TimeSeries(data, meta, units)

# A more manual dataset would be a numpy array, which we can creat using:
tm = Time(['2000:002', '2001:345', '2002:345'])
a = [1, 4, 5]
b = [2.0, 5.0, 8.2]
c = ['x', 'y', 'z']
arr = np.stack([tm, a, b, c], axis=1)
# Note: this array needs to have the times in the first column, this can be in
예제 #28
0
def read_iris_spectrograph_level2_fits(filenames,
                                       spectral_windows=None,
                                       uncertainty=True,
                                       memmap=False):
    """
    Reads IRIS level 2 spectrograph FITS from an OBS into an IRISSpectrograph instance.

    Parameters
    ----------
    filenames: `list` of `str` or `str`
        Filename of filenames to be read.  They must all be associated with the same
        OBS number.

    spectral_windows: iterable of `str` or `str`
        Spectral windows to extract from files.  Default=None, implies, extract all
        spectral windows.

    Returns
    -------
    result: `irispy.spectrograph.IRISSpectrograph`

    """
    if type(filenames) is str:
        filenames = [filenames]
    for f, filename in enumerate(filenames):
        hdulist = fits.open(filename,
                            memmap=memmap,
                            do_not_scale_image_data=memmap)
        hdulist.verify('fix')
        if f == 0:
            # Determine number of raster positions in a scan
            raster_positions_per_scan = int(hdulist[0].header["NRASTERP"])
            # Collecting the window observations.
            windows_in_obs = np.array([
                hdulist[0].header["TDESC{0}".format(i)]
                for i in range(1, hdulist[0].header["NWIN"] + 1)
            ])
            # If spectral_window is not set then get every window.
            # Else take the appropriate windows
            if not spectral_windows:
                spectral_windows_req = windows_in_obs
                window_fits_indices = range(1, len(hdulist) - 2)
            else:
                if type(spectral_windows) is str:
                    spectral_windows_req = [spectral_windows]
                else:
                    spectral_windows_req = spectral_windows
                spectral_windows_req = np.asarray(spectral_windows_req,
                                                  dtype="U")
                window_is_in_obs = np.asarray([
                    window in windows_in_obs for window in spectral_windows_req
                ])
                if not all(window_is_in_obs):
                    missing_windows = window_is_in_obs == False
                    raise ValueError(
                        "Spectral windows {0} not in file {1}".format(
                            spectral_windows[missing_windows], filenames[0]))
                window_fits_indices = np.nonzero(
                    np.in1d(windows_in_obs, spectral_windows))[0] + 1
            # Generate top level meta dictionary from first file
            # main header.
            top_meta = {
                "TELESCOP": hdulist[0].header["TELESCOP"],
                "INSTRUME": hdulist[0].header["INSTRUME"],
                "DATA_LEV": hdulist[0].header["DATA_LEV"],
                "OBSID": hdulist[0].header["OBSID"],
                "OBS_DESC": hdulist[0].header["OBS_DESC"],
                "STARTOBS": Time(hdulist[0].header["STARTOBS"]),
                "ENDOBS": Time(hdulist[0].header["ENDOBS"]),
                "SAT_ROT": hdulist[0].header["SAT_ROT"] * u.deg,
                "AECNOBS": int(hdulist[0].header["AECNOBS"]),
                "FOVX": hdulist[0].header["FOVX"] * u.arcsec,
                "FOVY": hdulist[0].header["FOVY"] * u.arcsec,
                "SUMSPTRN": hdulist[0].header["SUMSPTRN"],
                "SUMSPTRF": hdulist[0].header["SUMSPTRF"],
                "SUMSPAT": hdulist[0].header["SUMSPAT"],
                "NEXPOBS": hdulist[0].header["NEXPOBS"],
                "NRASTERP": hdulist[0].header["NRASTERP"],
                "KEYWDDOC": hdulist[0].header["KEYWDDOC"]
            }
            # Initialize meta dictionary for each spectral_window
            window_metas = {}
            for i, window_name in enumerate(spectral_windows_req):
                if "FUV" in hdulist[0].header["TDET{0}".format(
                        window_fits_indices[i])]:
                    spectral_summing = hdulist[0].header["SUMSPTRF"]
                else:
                    spectral_summing = hdulist[0].header["SUMSPTRN"]
                window_metas[window_name] = {
                    "detector type":
                    hdulist[0].header["TDET{0}".format(
                        window_fits_indices[i])],
                    "spectral window":
                    hdulist[0].header["TDESC{0}".format(
                        window_fits_indices[i])],
                    "brightest wavelength":
                    hdulist[0].header["TWAVE{0}".format(
                        window_fits_indices[i])],
                    "min wavelength":
                    hdulist[0].header["TWMIN{0}".format(
                        window_fits_indices[i])],
                    "max wavelength":
                    hdulist[0].header["TWMAX{0}".format(
                        window_fits_indices[i])],
                    "SAT_ROT":
                    hdulist[0].header["SAT_ROT"],
                    "spatial summing":
                    hdulist[0].header["SUMSPAT"],
                    "spectral summing":
                    spectral_summing
                }
            # Create a empty list for every spectral window and each
            # spectral window is a key for the dictionary.
            data_dict = dict([(window_name, list())
                              for window_name in spectral_windows_req])
        # Determine extra coords for this raster.
        times = (Time(hdulist[0].header["STARTOBS"]) + TimeDelta(
            hdulist[-2].data[:, hdulist[-2].header["TIME"]], format='sec'))
        raster_positions = np.arange(int(hdulist[0].header["NRASTERP"]))
        pztx = hdulist[-2].data[:, hdulist[-2].header["PZTX"]] * u.arcsec
        pzty = hdulist[-2].data[:, hdulist[-2].header["PZTY"]] * u.arcsec
        xcenix = hdulist[-2].data[:, hdulist[-2].header["XCENIX"]] * u.arcsec
        ycenix = hdulist[-2].data[:, hdulist[-2].header["YCENIX"]] * u.arcsec
        obs_vrix = hdulist[-2].data[:,
                                    hdulist[-2].header["OBS_VRIX"]] * u.m / u.s
        ophaseix = hdulist[-2].data[:, hdulist[-2].header["OPHASEIX"]]
        exposure_times_fuv = hdulist[-2].data[:, hdulist[-2].
                                              header["EXPTIMEF"]] * u.s
        exposure_times_nuv = hdulist[-2].data[:, hdulist[-2].
                                              header["EXPTIMEN"]] * u.s
        # If OBS is raster, include raster positions.  Otherwise don't.
        if top_meta["NRASTERP"] > 1:
            general_extra_coords = [("time", 0, times),
                                    ("raster position", 0,
                                     np.arange(top_meta["NRASTERP"])),
                                    ("pztx", 0, pztx), ("pzty", 0, pzty),
                                    ("xcenix", 0, xcenix),
                                    ("ycenix", 0, ycenix),
                                    ("obs_vrix", 0, obs_vrix),
                                    ("ophaseix", 0, ophaseix)]
        else:
            general_extra_coords = [("time", 0, times), ("pztx", 0, pztx),
                                    ("pzty", 0, pzty), ("xcenix", 0, xcenix),
                                    ("ycenix", 0, ycenix),
                                    ("obs_vrix", 0, obs_vrix),
                                    ("ophaseix", 0, ophaseix)]
        for i, window_name in enumerate(spectral_windows_req):
            # Determine values of properties dependent on detector type.
            if "FUV" in hdulist[0].header["TDET{0}".format(
                    window_fits_indices[i])]:
                exposure_times = exposure_times_fuv
                DN_unit = iris_tools.DN_UNIT["FUV"]
                readout_noise = iris_tools.READOUT_NOISE["FUV"]
            elif "NUV" in hdulist[0].header["TDET{0}".format(
                    window_fits_indices[i])]:
                exposure_times = exposure_times_nuv
                DN_unit = iris_tools.DN_UNIT["NUV"]
                readout_noise = iris_tools.READOUT_NOISE["NUV"]
            else:
                raise ValueError(
                    "Detector type in FITS header not recognized.")
            # Derive WCS, data and mask for NDCube from file.
            # Sit-and-stare have a CDELT of 0 which causes issues in astropy WCS.
            # In this case, set CDELT to a tiny non-zero number.
            if hdulist[window_fits_indices[i]].header["CDELT3"] == 0:
                hdulist[window_fits_indices[i]].header["CDELT3"] = 1e-10
            wcs_ = WCS(hdulist[window_fits_indices[i]].header)
            if not memmap:
                data_mask = hdulist[window_fits_indices[i]].data == -200.
            else:
                data_mask = None
            # Derive extra coords for this spectral window.
            window_extra_coords = copy.deepcopy(general_extra_coords)
            window_extra_coords.append(("exposure time", 0, exposure_times))
            # Collect metadata relevant to single files.
            try:
                date_obs = Time(hdulist[0].header["DATE_OBS"])
            except ValueError:
                date_obs = None
            try:
                date_end = Time(hdulist[0].header["DATE_END"])
            except ValueError:
                date_end = None
            single_file_meta = {
                "SAT_ROT":
                hdulist[0].header["SAT_ROT"] * u.deg,
                "DATE_OBS":
                date_obs,
                "DATE_END":
                date_end,
                "HLZ":
                bool(int(hdulist[0].header["HLZ"])),
                "SAA":
                bool(int(hdulist[0].header["SAA"])),
                "DSUN_OBS":
                hdulist[0].header["DSUN_OBS"] * u.m,
                "IAECEVFL":
                hdulist[0].header["IAECEVFL"],
                "IAECFLAG":
                hdulist[0].header["IAECFLAG"],
                "IAECFLFL":
                hdulist[0].header["IAECFLFL"],
                "KEYWDDOC":
                hdulist[0].header["KEYWDDOC"],
                "detector type":
                hdulist[0].header["TDET{0}".format(window_fits_indices[i])],
                "spectral window":
                window_name,
                "OBSID":
                hdulist[0].header["OBSID"],
                "OBS_DESC":
                hdulist[0].header["OBS_DESC"],
                "STARTOBS":
                Time(hdulist[0].header["STARTOBS"]),
                "ENDOBS":
                Time(hdulist[0].header["ENDOBS"])
            }
            # Derive uncertainty of data
            if uncertainty:
                out_uncertainty = u.Quantity(
                    np.sqrt((hdulist[window_fits_indices[i]].data *
                             DN_unit).to(u.photon).value +
                            readout_noise.to(u.photon).value**2),
                    unit=u.photon).to(DN_unit).value
            else:
                out_uncertainty = None
            # Appending NDCube instance to the corresponding window key in dictionary's list.
            data_dict[window_name].append(
                IRISSpectrogramCube(hdulist[window_fits_indices[i]].data,
                                    wcs_,
                                    out_uncertainty,
                                    DN_unit,
                                    single_file_meta,
                                    window_extra_coords,
                                    mask=data_mask))
        hdulist.close()
    # Construct dictionary of IRISSpectrogramCubeSequences for spectral windows
    data = dict([(window_name,
                  IRISSpectrogramCubeSequence(data_dict[window_name],
                                              window_metas[window_name],
                                              common_axis=0))
                 for window_name in spectral_windows_req])
    # Initialize an IRISSpectrograph object.
    return IRISSpectrograph(data, meta=top_meta)
예제 #29
0
    "%Y-%b-%d",  # Example 2007-May-04
    "%Y-%m-%d",  # Example 2007-05-04
    "%Y/%m/%d",  # Example 2007/05/04
    "%d-%b-%Y",  # Example 04-May-2007
    "%d-%b-%Y %H:%M:%S.%f",  # Example 04-May-2007 21:08:12.999999
    "%Y%m%d_%H%M%S",  # Example 20070504_210812
    "%Y:%j:%H:%M:%S",  # Example 2012:124:21:08:12
    "%Y:%j:%H:%M:%S.%f",  # Example 2012:124:21:08:12.999999
    "%Y%m%d%H%M%S",  # Example 20140101000001 (JSOC/VSO Export/Downloads)
    "%Y.%m.%d_%H:%M:%S_TAI",  # Example 2016.05.04_21:08:12_TAI - JSOC
    "%Y.%m.%d_%H:%M:%S_UTC",  # Example 2016.05.04_21:08:12_UTC - JSOC
    "%Y.%m.%d_%H:%M:%S",  # Example 2016.05.04_21:08:12 - JSOC
    "%Y/%m/%dT%H:%M",  # Example 2007/05/04T21:08
]

_ONE_DAY_TIMEDELTA = TimeDelta(1 * u.day)


def is_time_equal(t1, t2):
    """
    Work around for https://github.com/astropy/astropy/issues/6970.

    Remove the usage of this function once the fix is in place.
    """
    if abs(t1 - t2) < 1 * u.nanosecond:
        return True
    return False


def _group_or_none(match, group, fun):
    try:
예제 #30
0
def test_absolute_times(data, timedelta):

    # Make sure that we handle absolute times correctly. We also check that
    # TimeDelta works properly when timedelta is True.

    # The example data uses relative times
    t, y, dy, params = data

    # FIXME: There seems to be a numerical stability issue in that if we run
    # the algorithm with the same values but offset in time, the transit_time
    # is not offset by a fixed amount. To avoid this issue in this test, we
    # make sure the first time is also the smallest so that internally the
    # values of the relative time should be the same.
    t[0] = 0.

    # Add units
    t = t * u.day
    y = y * u.mag
    dy = dy * u.mag

    # We now construct a set of absolute times but keeping the rest the same.
    start = Time('2019-05-04T12:34:56')
    trel = TimeDelta(t) if timedelta else t
    t = trel + start

    # and we set up two instances of BoxLeastSquares, one with absolute and one
    # with relative times.
    bls1 = BoxLeastSquares(t, y, dy)
    bls2 = BoxLeastSquares(trel, y, dy)

    results1 = bls1.autopower(0.16 * u.day)
    results2 = bls2.autopower(0.16 * u.day)

    # All the results should match except transit time which should be
    # absolute instead of relative in the first case.

    for key in results1:
        if key == 'transit_time':
            assert_quantity_allclose((results1[key] - start).to(u.day), results2[key])
        elif key == 'objective':
            assert results1[key] == results2[key]
        else:
            assert_allclose(results1[key], results2[key])

    # Check that model evaluation works fine

    model1 = bls1.model(t, 0.2 * u.day, 0.05 * u.day, Time('2019-06-04T12:34:56'))
    model2 = bls2.model(trel, 0.2 * u.day, 0.05 * u.day, TimeDelta(1 * u.day))
    assert_quantity_allclose(model1, model2)

    # Check model validation

    with pytest.raises(TypeError) as exc:
        bls1.model(t, 0.2 * u.day, 0.05 * u.day, 1 * u.day)
    assert exc.value.args[0] == ('transit_time was provided as a relative time '
                                 'but the BoxLeastSquares class was initialized '
                                 'with absolute times.')

    with pytest.raises(TypeError) as exc:
        bls1.model(trel, 0.2 * u.day, 0.05 * u.day, Time('2019-06-04T12:34:56'))
    assert exc.value.args[0] == ('t_model was provided as a relative time '
                                 'but the BoxLeastSquares class was initialized '
                                 'with absolute times.')

    with pytest.raises(TypeError) as exc:
        bls2.model(trel, 0.2 * u.day, 0.05 * u.day, Time('2019-06-04T12:34:56'))
    assert exc.value.args[0] == ('transit_time was provided as an absolute time '
                                 'but the BoxLeastSquares class was initialized '
                                 'with relative times.')

    with pytest.raises(TypeError) as exc:
        bls2.model(t, 0.2 * u.day, 0.05 * u.day, 1 * u.day)
    assert exc.value.args[0] == ('t_model was provided as an absolute time '
                                 'but the BoxLeastSquares class was initialized '
                                 'with relative times.')

    # Check compute_stats

    stats1 = bls1.compute_stats(0.2 * u.day, 0.05 * u.day, Time('2019-06-04T12:34:56'))
    stats2 = bls2.compute_stats(0.2 * u.day, 0.05 * u.day, 1 * u.day)

    for key in stats1:
        if key == 'transit_times':
            assert_quantity_allclose((stats1[key] - start).to(u.day), stats2[key], atol=1e-10 * u.day)
        elif key.startswith('depth'):
            for value1, value2 in zip(stats1[key], stats2[key]):
                assert_quantity_allclose(value1, value2)
        else:
            assert_allclose(stats1[key], stats2[key])

    # Check compute_stats validation

    with pytest.raises(TypeError) as exc:
        bls1.compute_stats(0.2 * u.day, 0.05 * u.day, 1 * u.day)
    assert exc.value.args[0] == ('transit_time was provided as a relative time '
                                 'but the BoxLeastSquares class was initialized '
                                 'with absolute times.')

    with pytest.raises(TypeError) as exc:
        bls2.compute_stats(0.2 * u.day, 0.05 * u.day, Time('2019-06-04T12:34:56'))
    assert exc.value.args[0] == ('transit_time was provided as an absolute time '
                                 'but the BoxLeastSquares class was initialized '
                                 'with relative times.')

    # Check transit_mask

    mask1 = bls1.transit_mask(t, 0.2 * u.day, 0.05 * u.day, Time('2019-06-04T12:34:56'))
    mask2 = bls2.transit_mask(trel, 0.2 * u.day, 0.05 * u.day, 1 * u.day)

    assert_equal(mask1, mask2)

    # Check transit_mask validation

    with pytest.raises(TypeError) as exc:
        bls1.transit_mask(t, 0.2 * u.day, 0.05 * u.day, 1 * u.day)
    assert exc.value.args[0] == ('transit_time was provided as a relative time '
                                 'but the BoxLeastSquares class was initialized '
                                 'with absolute times.')

    with pytest.raises(TypeError) as exc:
        bls1.transit_mask(trel, 0.2 * u.day, 0.05 * u.day, Time('2019-06-04T12:34:56'))
    assert exc.value.args[0] == ('t was provided as a relative time '
                                 'but the BoxLeastSquares class was initialized '
                                 'with absolute times.')

    with pytest.raises(TypeError) as exc:
        bls2.transit_mask(trel, 0.2 * u.day, 0.05 * u.day, Time('2019-06-04T12:34:56'))
    assert exc.value.args[0] == ('transit_time was provided as an absolute time '
                                 'but the BoxLeastSquares class was initialized '
                                 'with relative times.')

    with pytest.raises(TypeError) as exc:
        bls2.transit_mask(t, 0.2 * u.day, 0.05 * u.day, 1 * u.day)
    assert exc.value.args[0] == ('t was provided as an absolute time '
                                 'but the BoxLeastSquares class was initialized '
                                 'with relative times.')
    def track(self,
              target_or_ra,
              dec=None,
              duration=0,
              start_time=None,
              verbose=True):
        """
        Given a target name and a tracking duration in seconds, start tracking
        the source.  If the 'dec' keyword is not None, the target is intepreted
        to be a RA.  If the duration is less than or equal to zero the default
        of 12 hours is used.
        
        .. note:: The tracking can be canceled at any time with a control-C.
        """

        # Force to string
        target_or_ra = str(target_or_ra)
        if dec is not None:
            dec = str(dec)

        # Load in where we are
        obs = EarthLocation.from_geocentric(
            *self.control_instance.station.ecef, unit=u.m)

        # Resolve the name into coordinates
        if target_or_ra.lower() in solar_system_ephemeris.bodies:
            if target_or_ra.lower().startswith('earth'):
                raise ValueError(f"Invalid target: '{target_or_ra}'")

            sc = get_body(target_or_ra.lower(), Time.now(), location=obs)
            if verbose:
                print(f"Resolved '{target_or_ra}' to {target_or_ra.lower()}")
        else:
            if dec is not None:
                ra = Angle(target_or_ra, unit='hourangle')
                dec = Angle(dec, unit='deg')
                sc = SkyCoord(ra, dec, frame='fk5')
                if verbose:
                    print(
                        f"Resolved '{target_or_ra}, {dec}' to RA {sc.ra}, Dec. {sc.dec}"
                    )
            else:
                sc = SkyCoord.from_name(target_or_ra)
                if verbose:
                    print(
                        f"Resolved '{target_or_ra}' to RA {sc.ra}, Dec. {sc.dec}"
                    )

        # Figure out the duration of the tracking
        if duration <= 0:
            duration = 86400.0 / 2

        # Set the pointing update time offset - this is half the update interval
        # so that we should end up with a nice scalloped pattern
        puto = TimeDelta(min([duration, self.update_interval]) / 2.0,
                         format='sec')

        # Set the tracking stop time
        if start_time is None:
            start_time = time.time()
        t_stop = start_time + duration

        try:
            # Go!
            t = time.time()
            while t < t_stop:
                ## Get a time marker so that we can account for how long the
                ## update takes
                t_mark = time.time()

                ## Figure out where the target will be in puto seconds
                aa = sc.transform_to(
                    AltAz(obstime=Time.now() + puto, location=obs))
                az = aa.az.deg
                alt = aa.alt.deg
                if verbose:
                    print(
                        f"At {time.time():.1f}, moving to azimuth {aa.az}, altitude {aa.alt}"
                    )

                ## Point
                self.control_instance.set_beam_pointing(az,
                                                        alt,
                                                        degrees=True,
                                                        load_time=t_mark + 2)

                ## Find how much time we used and when we should sleep until
                t_used = time.time() - t_mark
                t_sleep_until = t + self.update_interval - t_used

                ## Sleep to wait it out in increments of 0.01 s so that a control-C
                ## doesn't take forever to register
                while t < t_sleep_until and t < t_stop:
                    time.sleep(0.01)
                    t = time.time()

        except KeyboardInterrupt:
            # We gave up, end it now
            pass
예제 #32
0
##############################################################################
# The data from the TimeSeries can be retrieved in a number of formats:

ts_goes.to_dataframe()
ts_goes.to_table()
ts_goes.to_array()

##############################################################################
# Creating a TimeSeries from scratch can be done in a lot of ways, much like a
# Map.
# Input data can be in the form of a Pandas DataFrame (preferred), an astropy
# table or a numpy array.
# To generate some data and the corresponding dates

base = datetime.datetime.today()
dates = Time(base) - TimeDelta(np.arange(24 * 60)*u.minute)
intensity = np.sin(np.arange(0, 12 * np.pi, ((12 * np.pi) / (24 * 60))))
# Create the data DataFrame, header MetaDict and units OrderedDict
data = DataFrame(intensity, index=dates, columns=['intensity'])
units = OrderedDict([('intensity', u.W / u.m**2)])
meta = MetaDict({'key': 'value'})
# Create the time series
ts_custom = sunpy.timeseries.TimeSeries(data, meta, units)

# A more manual dataset would be a numpy array, which we can creat using:
tm = Time(['2000:002', '2001:345', '2002:345'])
a = [1, 4, 5]
b = [2.0, 5.0, 8.2]
c = ['x', 'y', 'z']
arr = np.stack([tm, a, b, c], axis=1)
# Note: this array needs to have the times in the first column, this can be in
def get_start_time(ra0_deg, length_sec):
    """Returns optimal start time for field RA and observation length."""
    t = Time('2000-01-01 00:00:00', scale='utc', location=('116.764d', '0d'))
    dt_hours = 24.0 - t.sidereal_time('apparent').hour + (ra0_deg / 15.0)
    start = t + TimeDelta(dt_hours * 3600.0 - length_sec / 2.0, format='sec')
    return start.value
예제 #34
0
def whenup(date=None,verbose=False):
    ''' Find out the times when preferred sources are up for a given date. 
        Prints the result of source rise and set times for preferred sources:
        Sun, 0319+415, 1229+020, 1331+305, 2136+006, 2253+161
        
        Optional arguments:
           date: Time() object giving start date (time of day is ignored) 
                 or None for current date.
           verbose: If True, actually print the table of times to the screen
           
        Returns: dictionary of time objects with keys 'taz', 'teq', 'tgap', each
                 being a two-element Time() object, where first time is
                 start (rise) and second is end (set), and 'lines', which is a
                 printable table of times.
    '''
    srclist = ['Sun     ', '0319+415', '1229+020', '1331+305', '2136+006', '2253+161']

    # Use the 24-h day specified by date (i.e. drop the time of day)
    if date is None:
        # Use today's date
        mjd = int(Time.now().mjd)
    else:
        mjd = int(date.mjd)
    t = Time(mjd,format='mjd')

    # Add times in 1-min steps up to duration dur (hours)
    ts = t + TimeDelta(np.arange(0.,24.,1./60.)/24.,format='jd')

    aa = eovsa_cat.eovsa_array_with_cat()

    nt = len(ts)
    ra = np.zeros((len(srclist),nt))
    ha = np.zeros((len(srclist),nt))
    dec = np.zeros((len(srclist),nt))
    alt = np.zeros((len(srclist),nt))
    az = np.zeros((len(srclist),nt))
    taz = []
    teq = []
    tgap = []
    lines = []
    for i in range(nt):
        aa.set_jultime(ts[i].jd)    
        lst = aa.sidereal_time()

        for j,srcname in enumerate(srclist):
            src = aa.cat[srcname.split()[0]]
            src.compute(aa)
            ra[j,i] = src.ra
            ha[j,i] = lst - src.ra
            dec[j,i] = src.dec
            alt[j,i] = src.alt
            az[j,i] = src.az

    ra_deg = deg(ra)
    ha_deg = deg(ha)
    dec_deg = deg(dec)
    alt = deg(alt)
    az_deg = deg2(az)
    lines.append('Source     Alt-Az    Equatorial     Gap')
    lines.append(' Name    Rise   Set  Rise   Set  Start  End')
    lines.append('-------- ----- ----- ----- ----- ----- -----')
    for j in range(len(srclist)):
        iset_eq = np.where(np.abs(ha_deg[j] - 55.0) < 0.15)[0][0]
        irise_eq = np.where(np.abs(ha_deg[j] + 55.0) < 0.15)[0][0]
        trise_eq = ts[irise_eq]
        tset_eq = ts[iset_eq]
        if iset_eq < irise_eq:
            if j == 0:
                tset_eq += TimeDelta(1,format='jd')
            else:
                tset_eq += TimeDelta(1436./60./24.,format='jd')
        for iwindow in np.arange(0.1,0.2,0.01):
            try:
                i1 = np.where(np.abs(alt[j] - 10.0) < iwindow)[0][0]
                i2 = np.where(np.abs(alt[j] - 10.0) < iwindow)[0][1]
                if i2 == i1+1:
                    i2 = np.where(np.abs(alt[j] - 10.0) < iwindow)[0][2]
                break
            except:
                print 'Window',iwindow,'did not work.  Trying again'
        if alt[j,i1] < alt[j,i1+1]:
            irise_az = i1
            iset_az = i2
        else:
            irise_az = i2
            iset_az = i1
        trise_az = ts[irise_az]
        tset_az = ts[iset_az]
        if iset_az < irise_az:
            if j == 0:
                tset_az += TimeDelta(1,format='jd')
            else:
                tset_az += TimeDelta(1436./60./24.,format='jd')
        if j == 1:
            up, = np.where(alt[j] > 10.)
            j1 = np.where(np.abs(az_deg[j,up] - 35.0) < 1.0)[0][0]
            j2 = np.where(np.abs(az_deg[j,up] - 325.0) < 1.0)[0][0]
            jset = up[j1]
            jrise = up[j2]
            tset_gap = ts[jset]
            trise_gap = ts[jrise]
            if jrise < jset:
                trise_gap += TimeDelta(1436./60./24.,format='jd')
            taz.append(Time([trise_az.iso,tset_az.iso]))
            teq.append(Time([trise_eq.iso,tset_eq.iso]))
            tgap.append(Time([tset_gap.iso,trise_gap.iso]))
            lines.append('{0} {1} {2} {3} {4} {5} {6}'.format(srclist[j], trise_az.iso[11:16], tset_az.iso[11:16], trise_eq.iso[11:16], tset_eq.iso[11:16], tset_gap.iso[11:16], trise_gap.iso[11:16]))
        else:
            taz.append(Time([trise_az.iso,tset_az.iso]))
            teq.append(Time([trise_eq.iso,tset_eq.iso]))
            tgap.append((None,None))
            lines.append('{0} {1} {2} {3} {4} {5} {6}'.format(srclist[j], trise_az.iso[11:16], tset_az.iso[11:16], trise_eq.iso[11:16], tset_eq.iso[11:16], ' --- ',' --- '))
    if verbose:
        for line in lines:
            print line
    return {'source':srclist,'taz':taz, 'teq':teq, 'tgap':tgap, 'lines':lines}
예제 #35
0
def write_event_file(events, parameters, filename, overwrite=False):
    from astropy.time import Time, TimeDelta
    mylog.info("Writing events to file %s." % filename)

    t_begin = Time.now()
    dt = TimeDelta(parameters["exposure_time"], format='sec')
    t_end = t_begin + dt

    col_x = pyfits.Column(name='X',
                          format='D',
                          unit='pixel',
                          array=events["xpix"])
    col_y = pyfits.Column(name='Y',
                          format='D',
                          unit='pixel',
                          array=events["ypix"])
    col_e = pyfits.Column(name='ENERGY',
                          format='E',
                          unit='eV',
                          array=events["energy"] * 1000.)
    col_dx = pyfits.Column(name='DETX',
                           format='D',
                           unit='pixel',
                           array=events["detx"])
    col_dy = pyfits.Column(name='DETY',
                           format='D',
                           unit='pixel',
                           array=events["dety"])
    col_id = pyfits.Column(name='CCD_ID',
                           format='D',
                           unit='pixel',
                           array=events["ccd_id"])

    chantype = parameters["channel_type"]
    if chantype == "PHA":
        cunit = "adu"
    elif chantype == "PI":
        cunit = "Chan"
    col_ch = pyfits.Column(name=chantype.upper(),
                           format='1J',
                           unit=cunit,
                           array=events[chantype])

    col_t = pyfits.Column(name="TIME",
                          format='1D',
                          unit='s',
                          array=events['time'])

    cols = [col_e, col_x, col_y, col_ch, col_t, col_dx, col_dy, col_id]

    coldefs = pyfits.ColDefs(cols)
    tbhdu = pyfits.BinTableHDU.from_columns(coldefs)
    tbhdu.name = "EVENTS"

    tbhdu.header["MTYPE1"] = "sky"
    tbhdu.header["MFORM1"] = "x,y"
    tbhdu.header["MTYPE2"] = "EQPOS"
    tbhdu.header["MFORM2"] = "RA,DEC"
    tbhdu.header["TCTYP2"] = "RA---TAN"
    tbhdu.header["TCTYP3"] = "DEC--TAN"
    tbhdu.header["TCRVL2"] = parameters["sky_center"][0]
    tbhdu.header["TCRVL3"] = parameters["sky_center"][1]
    tbhdu.header["TCDLT2"] = -parameters["plate_scale"]
    tbhdu.header["TCDLT3"] = parameters["plate_scale"]
    tbhdu.header["TCRPX2"] = parameters["pix_center"][0]
    tbhdu.header["TCRPX3"] = parameters["pix_center"][1]
    tbhdu.header["TCUNI2"] = "deg"
    tbhdu.header["TCUNI3"] = "deg"
    tbhdu.header["TLMIN2"] = 0.5
    tbhdu.header["TLMIN3"] = 0.5
    tbhdu.header["TLMAX2"] = 2.0 * parameters["num_pixels"] + 0.5
    tbhdu.header["TLMAX3"] = 2.0 * parameters["num_pixels"] + 0.5
    tbhdu.header["TLMIN4"] = parameters["chan_lim"][0]
    tbhdu.header["TLMAX4"] = parameters["chan_lim"][1]
    tbhdu.header["TLMIN6"] = -0.5 * parameters["num_pixels"]
    tbhdu.header["TLMAX6"] = 0.5 * parameters["num_pixels"]
    tbhdu.header["TLMIN7"] = -0.5 * parameters["num_pixels"]
    tbhdu.header["TLMAX7"] = 0.5 * parameters["num_pixels"]
    tbhdu.header["EXPOSURE"] = parameters["exposure_time"]
    tbhdu.header["TSTART"] = 0.0
    tbhdu.header["TSTOP"] = parameters["exposure_time"]
    tbhdu.header["HDUVERS"] = "1.1.0"
    tbhdu.header["RADECSYS"] = "FK5"
    tbhdu.header["EQUINOX"] = 2000.0
    tbhdu.header["HDUCLASS"] = "OGIP"
    tbhdu.header["HDUCLAS1"] = "EVENTS"
    tbhdu.header["HDUCLAS2"] = "ACCEPTED"
    tbhdu.header["DATE"] = t_begin.tt.isot
    tbhdu.header["DATE-OBS"] = t_begin.tt.isot
    tbhdu.header["DATE-END"] = t_end.tt.isot
    tbhdu.header["RESPFILE"] = os.path.split(parameters["rmf"])[-1]
    tbhdu.header["PHA_BINS"] = parameters["nchan"]
    tbhdu.header["ANCRFILE"] = os.path.split(parameters["arf"])[-1]
    tbhdu.header["CHANTYPE"] = parameters["channel_type"]
    tbhdu.header["MISSION"] = parameters["mission"]
    tbhdu.header["TELESCOP"] = parameters["telescope"]
    tbhdu.header["INSTRUME"] = parameters["instrument"]
    tbhdu.header["RA_PNT"] = parameters["sky_center"][0]
    tbhdu.header["DEC_PNT"] = parameters["sky_center"][1]
    tbhdu.header["ROLL_PNT"] = parameters["roll_angle"]
    tbhdu.header["AIMPT_X"] = parameters["aimpt_coords"][0]
    tbhdu.header["AIMPT_Y"] = parameters["aimpt_coords"][1]
    if parameters["dither_params"]["dither_on"]:
        tbhdu.header["DITHXAMP"] = parameters["dither_params"]["x_amp"]
        tbhdu.header["DITHYAMP"] = parameters["dither_params"]["y_amp"]
        tbhdu.header["DITHXPER"] = parameters["dither_params"]["x_period"]
        tbhdu.header["DITHYPER"] = parameters["dither_params"]["y_period"]

    start = pyfits.Column(name='START',
                          format='1D',
                          unit='s',
                          array=np.array([0.0]))
    stop = pyfits.Column(name='STOP',
                         format='1D',
                         unit='s',
                         array=np.array([parameters["exposure_time"]]))

    tbhdu_gti = pyfits.BinTableHDU.from_columns([start, stop])
    tbhdu_gti.name = "STDGTI"
    tbhdu_gti.header["TSTART"] = 0.0
    tbhdu_gti.header["TSTOP"] = parameters["exposure_time"]
    tbhdu_gti.header["HDUCLASS"] = "OGIP"
    tbhdu_gti.header["HDUCLAS1"] = "GTI"
    tbhdu_gti.header["HDUCLAS2"] = "STANDARD"
    tbhdu_gti.header["RADECSYS"] = "FK5"
    tbhdu_gti.header["EQUINOX"] = 2000.0
    tbhdu_gti.header["DATE"] = t_begin.tt.isot
    tbhdu_gti.header["DATE-OBS"] = t_begin.tt.isot
    tbhdu_gti.header["DATE-END"] = t_end.tt.isot

    hdulist = [pyfits.PrimaryHDU(), tbhdu, tbhdu_gti]

    pyfits.HDUList(hdulist).writeto(filename, overwrite=overwrite)
예제 #36
0
sd = SphericalCosLatDifferential([0, 1] * u.mas / u.yr, [0, 1] * u.mas / u.yr,
                                 10 * u.km / u.s)
srd = SphericalRepresentation(sr, differentials=sd)
sc = SkyCoord([1, 2], [3, 4], unit='deg,deg', frame='fk4', obstime='J1990.5')
scc = sc.copy()
scc.representation_type = 'cartesian'
tm = Time([2450814.5, 2450815.5], format='jd', scale='tai', location=el)

# NOTE: in the test below the name of the column "x" for the Quantity is
# important since it tests the fix for #10215 (namespace clash, where "x"
# clashes with "el2.x").
mixin_cols = {
    'tm':
    tm,
    'dt':
    TimeDelta([1, 2] * u.day),
    'sc':
    sc,
    'scc':
    scc,
    'scd':
    SkyCoord([1, 2], [3, 4], [5, 6],
             unit='deg,deg,m',
             frame='fk4',
             obstime=['J1990.5', 'J1991.5']),
    'x': [1, 2] * u.m,
    'lat':
    Latitude([1, 2] * u.deg),
    'lon':
    Longitude([1, 2] * u.deg, wrap_angle=180. * u.deg),
    'ang':
예제 #37
0
import astropy.units as u
import astropy._erfa as erfa
from astropy.time import Time, TimeDelta
from astropy.time.utils import day_frac, two_sum
from astropy.utils import iers

allclose_jd = functools.partial(np.allclose, rtol=2.**-52, atol=0)
allclose_jd2 = functools.partial(np.allclose, rtol=2.**-52,
                                 atol=2.**-52)  # 20 ps atol
allclose_sec = functools.partial(np.allclose,
                                 rtol=2.**-52,
                                 atol=2.**-52 * 24 * 3600)  # 20 ps atol

tiny = 2.**-52
dt_tiny = TimeDelta(tiny, format='jd')


def test_abs_jd2_always_less_than_half():
    """Make jd2 approach +/-0.5, and check that it doesn't go over."""
    t1 = Time(2400000.5, [-tiny, +tiny], format='jd')
    assert np.all(t1.jd1 % 1 == 0)
    assert np.all(abs(t1.jd2) < 0.5)
    t2 = Time(2400000., [[0.5 - tiny, 0.5 + tiny], [-0.5 - tiny, -0.5 + tiny]],
              format='jd')
    assert np.all(t2.jd1 % 1 == 0)
    assert np.all(abs(t2.jd2) < 0.5)


def test_addition():
    """Check that an addition at the limit of precision (2^-52) is seen"""
예제 #38
0
    def auto_open(cls, files=None):
        """Attempt to get an up-to-date leap-second list.

        The routine will try the files in sequence until it finds one
        whose expiration date is "good enough" (see below).  If none
        are good enough, it returns the one with the most recent expiration
        date, warning if that file is expired.

        For remote files that are cached already, the cached file is tried
        first before attempting to retrieve it again.

        Parameters
        ----------
        files : list of path-like, optional
            List of files/URLs to attempt to open.  By default, uses
            ``cls._auto_open_files``.

        Returns
        -------
        leap_seconds : `~astropy.utils.iers.LeapSeconds`
            Up to date leap-second table

        Notes
        -----
        Bulletin C is released about 10 days after a possible leap second is
        introduced, i.e., mid-January or mid-July.  Expiration days are thus
        generally at least 150 days after the present.  We look for a file
        that expires more than 180 - `~astropy.utils.iers.Conf.auto_max_age`
        after the present.
        """
        good_enough = cls._today() + TimeDelta(
            180 - _none_to_float(conf.auto_max_age), format='jd')

        if files is None:
            # Basic files to go over (entries in _auto_open_files can be
            # configuration items, which we want to be sure are up to date).
            files = [getattr(conf, f, f) for f in cls._auto_open_files]

        # Remove empty entries.
        files = [f for f in files if f]

        # Our trials start with normal files and remote ones that are
        # already in cache.  The bools here indicate that the cache
        # should be used.
        trials = [(f, True) for f in files
                  if not urlparse(f).netloc or is_url_in_cache(f)]
        # If we are allowed to download, we try downloading new versions
        # if none of the above worked.
        if conf.auto_download:
            trials += [(f, False) for f in files if urlparse(f).netloc]

        self = None
        err_list = []
        # Go through all entries, and return the first one that
        # is not expired, or the most up to date one.
        for f, allow_cache in trials:
            if not allow_cache:
                clear_download_cache(f)

            try:
                trial = cls.open(f, cache=True)
            except Exception as exc:
                err_list.append(exc)
                continue

            if self is None or trial.expires > self.expires:
                self = trial
                self.meta['data_url'] = str(f)
                if self.expires > good_enough:
                    break

        if self is None:
            raise ValueError('none of the files could be read. The '
                             'following errors were raised:\n' + str(err_list))

        if self.expires < self._today():
            warn('leap-second file is expired.', IERSStaleWarning)

        return self
예제 #39
0
    def __init__(self, Max_Delta_days, planet=None):
        try:
            if planet == None:
                empty_class = True
            else:
                empty_class = False
            
        except Exception:
            
            if planet.all() == None:
                empty_class = True
            else:
                empty_class = False
                
        if empty_class:
            
            """ Create Empty class to load data from file """

            self.name = None
            self.epoch = None
            self.tranmid_err = None
            self.period = None
            self.period_err = None
            self.transit_duration = None
            self.eccentricity = None
            self.star_Teff = None
            self.star_jmag = None
            self.pl_radj = None
            self.Planets_eclipse = None
            self.num_eclipses = None
            
            self.target_observable = []
            self.eclipse_observable = []
            
        else:

            """ Initialize Eclipse instance from Nasa query_planet object """

            self.name = planet['pl_name']
            self.epoch = Time(planet['pl_tranmid'], format='jd', scale='utc', location=paranal.location)
            if np.isnan(planet['pl_tranmiderr1']):
                self.tranmid_err = TimeDelta(0, format = 'jd') 
            else:
                self.tranmid_err = TimeDelta(planet['pl_tranmiderr1']) # error into future
            self.period = planet['pl_orbper'] * u.day
            if np.isnan(planet['pl_orbpererr1']) or np.isnan(planet['pl_orbpererr2']):
                self.period_err = 0 * u.day
            else:
                if np.abs(planet['pl_orbpererr1']) > np.abs(planet['pl_orbpererr2']):
                    self.period_err = planet['pl_orbpererr1'] * u.day
                else:
                    self.period_err = planet['pl_orbpererr2'] * u.day
            self.transit_duration = planet['pl_trandur'] * u.hour #CAREFUL in nexa_old the pl_trandur is given in u.day
            self.eccentricity = planet['pl_orbeccen']
            self.star_Teff = planet['st_teff']
            self.star_jmag = planet['sy_jmag']
            self.pl_radj = planet['pl_radj']*11.2

            self.target_observable = []
            self.eclipse_observable = []
    
            """
                Planet_next_eclipse : Class object with which the future transits of the planet can be calculated.
                                      For documentation review astroplan.EclipsingSystem documentation.
    
                WARNING:
                There are currently two major caveats in the implementation of
                ''EclipsingSystem''. The secondary eclipse time approximation is
                only accurate when the orbital eccentricity is small, and the eclipse
                times are computed without any barycentric corrections. The current
                implementation should only be used forapproximate mid-eclipse times for
                low eccentricity orbits, with event durations longer than the
                barycentric correction error (<=16 minutes). 
                
                This is corrected for with the barycentric correction, which can be found in the classmethod Observability
    
                From EclipsingSystem.__doc__
    
            """
    
            Planet_next_eclipse = astroplan.EclipsingSystem(
                primary_eclipse_time=self.epoch, orbital_period=self.period, duration=self.transit_duration)
    
            self.Planets_eclipse = Planet_next_eclipse # time in barycentric frame
    
            # number of eclipses occurring during Max_Delta_days.
    
            self.num_eclipses = int(np.floor(Max_Delta_days / (self.period / u.day)))
    
            """ coordinates of the object in IRCS """
            try:
                self.Coordinates = FixedTarget.from_name(self.name)
            except Exception:
                sky_coord = SkyCoord(ra = planet['ra']*u.deg, dec = planet['dec']*u.deg)
                self.Coordinates = FixedTarget(name=self.name,
                    coord = sky_coord)
        print("Specify either '--gps-start' or '--year-start', but not both")
        sys.exit(1)
    elif args.gpsstart is not None:
        try:
            starttime = Time(args.gpsstart, format='gps', scale='utc')
        except ValueError:
            Exception("Could not parse start GPS time: {}".format(args.gpsstart))
    else:
        try:
            starttime = Time(args.yearstart, format='decimalyear', scale='utc')
        except ValueError:
            Exception("Could not parse start year: {}".format(args.yearstart))

    # set the time step
    try:
        dt = TimeDelta(args.interval*3600., format='sec')
    except ValueError:
        Exception("Could not parse time interval: {}".format(args.interval))

    # set the end time
    try:
        endtime = Time(starttime.decimalyear + args.nyears, format='decimalyear', scale='utc') + dt
    except ValueError:
        Exception("Could not parse total timespan")

    pos = []
    vel = []
    acc = []

    # get positions, velocities and accelerations
    curtime = starttime
예제 #41
0
##############################################################################
# Next let's show how to this looks like on the Sun.
# Load in an AIA map:

aia_map = sunpy.map.Map(sunpy.data.sample.AIA_171_IMAGE)

##############################################################################
# Let's define our starting coordinates

hpc_y = u.Quantity(np.arange(-700, 800, 100), u.arcsec)
hpc_x = np.zeros_like(hpc_y)

##############################################################################
# Let's define how many days in the future we want to rotate to

dt = TimeDelta(4*u.day)
future_date = aia_map.date + dt

##############################################################################
# Now let's plot the original and rotated positions on the AIA map.

fig = plt.figure()
ax = plt.subplot(projection=aia_map)
aia_map.plot()
ax.set_title('The effect of {0} days of differential rotation'.format(dt.to(u.day).value))
aia_map.draw_grid()

for this_hpc_x, this_hpc_y in zip(hpc_x, hpc_y):
    start_coord = SkyCoord(this_hpc_x, this_hpc_y, frame=aia_map.coordinate_frame)
    rotated_coord = solar_rotate_coordinate(start_coord, time=future_date)
    coord = SkyCoord([start_coord.Tx, rotated_coord.Tx],