Exemplo n.º 1
0
    def generate_polycos(self,
                         model,
                         mjdStart,
                         mjdEnd,
                         obs,
                         segLength,
                         ncoeff,
                         obsFreq,
                         maxha,
                         method="TEMPO",
                         numNodes=20):
        """
        Generate the polyco file data file.

        Parameters
        ---------
        model : TimingModel
            TimingModel for generate the Polycos with parameters
            setup.

        mjdStart : float / nump longdouble
            Start time of polycos in mjd

        mjdEnd : float / nump longdouble
            Ending time of polycos in mjd

        obs : str
            Observatory code

        segLength :
            Length of polyco segement [unit: minutes]

        ncoeff :
            number of coefficents

        obsFreq :
            observing frequency

        maxha :
            Maximum hour angle

        method : sting optional ['TEMPO','TEMPO2',...] Default TEMPO
            Method to generate polycos. Now it is only support the TEMPO method.

        numNodes : int optional. Default 20
            Number of nodes for fitting. It can not be less then the number of
            coefficents.
        Return
        ---------

        A polyco table.


        """
        mjdStart = np.longdouble(mjdStart) * u.day
        mjdEnd = np.longdouble(mjdEnd) * u.day
        timeLength = mjdEnd - mjdStart
        segLength = np.longdouble(segLength) * u.min
        obsFreq = float(obsFreq)
        month = [
            'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep',
            'Oct', 'Nov', 'Dec'
        ]
        # Alocate memery
        coeffs = np.longdouble(np.zeros(ncoeff))
        entryList = []
        entryIntvl = np.arange(mjdStart.value, mjdEnd.value,
                               segLength.to('day').value)
        if entryIntvl[-1] < mjdEnd.value:
            entryIntvl = np.append(entryIntvl, mjdEnd.value)

        # Make sure the number of nodes is bigger then number of coeffs.
        if numNodes < ncoeff:
            numNodes = ncoeff + 1

    # generate the ploynomial coefficents
        if method == "TEMPO":
            # Using tempo1 method to create polycos
            for i in range(len(entryIntvl) - 1):
                tStart = entryIntvl[i]
                tStop = entryIntvl[i + 1]
                nodes = np.linspace(tStart, tStop, numNodes)
                tmid = ((tStart + tStop) / 2.0) * u.day
                toaMid = toa.get_TOAs_list([
                    toa.TOA((np.modf(tmid.value)[1], np.modf(tmid.value)[0]),
                            obs=obs,
                            freq=obsFreq),
                ])
                refPhase = model.phase(toaMid.table)
                mjdSpan = ((tStop - tStart) * u.day).to('min')
                # Create node toas(Time sample using TOA class)
                toaList = [
                    toa.TOA((np.modf(toaNode)[1], np.modf(toaNode)[0]),
                            obs=obs,
                            freq=obsFreq) for toaNode in nodes
                ]

                toas = toa.get_TOAs_list(toaList)

                ph = model.phase(toas.table)
                dt = (nodes * u.day - tmid).to('min')  # Use constant
                rdcPhase = ph - refPhase
                rdcPhase = rdcPhase.int - dt.value * model.F0.value * 60.0 + rdcPhase.frac
                dtd = dt.value.astype(float)  # Trancate to double
                rdcPhased = rdcPhase.astype(float)
                coeffs = np.polyfit(dtd, rdcPhased, ncoeff - 1)
                coeffs = coeffs[::-1]
                midTime = at.Time(int(tmid.value),
                                  np.modf(tmid.value)[0],
                                  format='mjd',
                                  scale='utc')
                date, hms = midTime.iso.split()
                yy, mm, dd = date.split('-')
                date = dd + '-' + month[int(mm) - 1] + '-' + yy[2:4]
                hms = hms.replace(':', "")
                entry = polycoEntry(tmid.value,
                                    mjdSpan.to('day').value, refPhase.int,
                                    refPhase.frac, model.F0.value, ncoeff,
                                    coeffs, obs)
                entryList.append(
                    (model.PSR.value, date, hms, tmid.value, model.DM.value,
                     0.0, 0.0, 0.0, obsFreq, entry))

            pTable = table.Table(rows=entryList,
                                 names=('psr', 'date', 'utc', 'tmid', 'dm',
                                        'dopper', 'logrms', 'binary_phase',
                                        'obsfreq', 'entry'),
                                 meta={'name': 'Ployco Data Table'})
            self.polycoTable = pTable

        else:
            #  Reading from an old polycofile
            pass
Exemplo n.º 2
0
def test_asteroid_velocity_frame_shifts():
    """
    This test mocks up the use case of observing a spectrum of an asteroid
    at different times and from different observer locations.
    """
    time1 = time.Time('2018-12-13 9:00')
    dt = 12 * u.hour
    time2 = time1 + dt

    # make the silly but simplifying assumption that the astroid is moving along
    # the x-axis of GCRS, and makes a 10 earth-radius closest approach

    v_ast = [5, 0, 0] * u.km / u.s
    x1 = -v_ast[0] * dt / 2
    x2 = v_ast[0] * dt / 2
    z = 10 * u.Rearth

    cdiff = CartesianDifferential(v_ast)

    asteroid_loc1 = GCRS(CartesianRepresentation(x1.to(u.km),
                                                 0 * u.km,
                                                 z.to(u.km),
                                                 differentials=cdiff),
                         obstime=time1)
    asteroid_loc2 = GCRS(CartesianRepresentation(x2.to(u.km),
                                                 0 * u.km,
                                                 z.to(u.km),
                                                 differentials=cdiff),
                         obstime=time2)

    # assume satellites that are essentially fixed in geostationary orbit on
    # opposite sides of the earth
    observer1 = GCRS(CartesianRepresentation(
        [0 * u.km, 35000 * u.km, 0 * u.km]),
                     obstime=time1)
    observer2 = GCRS(CartesianRepresentation(
        [0 * u.km, -35000 * u.km, 0 * u.km]),
                     obstime=time2)

    wls = np.linspace(4000, 7000, 100) * u.AA
    with pytest.warns(AstropyUserWarning,
                      match='No velocity defined on frame'):
        spec_coord1 = SpectralCoord(wls,
                                    observer=observer1,
                                    target=asteroid_loc1)

    assert spec_coord1.radial_velocity < 0 * u.km / u.s
    assert spec_coord1.radial_velocity > -5 * u.km / u.s

    with pytest.warns(AstropyUserWarning,
                      match='No velocity defined on frame'):
        spec_coord2 = SpectralCoord(wls,
                                    observer=observer2,
                                    target=asteroid_loc2)

    assert spec_coord2.radial_velocity > 0 * u.km / u.s
    assert spec_coord2.radial_velocity < 5 * u.km / u.s

    # now check the behavior of with_observer_stationary_relative_to: we shift each coord
    # into the velocity frame of its *own* target.  That would then be a
    # spectralcoord that would allow direct physical comparison of the two
    # diffferent spec_corrds.  There's no way to test that, without
    # actual data, though.

    # spec_coord2 is redshifted, so we test that it behaves the way "shifting
    # to rest frame" should - the as-observed spectral coordinate should become
    # the rest frame, so something that starts out red should become bluer
    target_sc2 = spec_coord2.with_observer_stationary_relative_to(
        spec_coord2.target)
    assert np.all(target_sc2 < spec_coord2)
    # rv/redshift should be 0 since the observer and target velocities should
    # be the same
    assert_quantity_allclose(target_sc2.radial_velocity,
                             0 * u.km / u.s,
                             atol=1e-7 * u.km / u.s)

    # check that the same holds for spec_coord1, but be more specific: it
    # should follow the standard redshift formula (which in this case yields
    # a blueshift, although the formula is the same as 1+z)
    target_sc1 = spec_coord1.with_observer_stationary_relative_to(
        spec_coord1.target)
    assert_quantity_allclose(target_sc1,
                             spec_coord1 / (1 + spec_coord1.redshift))
Exemplo n.º 3
0
    def compute_TDBs(self, method="default", ephem=None):
        """Compute and add TDB and TDB long double columns to the TOA table.
        This routine creates new columns 'tdb' and 'tdbld' in a TOA table
        for TDB times, using the Observatory locations and IERS A Earth
        rotation corrections for UT1.
        """
        log.info('Computing TDB columns.')
        if 'tdb' in self.table.colnames:
            log.info('tdb column already exists. Deleting...')
            self.table.remove_column('tdb')
        if 'tdbld' in self.table.colnames:
            log.info('tdbld column already exists. Deleting...')
            self.table.remove_column('tdbld')

        if ephem is None:
            if self.ephem is not None:
                ephem = self.ephem
            else:
                log.warning('No ephemeris provided to TOAs object or compute_TDBs. Using DE421')
                ephem = 'DE421'
        else:
            # If user specifies an ephemeris, make sure it is the same as the one already in the TOA object, to prevent mixing.
            if (self.ephem is not None) and (ephem != self.ephem):
                log.error('Ephemeris provided to compute_TDBs {0} is different than TOAs object ephemeris {1}!'.format(ephem,self.ephem))
        self.ephem = ephem

        # Compute in observatory groups
        tdbs = numpy.zeros_like(self.table['mjd'])
        for ii, key in enumerate(self.table.groups.keys):
            grp = self.table.groups[ii]
            obs = self.table.groups.keys[ii]['obs']
            loind, hiind = self.table.groups.indices[ii:ii+2]
            site = get_observatory(obs)
            if isinstance(site,TopoObs):
                # For TopoObs, it is safe to assume that all TOAs have same location
                # I think we should report to astropy that initializing
                # a Time from a list (or Column) of Times throws away the location information
                grpmjds = time.Time(grp['mjd'], location=grp['mjd'][0].location)
            else:
                # Grab locations for each TOA
                # It is crazy that I have to deconstruct the locations like
                # this to build a single EarthLocation object with an array
                # of locations contained in it.
                # Is there a more efficient way to convert a list of EarthLocations
                # into a single EarthLocation object with an array of values internally?
                loclist = [t.location for t in grp['mjd']]
                if loclist[0] is None:
                    grpmjds = time.Time(grp['mjd'],location=None)
                else:
                    locs = EarthLocation(numpy.array([l.x.value for l in loclist])*u.m,
                                         numpy.array([l.y.value for l in loclist])*u.m,
                                         numpy.array([l.z.value for l in loclist])*u.m)
                    grpmjds = time.Time(grp['mjd'],location=locs)
            grptdbs = site.get_TDBs(grpmjds, method=method, ephem=ephem)
            tdbs[loind:hiind] = numpy.asarray([t for t in grptdbs])

        # Now add the new columns to the table
        col_tdb = table.Column(name='tdb', data=tdbs)
        col_tdbld = table.Column(name='tdbld',
                data=[utils.time_to_longdouble(t) for t in tdbs])
        self.table.add_columns([col_tdb, col_tdbld])
Exemplo n.º 4
0
def mid_exposure_mjd(hdu):

    mjd_start = time.Time(hdu.header['MJD-STR'], format='mjd').utc
    mjd_end = time.Time(hdu.header['MJD-END'], format='mjd').utc
    return mjd_start + (mjd_end - mjd_start)/2.0
Exemplo n.º 5
0
import numpy as np
import sgp4.io
import sgp4.propagation
from astropy import time
from numpy import arctan, cos, degrees, sin, sqrt
from represent import RepresentationMixin
from scipy.constants import kilo, pi
from sgp4.earth_gravity import wgs72

from . import utilities as ou
from .maneuver import (Maneuver, Operation, PropagateAnomalyBy,
                       PropagateAnomalyTo)
from .utilities import *

J1970 = time.Time('J1970', scale='utc')
J2000 = time.Time('J2000', scale='utc')

__all__ = [
    'KeplerianElements',
]


class KeplerianElements(RepresentationMixin, object):
    """Defines an orbit using keplerian elements.

    :param a: Semimajor axis [m]
    :param e: Eccentricity [-]
    :param i: Inclination [rad]
    :param raan: Right ascension of ascending node (:math:`\Omega`) [rad]
    :param arg_pe: Argument of periapsis (:math:`\omega`) [rad]
Exemplo n.º 6
0
def test_io_time_write_fits_local(tmpdir, table_types):
    """
    Test that table with a Time mixin with scale local can also be written
    by io.fits. Like ``test_io_time_write_fits_standard`` above, but avoiding
    ``cxcsec`` format, which requires an epoch and thus cannot be used for a
    local time scale.
    """
    t = table_types([[1, 2], ['string', 'column']])
    t['a_local'] = time.Time([[50001, 50002], [50003, 50004]],
                             format='mjd',
                             scale='local',
                             location=EarthLocation(-2446354,
                                                    4237210,
                                                    4077985,
                                                    unit='m'))
    t['b_local'] = time.Time(
        ['1999-01-01T00:00:00.123456789', '2010-01-01T00:00:00'],
        scale='local')
    t['c'] = [3., 4.]

    filename = str(tmpdir.join('table-tmp'))

    # Show that FITS format succeeds

    with pytest.warns(AstropyUserWarning,
                      match='Time Column "b_local" has no specified location'):
        t.write(filename, format='fits', overwrite=True)

    with pytest.warns(
            AstropyUserWarning,
            match='Time column reference position "TRPOSn" is not specified.'):
        tm = table_types.read(filename, format='fits', astropy_native=True)

    for ab in ('a', 'b'):
        name = ab + '_local'

        # Assert that the time columns are read as Time
        assert isinstance(tm[name], time.Time)

        # Assert that the scales round-trip
        assert tm[name].scale == t[name].scale

        # Assert that the format is jd
        assert tm[name].format == 'jd'

        # Assert that the location round-trips
        assert tm[name].location == t[name].location

        # Finally assert that the column data round-trips
        assert (tm[name] == t[name]).all()

    for name in ('col0', 'col1', 'c'):
        # Assert that the non-time columns are read as Column
        assert isinstance(tm[name], Column)

        # Assert that the non-time columns' data round-trips
        assert (tm[name] == t[name]).all()

    # Test for conversion of time data to its value, as defined by its format.
    for ab in ('a', 'b'):
        name = ab + '_local'
        t[name].info.serialize_method['fits'] = 'formatted_value'

    t.write(filename, format='fits', overwrite=True)
    tm = table_types.read(filename, format='fits')

    for ab in ('a', 'b'):
        name = ab + '_local'

        assert not isinstance(tm[name], time.Time)
        assert (tm[name] == t[name].value).all()
Exemplo n.º 7
0
 def obs_release_date(self):
     """
     Observation release date
     """
     rt = self.get('obs_release_date', default=None, decode=True)
     return rt if not rt else time.Time(rt)
Exemplo n.º 8
0
t_prev = 2.0 # any value greater than start_day
start_day = 0.5 # start day of the analysis year in the format of the fraction of year (0.0~1.0)

# Skip Headers
with open(iFileName, "r") as ifile:
	ll = ifile.readline()
	while(lenNumHead(ll) != 4):
		if ll[0:10] == "Fill Value":
			fillValue = float(ll.split(',')[-1])
			#fillValue = numTailFilter.search(ll).group(0)
			print("fillValue = {0}".format(fillValue))
		ll = ifile.readline()
	while ll:
		data = ll.split(',')
		if float(data[1]) != fillValue:
			date = Time.Time(data[0]+"T12:00:00",format="isot")
			tt = date.jyear % 1
			if t_prev < start_day and tt >= start_day:
				lst.append({"Date":[],"DoY":[], "Value":[]})
			lst[-1]["Date"].append(date)
			lst[-1]["DoY"].append(((tt + start_day)%1)+start_day)
			lst[-1]["Value"].append(float(data[1]))
			t_prev = tt
		ll = ifile.readline()


fontsize = 18

fig, ax = plt.subplots(figsize=(18,6),dpi=100)
lastId = len(lst)-1
for ii, dd in enumerate(lst):
Exemplo n.º 9
0
    def upgradeScript(self):
        '''Script by Mirzhalilov and Khuzhakulov, upgrades observations to
        standard they proposed (2019 Summer Internship). Not applied, if
        "BJD-TDB or "MIDTIME" keywords present in fits header.
        '''
        if ('BJD-TDB' in self.hdr) or ('MIDTIME' in self.hdr):
            logger.debug(f'Already  upgraded: {self.date} {self.name}')
            return False

        #---Script--Start
        # Gets the value with the corresponding key
        JD_UTC = self.hdr['JD']
        RA = self.hdr['OBJCTRA'].split()
        DEC = self.hdr['OBJCTDEC'].split()
        obs_long = self.hdr['SITELONG'].split()
        obs_lat = self.hdr['SITELAT'].split()

        # Formatting RA in (h)our, (m)inute, (s)econd format
        RA = f'{RA[0]}h{RA[1]}m{RA[2]}s'
        # Formatting DEC in (d)egree, arc(m)inute and arc(s)econd
        DEC = f'{DEC[0]}d{DEC[1]}m{DEC[2]}s'
        # Converts obs_long and obs_lat into float
        obs_long = (((float(obs_long[2]) / 60) + float(obs_long[1])) /
                    60) + float(obs_long[0])
        obs_lat = (((float(obs_lat[2]) / 60) + float(obs_lat[1])) /
                   60) + float(obs_lat[0])
        # Setting Coordinate of observing object in the sky
        target = coord.SkyCoord(RA, DEC, frame='icrs')
        # Setting coordinate of observatory
        observatory = coord.EarthLocation(obs_long, obs_lat, OBS_ALT)
        # Creating time object by using JD taken from header which is beginning of observation
        JD_UTC = time.Time(JD_UTC,
                           format='jd',
                           scale='utc',
                           location=observatory)
        # Adding half of exposure time to JD in order to calculate time of middle of the observation
        New_JD = JD_UTC + datetime.timedelta(
            seconds=float(self.hdr['EXPTIME']) / 2)
        # Converting JD to UTC
        t_jd = time.Time(New_JD, format='jd', scale='utc')
        MIDTIME = t_jd.iso
        # Creating new time by using MIDTIME and Location of the observatory to calculate LST
        t1 = time.Time(MIDTIME, scale='utc', location=observatory)
        LST = t1.sidereal_time('mean')
        # target time difference in the time variable New_JD  between traveling to the centre of solar sytstem and traveling to the earth
        ltt_bary = New_JD.light_travel_time(target)
        # Firstly, changing New_JD from UTC unit to TDB and then adding to the time difference that was calculated above
        time_barycentre = New_JD.tdb + ltt_bary
        real_bjd = time_barycentre.value

        # Create cards for keywords
        self.hdr.append('BJD-TDB')
        self.hdr.append('MIDTIME')
        self.hdr.append('LST')
        self.hdr.append('PI')
        self.hdr.append('PRJTNUM')
        self.hdr.append('GAIN')
        self.hdr.append('PSCALE')
        self.hdr.append('EPOCH')
        self.hdr.append('RDNOISE')

        # Set new cards created
        self.hdr.set('BJD-TDB',
                     value=real_bjd,
                     comment='Dynamic Barycentric Julian Day')
        self.hdr.set('MIDTIME',
                     value=MIDTIME,
                     comment='DATE-OBS of Mid Exposure Time in UT')
        self.hdr.set('LST', value=str(LST), comment='Local Sidereal Time')
        self.hdr.set('PI', comment='Principle Investigator')
        self.hdr.set('PRJTNUM', comment='Project Number')
        self.hdr.set('GAIN', value=1.5, comment='e-/count')
        self.hdr.set('PSCALE', value=0.754, comment='\'\'/pixel')
        self.hdr.set('EPOCH', value=2000.0)
        self.hdr.set('RDNOISE', value=11.5, comment='e-')
        #---Script--End

        return True
Exemplo n.º 10
0
def test_time(tmpdir):
    time_array = time.Time(np.arange(100), format="unix")

    tree = {'large_time_array': time_array}

    helpers.assert_roundtrip_tree(tree, tmpdir)
Exemplo n.º 11
0
def create_voevent(jsonfile=None, deployment=False, **kwargs):
    """ template syntax for voeventparse creation of voevent
    """

    required = [
        'internalname', 'mjds', 'dm', 'width', 'snr', 'ra', 'dec', 'radecerr'
    ]
    preferred = ['fluence', 'p_flux', 'importance', 'dmerr']

    # set values
    dd = kwargs.copy()
    if jsonfile is not None:  # as made by caltechdata.set_metadata
        for k, v in trigger.items():
            if k in required + preferred:
                dd[k] = v

    assert all([
        k in dd for k in required
    ]), f'Input keys {list(dd.keys())} not complete (requires {required})'

    # TODO: set this correctly
    dt = time.Time(dd['mjds'], format='mjd').to_datetime(timezone=pytz.utc)

    # create voevent instance
    role = vp.definitions.roles.observation if deployment else vp.definitions.roles.test
    v = vp.Voevent(
        stream='',  # TODO: check
        stream_id=1,
        role=role)

    vp.set_who(v,
               date=datetime.datetime.utcnow(),
               author_ivorn="voevent.dsa-110.caltech.org")  # TODO: check

    vp.set_author(v,
                  title="DSA-110 Testing Node",
                  contactName="Casey Law",
                  contactEmail="*****@*****.**")

    params = []
    dm = vp.Param(name="dm",
                  value=str(dd['dm']),
                  unit="pc/cm^3",
                  ucd="phys.dispMeasure;em.radio.750-1500MHz",
                  dataType='float',
                  ac=True)
    dm.Description = 'Dispersion Measure'
    params.append(dm)

    width = vp.Param(name="width",
                     value=str(dd['width']),
                     unit="ms",
                     ucd="time.duration;src.var.pulse",
                     dataType='float',
                     ac=True)
    width.Description = 'Temporal width of burst'
    params.append(width)

    snr = vp.Param(name="snr",
                   value=str(dd['snr']),
                   ucd="stat.snr",
                   dataType='float',
                   ac=True)
    snr.Description = 'Signal to noise ratio'
    params.append(snr)

    if 'fluence' in dd:
        fluence = vp.Param(
            name='fluence',
            value=str(dd['fluence']),
            unit='Jansky ms',
            ucd='em.radio.750-1500MHz',  # TODO: check
            dataType='float',
            ac=False)
        fluence.Description = 'Fluence'
        params.append(fluence)

    if 'p_flux' in dd:
        p_flux = vp.Param(name='peak_flux',
                          value=str(dd['p_flux']),
                          unit='Janskys',
                          ucd='em.radio.750-1500MHz',
                          dataType='float',
                          ac=True)
        p_flux.Description = 'Peak Flux'
        params.append(p_flux)

    if 'dmerr' in dd:
        dmerr = vp.Param(name="dm_error",
                         value=str(dd['dmerr']),
                         unit="pc/cm^3",
                         ucd="phys.dispMeasure;em.radio.750-1500MHz",
                         dataType='float',
                         ac=True)
        dmerr.Description = 'Dispersion Measure error'
        params.append(dmerr)

    v.What.append(vp.Group(params=params, name='event parameters'))

    vp.add_where_when(v,
                      coords=vp.Position2D(
                          ra=str(dd['ra']),
                          dec=str(dd['dec']),
                          err=str(dd['radecerr']),
                          units='deg',
                          system=vp.definitions.sky_coord_system.utc_fk5_geo),
                      obs_time=dt,
                      observatory_location='OVRO')

    print("\n***Here is your WhereWhen:***\n")
    print(vp.prettystr(v.WhereWhen))

    print("\n***And your What:***\n")
    print(vp.prettystr(v.What))

    vp.add_how(v,
               descriptions='Discovered with DSA-110',
               references=vp.Reference('http://deepsynoptic.org'))

    if 'importance' in dd:
        vp.add_why(v, importance=str(dd['importance']))
    else:
        vp.add_why(v)
    v.Why.Name = str(dd['internalname'])

    vp.assert_valid_as_v2_0(v)

    return v
Exemplo n.º 12
0
# dem_norm=np.ones([nx,ny,nt])
data = np.array([3.4, 13.8, 184, 338, 219.55, 12.22])
edata = np.array([0.2, 0.43, 7.83, 12.9, 5.80, 0.23])
dem_norm = np.array([
    0.082588151, 0.18005607, 0.30832890, 0.47582966, 0.66201794, 0.83059740,
    0.93994260, 0.95951378, 0.88358527, 0.73393929, 0.54981130, 0.37136465,
    0.22609001, 0.11025056
])

correction_table = get_correction_table()
wavenum = ['94', '131', '171', '193', '211', '335']
channels = []
for i in np.arange(len(wavenum)):
    channels.append(float(wavenum[i]) * u.angstrom)

time_calibration = time.Time('2014-01-01T00:00:00', scale='utc')

time_test = time.Time('2014-01-01T00:00:00', scale='utc')

# deg_calibration = {}
deg_calibration = np.zeros([len(channels)])
# deg = {}
deg = np.zeros([len(channels)])

for i, c in enumerate(channels):
    deg_calibration[i] = degradation(c,
                                     time_calibration,
                                     correction_table=correction_table)
    deg[i] = degradation(c, time_test, correction_table=correction_table)

tresp_logt = tresp[:, 0]
Exemplo n.º 13
0
def search_config(config,
                  preffile=None,
                  inprefs={},
                  nameincludes=None,
                  searchintents=None):
    """ Test whether configuration specifies a scan config that realfast should
    search
    """

    # find config properties of interest
    intent = config.scan_intent
    antennas = config.get_antennas()
    antnames = [str(ant.name) for ant in antennas]
    subbands = config.get_subbands()
    inttimes = [subband.hw_time_res for subband in subbands]
    pols = [subband.pp for subband in subbands]
    nchans = [subband.spectralChannels for subband in subbands]
    chansizes = [subband.bw / subband.spectralChannels for subband in subbands]
    reffreqs = [subband.sky_center_freq * 1e6 for subband in subbands]

    # Do not process if...
    # 1) chansize changes between subbands
    if not all([chansizes[0] == chansize for chansize in chansizes]):
        logger.warn(
            "Channel size changes between subbands: {0}".format(chansizes))
        return False

    # 2) start and stop time is after current time
    now = time.Time.now().unix
    startTime = time.Time(config.startTime, format='mjd').unix
    stopTime = time.Time(config.stopTime, format='mjd').unix
    if (startTime < now) and (stopTime < now):
        logger.warn(
            "Scan startTime and stopTime are in the past ({0}, {1} < {2})".
            format(startTime, stopTime, now))
        return False

    # 3) if nameincludes set, reject if datasetId does not have it
    if nameincludes is not None:
        if nameincludes not in config.datasetId:
            logger.warn(
                "datasetId {0} does not include nameincludes {1}".format(
                    config.datasetId, nameincludes))
            return False

    # 4) only search if in searchintents
    if searchintents is not None:
        if not any([searchintent in intent for searchintent in searchintents]):
            logger.warn("intent {0} not in searchintents list {1}".format(
                intent, searchintents))
            return False

    # 5) only two antennas
    if len(antnames) <= 2:
        logger.warn("Only {0} antennas in array".format(len(antnames)))
        return False

    # 6) only if state validates
    prefsname = get_prefsname(config=config)
    if not heuristics.state_validates(
            config=config, preffile=preffile, prefsname=prefsname,
            inprefs=inprefs):
        logger.warn("State not valid for scanId {0}".format(config.scanId))
        return False
    # 7) only if some fast sampling is done (faster than VLASS final inttime)
    t_fast = 0.4
    if not any([inttime < t_fast for inttime in inttimes]):
        logger.warn(
            "No subband has integration time faster than {0} s".format(t_fast))
        return False

    return True
Exemplo n.º 14
0
    def start_pipeline(self, scanId, cfile=None, segments=None):
        """ Start pipeline conditional on cluster state.
        Sets futures and state after submission keyed by scanId.
        segments arg can be used to select or slow segment submission.
        """

        st = self.states[scanId]
        w_memlim = self.read_overhead * st.vismem * 1e9
        if segments is None:
            segments = list(range(st.nsegment))

        vys_timeout = self.vys_timeout
        if st.metadata.datasource in ['vys', 'vyssim']:
            if self.vys_timeout is not None:
                logger.debug(
                    "vys_timeout factor set to fixed value of {0:.1f}x".format(
                        vys_timeout))
            else:
                assert self.vys_sec_per_spec is not None, "Must define vys_sec_per_spec to estimate vys_timeout"
                nspec = st.readints * st.nbl * st.nspw * st.npol
                vys_timeout = (st.t_segment +
                               self.vys_sec_per_spec * nspec) / st.t_segment
                logger.debug(
                    "vys_timeout factor scaled by nspec to {0:.1f}x".format(
                        vys_timeout))

        mockseg = random.choice(segments) if random.uniform(
            0, 1) < self.mockprob else None
        if mockseg is not None:
            logger.info("Mock set for scanId {0} in segment {1}".format(
                scanId, mockseg))

        # vys data means realtime operations must timeout within a scan time
        if st.metadata.datasource == 'vys':
            timeout = 0.9 * st.metadata.inttime * st.metadata.nints  # bit shorter than scan
        else:
            timeout = 0
        throttletime = self.throttle * st.metadata.inttime * st.metadata.nints / st.nsegment
        logger.info(
            'Submitting {0} segments for scanId {1} with {2:.1f}s per segment'.
            format(len(segments), scanId, throttletime))
        logger.debug('Read_overhead {0}, read_totfrac {1}, and '
                     'spill_limit {2} with timeout {3}s'.format(
                         self.read_overhead, self.read_totfrac,
                         self.spill_limit, timeout))

        tot_memlim = self.read_totfrac * sum([
            v['resources']['MEMORY']
            for v in itervalues(self.client.scheduler_info()['workers'])
            if 'READER' in v['resources']
        ])

        # submit segments
        t0 = time.Time.now().unix
        elapsedtime = 0
        nsubmitted = 0  # count number submitted from list segments
        segments = iter(segments)
        segment = next(segments)
        telcalset = self.set_telcalfile(scanId)
        while True:
            segsubtime = time.Time.now().unix
            if st.metadata.datasource == 'vys':
                endtime = time.Time(st.segmenttimes[segment][1],
                                    format='mjd').unix
                if endtime < segsubtime - 2:  # TODO: define buffer delay better
                    logger.warning(
                        "Segment {0} time window has passed ({1} < {2}). Skipping."
                        .format(segment, endtime, segsubtime - 1))
                    try:
                        segment = next(segments)
                        continue
                    except StopIteration:
                        logger.debug(
                            "No more segments for scanId {0}".format(scanId))
                        break

            # try setting telcal
            if not telcalset:
                telcalset = self.set_telcalfile(scanId)

            # submit if cluster ready and telcal available
            if (heuristics.reader_memory_ok(self.client, w_memlim)
                    and heuristics.readertotal_memory_ok(
                        self.client, tot_memlim)
                    and heuristics.spilled_memory_ok(limit=self.spill_limit,
                                                     daskdir=self.daskdir)
                    and (telcalset if self.requirecalibration else True)):

                # first time initialize scan
                if scanId not in self.futures:
                    self.futures[scanId] = []
                    self.errors[scanId] = 0
                    self.finished[scanId] = 0

                    if self.indexresults:
                        elastic.indexscan(
                            inmeta=self.states[scanId].metadata,
                            preferences=self.states[scanId].prefs,
                            indexprefix=self.indexprefix)
                    else:
                        logger.info("Not indexing scan or prefs.")

                futures = pipeline.pipeline_seg(st,
                                                segment,
                                                cl=self.client,
                                                cfile=cfile,
                                                vys_timeout=vys_timeout,
                                                mem_read=w_memlim,
                                                mem_search=2 * st.vismem * 1e9,
                                                mockseg=mockseg)
                self.futures[scanId].append(futures)
                nsubmitted += 1

                if self.data_logging:
                    segment, data, cc, acc = futures
                    distributed.fire_and_forget(
                        self.client.submit(util.data_logger,
                                           st,
                                           segment,
                                           data,
                                           fifo_timeout='0s',
                                           priority=-1))
                if self.indexresults:
                    elastic.indexscanstatus(scanId,
                                            pending=self.pending[scanId],
                                            finished=self.finished[scanId],
                                            errors=self.errors[scanId],
                                            indexprefix=self.indexprefix,
                                            nsegment=st.nsegment)

                try:
                    segment = next(segments)
                except StopIteration:
                    logger.info(
                        "No more segments for scanId {0}".format(scanId))
                    break

            else:
                if not heuristics.reader_memory_ok(self.client, w_memlim):
                    logger.info(
                        "System not ready. No reader available with required memory {0}"
                        .format(w_memlim))
                elif not heuristics.readertotal_memory_ok(
                        self.client, tot_memlim):
                    logger.info(
                        "System not ready. Total reader memory exceeds limit of {0}"
                        .format(tot_memlim))
                elif not heuristics.spilled_memory_ok(limit=self.spill_limit,
                                                      daskdir=self.daskdir):
                    logger.info(
                        "System not ready. Spilled memory exceeds limit of {0}"
                        .format(self.spill_limit))
                elif not (self.set_telcalfile(scanId)
                          if self.requirecalibration else True):
                    logger.info(
                        "System not ready. No telcalfile available for {0}".
                        format(scanId))

            # periodically check on submissions. always, if memory limited.
            if not (segment % 2) or not (
                    heuristics.reader_memory_ok(self.client, w_memlim) and
                    heuristics.readertotal_memory_ok(self.client, tot_memlim)
                    and heuristics.spilled_memory_ok(limit=self.spill_limit,
                                                     daskdir=self.daskdir)):
                self.cleanup(
                    keep=scanId)  # do not remove keys of ongoing submission

            # check timeout and wait time for next segment
            elapsedtime = time.Time.now().unix - t0
            if elapsedtime > timeout and timeout:
                logger.info("Submission timed out. Submitted {0}/{1} segments "
                            "in ScanId {2}".format(nsubmitted, st.nsegment,
                                                   scanId))
                break
            else:
                dt = time.Time.now().unix - segsubtime
                if dt < throttletime:
                    logger.debug("Waiting {0:.1f}s to submit segment.".format(
                        throttletime - dt))
                    sleep(throttletime - dt)
Exemplo n.º 15
0
Session = sessionmaker(bind=engine)
session = Session()

fp = urllib2.urlopen(
    'http://services.swpc.noaa.gov/text/goes-xray-flux-primary.txt')
con = fp.read()
lines = con.split('\n')
last_line = ''
for l in lines:
    match = re.search(
        '^(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\S+)\s+(\S+)', l)
    if not match: continue
    year = int(match.group(1))
    month = int(match.group(2))
    day = int(match.group(3))
    hour = int(match.group(4)[0:2])
    minute = int(match.group(4)[2:4])
    flux_short = float(match.group(7))
    flux_long = float(match.group(8))

    t_utc = datetime.datetime(year, month, day, hour, minute, 0)
    t_tai = time.Time(t_utc, format='datetime', scale='utc').tai.datetime
    goes = GOES(t_tai=t_tai,
                xray_flux_long=flux_long,
                xray_flux_short=flux_short)
    session.merge(goes)
    last_line = l

session.commit()
print last_line
Exemplo n.º 16
0
def test_frame_api():
    from astropy.coordinates.representation import SphericalRepresentation, \
                                 UnitSphericalRepresentation
    from astropy.coordinates.builtin_frames import ICRS, FK5
    # <--------------------Reference Frame/"Low-level" classes--------------------->
    # The low-level classes have a dual role: they act as specifiers of coordinate
    # frames and they *may* also contain data as one of the representation objects,
    # in which case they are the actual coordinate objects themselves.

    # They can always accept a representation as a first argument
    icrs = ICRS(UnitSphericalRepresentation(lon=8 * u.hour, lat=5 * u.deg))

    # which is stored as the `data` attribute
    assert icrs.data.lat == 5 * u.deg
    assert icrs.data.lon == 8 * u.hourangle

    # Frames that require additional information like equinoxs or obstimes get them
    # as keyword parameters to the frame constructor.  Where sensible, defaults are
    # used. E.g., FK5 is almost always J2000 equinox
    fk5 = FK5(UnitSphericalRepresentation(lon=8 * u.hour, lat=5 * u.deg))
    J2000 = time.Time('J2000', scale='utc')
    fk5_2000 = FK5(UnitSphericalRepresentation(lon=8 * u.hour, lat=5 * u.deg),
                   equinox=J2000)
    assert fk5.equinox == fk5_2000.equinox

    # the information required to specify the frame is immutable
    J2001 = time.Time('J2001', scale='utc')
    with raises(AttributeError):
        fk5.equinox = J2001

    # Similar for the representation data.
    with raises(AttributeError):
        fk5.data = UnitSphericalRepresentation(lon=8 * u.hour, lat=5 * u.deg)

    # There is also a class-level attribute that lists the attributes needed to
    # identify the frame.  These include attributes like `equinox` shown above.
    assert all(nm in ('equinox', 'obstime')
               for nm in fk5.get_frame_attr_names())

    # the result of `get_frame_attr_names` is called for particularly in  the
    # high-level class (discussed below) to allow round-tripping between various
    # frames.  It is also part of the public API for other similar developer /
    # advanced users' use.

    # The actual position information is accessed via the representation objects
    assert_allclose(icrs.represent_as(SphericalRepresentation).lat, 5 * u.deg)
    # shorthand for the above
    assert_allclose(icrs.spherical.lat, 5 * u.deg)
    assert icrs.cartesian.z.value > 0

    # Many frames have a "default" representation, the one in which they are
    # conventionally described, often with a special name for some of the
    # coordinates. E.g., most equatorial coordinate systems are spherical with RA and
    # Dec. This works simply as a shorthand for the longer form above

    assert_allclose(icrs.dec, 5 * u.deg)
    assert_allclose(fk5.ra, 8 * u.hourangle)

    assert icrs.representation_type == SphericalRepresentation

    # low-level classes can also be initialized with names valid for that representation
    # and frame:
    icrs_2 = ICRS(ra=8 * u.hour, dec=5 * u.deg, distance=1 * u.kpc)
    assert_allclose(icrs.ra, icrs_2.ra)

    # and these are taken as the default if keywords are not given:
    # icrs_nokwarg = ICRS(8*u.hour, 5*u.deg, distance=1*u.kpc)
    # assert icrs_nokwarg.ra == icrs_2.ra and icrs_nokwarg.dec == icrs_2.dec

    # they also are capable of computing on-sky or 3d separations from each other,
    # which will be a direct port of the existing methods:
    coo1 = ICRS(ra=0 * u.hour, dec=0 * u.deg)
    coo2 = ICRS(ra=0 * u.hour, dec=1 * u.deg)
    # `separation` is the on-sky separation
    assert coo1.separation(coo2).degree == 1.0

    # while `separation_3d` includes the 3D distance information
    coo3 = ICRS(ra=0 * u.hour, dec=0 * u.deg, distance=1 * u.kpc)
    coo4 = ICRS(ra=0 * u.hour, dec=0 * u.deg, distance=2 * u.kpc)
    assert coo3.separation_3d(coo4).kpc == 1.0

    # The next example fails because `coo1` and `coo2` don't have distances
    with raises(ValueError):
        assert coo1.separation_3d(coo2).kpc == 1.0
Exemplo n.º 17
0
def test_io_time_write_fits_standard(tmpdir, table_types):
    """
    Test that table with Time mixin columns can be written by io.fits.
    Validation of the output is done. Test that io.fits writes a table
    containing Time mixin columns that can be partially round-tripped
    (metadata scale, location).

    Note that we postpone checking the "local" scale, since that cannot
    be done with format 'cxcsec', as it requires an epoch.
    """
    t = table_types([[1, 2], ['string', 'column']])
    for scale in time.STANDARD_TIME_SCALES:
        t['a' + scale] = time.Time([[1, 2], [3, 4]],
                                   format='cxcsec',
                                   scale=scale,
                                   location=EarthLocation(-2446354,
                                                          4237210,
                                                          4077985,
                                                          unit='m'))
        t['b' + scale] = time.Time(
            ['1999-01-01T00:00:00.123456789', '2010-01-01T00:00:00'],
            scale=scale)
    t['c'] = [3., 4.]

    filename = str(tmpdir.join('table-tmp'))

    # Show that FITS format succeeds
    with pytest.warns(AstropyUserWarning,
                      match='Time Column "btai" has no specified location, '
                      'but global Time Position is present'):
        t.write(filename, format='fits', overwrite=True)
    with pytest.warns(
            AstropyUserWarning,
            match='Time column reference position "TRPOSn" is not specified'):
        tm = table_types.read(filename, format='fits', astropy_native=True)

    for scale in time.STANDARD_TIME_SCALES:
        for ab in ('a', 'b'):
            name = ab + scale

            # Assert that the time columns are read as Time
            assert isinstance(tm[name], time.Time)

            # Assert that the scales round-trip
            assert tm[name].scale == t[name].scale

            # Assert that the format is jd
            assert tm[name].format == 'jd'

            # Assert that the location round-trips
            assert tm[name].location == t[name].location

            # Finally assert that the column data round-trips
            assert (tm[name] == t[name]).all()

    for name in ('col0', 'col1', 'c'):
        # Assert that the non-time columns are read as Column
        assert isinstance(tm[name], Column)

        # Assert that the non-time columns' data round-trips
        assert (tm[name] == t[name]).all()

    # Test for conversion of time data to its value, as defined by its format
    for scale in time.STANDARD_TIME_SCALES:
        for ab in ('a', 'b'):
            name = ab + scale
            t[name].info.serialize_method['fits'] = 'formatted_value'

    t.write(filename, format='fits', overwrite=True)
    tm = table_types.read(filename, format='fits')

    for scale in time.STANDARD_TIME_SCALES:
        for ab in ('a', 'b'):
            name = ab + scale

            assert not isinstance(tm[name], time.Time)
            assert (tm[name] == t[name].value).all()
Exemplo n.º 18
0
def test_transform_api():
    from astropy.coordinates.representation import UnitSphericalRepresentation
    from astropy.coordinates.builtin_frames import ICRS, FK5
    from astropy.coordinates.baseframe import frame_transform_graph, BaseCoordinateFrame
    from astropy.coordinates.transformations import DynamicMatrixTransform
    # <------------------------Transformations------------------------------------->
    # Transformation functionality is the key to the whole scheme: they transform
    # low-level classes from one frame to another.

    # (used below but defined above in the API)
    fk5 = FK5(ra=8 * u.hour, dec=5 * u.deg)

    # If no data (or `None`) is given, the class acts as a specifier of a frame, but
    # without any stored data.
    J2001 = time.Time('J2001', scale='utc')
    fk5_J2001_frame = FK5(equinox=J2001)

    # if they do not have data, the string instead is the frame specification
    assert repr(fk5_J2001_frame) == "<FK5 Frame (equinox=J2001.000)>"

    #  Note that, although a frame object is immutable and can't have data added, it
    #  can be used to create a new object that does have data by giving the
    # `realize_frame` method a representation:
    srep = UnitSphericalRepresentation(lon=8 * u.hour, lat=5 * u.deg)
    fk5_j2001_with_data = fk5_J2001_frame.realize_frame(srep)
    assert fk5_j2001_with_data.data is not None
    # Now `fk5_j2001_with_data` is in the same frame as `fk5_J2001_frame`, but it
    # is an actual low-level coordinate, rather than a frame without data.

    # These frames are primarily useful for specifying what a coordinate should be
    # transformed *into*, as they are used by the `transform_to` method
    # E.g., this snippet precesses the point to the new equinox
    newfk5 = fk5.transform_to(fk5_J2001_frame)
    assert newfk5.equinox == J2001

    # classes can also be given to `transform_to`, which then uses the defaults for
    # the frame information:
    samefk5 = fk5.transform_to(FK5)
    # `fk5` was initialized using default `obstime` and `equinox`, so:
    assert_allclose(samefk5.ra, fk5.ra, atol=1e-10 * u.deg)
    assert_allclose(samefk5.dec, fk5.dec, atol=1e-10 * u.deg)

    # transforming to a new frame necessarily loses framespec information if that
    # information is not applicable to the new frame.  This means transforms are not
    # always round-trippable:
    fk5_2 = FK5(ra=8 * u.hour, dec=5 * u.deg, equinox=J2001)
    ic_trans = fk5_2.transform_to(ICRS)

    # `ic_trans` does not have an `equinox`, so now when we transform back to FK5,
    # it's a *different* RA and Dec
    fk5_trans = ic_trans.transform_to(FK5)
    assert not allclose(fk5_2.ra, fk5_trans.ra, rtol=0, atol=1e-10 * u.deg)

    # But if you explicitly give the right equinox, all is fine
    fk5_trans_2 = fk5_2.transform_to(FK5(equinox=J2001))
    assert_allclose(fk5_2.ra, fk5_trans_2.ra, rtol=0, atol=1e-10 * u.deg)

    # Trying to transforming a frame with no data is of course an error:
    with raises(ValueError):
        FK5(equinox=J2001).transform_to(ICRS)

    # To actually define a new transformation, the same scheme as in the
    # 0.2/0.3 coordinates framework can be re-used - a graph of transform functions
    # connecting various coordinate classes together.  The main changes are:
    # 1) The transform functions now get the frame object they are transforming the
    #    current data into.
    # 2) Frames with additional information need to have a way to transform between
    #    objects of the same class, but with different framespecinfo values

    # An example transform function:
    class SomeNewSystem(BaseCoordinateFrame):
        pass

    @frame_transform_graph.transform(DynamicMatrixTransform, SomeNewSystem,
                                     FK5)
    def new_to_fk5(newobj, fk5frame):
        ot = newobj.obstime
        eq = fk5frame.equinox
        # ... build a *cartesian* transform matrix using `eq` that transforms from
        # the `newobj` frame as observed at `ot` to FK5 an equinox `eq`
        matrix = np.eye(3)
        return matrix
Exemplo n.º 19
0
 def obs_create_date(self):
     """
     Date when the dataset was created
     """
     cd = self.get('obs_create_date', default=None)
     return cd if not cd else time.Time(cd)
Exemplo n.º 20
0
def test_highlevel_api():
    J2001 = time.Time('J2001', scale='utc')

    # <--------------------------"High-level" class-------------------------------->
    # The "high-level" class is intended to wrap the lower-level classes in such a
    # way that they can be round-tripped, as well as providing a variety of
    # convenience functionality.  This document is not intended to show *all* of the
    # possible high-level functionality, rather how the high-level classes are
    # initialized and interact with the low-level classes

    # this creates an object that contains an `ICRS` low-level class, initialized
    # identically to the first ICRS example further up.

    sc = coords.SkyCoord(coords.SphericalRepresentation(lon=8 * u.hour,
                                                        lat=5 * u.deg,
                                                        distance=1 * u.kpc),
                         frame='icrs')

    # Other representations and `system` keywords delegate to the appropriate
    # low-level class. The already-existing registry for user-defined coordinates
    # will be used by `SkyCoordinate` to figure out what various the `system`
    # keyword actually means.

    sc = coords.SkyCoord(ra=8 * u.hour, dec=5 * u.deg, frame='icrs')
    sc = coords.SkyCoord(l=120 * u.deg, b=5 * u.deg, frame='galactic')

    # High-level classes can also be initialized directly from low-level objects
    sc = coords.SkyCoord(coords.ICRS(ra=8 * u.hour, dec=5 * u.deg))

    # The next example raises an error because the high-level class must always
    # have position data.
    with pytest.raises(ValueError):
        sc = coords.SkyCoord(coords.FK5(equinox=J2001))  # raises ValueError

    # similarly, the low-level object can always be accessed

    # this is how it's supposed to look, but sometimes the numbers get rounded in
    # funny ways
    # assert repr(sc.frame) == '<ICRS Coordinate: ra=120.0 deg, dec=5.0 deg>'
    rscf = repr(sc.frame)
    assert rscf.startswith('<ICRS Coordinate: (ra, dec) in deg')

    # and  the string representation will be inherited from the low-level class.

    # same deal, should loook like this, but different archituectures/ python
    # versions may round the numbers differently
    # assert repr(sc) == '<SkyCoord (ICRS): ra=120.0 deg, dec=5.0 deg>'
    rsc = repr(sc)
    assert rsc.startswith('<SkyCoord (ICRS): (ra, dec) in deg')

    # Supports a variety of possible complex string formats
    sc = coords.SkyCoord('8h00m00s +5d00m00.0s', frame='icrs')

    # In the next example, the unit is only needed b/c units are ambiguous.  In
    # general, we *never* accept ambiguity
    sc = coords.SkyCoord('8:00:00 +5:00:00.0',
                         unit=(u.hour, u.deg),
                         frame='icrs')

    # The next one would yield length-2 array coordinates, because of the comma

    sc = coords.SkyCoord(['8h 5d', '2°2′3″ 0.3rad'], frame='icrs')

    # It should also interpret common designation styles as a coordinate
    # NOT YET
    # sc = coords.SkyCoord('SDSS J123456.89-012345.6', frame='icrs')

    # but it should also be possible to provide formats for outputting to strings,
    # similar to `Time`.  This can be added right away or at a later date.

    # transformation is done the same as for low-level classes, which it delegates to

    sc_fk5_j2001 = sc.transform_to(coords.FK5(equinox=J2001))
    assert sc_fk5_j2001.equinox == J2001

    # The key difference is that the high-level class remembers frame information
    # necessary for round-tripping, unlike the low-level classes:
    sc1 = coords.SkyCoord(ra=8 * u.hour,
                          dec=5 * u.deg,
                          equinox=J2001,
                          frame='fk5')
    sc2 = sc1.transform_to('icrs')

    # The next assertion succeeds, but it doesn't mean anything for ICRS, as ICRS
    # isn't defined in terms of an equinox
    assert sc2.equinox == J2001

    # But it *is* necessary once we transform to FK5
    sc3 = sc2.transform_to('fk5')
    assert sc3.equinox == J2001
    assert_allclose(sc1.ra, sc3.ra)

    # `SkyCoord` will also include the attribute-style access that is in the
    # v0.2/0.3 coordinate objects.  This will *not* be in the low-level classes
    sc = coords.SkyCoord(ra=8 * u.hour, dec=5 * u.deg, frame='icrs')
    scgal = sc.galactic
    assert str(scgal).startswith('<SkyCoord (Galactic): (l, b)')

    # the existing `from_name` and `match_to_catalog_*` methods will be moved to the
    # high-level class as convenience functionality.

    # in remote-data test below!
    # m31icrs = coords.SkyCoord.from_name('M31', frame='icrs')
    # assert str(m31icrs) == '<SkyCoord (ICRS) RA=10.68471 deg, Dec=41.26875 deg>'

    if HAS_SCIPY:
        cat1 = coords.SkyCoord(ra=[1, 2] * u.hr,
                               dec=[3, 4.01] * u.deg,
                               distance=[5, 6] * u.kpc,
                               frame='icrs')
        cat2 = coords.SkyCoord(ra=[1, 2, 2.01] * u.hr,
                               dec=[3, 4, 5] * u.deg,
                               distance=[5, 200, 6] * u.kpc,
                               frame='icrs')
        idx1, sep2d1, dist3d1 = cat1.match_to_catalog_sky(cat2)
        idx2, sep2d2, dist3d2 = cat1.match_to_catalog_3d(cat2)

        assert np.any(idx1 != idx2)
Exemplo n.º 21
0
from scipy.constants import speed_of_light

import pypeline.phased_array.beamforming as beamforming
import pypeline.phased_array.bluebild.data_processor as data_proc
import pypeline.phased_array.bluebild.imager.spatial_domain as bb_sd
import pypeline.phased_array.bluebild.parameter_estimator as param_est
import pypeline.phased_array.instrument as instrument
import pypeline.phased_array.util.data_gen.sky as sky
import pypeline.phased_array.util.data_gen.visibility as visibility
import pypeline.phased_array.util.gram as gr
import pypeline.phased_array.util.grid as grid
import pypeline.phased_array.util.io.image as image
import pypeline.util.math.sphere as sph

# Observation
obs_start = atime.Time(56879.54171302732, scale='utc', format='mjd')
field_center = coord.SkyCoord(218 * u.deg, 34.5 * u.deg)
field_of_view = np.radians(5)
frequency = 145e6
wl = speed_of_light / frequency

# Instrument
N_station = 24
dev = instrument.LofarBlock(N_station)
mb_cfg = [(_, _, field_center) for _ in range(N_station)]
mb = beamforming.MatchedBeamformerBlock(mb_cfg)
gram = gr.GramBlock()

# Data generation
T_integration = 8
sky_model = sky.from_tgss_catalog(field_center, field_of_view, N_src=20)
Exemplo n.º 22
0
    def get(self, telescope_id=None):
        f"""
        ---
        single:
          description: Retrieve ephemeris data for a single telescope, or for all telescopes if no telescope_id is provided, up to {MAX_TELESCOPES_TO_DISPLAY} telescopes.
          tags:
            - ephemeris
          parameters:
            - in: path
              name: telescope_id
              required: false
              schema:
                type: string
          responses:
            200:
              content:
                application/json:
                    schema:
                        type: object
            400:
              content:
                application/json:
                  schema: Error
        multiple:
          tags:
            - ephemeris
          description: Retrieve ephemeris data for multiple telescopes, up to {MAX_TELESCOPES_TO_DISPLAY}
          parameters:
          - in: query
            name: telescope_ids
            nullable: true
            schema:
              type: array
            description: |
                List of telescope IDs to retrieve ephemeris data for.
          responses:
            200:
              content:
                application/json:
                  schema:
                    type: object
            400:
              content:
                application/json:
                  schema: Error
        """

        time = self.get_query_argument('time', None)

        if time is not None:
            try:
                time = ap_time.Time(time, format='iso')
            except ValueError as e:
                return self.error(f'Invalid time format: {e.args[0]}')
        else:
            time = ap_time.Time.now()

        ephemerides = None

        if telescope_id is not None:
            try:
                telescope_id = int(telescope_id)
            except ValueError as e:
                return self.error(f'Invalid value for Telescope id: {e.args[0]}')
            telescope = Telescope.query.filter(Telescope.id == telescope_id).first()
            if telescope is None:
                return self.error('No Telescope with this id')
            else:
                if telescope.fixed_location is not True:
                    return self.error('Telescope is not fixed')
                else:
                    ephemerides = telescope.ephemeris(time)
        else:
            telescope_ids = self.get_query_argument('telescopeIds', None)
            if telescope_ids is not None:
                try:
                    telescope_ids = [int(t) for t in telescope_ids.split(',')]
                except ValueError as e:
                    return self.error(f'Invalid telescopeIds format: {e.args[0]}')

                if len(telescope_ids) > MAX_TELESCOPES_TO_DISPLAY:
                    telescope_ids = telescope_ids[:MAX_TELESCOPES_TO_DISPLAY]

                telescopes = Telescope.query.filter(
                    Telescope.id.in_(telescope_ids)
                ).all()
            else:
                telescopes = Telescope.query.all()
                if len(telescopes) > MAX_TELESCOPES_TO_DISPLAY:
                    telescopes = telescopes[:MAX_TELESCOPES_TO_DISPLAY]

            ephemerides = {
                telescope.id: telescope.ephemeris(time) for telescope in telescopes
            }

        self.verify_and_commit()
        return self.success(data=ephemerides)
Exemplo n.º 23
0
def main():
    parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter,
                                     fromfile_prefix_chars='@',
                                     parents=[util.base_parser])

    parser.add_argument('--pixel-scale', help="What should the pixel scale of the stack be? (in arc-seconds)",
                        default=0.16)
    parser.add_argument('--swarp', action='store_true', help="Use projection to do shifts, default is pixel shifts.")
    parser.add_argument('--stack-mode', choices=STACKING_MODES.keys(),
                        default='WEIGHTED_MEDIAN', help="How to combine images.")
    parser.add_argument('--rectify', action='store_true', help="Rectify images to WCS of reference, otherwise "
                                                               "images must be on same grid before loading.")
    parser.add_argument('--mask', action='store_true', help='set masked pixels to nan before shift/stack')
    parser.add_argument('--n-sub-stacks', default=3, type=int, help='How many sub-stacks should we produce')
    parser.add_argument('--rate-min', type=float, default=1, help='Minimum shift rate ("/hr)')
    parser.add_argument('--rate-max', type=float, default=5, help='Maximum shift rate ("/hr)')
    parser.add_argument('--rate-step', type=float, default=0.25, help='Step-size for shift rate ("/hr)')
    parser.add_argument('--angle-min', type=float, default=-3, help='Minimum angle to shift at (deg)')
    parser.add_argument('--angle-max', type=float, default=3, help='Maximum angle to shift at (deg)')
    parser.add_argument('--angle-step', type=float, default=0.25, help='Step-size for shift angle (deg)')
    parser.add_argument('--clip', type=int, default=None,
                        help='Mask pixel whose variance is clip times the median variance')
    parser.add_argument('--section-size', type=int, default=1024,
                        help='Break images into section when stacking (conserves memory)')
    parser.add_argument('--centre', default=None, help="only stack data around this RA/DEC (decimal degree) centre",
                        nargs=3, type=float)
    parser.add_argument('--group', action='store_true', help='Make stacks time grouped instead of striding.')

    args = parser.parse_args()
    logging.basicConfig(level=getattr(logging, args.log_level),
                        format="%(asctime)s :: %(levelname)s :: %(module)s.%(funcName)s:%(lineno)d %(message)s")

    reruns = args.rerun[0].split(":")
    if len(reruns) > 2:
        raise ValueError("Don't know what to do with more then 2 rerun directories.")

    input_rerun, output_rerun = util.parse_rerun(args.basedir, args.rerun)

    output_dir = os.path.join(output_rerun, args.exptype, args.pointing, args.filter)
    arc_dir = os.path.join("arc:projects/NewHorizons/DATA/HSC/rerun/sns_weighted/",
                           args.exptype,
                           args.pointing,
                           args.filter)

    os.makedirs(output_dir, exist_ok=True)
    logging.info(f'Writing results to {output_dir}')

    if args.swarp:
        stack_function = swarp
    else:
        stack_function = shift

    rates = shift_rates(args.rate_min, args.rate_max, args.rate_step,
                        args.angle_min, args.angle_max, args.angle_step)
    logging.info(f'Shift-and-Stacking the following list of rate/angle pairs: '
                 f'{[(rate["rate"],rate["angle"]) for rate in rates]}')

    logging.info(f'Loading all images matching pattern: {input_rerun}')
    images = np.array(get_image_list(input_rerun, args.exptype, ccd=args.ccd,
                                     visit=args.visit, filters=[args.pointing, args.filter]))
    if not len(images) > 0:
        raise OSError(f'No images found using {input_rerun}')

    # Organize images in MJD order.
    mjds = []
    logging.info(f"Sorting list of {len(images)} based on mjd")
    new_images = []
    full_hdus = {}
    for filename in images:
        try:
            hdulist = fits.open(filename)
            image = os.path.basename(filename)
            full_hdus[image] = hdulist
            full_hdus[image][0].header['IMAGE'] = image
            mjds.append(time.Time(mid_exposure_mjd(hdulist[0])))
            new_images.append(image)
        except Exception as ex:
            logging.error(str(ex))
            logging.error(f"Failed to open {filename} not using")

    ind = np.argsort(mjds)
    # sort the images by mjd
    images = numpy.array(new_images)[ind]
    logging.info(f"Sorted list of {len(mjds)} dates.")
    # In debug mode just do three images or less if there aren't three
    if logging.getLogger().getEffectiveLevel() < logging.INFO:
        num_of_images = min(6, len(images))
        stride = max(1, int(len(images)/num_of_images-1))
        logging.debug(f'Selecting every {stride}th images, for total of {num_of_images}')
        images = images[::stride]

    # do the stacking in groups of images as set from the CL.
    reference_hdu = None
    for index in range(args.n_sub_stacks):
        if not args.group:
            # stride the image list
            sub_images = images[index::args.n_sub_stacks]
            reference_idx = int(len(images) // 2)
            reference_image = images[reference_idx]
            reference_hdu = full_hdus[reference_image]
            # reference_hdu = fits.open(images[reference_idx])
            # reference_hdu[0].header['IMAGE'] = os.path.basename(reference_image)
            # reference_filename = os.path.splitext(os.path.basename(images[reference_idx]))[0][8:]
        else:
            # group images by time
            start_idx = len(images)//args.n_sub_stacks*index
            start_idx = int(max(0, start_idx))
            end_idx = len(images)//args.n_sub_stacks*(index+1)
            end_idx = int(min(len(images), end_idx))
            sub_images = images[start_idx:end_idx]

        hdus = {}
        for image in sub_images:
            hdus[image] = full_hdus[image]
            # with fits.open(image, mode='update') as hdulist:
            #    hdulist[0].header['IMAGE'] = os.path.basename(image)
            # hdus[image] = os.path.basename(image)

        # make a dictionary of the astrometric headers.
        astheads = {}
        for image in sub_images:
            astheads[image] = hdus[image][1].header

        if args.group:
            if index == 1:
                reference_idx = 0
            elif index == args.n_sub_stacks-1:
                reference_idx = -1
            else:
                reference_idx = int(len(sub_images) // 2)
            reference_image = sub_images[reference_idx]
            reference_hdu = hdus[reference_image]
            # reference_hdu = fits.open(sub_images[reference_idx])
        reference_filename = os.path.splitext(reference_hdu[0].header['IMAGE'])[0][0:]

        if not args.swarp and args.rectify:
            # Need to project all images to same WCS before passing to stack.
            logging.info('Swarp-ing the input images to a common projection and reference frame.')
            swarped_hdus = swarp(hdus, reference_hdu, None)
            for idx in swarped_hdus:
                # This HDUList order of 1 for science data and 2 for mask and 3 for uncertainty
                # is a hard coding of the LSST Pipeline order from 2020.
                image = swarped_hdus[idx]
                # noinspection PyUnresolvedReferences
                hdus[idx][1].header['RECTIFY'] = 'DONE'
                # noinspection PyUnresolvedReferences
                hdus[idx][1].data = image.data
                # noinspection PyUnresolvedReferences
                hdus[idx][1].header = image.header
                # noinspection PyUnresolvedReferences
                hdus[idx][2].data = image.mask
                # noinspection PyUnresolvedReferences
                hdus[idx][3].data = image.uncertainty
                # noinspection PyUnresolvedReferences
                # hdus.close()

        if args.centre is not None:
            box_size = args.section_size//2
            chdus = {}
            centre = None
            for key in hdus:
                # with fits.open(key) as hdul:
                    hdul = hdus[key]
                    image = key
                    # image = hdul[0].header['IMAGE']
                    if centre is None:
                        # make the center of cutouts the RA/DEC given but offset for epoch of RA/DEC and middle rate of motion.
                        centre_epoch = time.Time(args.centre[2], format='mjd').utc
                        mid_rates = rates[len(rates) // 2]
                        ra_rate = mid_rates["rate"] * numpy.cos(numpy.deg2rad(mid_rates["angle"]))/3600.0
                        dec_rate = mid_rates["rate"] * numpy.sin(numpy.deg2rad(mid_rates["angle"]))/3600.0
                        reference_date = mid_exposure_mjd(reference_hdu[0])
                        dt = (reference_date - centre_epoch).to('hour').value
                        dra = ra_rate*dt
                        ddec = dec_rate*dt
                        dec_centre = args.centre[1]+ddec
                        ra_centre = args.centre[0]+dra*numpy.cos(numpy.deg2rad(dec_centre))
                        centre = SkyCoord(ra_centre, dec_centre, unit='degree')
                        logging.info(f'Extracting box of 1/2 width {box_size} pixels around {centre}')
                    # hduc = fits.HDUList()
                    # hduc.append(fits.PrimaryHDU(header=hdul[0].header))
                    w = WCS(astheads[image])
                    try:
                        x, y = w.all_world2pix(centre.ra.degree, centre.dec.degree, 0)
                    except Exception as ex:
                        logging.error(str(ex))
                        continue
                    logging.info(f'{image} {centre.to_string(style="hmsdms", sep=":")} -> {x},{y}')
                    x1 = int(max(0, x - box_size))
                    x2 = int(min(hdul[1].header['NAXIS1'], x1 + 2*box_size))
                    x1 = int(max(0, x2 - 2*box_size))
                    y1 = int(max(0, y - box_size))
                    y2 = int(min(hdul[1].header['NAXIS2'], y1 + 2*box_size))
                    y1 = int(max(0, y2 - 2*box_size))
                    logging.info(f'{hdul[0].header["FRAMEID"]} -> [{y1}:{y2},{x1}:{x2}]')
                    hdul[0].header['XOFFSET'] = x1
                    hdul[0].header['YOFFSET'] = y1
                    astheads[image]['CRPIX1'] -= x1
                    astheads[image]['CRPIX2'] -= y1
                    logging.info(f'Extracting cutout of {image} from file on disk.')
                    for idx in range(1, 4):
                        try:
                            data = hdul[idx].data[y1:y2, x1:x2]
                        except Exception as ex:
                            logging.error(str(ex))
                            logging.error(f"Extracting [{y1}:{y2},{x1}:{x2}] from {hdul[0].header['FRAMEID']}, blanking this frame.")
                            data = np.ones((y2-y1+1)*(x2-x1+1))*np.nan
                            data.shape = y2-y1+1, x2-x1+1
                        hdul[idx].data = data
                        # hduc.append(fits.ImageHDU(data=data, header=hdul[idx].header))
                    # tt_file = tempfile.NamedTemporaryFile(delete=False)
                    # logging.info(f"Writing {key}[{y1}:{y2},{x1}:{x2}] to temporary file {tt_file.name}")
                    # hduc.writeto(tt_file)
                    logging.info("DONE.")
                    chdus[image] = hdul
            hdus = chdus
        hdus2 = {}
        # Remove from the list HDULists where there is no data left (after cutout)
        for key in hdus:
            if hdus[key] is not None:
                hdus2[key] = hdus[key]
        hdus = hdus2

        if args.clip is not None:
            # Use the variance data section to mask high variance pixels from the stack.
            # mask pixels that are both high-variance AND part of a detected source.
            logging.info(f'Masking pixels in image whose variance exceeds {args.clip} times the median variance.')
            for key in hdus:
                # with fits.open(key, mode='update') as hdu:
                    hdu = hdus[key]
                    if hdu[HSC_HDU_MAP['image']].header.get('CLIP', None) is not None:
                        continue
                    hdu[HSC_HDU_MAP['image']].header['CLIP'] = args.clip
                    # hdu.flush()
                    mvar = numpy.nanmedian(hdu[HSC_HDU_MAP['variance']].data)
                    if numpy.isnan(mvar):
                        logging.warning(f"median varianace of {key} is {mvar}")
                    logging.debug(f'Median variance of {key} is {mvar}')
                    with warnings.catch_warnings():
                        warnings.simplefilter("ignore", category=RuntimeWarning)
                        bright_mask = hdu[HSC_HDU_MAP['variance']].data > mvar*args.clip
                    detected_mask = bitfield_to_boolean_mask(hdu[HSC_HDU_MAP['mask']].data,
                                                             ignore_flags=LSST_MASK_BITS['DETECTED'],
                                                             flip_bits=True)
                    logging.debug(f'Bright Mask flagged {np.sum(bright_mask)}')
                    hdu[HSC_HDU_MAP['image']].data[bright_mask & detected_mask] = np.nan
                    logging.debug(f'Clip setting {np.sum(bright_mask & detected_mask)} to nan')
                    hdu[HSC_HDU_MAP['variance']].data[bright_mask & detected_mask] = np.nan
                    # update the local version with the clipped data, but not if this is a cutout.

        if args.mask:
            # set masked pixel to 'nan' before sending for stacking
            for key in hdus:
                # with fits.open(key, mode='update') as hdu:
                    hdu = hdus[key]
                    if not hdu[HSC_HDU_MAP['image']].header.get('MASKED', False):
                        hdu[HSC_HDU_MAP['image']].data = mask_as_nan(hdu[HSC_HDU_MAP['image']].data,
                                                                     hdu[HSC_HDU_MAP['mask']].data)
                        hdu[HSC_HDU_MAP['variance']].data = mask_as_nan(hdu[HSC_HDU_MAP['variance']].data,
                                                                        hdu[HSC_HDU_MAP['mask']].data)
                        hdu[HSC_HDU_MAP['image']].header['MASKED'] = True

        for rate in rates:
            dra = rate['rate']*np.cos(np.deg2rad(rate['angle'])) * units.arcsecond/units.hour
            ddec = rate['rate']*np.sin(np.deg2rad(rate['angle'])) * units.arcsecond/units.hour
            if args.group:
                int_rate = int(rate["rate"]*10)
                int_angle = int((rate['angle'] % 360)*10)
                expnum = f'{int(args.pointing)}{int_rate:02d}{int_angle:04d}{index}'
                output_filename = f'{expnum}p{args.ccd:02d}.fits'
            else:
                expnum = reference_hdu[0].header.get('FRAMEID', 'HSCA0000000').replace('HSCA', '')
                output_filename = f'STACK-{reference_filename}-{index:02d}-' \
                                  f'{rate["rate"]:+06.2f}-{rate["angle"]:+06.2f}.fits.fz'
            # Removed check of VOSpace as now running on arcade
            output_filename = os.path.join(output_dir, output_filename)
            output = stack_function(hdus, reference_hdu, {'dra': dra, 'ddec': ddec},
                                    stacking_mode=args.stack_mode, section_size=args.section_size, astheads=astheads)
            logging.debug(f'Got stack result {output}, writing to {output_filename}')
            # Keep a history of which visits when into the stack.
            output[0].header['SOFTWARE'] = f'{__name__}-{__version__}'
            output[0].header['NCOMBINE'] = (len(hdus), 'Number combined')
            output[0].header['COMBALGO'] = (args.stack_mode, 'Stacking mode')
            output[0].header['RATE'] = (rate['rate'], 'arc-second/hour')
            output[0].header['ANGLE'] = (rate['angle'], 'degree')
            output[0].header['DRA'] = (dra.value, str(dra.unit))
            output[0].header['DDEC'] = (ddec.value, str(ddec.unit))
            output[0].header['CCDNUM'] = (args.ccd, 'CCD NUMBER or DETSER')
            output[0].header['EXPNUM'] = (expnum, '[int(pointing)][rate*10][(angle%360)*10][index]')
            output[0].header['REFEXP'] = (reference_filename, 'reference exposure')
            output[0].header['MIDMJD'] = (mid_exposure_mjd(output[0]).mjd, " MID Exposure")
            output[0].header['ASTLEVEL'] = 1
            output[0].header['YOFFSET'] = reference_hdu[0].header.get('YOFFSET', 0)
            output[0].header['XOFFSET'] = reference_hdu[0].header.get('XOFFSET', 0)

            for i_index, image_name in enumerate(sub_images):
                output[0].header[f'input{i_index:03d}'] = os.path.basename(image_name)
            output.writeto(output_filename, overwrite=True)

    return 0
Exemplo n.º 24
0
def dummy_fitstbl(nfile=10,
                  spectro_name='shane_kast_blue',
                  directory='',
                  notype=False):
    """
    Generate a dummy fitstbl for testing

    Parameters
    ----------
    nfile : int, optional
      Number of files to mimic
    spectro_name : str, optional
      Name of spectrograph to mimic
    notype : bool (optional)
      If True, do not add image type info to the fitstbl

    Returns
    -------
    fitstbl : PypeItMetaData

    """
    fitsdict = {}
    fitsdict['index'] = np.arange(nfile)
    fitsdict['directory'] = [directory] * nfile
    fitsdict['filename'] = ['b{:03d}.fits.gz'.format(i) for i in range(nfile)]
    # TODO: The below will fail at 60
    dates = ['2015-01-23T00:{:02d}:11.04'.format(i) for i in range(nfile)]
    ttime = time.Time(dates, format='isot')
    fitsdict['mjd'] = ttime.mjd
    fitsdict['target'] = ['Dummy'] * nfile
    fitsdict['ra'] = ['00:00:00'] * nfile
    fitsdict['dec'] = ['+00:00:00'] * nfile
    fitsdict['exptime'] = [300.] * nfile
    fitsdict['dispname'] = ['600/4310'] * nfile
    fitsdict['dichroic'] = ['560'] * nfile
    fitsdict["binning"] = ['1,1'] * nfile
    fitsdict["airmass"] = [1.0] * nfile

    if spectro_name == 'shane_kast_blue':
        fitsdict['numamplifiers'] = [1] * nfile
        # Lamps
        for i in range(1, 17):
            fitsdict['lampstat{:02d}'.format(i)] = ['off'] * nfile
        fitsdict['exptime'][0] = 0  # Bias
        fitsdict['lampstat06'][1] = 'on'  # Arc
        fitsdict['exptime'][1] = 30  # Arc
        fitsdict['lampstat01'][2] = 'on'  # Trace, pixel, slit flat
        fitsdict['lampstat01'][3] = 'on'  # Trace, pixel, slit flat
        fitsdict['exptime'][2] = 30  # flat
        fitsdict['exptime'][3] = 30  # flat
        fitsdict['ra'][4] = '05:06:36.6'  # Standard
        fitsdict['dec'][4] = '52:52:01.0'
        fitsdict['airmass'][4] = 1.2
        fitsdict['ra'][5] = '07:06:23.45'  # Random object
        fitsdict['dec'][5] = '+30:20:50.5'
        fitsdict['decker'] = ['0.5 arcsec'] * nfile

    # arrays
    for k in fitsdict.keys():
        fitsdict[k] = np.array(fitsdict[k])

    spectrograph = load_spectrograph(spectro_name)
    fitstbl = PypeItMetaData(spectrograph,
                             spectrograph.default_pypeit_par(),
                             data=fitsdict)
    fitstbl['instrume'] = spectro_name
    type_bits = np.zeros(len(fitstbl),
                         dtype=fitstbl.type_bitmask.minimum_dtype())

    # Image typing
    if not notype:
        if spectro_name == 'shane_kast_blue':
            #fitstbl['sci_ID'] = 1  # This links all the files to the science object
            type_bits[0] = fitstbl.type_bitmask.turn_on(type_bits[0],
                                                        flag='bias')
            type_bits[1] = fitstbl.type_bitmask.turn_on(type_bits[1],
                                                        flag='arc')
            type_bits[2:4] = fitstbl.type_bitmask.turn_on(
                type_bits[2:4], flag=['pixelflat', 'trace'])
            type_bits[4] = fitstbl.type_bitmask.turn_on(type_bits[4],
                                                        flag='standard')
            type_bits[5:] = fitstbl.type_bitmask.turn_on(type_bits[5:],
                                                         flag='science')
            fitstbl.set_frame_types(type_bits)
            # Calibration groups
            cfgs = fitstbl.unique_configurations(
                ignore_frames=['bias', 'dark'])
            fitstbl.set_configurations(cfgs)
            fitstbl.set_calibration_groups(global_frames=['bias', 'dark'])

    return fitstbl
Exemplo n.º 25
0
 def set_OM_to_time(self):
     """Set OM to a time"""
     self.m.OM.value = time.Time(54000, format="mjd")
Exemplo n.º 26
0
scdc.representation_type = 'cartesian'
scpm = coordinates.SkyCoord([1, 2], [3, 4], [5, 6],
                            unit='deg,deg,pc',
                            pm_ra_cosdec=[7, 8] * u.mas / u.yr,
                            pm_dec=[9, 10] * u.mas / u.yr)
scpmrv = coordinates.SkyCoord([1, 2], [3, 4], [5, 6],
                              unit='deg,deg,pc',
                              pm_ra_cosdec=[7, 8] * u.mas / u.yr,
                              pm_dec=[9, 10] * u.mas / u.yr,
                              radial_velocity=[11, 12] * u.km / u.s)
scrv = coordinates.SkyCoord([1, 2], [3, 4], [5, 6],
                            unit='deg,deg,pc',
                            radial_velocity=[11, 12] * u.km / u.s)
tm = time.Time([51000.5, 51001.5],
               format='mjd',
               scale='tai',
               precision=5,
               location=el[0])
tm2 = time.Time(tm, precision=3, format='iso')
tm3 = time.Time(tm, location=el)
tm3.info.serialize_method['ecsv'] = 'jd1_jd2'
obj = table.Column([{'a': 1}, {'b': [2]}], dtype='object')
su = table.Column([(1, (1.5, 1.6)), (2, (2.5, 2.6))],
                  name='su',
                  dtype=[('i', np.int64),
                         ('f', [('p1', np.float64), ('p0', np.float64)])])
su2 = table.Column([(['snake', 'c'], [1.6, 1.5]), (['eal', 'a'], [2.5, 2.6])],
                   dtype=[('name', 'U5', (2, )), ('f', 'f8', (2, ))])

# NOTE: for testing, the name of the column "x" for the
# Quantity is important since it tests the fix for #10215
Exemplo n.º 27
0
    def __init__(self, MJD, # required
                 error=0.0, obs='Barycenter', freq=float("inf"),
                 scale=None,
                 **kwargs):  # keyword args that are completely optional
        r"""
        Construct a TOA object

        Parameters
        ----------
        MJD : astropy Time, float, or tuple of floats
            The time of the TOA, which can be expressed as an astropy Time,
            a floating point MJD (64 or 128 bit precision), or a tuple
            of (MJD1,MJD2) whose sum is the full precision MJD (usually the
            integer and fractional part of the MJD)
        obs : string
            The observatory code for the TOA
        freq : float or astropy Quantity
            Frequency corresponding to the TOA.  Either a Quantity with frequency
            units, or a number for which MHz is assumed.
        scale : string
            Time scale for the TOA time.  Defaults to the timescale appropriate
            to the site, but can be overridden

        Notes
        -----
        It is VERY important that all astropy.Time() objects are created
        with precision=9. This is ensured in the code and is checked for any
        Time object passed to the TOA constructor.

        """
        site = get_observatory(obs)
        # If MJD is already a Time, just use it. Note that this will ignore
        # the 'scale' argument to the TOA() constructor!
        if isinstance(MJD,time.Time):
            t = MJD
        else:
            if numpy.isscalar(MJD):
                arg1, arg2 = MJD, None
            else:
                arg1, arg2 = MJD[0], MJD[1]
            if scale is None:
                scale = site.timescale
            # First build a time without a location
            # Note that when scale is UTC, must use pulsar_mjd format!
            if scale.lower() == 'utc':
                fmt = 'pulsar_mjd'
            else:
                fmt = 'mjd'
            t = time.Time(arg1, arg2, scale=scale,
                    format=fmt, precision=9)
        # Now assign the site location to the Time, for use in the TDB conversion
        # Time objects are immutable so you must make a new one to add the location!
        # Use the intial time to look up the observatory location
        # (needed for moving observatories)
        # The location is an EarthLocation in the ITRF (ECEF, WGS84) frame
        try:
            loc = site.earth_location_itrf(time=t)
        except:
            print("Error computing earth_location_itrf at time {0}, {1}".format(t,type(t)))
            raise
        # Then construct the full time, with observatory location set
        self.mjd = time.Time(t, location=loc, precision=9)

        if hasattr(error,'unit'):
            self.error = error
        else:
            self.error = error * u.microsecond
        self.obs = site.name
        if hasattr(freq,'unit'):
            try:
                junk = freq.to(u.MHz)
            except u.UnitConversionError:
                log.error("Frequency for TOA with incompatible unit {0}".format(freq))
            self.freq = freq
        else:
            self.freq = freq * u.MHz
        if self.freq == 0.0*u.MHz:
            self.freq = numpy.inf*u.MHz
        self.flags = kwargs
Exemplo n.º 28
0
    def get(self, request, input_format, input_value, format=None):

        conversion = models.TimesConversion()
        conversion.input_format = input_format
        conversion.input_value = input_value
        conversion.documentation_URL = "http://docs.astropy.org/en/stable/time/index.html#reference-api"

        if input_format in [
                'byear', 'cxcsec', 'decimalyear', 'gps', 'jd', 'jyear', 'mjd',
                'plot_date', 'unix'
        ]:
            input_value = float(input_value)

        try:
            time = t.Time(input_value, format=input_format)
        except ValueError as e:
            conversion_error = models.Error(code=9, message=e.message)
            conversion_error.save()
            conversion.error = conversion_error
        else:
            conversion.byear = time.byear
            conversion.byear_str = time.byear_str
            conversion.cxcsec = time.cxcsec

            try:
                conversion.datetime = time.datetime
            except ValueError as datetime_e:
                if conversion.error is None:
                    conversion_error = models.Error(code=9,
                                                    message='datetime: ' +
                                                    datetime_e.message)
                    conversion_error.save()
                    conversion.error = conversion_error
                else:
                    conversion.error.message += ", datetime: " + datetime_e.message
                    conversion.error.save()

            conversion.decimalyear = time.decimalyear
            conversion.gps = time.gps
            conversion.iso = time.iso
            conversion.isot = time.isot
            conversion.jd = time.jd
            conversion.jyear = time.jyear
            conversion.jyear_str = time.jyear_str
            conversion.mjd = time.mjd
            conversion.plot_date = time.plot_date
            conversion.unix = time.unix

            try:
                conversion.yday = time.yday
            except ValueError as datetime_e:
                if conversion.error is None:
                    conversion_error = models.Error(code=9,
                                                    message='yday: ' +
                                                    datetime_e.message)
                    conversion_error.save()
                    conversion.error = conversion_error
                else:
                    conversion.error.message += ", yday: " + datetime_e.message
                    conversion.error.save()

        serializer = serializers.TimesConversionSerializer(conversion)
        return response.Response(serializer.data)
Exemplo n.º 29
0
    def compute_posvels(self, ephem=None, planets=False):
        """Compute positions and velocities of the observatories and Earth.

        Compute the positions and velocities of the observatory (wrt
        the Geocenter) and the center of the Earth (referenced to the
        SSB) for each TOA.  The JPL solar system ephemeris can be set
        using the 'ephem' parameter.  The positions and velocities are
        set with PosVel class instances which have astropy units.
        """
        if ephem is None:
            if self.ephem is not None:
                ephem = self.ephem
            else:
                log.warning('No ephemeris provided to TOAs object or compute_posvels. Using DE421')
                ephem = 'DE421'
        else:
            # If user specifies an ephemeris, make sure it is the same as the one already in the TOA object, to prevent mixing.
            if (self.ephem is not None) and (ephem != self.ephem):
                log.error('Ephemeris provided to compute_posvels {0} is different than TOAs object ephemeris {1}!'.format(ephem,self.ephem))
        # Record the choice of ephemeris and planets
        self.ephem = ephem
        self.planets = planets
        log.info('Computing positions and velocities of observatories and Earth (planets = {0}), using {1} ephemeris'.format(planets, ephem))
        # Remove any existing columns
        cols_to_remove = ['ssb_obs_pos', 'ssb_obs_vel', 'obs_sun_pos']
        for c in cols_to_remove:
            if c in self.table.colnames:
                log.info('Column {0} already exists. Removing...'.format(c))
                self.table.remove_column(c)
        for p in ('jupiter', 'saturn', 'venus', 'uranus'):
            name = 'obs_'+p+'_pos'
            if name in self.table.colnames:
                log.info('Column {0} already exists. Removing...'.format(name))
                self.table.remove_column(name)

        self.table.meta['ephem'] = ephem
        ssb_obs_pos = table.Column(name='ssb_obs_pos',
                                    data=numpy.zeros((self.ntoas, 3), dtype=numpy.float64),
                                    unit=u.km, meta={'origin':'SSB', 'obj':'OBS'})
        ssb_obs_vel = table.Column(name='ssb_obs_vel',
                                    data=numpy.zeros((self.ntoas, 3), dtype=numpy.float64),
                                    unit=u.km/u.s, meta={'origin':'SSB', 'obj':'OBS'})
        obs_sun_pos = table.Column(name='obs_sun_pos',
                                    data=numpy.zeros((self.ntoas, 3), dtype=numpy.float64),
                                    unit=u.km, meta={'origin':'OBS', 'obj':'SUN'})
        if planets:
            plan_poss = {}
            for p in ('jupiter', 'saturn', 'venus', 'uranus'):
                name = 'obs_'+p+'_pos'
                plan_poss[name] = table.Column(name=name,
                                    data=numpy.zeros((self.ntoas, 3), dtype=numpy.float64),
                                    unit=u.km, meta={'origin':'OBS', 'obj':p})

        # Now step through in observatory groups
        for ii, key in enumerate(self.table.groups.keys):
            grp = self.table.groups[ii]
            obs = self.table.groups.keys[ii]['obs']
            loind, hiind = self.table.groups.indices[ii:ii+2]
            site = get_observatory(obs)
            tdb = time.Time(grp['tdb'],precision=9)
            ssb_obs = site.posvel(tdb,ephem)
            log.debug("SSB obs pos {0}".format(ssb_obs.pos[:,0]))
            ssb_obs_pos[loind:hiind,:] = ssb_obs.pos.T.to(u.km)
            ssb_obs_vel[loind:hiind,:] = ssb_obs.vel.T.to(u.km/u.s)
            sun_obs = objPosVel_wrt_SSB('sun',tdb,ephem) - ssb_obs
            obs_sun_pos[loind:hiind,:] = sun_obs.pos.T.to(u.km)
            if planets:
                for p in ('jupiter', 'saturn', 'venus', 'uranus'):
                    name = 'obs_'+p+'_pos'
                    dest = p
                    pv = objPosVel_wrt_SSB(dest,tdb,ephem) - ssb_obs
                    plan_poss[name][loind:hiind,:] = pv.pos.T.to(u.km)
        cols_to_add = [ssb_obs_pos, ssb_obs_vel, obs_sun_pos]
        if planets:
            cols_to_add += plan_poss.values()
        log.debug('Adding columns ' + ' '.join([cc.name for cc in cols_to_add]))
        self.table.add_columns(cols_to_add)
import astropy.coordinates as coords 
import astropy.time as Time 

t = Time.Time('2020-12-15T20:04:00', format='isot', scale='utc')

c = coords.get_body('uranus', t)

print("RA = {} rad".format(c.ra.rad))
print("Dec = {} rad".format(c.dec.rad))
print("Observation Time = {} {}".format(t.value, t.scale))