예제 #1
0
def test_strftime_array_2():
    tstrings = [['1998-01-01 00:00:01', '1998-01-01 00:00:02'],
                ['1998-01-01 00:00:03', '1995-12-31 23:59:60']]
    tstrings = np.array(tstrings)

    t = Time(tstrings)

    for format in t.FORMATS:
        t.format = format
        assert np.all(t.strftime('%Y-%m-%d %H:%M:%S') == tstrings)
        assert t.strftime('%Y-%m-%d %H:%M:%S').shape == tstrings.shape
예제 #2
0
파일: skynoise.py 프로젝트: grand-oma/HDR
def build_hor_map(ra_v,dec_v,site,timestr):
    # Build tools for Horizontal coordinates
    if site == "lenghu":
        location = EarthLocation(lat=38.4*u.deg, lon=+93.3*u.deg, height=2650*u.m)
    obs_time = Time(timestr) # UTC time
    str_time = Time.strftime(obs_time,"%b-%d-%Y %H:%M")
    stepaz = round(360./np.shape(ra_v)[0])
    stepalt = round(180./np.shape(dec_v)[0])
    az_v = np.arange(0,360,stepaz)
    nl = np.shape(az_v)[0]
    alt_v = np.arange(-90,90,stepalt)
    nb = np.shape(alt_v)[0]
    az = np.repeat(az_v,nb)
    az = np.reshape(az,(nl,nb))
    alt = np.tile(alt_v,(nl,1))
    temp_hor = np.zeros(shape=np.shape(alt))

    # Now loop on RaDec map and extract temp values
    for i, ra  in enumerate(ra_v):
      print("***",i,"/",len(ra_v))
      for j, dec in enumerate(dec_v):
        c = SkyCoord(ra, dec, frame='icrs', unit=(u.radian, u.radian))
        altaz = c.transform_to(AltAz(obstime=obs_time,location=location))
        th = altaz.alt.degree
        phi = altaz.az.degree
        ii = np.argmin(abs(phi-az_v))
        jj = np.argmin(abs(th-alt_v))
        temp_hor[ii,jj] = temp[i,j]

    return az,alt,temp_hor
예제 #3
0
def get_anneal_name(anneal_start):

    t = Time(anneal_start, format='mjd')
    anneal_name = t.strftime('%Y%b%d')
    print('Anneal start date: ', anneal_name)

    return anneal_name
예제 #4
0
def test_strftime_leapsecond():
    time_string = '1995-12-31 23:59:60'
    t = Time(time_string)

    for format in t.FORMATS:
        t.format = format
        assert t.strftime('%Y-%m-%d %H:%M:%S') == time_string
예제 #5
0
def test_strftime_array():
    tstrings = ['2010-09-03 00:00:00', '2005-09-03 06:00:00', '1995-12-31 23:59:60']
    t = Time(tstrings)

    for format in t.FORMATS:
        t.format = format
        assert t.strftime('%Y-%m-%d %H:%M:%S').tolist() == tstrings
예제 #6
0
def write_fits(skysubrect, specrect, errorrect, header):
    hdulist = []
    for image, ftp, nn in zip([skysubrect, specrect, errorrect],
                              [fits.PrimaryHDU, fits.ImageHDU, fits.ImageHDU],
                              ['skysub', 'spectra', 'error']):
        hdu = ftp(np.array(image, dtype='float32'))
        hdu.header['CRVAL2'] = 1
        hdu.header['CRVAL1'] = np.log(def_wave)[0]
        hdu.header['CRPIX2'] = 1
        hdu.header['CRPIX1'] = 1
        hdu.header['CTYPE2'] = 'fiber'
        hdu.header['CTYPE1'] = 'ln(wave)'
        hdu.header['CDELT2'] = 1
        hdu.header['CDELT1'] = np.log(def_wave)[1] - np.log(def_wave)[0]
        for key in header.keys():
            if key in hdu.header:
                continue
            if ('CCDSEC' in key) or ('DATASEC' in key):
                continue
            if ('BSCALE' in key) or ('BZERO' in key):
                continue
            try:
                hdu.header[key] = header[key]
            except:
                continue
        t = Time(header['DATE-OBS'])
        objname = '_'.join(str(header['OBJECT']).split())
        iname = '_'.join([objname, t.strftime('%Y%m%dT%H%M%S'), 'multi'])
        hdu.header['EXTNAME'] = nn
        hdulist.append(hdu)
    fits.HDUList(hdulist).writeto(op.join(outfolder, iname + '.fits'),
                                  overwrite=True)
예제 #7
0
def test_strftime_scalar():
    """Test of Time.strftime
    """
    time_string = '2010-09-03 06:00:00'
    t = Time(time_string)

    for format in t.FORMATS:
        t.format = format
        assert t.strftime('%Y-%m-%d %H:%M:%S') == time_string
예제 #8
0
파일: mmtcam.py 프로젝트: zyc1113/MMTtools
def wind_avg_max(wind_tab0, h0):
    '''
    Compute average, maximum and avg direction of wind from wind data for
    each observation period

    Parameters
    ----------
    wind_tab0 : astropy.table.Table
     Astropy table containing young and young2 wind data

    h0 : astropy.io.fits.header.Header
      FITS header

    Returns
    -------
    h0 : astropy.io.fits.header.Header
      Updated FITS header

    Notes
    -----
    Created by Chun Ly, 1 March 2017
    '''
    t_start = Time(h0['DATE-OBS']).to_datetime(timezone=utc_mst)
    t_stop  = t_start + timedelta(seconds=h0['EXPTIME'])

    # Note: mjd_start/mjd_stop is MJD associated with MST time not UTC
    mjd_start = Time(t_start.strftime('%Y-%m-%d %H:%M%:%S')).mjd
    mjd_stop  = Time(t_stop.strftime('%Y-%m-%d %H:%M%:%S')).mjd

    time0 = Time(wind_tab0['MST_time'])
    mjd0  = time0.mjd
    t_idx = np.where((mjd0 >= mjd_start) & (mjd0 <= mjd_stop))[0]

    avg1 = np.average(wind_tab0['speed1'][t_idx]*mph_conv)
    max1 = np.max(wind_tab0['speed1'][t_idx]*mph_conv)
    dir1 = np.average(wind_tab0['direct1'][t_idx])

    avg2 = np.average(wind_tab0['speed2'][t_idx]*mph_conv)
    max2 = np.max(wind_tab0['speed2'][t_idx]*mph_conv)
    dir2 = np.average(wind_tab0['direct2'][t_idx])

    #young1 = {'avg':avg1, 'max':max1, 'dir':dir1}
    #young2 = {'avg':avg2, 'max':max2, 'dir':dir2}
    #return young1, young2

    # Update header with wind data
    h0.set('Y1_AVG', avg1, 'YOUNG1 avg wind speed [mph]')
    h0.set('Y1_MAX', max1, 'YOUNG1 max wind speed [mph]')
    h0.set('Y1_DIR', dir1, 'YOUNG1 avg wind direction [deg]')

    h0.set('Y2_AVG', avg2, 'YOUNG2 avg wind speed [mph]')
    h0.set('Y2_MAX', max2, 'YOUNG2 max wind speed [mph]')
    h0.set('Y2_DIR', dir2, 'YOUNG1 avg wind direction [deg]')

    return h0
def GPSTime2Local(time):
    timeLocal0 = []
    for i in range(len(time)):
        tgps0 = time[i]
        tgps = tgps0 - 19  # Added correction
        t = Time(tgps, format='gps', precision=3)
        t = Time(t, format='iso', precision=3)
        t = t.strftime("%H:%M:%S")
        timeLocal0.append(t)
        # print(str(tgps) + ' --> ' + str(t))
    return timeLocal0
예제 #10
0
def jd2cal(jd, precision=0, DT_FMT='iso'):
    t = Time(jd, format='jd', precision=precision)

    if DT_FMT == 'iso':
        t.format = 'iso'
        return t.value
    else:
        if type(jd) == type(0.0):
            dt = t.strftime(DT_FMT)
        else:
            dt = [x.tt.datetime.strftime(DT_FMT) for x in t]

        return np.asarray(dt)
예제 #11
0
def get_twilights(night, alt=-18., verbose=False):
    '''
    Calculate and return twilight values for the start and end of the night.

    Parameters
    ----------
    night : int
        night in form 20210320 for 20 March 2021
    alt : float
        sun altitude for twilight (default 18 deg twilight)
    verbose : bool
        print verbose output? 

    Returns
    -------
    twibeg : float
        time of twilight in MJD at beginning of the night
    twiend : float
        time of twilight in MJD at the end of the night
    '''

    night = int(night)
    nightstr = str(night)
    timestr = "{0}-{1}-{2}T00:00:00.0".format(nightstr[:4], nightstr[4:6], nightstr[6:8])
    t = Time(timestr, format='isot', scale='utc') + 1

    # Set observatory lat,long to calculate twilight
    desi = ephem.Observer()
    desi.lon = '-111.59989'
    desi.lat = '31.96403'
    desi.elev = 2097.
    desi.date = t.strftime('%Y/%m/%d 7:00')

    # Calculate twilight times
    desi.horizon = str(alt)
    beg_twilight=desi.previous_setting(ephem.Sun(), use_center=True) # End astro twilight
    end_twilight=desi.next_rising(ephem.Sun(), use_center=True) # Begin astro twilight
    twibeg = Time( beg_twilight.datetime(), format='datetime').mjd
    twiend = Time( end_twilight.datetime(), format='datetime').mjd

    if verbose: 
        print("Evening twilight: ", beg_twilight, "UT") 
        print("Morning twilight: ", end_twilight, "UT") 

    return twibeg, twiend
예제 #12
0
파일: skynoise.py 프로젝트: grand-oma/HDR
def build_hor_map2(ra,dec,temp,site,timestr):
    # First transfer RaDec map to Horizontal coordinates
    if site == "lenghu":
        location = EarthLocation(lat=38.4*u.deg, lon=+93.3*u.deg, height=2650*u.m)
    obs_time = Time(timestr, scale='utc',location=location) # UTC time
    str_time = Time.strftime(obs_time,"%b-%d-%Y %H:%M")
    LST = obs_time.sidereal_time('mean')
    LST_hour = str(LST).split('h')[0]
    print("Local Sideral Time:",LST)

    print("Building LFMap in Hor coordinates for LST hour =",LST_hour,"h.")

    c = SkyCoord(ra, dec, frame='icrs', unit=(u.radian, u.radian))
    altaz = c.transform_to(AltAz(obstime=obs_time,location=location))

    # Now build final (ordered) map in Horizonatl coordinates
    nl = np.shape(ra)[0]
    nb = np.shape(ra)[1]
    stepaz = round(360./nl)
    stepalt = round(180./nb)
    az_v = np.arange(0,360,stepaz) * u.degree
    alt_v = np.arange(-90,90,stepalt) * u.degree
    az = np.repeat(az_v,nb)
    az = np.reshape(az,(nl,nb))
    alt = np.tile(alt_v,(nl,1))
    altaz_fin = SkyCoord(az, alt, frame = 'altaz', obstime=obs_time,location=location)

    # Now find corresponding cells and fill temperature matrix accordingly
    sep = altaz.separation(altaz_fin)
    temp_hor = np.zeros(shape=np.shape(alt))
    for i in range(nl):
          print('***',i,'/',nl)
          for j in range(nb):
              sep = altaz.separation(altaz_fin[i,j]).degree
              sepi = np.where(sep == np.amin(sep))
              a = sepi[0][0]
              b = sepi[1][0]
              #print('Minimal distance =',np.amin(sep),'deg in cell',a,b)
              #print('(',altaz_fin[i,j].az.degree,altaz_fin[i,j].alt.degree,') vs (',altaz[a,b].az.degree,altaz[a,b].alt.degree,')')
              temp_hor[i,j] = temp[a,b]

    return np.array(az),np.array(alt),temp_hor
예제 #13
0
파일: mmtcam.py 프로젝트: zyc1113/MMTtools
def get_mst(h0):
    '''
    Get string-formatted MST time from UTC datetime

    Parameters
    ----------
    h0 : astropy.io.fits.header.Header
      FITS header

    Returns
    -------
    t.strftime : string
      Time in HH:MM:SS format

    Notes
    -----
    Created by Chun Ly, 26 February 2017
    '''

    t = Time(h0['DATE-OBS']).to_datetime(timezone=utc_mst)
    return t.strftime('%H:%M:%S')
예제 #14
0
def main(year=None,
         month=None,
         day=None,
         ndays=1,
         overwritejp2=False,
         overwritefits=False):
    # tst = datetime.strptime("2017-04-01", "%Y-%m-%d")
    # ted = datetime.strptime("2019-12-31", "%Y-%m-%d")
    if year:
        ted = datetime(year, month, day)
    else:
        ted = datetime.now() - timedelta(days=2)
    tst = Time(np.fix(Time(ted).mjd) - ndays + 1, format='mjd').datetime
    print("Running pipeline_fitsutils for date from {} to {}".format(
        tst.strftime("%Y-%m-%d"), ted.strftime("%Y-%m-%d")))
    dateobs = tst
    while dateobs <= ted:
        datestr = dateobs.strftime("%Y-%m-%d")
        rewriteImageFits(datestr,
                         verbose=True,
                         writejp2=True,
                         overwritejp2=overwritejp2,
                         overwritefits=overwritefits)
        dateobs = dateobs + timedelta(days=1)
예제 #15
0
    def fetch(self, qres, path=None, error_callback=None, **kwargs):
        """
        Download a set of results.

        Parameters
        ----------
        qres : `~sunpy.net.dataretriever.QueryResponse`
            Results to download.

        Returns
        -------
        Results Object
        """

        urls = [qrblock['url'] for qrblock in qres]

        filenames = []
        local_filenames = []

        for i, [url, qre] in enumerate(zip(urls, qres)):
            name = url.split('/')[-1]

            day = Time(qre['Time'].start.strftime('%Y-%m-%d')) + TimeDelta(
                i * u.day)

            if name not in filenames:
                filenames.append(name)

            if name.endswith('.gz'):
                local_filenames.append('{}SRS.txt'.format(
                    day.strftime('%Y%m%d')))
            else:
                local_filenames.append(name)

        if path is not None:
            path = pathlib.Path(path)
        # Files to be actually downloaded
        paths = self._get_full_filenames(qres, filenames, path)

        # Those files that will be present after get returns
        local_paths = self._get_full_filenames(qres, local_filenames, path)

        # remove duplicate urls. This will make paths and urls to have same number of elements.
        # OrderedDict is required to maintain ordering because it will be zipped with paths later
        urls = list(OrderedDict.fromkeys(urls))

        dobj = Downloader(max_conn=5)

        for aurl, fname in zip(urls, paths):
            dobj.enqueue_file(aurl, filename=fname)

        paths = dobj.download()

        outfiles = []
        for fname, srs_filename in zip(local_paths, local_filenames):

            name = fname.name

            past_year = False
            for i, fname2 in enumerate(paths):
                fname2 = pathlib.Path(fname2)

                if fname2.name.endswith('.txt'):
                    continue

                year = fname2.name.split('_SRS')[0]

                if year in name:
                    TarFile = tarfile.open(fname2)
                    filepath = fname.parent
                    member = TarFile.getmember('SRS/' + srs_filename)
                    member.name = name
                    TarFile.extract(member, path=filepath)
                    TarFile.close()

                    outfiles.append(fname)

                    past_year = True
                    break

            if past_year is False:
                outfiles.append(fname)

        paths.data = list(map(str, outfiles))
        return paths
예제 #16
0
    max_bl = int(np.max(baseline_lengths))
    lb_threshold = {
        'B3': 750,
        'B6': 780,
    }

    TM = ('TM1' if max_bl < lb_threshold[band] else 'TM2')

    if antennadiameter == 12:
        array_config = stacked[(obstime > start_times)
                               & (obstime < end_times)]['Approx\xa0Config.']
        if len(array_config) == 0:
            array_config = ['unknown']
    else:
        TM = array_config = '7M'
    key = obstime.strftime('%Y-%m-%d')
    if fields[0] in results:
        # account for
        if key in results[fields[0]]:
            key = key + "_"
        results[fields[0]][key] = {
            'array': array_config[0],
            'TM': TM,
            'mous': mous,
            'band': band,
            'onsource': onsource_time / 3600,
            'exptime': integration_time.to(u.hour).value
        }
    else:
        results[fields[0]] = {
            key: {
예제 #17
0
    def fetch(self, qres, path=None, error_callback=None, **kwargs):
        """
        Download a set of results.

        Parameters
        ----------
        qres : `~sunpy.net.dataretriever.QueryResponse`
            Results to download.

        Returns
        -------
        Results Object
        """

        urls = [qrblock.url for qrblock in qres]

        filenames = []
        local_filenames = []

        for i, [url, qre] in enumerate(zip(urls, qres)):
            name = url.split('/')[-1]

            # temporary fix !!! coz All QRBs have same start_time values
            day = Time(qre.time.start.strftime('%Y-%m-%d')) + TimeDelta(
                i * u.day)

            if name not in filenames:
                filenames.append(name)

            if name.endswith('.gz'):
                local_filenames.append('{}SRS.txt'.format(
                    day.strftime('%Y%m%d')))
            else:
                local_filenames.append(name)

        # Files to be actually downloaded
        paths = self._get_full_filenames(qres, filenames, path)

        # Those files that will be present after get returns
        local_paths = self._get_full_filenames(qres, local_filenames, path)

        res = Results(lambda x: None, 0, lambda map_: self._link(map_))

        # remove duplicate urls. This will make paths and urls to have same number of elements.
        # OrderedDict is required to maintain ordering because it will be zipped with paths later

        urls = list(OrderedDict.fromkeys(urls))

        dobj = Downloader(max_conn=len(urls), max_total=len(urls))

        # We cast to list here in list(zip... to force execution of
        # res.require([x]) at the start of the loop.
        for aurl, ncall, fname in list(
                zip(urls, map(lambda x: res.require([x]), urls), paths)):
            dobj.download(aurl, fname, ncall, error_callback)

        res.wait()

        res2 = Results(lambda x: None, 0)

        for fname, srs_filename in zip(local_paths, local_filenames):

            fname = fname.args[0]
            name = fname.split('/')[-1]

            past_year = False
            for i, fname2 in enumerate(paths):

                fname2 = fname2.args[0]

                if fname2.endswith('.txt'):
                    continue

                year = fname2.split('/')[-1]
                year = year.split('_SRS')[0]

                if year in name:
                    TarFile = tarfile.open(fname2)
                    filepath = fname.rpartition('/')[0]
                    member = TarFile.getmember('SRS/' + srs_filename)
                    member.name = name
                    TarFile.extract(member, path=filepath)
                    TarFile.close()

                    callback = res2.require([fname])
                    callback({'path': fname})

                    past_year = True
                    break

            if past_year is False:
                callback = res2.require([fname])
                callback({'path': fname})

        return res2
예제 #18
0
파일: mmtcam.py 프로젝트: zyc1113/MMTtools
def query_mmtlog_wind(u_start, u_stop, user='******', passwd='', path0='',
                      silent=False, verbose=True):
    '''
    Query ops.mmto.arizona.edu's log for wind data.
    Note: This code requires specifying the password

    Parameters
    ----------
    u_start : string
     UTC start time. Formatted as 'YYYY-MM-DD HH:MM:SS'

    u_stop : string
     UTC stop time. Formatted as 'YYYY-MM-DD HH:MM:SS'

    user : string
     Username to login. Default: 'webuser'

    passwd : string
     Password for user. Default: ''

    path0 : string
      Directory path to files.

    silent : boolean
      Turns off stdout messages. Default: False

    verbose : boolean
      Turns on additional stdout messages. Default: True

    Returns
    -------
    tab0 : astropy.table.Table
     Astropy table containing young and young2 wind data

    Notes
    -----
    Created by Chun Ly, 28 February 2017
    Modified by Chun Ly, 01 March 2017
     - Return tab0
    '''

    if passwd == '':
        log.error('Must specify password!')
        log.error('Exiting!!!')
        return

    if silent == False: log.info('### Begin query_mmtlog_wind: '+systime())

    m_start = Time(u_start).to_datetime(timezone=utc_mst)
    m_start = m_start.strftime('%Y-%m-%d %H:%M%:%S')
    m_stop  = Time(u_stop).to_datetime(timezone=utc_mst) + \
              timedelta(seconds=10*60.0) # Add 10 min. to have enough buffer
    m_stop  = m_stop.strftime('%Y-%m-%d %H:%M%:%S')

    conn = pymysql.connect(host='ops.mmto.arizona.edu', user=user,
                           passwd=passwd, db='mmtlogs')
    cur = conn.cursor()

    sql1 = "SELECT timestamp,young_wind_speed,young_wind_direction FROM "+\
           "young_background_log where timestamp >= '"+m_start+"' AND "+\
           "timestamp < '"+m_stop+"'"
    # print sql1
    n_entries = cur.execute(sql1)
    results1 = cur.fetchall()

    sql2 = sql1.replace('young', 'young2')
    cur.execute(sql2)
    results2 = cur.fetchall()

    time0   = np.repeat('XXXX-XX-XX XX:XX:XX', n_entries)
    speed1  = np.zeros(n_entries)
    direct1 = np.zeros(n_entries)
    speed2  = np.zeros(n_entries)
    direct2 = np.zeros(n_entries)

    for nn in xrange(n_entries):
        time0[nn]   = results1[nn][0].isoformat()
        speed1[nn]  = results1[nn][1]
        direct1[nn] = results1[nn][2]

        speed2[nn]  = results2[nn][1]
        direct2[nn] = results2[nn][2]

    outfile = path0+'wind_data.tbl'
    vec0   = [time0, speed1, direct1, speed2, direct2]
    names0 = ('MST_time','speed1', 'direct1', 'speed2', 'direct2')
    tab0 = Table(vec0, names=names0)
    if silent == False: log.info('## Writing : '+outfile)
    asc.write(tab0, outfile, format='fixed_width_two_line', overwrite=True)
    if silent == False: log.info('### End query_mmtlog_wind: '+systime())
    return tab0
예제 #19
0
파일: lcogt.py 프로젝트: awe2/YSE_PZ
class lcogt(object):
    def __init__(self, username, password, progid, telescop, start_date,
                 end_date):

        uri_base = 'https://observe.lco.global/'
        archive_base = 'https://archive-api.lco.global/'
        self.params = {
            'uri': {
                'archive': {
                    'authorization': archive_base + 'api-token-auth/',
                    'frames': archive_base + 'frames/',
                    'token': None
                },
                'request': {
                    'authorization': uri_base + 'api/api-token-auth/',
                    'request': uri_base + 'api/requestgroups/',
                    'token': None
                }
            },
            'date_format': '%Y-%m-%d %H:%M:%S',
            'constraints': {
                'max_airmass': 2.5,
                'min_lunar_distance': 15
            },
            'strategy': {
                'default': {
                    'type': 'default',
                    'proposal': [
                        {
                            'name': progid,
                            'obstype': 'NORMAL'
                        },
                    ],
                    'filters': ['up', 'gp', 'rp', 'ip'],
                    'min_exposure': {
                        'default': 45,
                        'up': 150
                    },
                    'max_exposure': 540,
                    # SNR strategy are pairwise mag, snr values.
                    # If current source magnitude is < mag, then use given snr.
                    'snr': [[16, 40], [18, 20], [99, 10]],
                    'cadence': 4,
                    'telescope_class': '1m0',
                    'instrument_type': '1M0-SCICAM-SINISTRO',
                    'acquisition_config': 'OFF',
                    'guiding_config': 'ON',
                    'window': 1.0,
                    'ipp': 1.0
                },
                'spectroscopy': {
                    'type': 'spectroscopy',
                    'proposal': [
                        {
                            'name': progid,
                            'obstype': 'NORMAL'
                        },
                    ],
                    'min_exposure': 300,
                    'max_exposure': 2400,
                    'telescope_class': '2m0',
                    'instrument_type': '2M0-FLOYDS-SCICAM',
                    'slit': 'slit_1.6as',
                    'acquisition_config': 'OFF',
                    'guiding_config': 'ON',
                    'window': 1.0,
                    'ipp': 1.0
                },
                'photometry': {
                    'type': 'photometry',
                    'proposal': [
                        {
                            'name': progid,
                            'obstype': 'NORMAL'
                        },
                    ],
                    'filters': ['up', 'gp', 'rp', 'ip'],
                    'min_exposure': {
                        'default': 45,
                        'up': 150
                    },
                    'max_exposure': 540,
                    # SNR strategy are pairwise mag, snr values.  first is mag
                    # and second is snr.  If mag_source < mag, then use snr.
                    'snr': [[16, 40], [18, 20], [99, 10]],
                    'cadence': 4,
                    'telescope_class': '1m0',
                    'instrument_type': '1M0-SCICAM-SINISTRO',
                    'acquisition_config': 'OFF',
                    'guiding_config': 'ON',
                    'window': 1.0,
                    'ipp': 1.01
                }
            }
        }

        self.start_date = Time(start_date).datetime
        self.end_date = Time(end_date).datetime

        self.telescope = telescop
        if 'soar' in telescop:
            self.params['strategy']['spectroscopy'] = {
                'type': 'spectroscopy',
                'proposal': [
                    {
                        'name': progid,
                        'obstype': 'NORMAL'
                    },
                ],
                'min_exposure': 300,
                'max_exposure': 2400,
                'telescope_class': '4m0',
                'instrument_type': 'SOAR_GHTS_REDCAM',
                'slit': 'slit_1.0as',
                'acquisition_config': 'OFF',
                'guiding_config': 'ON',
                'window': 6.0,
                'ipp': 1.0
            }
            self.params['constraints'] = {
                'max_airmass': 1.7,
                'min_lunar_distance': 30
            }

        # List of all proposals to which I have access.  Some of these are old
        # proposals where the data are now public
        self.proposals = [
            'NOAO2019A-020', 'NOAO2017AB-012', 'NOAO2018A-005',
            'NOAO2019A-020-TC', 'NOAO2017AB-005', 'NOAO2018A-007',
            'NOAO2018B-022', 'NOAO2019A-001', 'NOAO2018B-022b',
            'NOAO2019B-008', 'NOAO2019B-004', 'KEY2017AB-001', 'NOAO2019B-009',
            'NAOC2017AB-001', 'LCO2018A-004', 'FTPEPO2014A-004',
            'ARI2017AB-002', progid
        ]

        # These are relevant to SINISTRO on the 1m.  Will update with Spectral
        # and MuSCAT3 zeropoints
        self.constants = {
            'zpt': {
                'up': 20.5665,
                'gp': 23.2249,
                'rp': 23.1314,
                'ip': 22.8465
            }
        }

        self.username = username
        self.password = password

        self.format = {'token': 'Token {token}'}

    def get_username_password(self):
        if (self.username is not None and self.password is not None):
            return (self.username, self.password)
        else:
            # Get username password out of shibboleth
            return (None, None)

    def get_token_header(self, username, password, auth_type='archive'):

        # Check that we're getting the right token
        if auth_type not in self.params['uri'].keys():
            return (None)

        params = self.params['uri'][auth_type]

        # Check if header token has already been defined
        if params['token']:
            # Return the heade that we need
            fmt = self.format['token']
            header = {'Authorization': fmt.format(token=params['token'])}
            return (header)
        else:
            # Need to generate a new token
            data = {'username': username, 'password': password}
            uri = params['authorization']

            # Now run a request
            response = requests.post(uri, data=data).json()

            # Check that the request worked
            if 'token' in response.keys():
                self.params['uri'][auth_type]['token'] = response['token']
                fmt = self.format['token']
                header = {'Authorization': fmt.format(token=response['token'])}
                return (header)
            else:
                # There was some problem with authentication/connection
                return (None)

    # Download spectral calibration files for a specific telid and date
    def get_spectral_calibrations(self,
                                  date,
                                  telid,
                                  site,
                                  outrootdir='',
                                  funpack=True):

        # Get username and password
        username, password = self.get_username_password()

        # Get authorization token
        headers = self.get_token_header(username, password)

        params = {'limit': 100}
        results = []
        params['TELID'] = telid
        params['SITEID'] = site
        delta = TimeDelta(1, format='jd')
        params['start'] = (date - delta).datetime.strftime(
            self.params['date_format'])
        params['end'] = (date + delta).datetime.strftime(
            self.params['date_format'])

        # First check for LAMPFLATs
        params['OBSTYPE'] = 'LAMPFLAT'
        response = requests.get(self.params['uri']['archive']['frames'],
                                params=params,
                                headers=headers)

        if response.status_code != 200:
            print(response.text)
        else:
            data = response.json()
            results += data['results']

        # Now check for ARCs
        params['OBSTYPE'] = 'ARC'
        response = requests.get(self.params['uri']['archive']['frames'],
                                params=params,
                                headers=headers)

        if response.status_code != 200:
            print(response.text)
        else:
            data = response.json()
            results += data['results']

        # Now try to download the obslist
        self.download_obslist(results,
                              outrootdir=outrootdir,
                              use_basename=True,
                              skip_header=True,
                              funpack=funpack)

    # Get a json object with complete list of observations.  Optional to define
    # a program or date range to narrow search
    def get_obslist(self,
                    propid=None,
                    sdate=None,
                    edate=None,
                    telid=None,
                    obstype=None,
                    rlevel=None,
                    obj=None,
                    reqnum=None):

        # Get username and password
        username, password = self.get_username_password()

        # Get authorization token
        headers = self.get_token_header(username, password)

        # Get parameters for request
        params = {'limit': 5000}
        fmt = self.params['date_format']
        results = []
        propids = []
        if rlevel is not None:
            params['RLEVEL'] = rlevel
        else:
            params['RLEVEL'] = 91
        if propid is not None:
            propids = list(propid)
        if sdate is not None:
            params['start'] = sdate.datetime.strftime(fmt)
        if edate is not None:
            params['end'] = edate.datetime.strftime(fmt)
        if telid is not None:
            params['TELID'] = telid
        if obstype is not None:
            params['OBSTYPE'] = obstype
        if obj is not None:
            params['OBJECT'] = obj
        if reqnum is not None:
            params['REQNUM'] = reqnum

        for pid in propids:
            params['PROPID'] = pid

            # Now do request
            response = requests.get(self.params['uri']['archive']['frames'],
                                    params=params,
                                    headers=headers)

            if response.status_code != 200:
                print(response.text)
            else:
                data = response.json()
                results += data['results']

        return (results)

    # Get a set of standard star observation for spectrophotometric calibration
    def get_standardobs(self, sdate=None, telid=None, rlevel=None):

        # Get username and password
        username, password = self.get_username_password()

        # Get authorization token
        headers = self.get_token_header(username, password)

        # Get parameters for request
        params = {'OBSTYPE': 'SPECTRUM', 'public': True}

        delta = TimeDelta(14, format='jd')
        date = Time(datetime.now())
        start = (date - delta).datetime
        fmt = self.params['date_format']

        if not sdate: params['start'] = start.strftime(fmt)
        else: params['start'] = sdate.datetime.strftime(fmt)

        if not rlevel: params['RLEVEL'] = 0
        else: params['RLEVEL'] = rlevel

        objs = [
            'EGGR274', 'L745-46A', 'FEIGE110', 'HZ 44', 'BD+284211', 'GD71'
        ]

        results = []
        for obj in objs:
            params['OBJECT'] = obj
            response = requests.get(self.params['uri']['archive']['frames'],
                                    params=params,
                                    headers=headers)

            if response.status_code != 200:
                print(response.text)
            else:
                data = response.json()
                results += data['results']

        return (results)

    # Get recent observation requests for proposal ID, start date, end date,
    # observation type, and instrument type
    def get_requestgroups(self,
                          propid=None,
                          sdate=None,
                          edate=None,
                          obstype=None,
                          itype=None):

        # Get username and password
        username, password = self.get_username_password()

        # Get authorization token
        headers = self.get_token_header(username,
                                        password,
                                        auth_type='request')

        params = {}
        results = []
        propids = []
        if propid is not None:
            propids = list(propid)
        if sdate is not None:
            start = sdate.datetime.strftime(self.params['date_format'])
            params['created_after'] = start
        if edate is not None:
            end = edate.datetime.strftime(self.params['date_format'])
            params['created_before'] = end

        for pid in propids:
            params['proposal'] = pid
            params['limit'] = 500

            # Now do request
            response = requests.get(self.params['uri']['request']['request'],
                                    params=params,
                                    headers=headers).json()

            # If instrument_type, remove values that do not conform
            if itype is not None:
                for r in copy.copy(response['results']):
                    test = r['requests'][0]['configurations'][0][
                        'instrument_type']
                    if test != itype:
                        response['results'].remove(r)

            results += response['results']

        return (results)

    # Given an input obslist from the LCOGT archive, download the associated
    # files for each element
    def download_obslist(self,
                         obslist,
                         outrootdir='',
                         use_basename=False,
                         skip_header=False,
                         funpack=True):

        for frame in obslist:
            filename = ''
            if use_basename:
                filename = frame['basename'] + '.fits.fz'
            else:
                target = frame['OBJECT']

                # Need to sanitize target, e.g., for spaces
                target = target.replace(' ', '_')

                filt = frame['FILTER']
                idnum = str(frame['id'])
                date = Time(frame['DATE_OBS']).datetime.strftime('ut%y%m%d')
                filename = target + '.' + date + '.' +\
                    filt + '.' + idnum + '.fits.fz'
            fullfilename = outrootdir + '/' + filename
            if not os.path.exists(outrootdir):
                shutil.os.makedirs(outrootdir)
            if ((not os.path.exists(fullfilename)
                 and not os.path.exists(fullfilename.strip('.fz')) and funpack)
                    or (not os.path.exists(fullfilename))):
                message = 'Downloading LCOGT file: {file}'
                print(message.format(file=fullfilename))
                with open(fullfilename, 'wb') as f:
                    f.write(requests.get(frame['url']).content)
            else:
                message = 'LCOGT file: {file} already exists!'
                print(message.format(file=fullfilename))

            # funpack - requires cfitsio
            if not os.path.exists(fullfilename.strip('.fz')) and funpack:
                cmd = 'funpack {file}'
                os.system(cmd.format(file=fullfilename))
            if os.path.exists(fullfilename) and funpack:
                os.remove(fullfilename)

            # Remove extraneous extension
            if not skip_header:
                hdulist = fits.open(fullfilename.strip('.fz'))
                newhdu = fits.HDUList()
                hdu = hdulist['SCI']
                hdu.header['OBSTYPE'] = 'OBJECT'
                newhdu.append(hdu)
                newhdu.writeto(fullfilename.strip('.fz'), overwrite=True)

    # Make a location element with telescope class
    def make_location(self, telescope):
        return ({'telescope_class': telescope})

    # Make an airmass/lunar distance constraint element
    def make_constraints(self):
        max_airmass = self.params['constraints']['max_airmass']
        min_lunar_distance = self.params['constraints']['min_lunar_distance']
        constraints = {
            'max_airmass': max_airmass,
            'min_lunar_distance': min_lunar_distance
        }
        return (constraints)

    # This sets up a target based on name, ra, dec
    def make_target(self, name, ra, dec):
        target = {
            'type': 'ICRS',
            'name': name,
            'ra': ra,
            'dec': dec,
            'proper_motion_ra': 0.0,
            'proper_motion_dec': 0.0,
            'parallax': 0.0,
            'epoch': 2000.0
        }
        return (target)

    # This sets up the instrument parameters for an obs
    # Imaging specific
    def make_instrument_configs(self, filt, exptime, strat):
        # This sets up an imaging observation for SINISTRO.  Need to update for
        # 2m imaging with Spectral and MuSCAT3
        if strat['type'] == 'default':
            configuration = {
                'instrument_name': '1M0-SCICAM-SINISTRO',
                'optical_elements': {
                    'filter': filt
                },
                'mode': 'full_frame',
                'exposure_time': exptime,
                'exposure_count': 1,
                'bin_x': 1,
                'bin_y': 1,
                'extra_params': {
                    'defocus': 0.0
                }
            }
        # This creates a spectroscopy observing element for FLOYDS
        elif strat['type'] == 'spectroscopy' and 'faulkes' in self.telescope:
            configuration = {
                "bin_x": 1,
                "bin_y": 1,
                "exposure_count": 1,
                "exposure_time": exptime,
                "mode": "default",
                "rotator_mode": "VFLOAT",
                "extra_params": {},
                "optical_elements": {
                    "slit": strat['slit']
                }
            }
        elif strat['type'] == 'spectroscopy' and 'soar' in self.telescope:
            configuration = {
                "bin_x": 2,
                "bin_y": 2,
                "exposure_count": 1,
                "exposure_time": exptime,
                "mode": "GHTS_R_400m1_2x2",
                "rotator_mode": "SKY",
                "extra_params": {
                    "rotator_angle": 90
                },
                "optical_elements": {
                    "slit": strat['slit'],
                    "grating": "SYZY_400"
                }
            }
        return ([configuration])

    # Make an acquisition element for FLOYDS spectroscopy (on coordinate vs.
    # acquire brightest object)
    def make_acquisition_config(self, strat, mode=None):
        if not mode:
            mode = strat['acquisition_config']
        return ({'mode': mode})

    # Should guiding be on or off
    def make_guiding_config(self, strat, extra_params={}):
        mode = strat['guiding_config']
        config = {'mode': mode}
        for key in extra_params.keys():
            config[key] = extra_params[key]
        return (config)

    """
    Window object for LCOGT API v3.
    start time = earliest the obs can be executed
    duration = amount of time in which request can be executed
    """

    def make_window(self, start, duration):
        fmt = self.params['date_format']
        end = start + timedelta(days=duration)
        # Note that this is an override for YSE App
        window = [{
            'start': self.start_date.strftime(fmt),
            'end': self.end_date.strftime(fmt)
        }]
        return (window)

    # Post your observing request with requests.post
    def post_user_request(self, user_request):
        # Get username and password
        username, password = self.get_username_password()

        # Get authorization token
        header = self.get_token_header(username, password, auth_type='request')
        uri = self.params['uri']['request']['request']
        response = requests.post(uri, json=user_request, headers=header)
        return (response)

    # Guess the exposure time based on input magnitude of target
    def get_exposure_time(self, filt, mag, strat):
        # Get preferred snr given input magnitude
        # Handle 'spec' case
        if filt == 'spec':
            if mag < 14:
                return (300)
            elif mag < 15:
                return (600)
            elif mag < 16:
                return (900)
            elif mag < 17:
                return (1500)
            elif mag < 17.5:
                return (1800)
            elif mag < 18.0:
                return (2100)
            elif mag < 18.5:
                return (2400)
            else:
                return (None)

        snr = 10.
        for pair in strat['snr']:
            if mag < pair[0]:
                snr = pair[1]
        term1 = 20. * snr**2
        term2 = 0.4 * (mag - self.constants['zpt'][filt])
        exptime = term1 * 10**term2
        print(filt, exptime)
        min_exposure = 45.0
        if filt in strat['min_exposure'].keys():
            min_exposure = strat['min_exposure'][filt]
        else:
            min_exposure = strat['min_exposure']['default']
        if exptime < min_exposure:
            exptime = min_exposure
        if exptime > strat['max_exposure']:
            return (None)

        return (exptime)

    """
    LCO API v3 configurations.
    configurations consist of constraints, instrument configuration, acquisition
    configuration, guiding configuration, target, instrument type, exposure
    type, and observation priority.  This method will construct a series of
    configurations for a single target.
    """

    def make_configurations(self, obj, ra, dec, exptime, strat):
        # Iterate over each filter and append to configurations list
        configurations = []

        # If default, construct configurations for photometry strategy
        if strat['type'] == 'default':
            for i, filt in enumerate(strat['filters']):
                constraints = self.make_constraints()

                # Get exposure time and make instrument_config
                instrument_configs = self.make_instrument_configs(
                    filt, exptime, strat)

                # Make acquisition and guiding config with strat
                acquisition_config = self.make_acquisition_config(strat)
                guiding_config = self.make_guiding_config(strat)

                # Make a tar object
                target = self.make_target(obj, ra, dec)

                # Compile everything into configuration
                configuration = {
                    'constraints': constraints,
                    'instrument_configs': instrument_configs,
                    'acquisition_config': acquisition_config,
                    'guiding_config': guiding_config,
                    'target': target,
                    'instrument_type': strat['instrument_type'],
                    'type': 'EXPOSE',
                    'priority': i + 1
                }
                configurations.append(configuration)

        elif strat['type'] == 'spectroscopy' and 'faulkes' in self.telescope:
            # Need to construct LAMP FLAT, ARC, SPECTRUM, ARC, LAMP FLAT
            for obstype in [
                    'LAMP_FLAT', 'ARC', 'SPECTRUM', 'ARC', 'LAMP_FLAT'
            ]:

                # Use default constraint values
                constraints = self.make_constraints()

                if obstype == 'LAMP_FLAT':
                    exptime = 50
                if obstype == 'ARC':
                    exptime = 60

                # Make acquisition and guiding config with strat
                instrument_configs = self.make_instrument_configs(
                    'spec', exptime, strat)
                if obstype == 'SPECTRUM':
                    acquisition_config = self.make_acquisition_config(
                        strat, mode='WCS')
                else:
                    acquisition_config = self.make_acquisition_config(strat)
                guiding_config = self.make_guiding_config(strat)

                # Make a target object
                target = self.make_target(obj, ra, dec)

                # Compile everything into configuration
                configuration = {
                    'type': obstype,
                    'constraints': constraints,
                    'instrument_configs': instrument_configs,
                    'acquisition_config': acquisition_config,
                    'guiding_config': guiding_config,
                    'target': target,
                    'instrument_type': strat['instrument_type']
                }
                configurations.append(configuration)

        elif strat['type'] == 'spectroscopy' and 'soar' in self.telescope:
            for i, obstype in enumerate(['ARC', 'SPECTRUM']):
                # Use default constraint values
                constraints = self.make_constraints()

                if obstype == 'ARC':
                    exptime = 0.5

                # Make acquisition and guiding config with strat
                instrument_configs = self.make_instrument_configs(
                    'spec', exptime, strat)
                if obstype == 'SPECTRUM':
                    acquisition_config = self.make_acquisition_config(
                        strat, mode='MANUAL')
                    guiding_config = self.make_guiding_config(
                        strat,
                        extra_params={
                            'optional': False,
                            "optical_elements": {},
                            "exposure_time": None,
                            "extra_params": {}
                        })
                elif obstype == 'ARC':
                    acquisition_config = self.make_acquisition_config(
                        strat, mode='OFF')
                    guiding_config = self.make_guiding_config(
                        strat,
                        extra_params={
                            'optional': True,
                            "optical_elements": {},
                            "exposure_time": None,
                            "extra_params": {}
                        })

                # Make a target object
                target = self.make_target(obj, ra, dec)

                # Compile everything into configuration
                configuration = {
                    'type': obstype,
                    'constraints': constraints,
                    'instrument_configs': instrument_configs,
                    'acquisition_config': acquisition_config,
                    'guiding_config': guiding_config,
                    'target': target,
                    'instrument_type': strat['instrument_type'],
                    'priority': i + 1
                }
                configurations.append(configuration)

        return (configurations)

    """
    LCO API v3 request.
    requests is a list of requests each defined by a location, configuration,
    and a window.
    """

    def make_requests(self, obj, ra, dec, exptime, strat):
        location = self.make_location(strat['telescope_class'])
        window = self.make_window(datetime.now(), strat['window'])
        configurations = self.make_configurations(obj, ra, dec, exptime, strat)

        requests = [{
            'location': location,
            'windows': window,
            'configurations': configurations
        }]

        return (requests)

    """
    LCO API v3 observation request.
    Each method constructs one layer of the request.  Here we need to make the
    outermost obs request with name, proposal, ipp value, operator, observation
    type, and the individual requests.  Send the
    """

    def make_obs_request(self,
                         obj,
                         ra,
                         dec,
                         exptime,
                         strategy='default',
                         propidx=''):

        # Get params - strategy data
        strat = self.params['strategy'][strategy]

        proposal = propidx

        # Make the obs_request dictionary
        obs_request = {
            'name': obj,
            'proposal': propidx,
            'ipp_value': strat['ipp'],
            'operator': 'SINGLE',
            'observation_type': 'NORMAL'
        }

        # Iterate through the next level of request
        requests = self.make_requests(obj, ra, dec, exptime, strat)
        if not requests[0]['configurations']:
            return (None)
        else:
            obs_request['requests'] = requests
            print(obs_request)
            response = self.post_user_request(obs_request)
            print(response.content)
            response = response.json()
            return (response)
예제 #20
0
파일: noaa.py 프로젝트: Cadair/sunpy
    def fetch(self, qres, path=None, error_callback=None, **kwargs):
        """
        Download a set of results.

        Parameters
        ----------
        qres : `~sunpy.net.dataretriever.QueryResponse`
            Results to download.

        Returns
        -------
        Results Object
        """

        urls = [qrblock.url for qrblock in qres]

        filenames = []
        local_filenames = []

        for i, [url, qre] in enumerate(zip(urls, qres)):
            name = url.split('/')[-1]

            # temporary fix !!! coz All QRBs have same start_time values
            day = Time(qre.time.start.strftime('%Y-%m-%d')) + TimeDelta(i*u.day)

            if name not in filenames:
                filenames.append(name)

            if name.endswith('.gz'):
                local_filenames.append('{}SRS.txt'.format(day.strftime('%Y%m%d')))
            else:
                local_filenames.append(name)

        # Files to be actually downloaded
        paths = self._get_full_filenames(qres, filenames, path)

        # Those files that will be present after get returns
        local_paths = self._get_full_filenames(qres, local_filenames, path)

        # remove duplicate urls. This will make paths and urls to have same number of elements.
        # OrderedDict is required to maintain ordering because it will be zipped with paths later
        urls = list(OrderedDict.fromkeys(urls))

        dobj = Downloader(max_conn=5)

        for aurl, fname in zip(urls, paths):
            dobj.enqueue_file(aurl, filename=fname)

        paths = dobj.download()

        outfiles = []
        for fname, srs_filename in zip(local_paths, local_filenames):

            name = fname.name

            past_year = False
            for i, fname2 in enumerate(paths):
                fname2 = pathlib.Path(fname2)

                if fname2.name.endswith('.txt'):
                    continue

                year = fname2.name.split('_SRS')[0]

                if year in name:
                    TarFile = tarfile.open(fname2)
                    filepath = fname.parent
                    member = TarFile.getmember('SRS/' + srs_filename)
                    member.name = name
                    TarFile.extract(member, path=filepath)
                    TarFile.close()

                    outfiles.append(fname)

                    past_year = True
                    break

            if past_year is False:
                outfiles.append(fname)

        paths.data = list(map(str, outfiles))
        return paths
예제 #21
0
class AllskyImage():
    """Class for handling individual FITS image files."""

    def __init__(self, filename, data, header):
        self.filename = filename  # raw image filename
        self.datetime = None      # date and time of observation
        self.thumbfilename = None # filename for thumbnail image
        self.data = data          # image data array
        self.header = header      # image header
        self.subregions = None    # subregion arrays
        self.features = None      # extracted features
        
    @classmethod
    def read_fits(cls, filename):
        """Create `~AllskyImage` instance from FITS image file.

        :return: self
        """
        hdu = fits.open(filename)[0]

        self = cls(filename.split(os.path.sep)[-1],
                   hdu.data.astype(np.float), hdu.header)

        try:
            self.datetime = Time(self.header['DATE-OBS'], format='isot')
        except (ValueError, KeyError):
            conf.logger.warning(('No time information for image file '
                                '{}.').format(filename))
            pass
            
        return self

    def write_fits(self, filename):
        """Write `~AllskyImage` instance to FITS image file"""
        hdu = fits.PrimaryHDU(self.data)
        hdu.writeto(filename, overwrite=True)

    def create_overlay(self, overlaytype='srcdens', regions=None):
        """Create overlay for thumbnail image. Requires self.subregions to be
        initialized. An overlay is an array with the same dimensions as
        self.data` in which certain subregions get assigned certain values as
        defined by `overlaytype`.

        :param overlaytype: define data source from `self.features` from which
                            overlay should be generated, default: 'srcdens'
        :param regions: list of length=len(self.subregions), highlights
                        subregions with list element value > 0; requires
                        `overlaytype='subregions'`, default: None

        :return: overlay array
        """
        map = np.zeros(self.data.shape)

        for i, sub in enumerate(self.subregions):
            if overlaytype == 'srcdens':
                map += sub*self.features['srcdens'][i]
            elif overlaytype == 'bkgmedian':
                map += sub*self.features['bkgmedian'][i]
            elif overlaytype == 'bkgmean':
                map += sub*self.features['bkgmean'][i]
            elif overlaytype == 'bkgstd':
                map += sub*self.features['bkgstd'][i]
            elif overlaytype == 'subregions':
                if regions[i]:
                    map += sub
            else:
                raise AllskyImageError('overlaytype "{}" unknown.'.format(
                    overlaytype))
                    
        map[map == 0] = np.nan
        return map
    
        
    def write_image(self, filename, overlay=None, mask=None,
                    overlay_alpha=0.3, overlay_color='Reds'):
        """Write `~AllskyImage` instance as scaled png thumbnail image file.

        :param filename: filename of image to be written, relative to cwd
        :param overlay: provide overlay or list of overlays, optional
        :param mask: apply image mask before writing image file
        :param overlay_alpha: alpha value to be applied to overlay
        :param overlay_color: colormap to be used with overlay

        :return: None
        """

        conf.logger.info('writing thumbnail "{}"'.format(filename))
        
        data = self.data

        # derive image scaling and stretching
        if mask is not None:
            norm = ImageNormalize(data[mask.data == 1],
                                  conf.THUMBNAIL_SCALE(),
                                  stretch=LinearStretch())
            data[mask.data == 0] = 0
        else:
            norm = ImageNormalize(data, conf.THUMBNAIL_SCALE(),
                                  stretch=LinearStretch())

        # create figure
        f, ax = plt.subplots(figsize=(conf.THUMBNAIL_WIDTH,
                                      conf.THUMBNAIL_HEIGHT))

        # plot image
        img = ax.imshow(data, origin='lower',
                        norm=norm, cmap='gray',
                        extent=[0, self.data.shape[1],
                                0, self.data.shape[0]])

        # plot overlay(s)
        if overlay is not None:
            if not isinstance(overlay, list):
                overlay = [overlay]
                overlay_color = [overlay_color]
            overlay_img = []
            for i in range(len(overlay)):
                overlay_img.append(ax.imshow(overlay[i], cmap=overlay_color[i],
                                             origin='lower', vmin=0, 
                                             alpha=overlay_alpha,
                                             extent=[0, overlay[i].shape[1],
                                                     0, overlay[i].shape[0]]))
                overlay_img[i].axes.get_xaxis().set_visible(False)
                overlay_img[i].axes.get_yaxis().set_visible(False)

        # remove axis labels and ticks
        plt.axis('off')
        img.axes.get_xaxis().set_visible(False)
        img.axes.get_yaxis().set_visible(False)

        # save thumbnail image
        plt.savefig(filename, bbox_inches='tight', dpi=conf.THUMBNAIL_DPI,
                    pad_inches=0)
        plt.close()

        # let thumbfilename consist of <night>/<filename>
        self.thumbfilename = os.path.join(*filename.split(os.path.sep)[-2:])

    def apply_mask(self, mask):
        """Apply `~AllskyImage` mask to this instance"""
        self.data = self.data * mask.data
        
    def crop_image(self):
        """Crop this `~AllskyImage` instance to the ranges defined by
        ``conf.X_CROPRANGE`` and ``conf.Y_CROPRANGE``.
        """
        self.data = self.data[conf.Y_CROPRANGE[0]:conf.Y_CROPRANGE[1],
                              conf.X_CROPRANGE[0]:conf.X_CROPRANGE[1]]
        
    def extract_features(self, subregions, mask=None):
        """Extract image features for each subregion. Image should be cropped
        and masked.

        :param subregions: subregions to be used
        :param mask: mask to be applied in source extraction, optional

        :return: None
        """
        # set internal pixel buffer
        sep.set_extract_pixstack(10000000)

        # extract time from header and derive frame properties
        try:
            time = Time(self.header['DATE-OBS'], format='isot')
            features = OrderedDict([
                ('time', time.isot),
                ('filename', self.filename.split(os.path.sep)[-1]),
                ('moon_alt', observatory.moon_altaz(time).alt.deg),
                ('sun_alt', observatory.sun_altaz(time).alt.deg),
                ('moon_phase', 1-observatory.moon_phase(time).value/np.pi),
            ])
        except KeyError as e:
            conf.logger.error('missing time data in file {}: {}.'.format(
                self.filename, e))
            return False

        # derive and subtract sky background
        bkg = sep.Background(self.data.astype(np.float64),
                             bw=conf.SEP_BKGBOXSIZE, bh=conf.SEP_BKGBOXSIZE,
                             fw=conf.SEP_BKGXRANGE, fh=conf.SEP_BKGYRANGE)
        data_sub = self.data - bkg.back()

        # if mask is provided, it is applied in the proper derivation of
        # source brightness thresholds
        if mask is not None:
            threshold = (np.ma.median(np.ma.array(data_sub,
                                                  mask=(1-mask))) +
                         np.median(bkg.rms())*conf.SEP_SIGMA)
            src = sep.extract(data_sub, threshold, minarea=conf.SEP_MINAREA,
                              mask=(1-mask),
                              deblend_nthresh=conf.SEP_DEBLENDN,
                              deblend_cont=conf.SEP_DEBLENDV)
        else:
            threshold = (np.median(data_sub) +
                         np.median(bkg.rms())*conf.SEP_SIGMA)
            src = sep.extract(data_sub, threshold, minarea=conf.SEP_MINAREA,
                              mask=mask,
                              deblend_nthresh=conf.SEP_DEBLENDN,
                              deblend_cont=conf.SEP_DEBLENDV)

        # apply max_flag cutoff (reject flawed sources)
        src = src[src['flag'] <= conf.SEP_MAXFLAG]

        # feature extraction per subregion
        features['srcdens'] = []
        features['bkgmedian'] = []
        features['bkgmean'] = []
        features['bkgstd'] = []
        for i, sub in enumerate(subregions):
            features['srcdens'].append(len(
                src[sub[src['y'].astype(np.int),
                        src['x'].astype(np.int)]])/np.sum(sub[mask== 1]))
            features['bkgmedian'].append(np.median(bkg.back()[sub]))
            features['bkgmean'].append(np.mean(bkg.back()[sub]))
            features['bkgstd'].append(np.std(bkg.back()[sub]))

        self.subregions = subregions
        self.features = features

    def write_to_database(self):
        """Write extracted features to database."""
        session = requests.Session()
        post_headers = {'Content-Type': 'application/json'}

        try:
            data = {'date': self.features['time'],
                    'night': int(self.datetime.strftime('%Y%m%d')),
                    'filearchivepath': self.thumbfilename,
                    'moonalt': self.features['moon_alt'],
                    'sunalt': self.features['sun_alt'],
                    'moonphase': self.features['moon_phase'],
                    'srcdens': self.features['srcdens'],
                    'bkgmean' : self.features['bkgmean'],
                    'bkgmedian' : self.features['bkgmedian'],
                    'bkgstd': self.features['bkgstd'],
            }
        except KeyError:
            conf.logger.error('data incomplete for file {}; reject.'.format(
                self.filename))
            return None

        post_request = session.post(
            conf.DB_URL+'data/Unlabeled/',
            headers=post_headers, auth=(conf.DB_USER, conf.DB_PWD),
            json=data)

        if not ((post_request.status_code == requests.codes.ok) or
        (post_request.status_code == requests.codes.created)):
            conf.logger.error('upload to database failed with code {}; {}'.format(
                post_request.status_code, post_request.text))
            raise ServerError('upload to database failed with code {}'.format(
                post_request.status_code))
def get_observations(
    fds_only=True,
    start_obsid=None,
    finish_obsid=None,
    obs_date=None,
    dec_pointing=None,
    cen_chan=None,
    hour_angle=None,
    mask_gp=False,
):

    df = pd.read_sql_table("observation", mdb.dbconn)

    if fds_only:
        df = df[df["obsname"].str.contains("FDS")]

    if mask_gp:
        sky = SkyCoord(df["ra_pointing"],
                       df["dec_pointing"],
                       unit=(u.deg, u.deg))

        l = sky.galactic.l.deg
        b = sky.galactic.b.deg

        mask = ((b >= GALACTIC_PLANE_LIMITS[0])
                & (b <= GALACTIC_PLANE_LIMITS[1])
                & ((l <= GALACTIC_PLANE_LIMITS[2]) |
                   (l >= GALACTIC_PLANE_LIMITS[3])))

        df = df[~mask]

    if start_obsid is not None:
        df = df[df["obs_id"] >= int(start_obsid)]

    if finish_obsid is not None:
        df = df[df["obs_id"] <= int(finish_obsid)]

    if obs_date is not None:
        gps = Time(df["obs_id"], format="gps", location=MWA)
        gps_date = gps.strftime("%Y-%m-%d")

        df = df[gps_date == obs_date]

    if dec_pointing is not None:
        dec = np.array([-71.0, -55.0, -41.0, -39.0, -26.0, -12.0, 3.0, 20.0])
        min_dec = lambda x: dec[np.argmin(np.abs(dec - x["dec_pointing"]))]
        df["Dec Strip"] = df.apply(min_dec, axis=1)

        df = df[df["Dec Strip"] == dec_pointing]

    if hour_angle is not None:
        gps = Time(df["obs_id"], format="gps", location=MWA)
        lst = gps.sidereal_time("mean")

        ra = df["ra_pointing"].values
        ha = np.round((lst.deg - ra) / 15)

        mask = ha > 12
        ha[mask] = ha[mask] - 24.0

        mask = ha < -12
        ha[mask] = ha[mask] + 24.0

        df = df[ha.astype(int) == hour_angle]

    if cen_chan is not None:
        df = df[df["cenchan"] == cen_chan]

    return df
예제 #23
0
파일: files.py 프로젝트: gw-detchar/miyopy
def gps2datestr(gps):
    utc = Time(gps, format='gps').to_datetime()
    date_str = utc.strftime('%Y%m%d%H%M')
    return date_str
예제 #24
0
def recalc(outfile='save_all.pickle'):
    t0 = time.time()
    temptable_raw = \
        load_temptable(TEMPFILE)

    log.info("Querying history of NuSTAR observations...")
    all_nustar_obs = get_obsid_list_from_heasarc()
    all_nustar_obs.sort('MET')

    all_nustar_obs['text'] = [
        (f"Source: {aobs['NAME']}<br>" + f"ObsID: {aobs['OBSID']}<br>" +
         f"Start: {aobs['DATE']} (MJD {aobs['TIME']})<br>" +
         f"End: {aobs['DATE-END']} (MJD {aobs['END_TIME']})<br>" +
         f"Type: {aobs['OBS_TYPE']}<br>" +
         f"Mode: {aobs['OBSERVATION_MODE']}<br>") for aobs in all_nustar_obs
    ]

    clock_offset_table = \
        load_and_flag_clock_table(clockfile=CLOCKFILE, shift_non_malindi=False)
    clock_offset_table_corr = \
        load_and_flag_clock_table(clockfile=CLOCKFILE, shift_non_malindi=True)

    table_times = temptable_raw['met']
    met_start = clock_offset_table['met'][0]
    met_stop = clock_offset_table['met'][-1] + 30
    clock_jump_times = np.array(
        [78708320, 79657575, 81043985, 82055671, 293346772])
    clock_jump_times += 30  #  Sum 30 seconds to avoid to exclude these points
    #  from previous interval

    gtis = find_good_time_intervals(temptable_raw, clock_jump_times)

    table_new = temperature_correction_table(met_start,
                                             met_stop,
                                             temptable=temptable_raw,
                                             freqchange_file=FREQFILE,
                                             time_resolution=10,
                                             craig_fit=False,
                                             hdf_dump_file='dump.hdf5')

    table_new = eliminate_trends_in_residuals(table_new,
                                              clock_offset_table_corr,
                                              gtis,
                                              fixed_control_points=np.arange(
                                                  291e6, 295e6, 86400))

    mets = np.array(table_new['met'])
    start = mets[0]
    stop = mets[-1]

    good_mets = clock_offset_table['met'] < stop
    clock_offset_table = clock_offset_table[good_mets]
    # print(clock_offset_table['met'][-10:], table_new['met'][-1])

    clock_mets = clock_offset_table['met']
    clock_mjds = clock_offset_table['mjd']
    dates = Time(clock_mjds[:-1], format='mjd')

    tempcorr_idx = \
        np.searchsorted(table_new['met'], clock_offset_table['met'])
    clock_residuals = \
        np.array(
            clock_offset_table['offset'] - table_new['temp_corr'][tempcorr_idx]
        )
    clock_residuals_detrend = np.array(
        clock_offset_table['offset'] -
        table_new['temp_corr_detrend'][tempcorr_idx])

    all_data = Table({
        'met': clock_mets[:-1],
        'mjd': np.array(clock_mjds[:-1], dtype=int),
        'doy': dates.strftime("%Y:%j"),
        'utc': dates.strftime("%Y:%m:%d"),
        'offset': clock_offset_table['offset'][:-1],
        'residual': clock_residuals[:-1],
        'residual_detrend': clock_residuals_detrend[:-1],
        'station': clock_offset_table['station'][:-1]
    })

    all_data.meta['clock_offset_file'] = CLOCKFILE
    all_data.meta['temperature_file'] = TEMPFILE
    all_data.meta['frequency_file'] = FREQFILE

    pickle.dump((all_data, table_new, gtis, all_nustar_obs),
                open(outfile, 'wb'))

    calculate_stats(all_data)

    log.info(f"Reprocessing done. It took {time.time() - t0} s")
    return all_data, table_new, gtis, all_nustar_obs
예제 #25
0
def save_observations(site: Site,
                      obs_list: list[Observation],
                      filename: str,
                      new_db=True):
    """
    Saves a list of Observation objects to a sqlite3 database and .csv file.

    The database contains rows formatted for: `satellite name, pass number, jd, local time, range, azimuth, elevation`
    Entries are ordered by the observation start time.

    :param Site site: Site object for the observation location
    :param List[Observation] obs_list: list of all observations to be saved
    :param filename: Name for the output .csv file
    :param new_db: (Optional: defaults to True)
        Setting to True will create a new DB for every new propagation run
        Setting to False will allow the DB to store data for multiple propagations
    """

    db_name = 'outputs/observations.sqlite'
    db = sqlite3.connect(db_name)
    cursor = db.cursor()

    if new_db:
        db.execute("DROP TABLE IF EXISTS " "observations")

    db.execute(
        "CREATE TABLE IF NOT EXISTS "
        "observations ("
        "site TEXT, name TEXT, pass_number REAL, jd REAL, time TEXT, range REAL, azimuth REAL, elevation REAL"
        ")")

    obs_list.sort(key=operator.attrgetter(
        't0'))  # Sort the observation objects by their start times

    for obs in obs_list:
        datarows = obs.create_observation_entry()

        for row in datarows:
            sat_name, pass_number, jd, sat_range, azimuth, elevation = row

            # Changing the julianday into a readable datetime string
            t = Time(jd, format='jd')
            t.format = 'datetime'
            t += site.utc_offset
            print_time = t.strftime("%m/%d/%Y, %H:%M:%S")

            # Changing units for readability
            azimuth *= 180 / math.pi
            elevation *= 180 / math.pi

            cursor.execute(
                "INSERT INTO observations("
                "site, name, pass_number, jd, time, range, azimuth, elevation"
                ") VALUES(?, ?, ?, ?, ?, ?, ?, ?)",
                (str(site.obs_location), sat_name, pass_number, jd, print_time,
                 sat_range, azimuth, elevation))

    data = cursor.execute("SELECT * FROM observations")

    with open(filename + '.csv', 'w') as obsfile:
        obs_writer = csv.writer(obsfile, delimiter=',')
        obs_writer.writerow([str(site.obs_location)])
        obs_writer.writerow([
            'Satellite Name', 'Time', 'Range (km)', 'Azimuth (deg)',
            'Elevation (deg)'
        ])

        for site, name, pass_number, jd, time, sat_range, azimuth, elevation in data:
            obs_writer.writerow([
                name, time,
                round(sat_range, 2),
                round(azimuth, 2),
                round(elevation, 2)
            ])

        print("\nObservation details saved to:\n" + filename + '.csv')

    cursor.close()
    db.commit()
    db.close()
예제 #26
0
ffreff = fits.open(ffref)
ffrefim = trimarrays.trimzeros(trimarrays.trimnan(ffreff[0].data))
ffreff.close()

bf1 = fits.open(file1)
bim1 = bf1[0].data.astype(np.float32)
bdate1 = Time(bf1[0].header['DATE']).datetime
bf1.close()

bf2 = fits.open(file2)
bim2 = bf2[0].data.astype(np.float32)
bdate2 = Time(bf2[0].header['DATE']).datetime
bf2.close()

bim1, bim2 = trimarrays.trimto(ffrefim, bim1, bim2)
bdiffs = (bim1 - bim2).flatten()
absdiffs = np.abs(bdiffs)
mv = np.round(absdiffs.mean())
mstd = absdiffs.std()
if absval:
    bdiffs = absdiffs
    bdiffs[bdiffs > clip * mstd] = mv
else:
    bdiffs[bdiffs < -clip * mstd] = - mv
    bdiffs[bdiffs > clip * mstd] = mv

plt.hist(bdiffs.flatten(), bins=bins)
plt.title("Compare bias" + bdate1.strftime(" %Y-%m-%d %H:%M:%S -v- ") + bdate2.strftime("%Y-%m-%d %H:%M:%S"))
plt.show()
예제 #27
0
파일: noaa.py 프로젝트: larrymanley/sunpy
    def fetch(self, qres, path=None, error_callback=None, **kwargs):
        """
        Download a set of results.

        Parameters
        ----------
        qres : `~sunpy.net.dataretriever.QueryResponse`
            Results to download.

        Returns
        -------
        Results Object
        """

        urls = [qrblock.url for qrblock in qres]

        filenames = []
        local_filenames = []

        for i, [url, qre] in enumerate(zip(urls, qres)):
            name = url.split('/')[-1]

            # temporary fix !!! coz All QRBs have same start_time values
            day = Time(qre.time.start.strftime('%Y-%m-%d')) + TimeDelta(i*u.day)

            if name not in filenames:
                filenames.append(name)

            if name.endswith('.gz'):
                local_filenames.append('{}SRS.txt'.format(day.strftime('%Y%m%d')))
            else:
                local_filenames.append(name)

        # Files to be actually downloaded
        paths = self._get_full_filenames(qres, filenames, path)

        # Those files that will be present after get returns
        local_paths = self._get_full_filenames(qres, local_filenames, path)

        res = Results(lambda x: None, 0, lambda map_: self._link(map_))

        # remove duplicate urls. This will make paths and urls to have same number of elements.
        # OrderedDict is required to maintain ordering because it will be zipped with paths later

        urls = list(OrderedDict.fromkeys(urls))

        dobj = Downloader(max_conn=len(urls), max_total=len(urls))

        # We cast to list here in list(zip... to force execution of
        # res.require([x]) at the start of the loop.
        for aurl, ncall, fname in list(zip(urls, map(lambda x: res.require([x]),
                                                     urls), paths)):
            dobj.download(aurl, fname, ncall, error_callback)

        res.wait()

        res2 = Results(lambda x: None, 0)

        for fname, srs_filename in zip(local_paths, local_filenames):

            fname = fname.args[0]
            name = fname.split('/')[-1]

            past_year = False
            for i, fname2 in enumerate(paths):

                fname2 = fname2.args[0]

                if fname2.endswith('.txt'):
                    continue

                year = fname2.split('/')[-1]
                year = year.split('_SRS')[0]

                if year in name:
                    TarFile = tarfile.open(fname2)
                    filepath = fname.rpartition('/')[0]
                    member = TarFile.getmember('SRS/' + srs_filename)
                    member.name = name
                    TarFile.extract(member, path=filepath)
                    TarFile.close()

                    callback = res2.require([fname])
                    callback({'path': fname})

                    past_year = True
                    break

            if past_year is False:
                callback = res2.require([fname])
                callback({'path': fname})

        return res2
예제 #28
0
def read_hdf5_limb_state_common_data(self, hf, lstate_id, state_in_orbit,
                                     cl_id):
    """SCIAMACHY level 1c common data

	Parameters
	----------
	hf : opened file
		Pointer to the opened level 1c HDF5 file
	lstate_id : int
		The limb state id.
	state_in_orbit : int
		The number in this batch of states for the header.
	cl_id : int
		The spectral cluster number.

	Returns
	-------
	success : int
		0 on success,
		1 if an error occurred, for example if the measurement data
		set for the requested limb and cluster ids is empty.
	"""
    # MDS = measurement data set
    cl_mds_group_name = "/MDS/limb_{0:02d}/cluster_{1:02d}".format(
        lstate_id, cl_id + 1)
    cl_mds_group = hf.get(cl_mds_group_name)
    if cl_mds_group is None:
        return 1
    # Load meta data
    self.metadata["calibration"] = hf.attrs["Calibration"].decode()
    self.metadata["l1b_product"] = hf.get("/MPH")["product_name"][0].decode()
    self.metadata["orbit"] = hf.get("/MPH")["abs_orbit"][0]
    self.metadata["state_id"] = hf.get("/ADS/STATES")["state_id"][lstate_id]
    self.metadata["software_version"] = hf.get(
        "/MPH")["software_version"][0].decode()
    self.metadata["keyfile_version"] = hf.get(
        "/SPH")["key_data_version"][0].decode()
    self.metadata["mfactor_version"] = hf.get(
        "/SPH")["m_factor_version"][0].decode()
    init_ver = hf.get("/SPH")["init_version"][0].decode().strip()
    self.metadata["init_version"], decont = init_ver.split()
    self.metadata["decont_flags"] = decont.lstrip("DECONT=")
    orb_phase = hf.get("/ADS/STATES")["orb_phase"][lstate_id]
    j_day_0 = 2451544.5  # 2000-01-01
    dsr_d, dsr_s, dsr_us = hf.get("/ADS/STATES")["dsr_time"][lstate_id]
    state_dt = Time(dsr_d + j_day_0 + dsr_s / 86400. + dsr_us / (86400. * 1e6),
                    format="jd").datetime
    self.metadata["date"] = state_dt.strftime("%d-%b-%Y %H:%M:%S.%f")

    logging.debug("applied calibrations: %s", self.metadata["calibration"])
    logging.debug("product: %s, orbit_nr: %s, state_id: %s, orb_phase: %s",
                  self.metadata["l1b_product"], self.metadata["orbit"],
                  self.metadata["state_id"], orb_phase)
    logging.debug(
        "soft_ver: %s, key_ver: %s, mf_ver: %s, init_ver: %s, "
        "decont_ver: %s", self.metadata["software_version"],
        self.metadata["keyfile_version"], self.metadata["mfactor_version"],
        self.metadata["init_version"], self.metadata["decont_flags"])

    ads_state = hf.get("/ADS/STATES")[lstate_id]
    cl_n_readouts = ads_state["clus_config"]["num_readouts"][cl_id]
    cl_intg_time = ads_state["clus_config"]["intg_time"][cl_id]
    self.metadata["nr_profile"] = 24 // (cl_intg_time * cl_n_readouts)
    self.metadata["act_profile"] = 0  # always zero for now

    # Prepare the header
    try:
        self.metadata["datatype_txt"] = _state_txt[self.metadata["state_id"]]
    except KeyError:
        logging.warn("State id %s not supported.", self.metadata["state_id"])
        return 1
    self.assemble_textheader()
    logging.debug("header:\n%s", self.textheader)

    # parse geolocation data
    gr_scia_geo = cl_mds_group.get("geoL_scia")
    tan_h = gr_scia_geo["tan_h"]
    # lat and lon are integers in degrees * 10^6
    lats_all = gr_scia_geo["tang_ground_point"]["lat"]
    lons_all = gr_scia_geo["tang_ground_point"]["lon"]
    sza_all = gr_scia_geo["sun_zen_ang"]
    saa_all = (gr_scia_geo["sun_azi_ang"] - gr_scia_geo["los_azi_ang"])
    sat_h_all = gr_scia_geo["sat_h"]
    earth_rad_all = gr_scia_geo["earth_rad"]
    # lat and lon are integers in degrees * 10^6
    subsatlat_all = gr_scia_geo["sub_sat_point"]["lat"]
    subsatlon_all = gr_scia_geo["sub_sat_point"]["lon"]
    # fix longitudes to [0°, 360°)
    lons_all[np.where(lons_all < 0)] += 360000000

    if cl_n_readouts > 2:
        tangent_heights = 0.5 * (tan_h[1::cl_n_readouts, 2] +
                                 tan_h[2::cl_n_readouts, 0])
        tp_lats = 0.5 * (lats_all[1::cl_n_readouts, 2] +
                         lats_all[2::cl_n_readouts, 0]) * 1e-6
        tp_lons = 0.5 * (lons_all[1::cl_n_readouts, 2] +
                         lons_all[2::cl_n_readouts, 0]) * 1e-6
        sza_toa = 0.5 * (sza_all[1::cl_n_readouts, 2] +
                         sza_all[2::cl_n_readouts, 0])
        saa_toa = 0.5 * (saa_all[1::cl_n_readouts, 2] +
                         saa_all[2::cl_n_readouts, 0])
        sat_hs = sat_h_all.reshape((-1, cl_n_readouts)).mean(axis=1)
        earth_rads = earth_rad_all.reshape((-1, cl_n_readouts)).mean(axis=1)
        subsatlat = subsatlat_all.reshape(
            (-1, cl_n_readouts)).mean(axis=1) * 1e-6
        subsatlon = subsatlon_all.reshape(
            (-1, cl_n_readouts)).mean(axis=1) * 1e-6
    else:
        tangent_heights = tan_h[::cl_n_readouts, 1]
        tp_lats = lats_all[::cl_n_readouts, 1] * 1e-6
        tp_lons = lons_all[::cl_n_readouts, 1] * 1e-6
        sza_toa = sza_all[::cl_n_readouts, 1]
        saa_toa = saa_all[::cl_n_readouts, 1]
        sat_hs = sat_h_all[::cl_n_readouts]
        earth_rads = earth_rad_all[::cl_n_readouts]
        subsatlat = subsatlat_all[::cl_n_readouts] * 1e-6
        subsatlon = subsatlon_all[::cl_n_readouts] * 1e-6

    logging.debug("tangent altitudes: %s", tangent_heights)
    nalt = len(tangent_heights)

    centre = _middle_coord(lats_all[0, 1] * 1e-6, lons_all[0, 1] * 1e-6,
                           lats_all[nalt - 2, 1] * 1e-6,
                           lons_all[nalt - 2, 1] * 1e-6)

    cent_lat_lon = (
        centre[0],
        # fix longitudes to [0, 360.)
        centre[1] if centre[1] >= 0. else 360. + centre[1],
        lats_all[0, 0] * 1e-6,
        lons_all[0, 0] * 1e-6,
        lats_all[0, 2] * 1e-6,
        lons_all[0, 2] * 1e-6,
        lats_all[nalt - 2, 0] * 1e-6,
        lons_all[nalt - 2, 0] * 1e-6,
        lats_all[nalt - 2, 2] * 1e-6,
        lons_all[nalt - 2, 2] * 1e-6)

    toa = 100.
    # to satellite first
    los_calc = np.degrees(np.arccos(0.0))
    sza_tp_h = sza_toa.copy()
    saa_tp_h = saa_toa.copy()
    los_tp_h = np.full_like(tangent_heights, los_calc)
    los_toa_h = np.full_like(tangent_heights, los_calc)
    sza_sat_h, los_sat_h, saa_sat_h = _calc_angles(sza_toa, los_calc, saa_toa,
                                                   tangent_heights, sat_hs,
                                                   earth_rads)
    # angles toa
    los_calc = np.degrees(
        np.arcsin((tangent_heights + earth_rads) / (toa + earth_rads)))
    # to tangent point
    los_toa_l = np.full_like(tangent_heights, los_calc)
    sza_tp_l, los_tp_l, saa_tp_l = _calc_angles(
        sza_toa, los_calc, saa_toa, np.full_like(tangent_heights, toa),
        tangent_heights, earth_rads)
    # to satellite
    sza_sat_l, los_sat_l, saa_sat_l = _calc_angles(
        sza_toa, los_calc, saa_toa, np.full_like(tangent_heights, toa), sat_hs,
        earth_rads)

    sza_sat_h[np.where(tangent_heights <= toa)] = 0.
    sza_sat_l[np.where(tangent_heights > toa)] = 0.
    saa_sat_h[np.where(tangent_heights <= toa)] = 0.
    saa_sat_l[np.where(tangent_heights > toa)] = 0.
    los_sat_h[np.where(tangent_heights <= toa)] = 0.
    los_sat_l[np.where(tangent_heights > toa)] = 0.

    sza_tp_h[np.where(tangent_heights <= toa)] = 0.
    sza_tp_l[np.where(tangent_heights > toa)] = 0.
    saa_tp_h[np.where(tangent_heights <= toa)] = 0.
    saa_tp_l[np.where(tangent_heights > toa)] = 0.
    los_tp_h[np.where(tangent_heights <= toa)] = 0.
    los_tp_l[np.where(tangent_heights > toa)] = 0.

    los_toa_h[np.where(tangent_heights <= toa)] = 0.
    los_toa_l[np.where(tangent_heights > toa)] = 0.

    sza_sat = sza_sat_h + sza_sat_l
    saa_sat = saa_sat_h + saa_sat_l
    los_sat = los_sat_h + los_sat_l

    sza_tp = sza_tp_h + sza_tp_l
    saa_tp = saa_tp_h + saa_tp_l
    los_tp = los_tp_h + los_tp_l

    los_toa = los_toa_h + los_toa_l

    logging.debug("TP sza, saa, los: %s, %s, %s", sza_tp, saa_tp, los_tp)
    logging.debug("TOA sza, saa, los: %s, %s, %s", sza_toa, saa_toa, los_toa)
    logging.debug("SAT sza, saa, los: %s, %s, %s", sza_sat, saa_sat, los_sat)

    # save the data to the limb scan class
    self.nalt = nalt
    self.orbit_state = (self.metadata["orbit"], state_in_orbit,
                        self.metadata["state_id"], self.metadata["nr_profile"],
                        self.metadata["act_profile"])
    self.date = (state_dt.year, state_dt.month, state_dt.day, state_dt.hour,
                 state_dt.minute, state_dt.second)
    self.orbit_phase = orb_phase
    self.cent_lat_lon = cent_lat_lon
    # pre-set the limb_data
    if self._limb_data_dtype is None:
        self._limb_data_dtype = _limb_data_dtype[:]
    self.limb_data = np.zeros((self.nalt), dtype=self._limb_data_dtype)
    self.limb_data["sub_sat_lat"] = subsatlat
    self.limb_data["sub_sat_lon"] = subsatlon
    self.limb_data["tp_lat"] = tp_lats
    self.limb_data["tp_lon"] = tp_lons
    self.limb_data["tp_alt"] = tangent_heights
    self.limb_data["tp_sza"] = sza_tp
    self.limb_data["tp_saa"] = saa_tp
    self.limb_data["tp_los"] = los_tp
    self.limb_data["toa_sza"] = sza_toa
    self.limb_data["toa_saa"] = saa_toa
    self.limb_data["toa_los"] = los_toa
    self.limb_data["sat_sza"] = sza_sat
    self.limb_data["sat_saa"] = saa_sat
    self.limb_data["sat_los"] = los_sat
    self.limb_data["sat_alt"] = sat_hs
    self.limb_data["earth_rad"] = earth_rads
    return 0
예제 #29
0
class metacheck():
    def __init__(self,ra,dec,mjd):
        # Transient information/search criteria
        self.coord = SkyCoord(ra,dec,unit=(u.hour,u.deg), frame='icrs')
        self.time = Time(mjd,format='mjd').datetime

        # Parameters for searches
        self.tns_radius = 5. # arcseconds
        self.mpc_radius = 1. # arcminutes
        self.ast_radius = 5. # arcseconds
        self.sim_radius = 10 # arcseconds
        self.ned_radius = 3. # arcminutes
        self.mdw_radius = 10 # arcseconds

    # Check the minor planet center's database of asteroids
    def checkMPC(self):
        table_output = []

        # Hack AF
        url = "https://minorplanetcenter.net/cgi-bin/mpcheck.cgi"
        form_data = {"year":"%s" % self.time.strftime("%Y"),
            "month":"%s" % self.time.strftime("%m"),
            "day":"%s" % self.time.strftime("%d"),
            "which":"pos",
            "ra":"%s" %
                self.coord.ra.to_string(unit="hour",pad=True,decimal=False,sep=" "),
            "decl":"%s" %
                self.coord.dec.to_string(unit="deg",pad=True,decimal=False,sep=" "),
            "TextArea":"",
            "radius":"%s" % 10,
            "limit":"%s" % 24,
            "oc":"%s" % 500,
            "sort":"d",
            "mot":"h",
            "tmot":"s",
            "pdes":"u",
            "needed":"f",
            "ps":"n",
            "type":"p"}

        r = requests.post(url, data=form_data)
        soup = BeautifulSoup(r.text,'html5lib')
        pre = soup.find("pre")
        if pre is None:
            return(None)
        else:
            data = []
            for row in [row for row in pre.contents[-1].split("\n")[3:-1]]:
                data.append([row[9:25],row[25:36],row[36:47]])
            table = Table(list(map(list, zip(*data))),names=('name','ra','dec'))
            return(table)

    # Check TNS
    def checkTNS(self):
        url = 'https://wis-tns.weizmann.ac.il/search?'
        url += '&ra={ra}'.format(ra=self.coord.ra.degree)
        url += '&decl={dec}'.format(dec=self.coord.dec.degree)
        url += '&radius={rad}'.format(rad=self.tns_radius)
        url += '&coords_unit=arcsec'

        # Get page info from url
        r = requests.get(url)
        soup = BeautifulSoup(r.text,'html5lib')
        html_table = soup.find("tbody")
        if (html_table) is None:
            return(None)
        tables = html_table.find_all("tr")
        data = []
        for table in tables:
            object_list = table.find_all("td")
            new_row = []
            for element in object_list:
                if ('cell-name' in element['class'] or
                    'cell-ra' in element['class'] or
                    'cell-decl' in element['class'] or
                    'cell-discoverydate' in element['class']):
                    new_row.append(element.get_text())
            data.append(new_row)

        table = Table(list(map(list, zip(*data))),names=('name','ra','dec','discoverydate'))
        return(table)

    # Check OSC
    def checkASTROCATS(self):
        # Get photometry in a 10 arcsecond region around coordinates
        url = 'https://api.astrocats.space/catalog/photometry/time+band+magnitude?'
        url += 'ra={ra}'.format(ra=self.coord.ra.degree)
        url += '&dec={dec}'.format(dec=self.coord.dec.degree)
        url += '&radius={radius}'.format(radius=self.ast_radius)
        url += '&format=csv'

        r = requests.get(url)
        if ('No objects found' in r.text):
            return None
        else:
            data = [l.split(',')
                for l in r.text.strip("event,time,band,magnitude\n").split("\n")]
            table = Table(list(map(list, zip(*data))),
                names=('event','time','band','magnitude'))
        return(table)

    # Check Simbad
    def checkSimbad(self):
        table = Simbad.query_region(self.coord, radius = self.sim_radius * u.arcsec)
        return(table)

    # Check NED
    def checkNED(self):
        table = Ned.query_region(self.coord,
            radius=self.ned_radius * u.arcmin, equinox='J2000.0')

        # Pick only galaxies
        mask = table['Type'] == b'G'
        return(table[mask])

    # Check M dwarf flare catalog
    def checkMDWARF(self):
        try:
            table = heasarc.query_region(self.coord,
                mission='mdwarfasc', radius=self.mdw_radius*u.arcsec)
            return(table)
        except:
            return(None)
예제 #30
0
    def _get_records(self,
                     mnemonic,
                     starttime,
                     endtime,
                     time_format=None,
                     **other_kwargs):
        """
        Retrieve all results for a mnemonic in the requested time range.

        Parameters
        ----------
        mnemonic : str
            The engineering mnemonic to retrieve

        starttime : str or astropy.time.Time
            The, inclusive, start time to retrieve from.

        endtime : str or astropy.time.Time
            The, inclusive, end time to retrieve from.

        result_format : str
            The format to request from the service.
            If None, the `default_format` is used.

        time_format : str
            The format of the input time used if the input times
            are strings. If None, a guess is made.

        other_kwargs : dict
            Keyword arguments not relevant to this implementation.

        Returns
        -------
        records : `astropy.Table`
            Returns the resulting table.

        Notes
        -----
        The engineering service always returns the bracketing entries
        before and after the requested time range.
        """
        if not isinstance(starttime, Time):
            starttime = Time(starttime, format=time_format)
        if not isinstance(endtime, Time):
            endtime = Time(endtime, format=time_format)
        self.starttime = starttime
        self.endtime = endtime

        # Make the request
        mnemonic = mnemonic.strip()
        mnemonic = mnemonic.upper()
        starttime_fmt = starttime.strftime('%Y%m%dT%H%M%S')
        endtime_fmt = endtime.strftime('%Y%m%dT%H%M%S')
        uri = f'{mnemonic}-{starttime_fmt}-{endtime_fmt}.csv'
        self._req.params = {'uri': SERVICE_URI + uri}
        prepped = self._session.prepare_request(self._req)
        settings = self._session.merge_environment_settings(
            prepped.url, {}, None, None, None)
        logger.debug('Query: %s', prepped.url)
        self.response = self._session.send(prepped, **settings)
        self.response.raise_for_status()
        logger.debug('Response: %s', self.response)
        logger.debug('Response test: %s', self.response.text)

        # Convert to table.
        r_list = self.response.text.split('\r\n')
        table = Table.read(r_list, format='ascii.csv')

        return table