예제 #1
0
 def fetch(self,
           config=GWSummConfigParser(),
           segdb_error='raise',
           datafind_error='raise',
           **kwargs):
     """Finalise this state by fetching its defining segments,
     either from global memory, or from the segment database
     """
     # check we haven't done this before
     if self.ready:
         return self
     # fetch data
     if self.definition:
         match = re.search('(%s)' % '|'.join(MATHOPS.keys()),
                           self.definition)
     else:
         match = None
     if self.filename:
         self._read_segments(self.filename)
     elif match:
         channel, thresh = self.definition.split(match.groups()[0])
         channel = channel.rstrip()
         thresh = float(thresh.strip())
         self._fetch_data(channel,
                          thresh,
                          match.groups()[0],
                          config=config,
                          datafind_error=datafind_error,
                          **kwargs)
     # fetch segments
     elif self.definition:
         self._fetch_segments(config=config,
                              segdb_error=segdb_error,
                              **kwargs)
     # fetch null
     else:
         start = config.getfloat(DEFAULTSECT, 'gps-start-time')
         end = config.getfloat(DEFAULTSECT, 'gps-end-time')
         self.known = [(start, end)]
         self.active = self.known
     # restrict to given hours
     if self.hours:
         segs_ = SegmentList()
         # get start day
         d = Time(float(self.start), format='gps', scale='utc').datetime
         d.replace(hour=0, minute=0, second=0, microsecond=0)
         end_ = Time(float(self.end), format='gps', scale='utc').datetime
         while d < end_:
             # get GPS of day
             t = to_gps(d)
             # for each [start, end) hour pair, build a segment
             for h0, h1 in self.hours:
                 segs_.append(Segment(t + h0 * 3600, t + h1 * 3600))
             # increment and return
             d += datetime.timedelta(1)
         self.known &= segs_
         self.active &= segs_
     # FIXME
     self.ready = True
     return self
예제 #2
0
    def add_pixels(self, tpf):
        tpfdata = tpf[1].read()
        aperture_shape = tpfdata['FLUX'][0].shape
        # Get the pixel coordinates of the corner of the aperture
        col, row = (self.template_tpf_header1['1CRV5P'],
                    self.template_tpf_header1['2CRV5P'])
        height, width = aperture_shape[0], aperture_shape[1]

        # Fill the data
        mask = tpf[2].read() > 0
        idx = self.cadenceno - tpfdata["CADENCENO"][0]

        # When quality flag 65536 is raised, there is no data and the times are NaN.
        if (tpfdata['QUALITY'][idx] & int(65536) > 0):
            raise Exception(
                'Error: Cadence {} does not appear to contain data!'.format(
                    self.cadenceno))

        if self.add_background:
            self.data[row:row+height, col:col+width][mask] = \
                tpfdata['FLUX'][idx][mask] \
                + tpfdata['FLUX_BKG'][idx][mask]
            self.uncert[row:row+height, col:col+width][mask] = \
                np.sqrt(
                    (tpfdata['FLUX_ERR'][idx][mask])**2 +
                    (tpfdata['FLUX_BKG_ERR'][idx][mask])**2
                )
        else:
            self.data[row:row+height, col:col+width][mask] = \
                tpfdata['FLUX'][idx][mask]
            self.uncert[row:row+height, col:col+width][mask] = \
                tpfdata['FLUX_ERR'][idx][mask]

        # If this is the first TPF being added, record the time and calculate DATE-OBS/END
        if self.time is None:
            self.time = tpfdata['TIME'][idx]
            self.quality = tpfdata['QUALITY'][idx]
            frametim = np.float(self.template_tpf_header1['FRAMETIM'])
            num_frm = np.float(self.template_tpf_header1['NUM_FRM'])

            # Calculate DATE-OBS from BJD time:
            mjd_start = self.time \
                + np.float(self.template_tpf_header1['BJDREFI']) \
                - frametim/3600./24./2. * num_frm \
                - 2400000.5
            self.mjdbeg = mjd_start
            starttime = Time(mjd_start, format='mjd')
            starttime = str(starttime.datetime)
            self.dateobs = starttime.replace(' ', 'T') + 'Z'

            # Calculate DATE-END:
            mjd_end = self.time \
                + np.float(self.template_tpf_header1['BJDREFI']) \
                + frametim/3600./24./2. * num_frm \
                - 2400000.5
            self.mjdend = mjd_end
            endtime = Time(mjd_end, format='mjd')
            endtime = str(endtime.datetime)
            self.dateend = endtime.replace(' ', 'T') + 'Z'
예제 #3
0
파일: core.py 프로젝트: berkowitze/gwsumm
 def fetch(self, config=GWSummConfigParser(), segdb_error='raise',
           datafind_error='raise', **kwargs):
     """Finalise this state by fetching its defining segments,
     either from global memory, or from the segment database
     """
     # check we haven't done this before
     if self.ready:
         return self
     # fetch data
     if self.definition:
         match = re.search('(%s)' % '|'.join(MATHOPS.keys()),
                           self.definition)
     else:
         match = None
     if self.filename:
         self._read_segments(self.filename)
     elif match:
         channel, thresh = self.definition.split(match.groups()[0])
         channel = channel.rstrip()
         thresh = float(thresh.strip())
         self._fetch_data(channel, thresh, match.groups()[0], config=config,
                          datafind_error=datafind_error, **kwargs)
     # fetch segments
     elif self.definition:
         self._fetch_segments(config=config, segdb_error=segdb_error,
                              **kwargs)
     # fetch null
     else:
         start = config.getfloat(DEFAULTSECT, 'gps-start-time')
         end = config.getfloat(DEFAULTSECT, 'gps-end-time')
         self.known = [(start, end)]
         self.active = self.known
     # restrict to given hours
     if self.hours:
         segs_ = SegmentList()
         # get start day
         d = Time(float(self.start), format='gps', scale='utc').datetime
         d.replace(hour=0, minute=0, second=0, microsecond=0)
         end_ = Time(float(self.end), format='gps', scale='utc').datetime
         while d < end_:
             # get GPS of day
             t = to_gps(d)
             # for each [start, end) hour pair, build a segment
             for h0, h1 in self.hours:
                 segs_.append(Segment(t + h0 * 3600, t + h1*3600))
             # increment and return
             d += datetime.timedelta(1)
         self.known &= segs_
         self.active &= segs_
     # FIXME
     self.ready = True
     return self
예제 #4
0
def test_timezone_convenience_methods():
    location = EarthLocation(-74.0 * u.deg, 40.7 * u.deg, 0 * u.m)
    obs = Observer(location=location, timezone=pytz.timezone('US/Eastern'))
    t = Time(57100.3, format='mjd')
    assert (obs.astropy_time_to_datetime(t).hour == 3)

    dt = datetime.datetime(2015, 3, 19, 3, 12)
    assert (obs.datetime_to_astropy_time(dt).datetime == datetime.datetime(
        2015, 3, 19, 7, 12))

    assert (obs.astropy_time_to_datetime(
        obs.datetime_to_astropy_time(dt)).replace(tzinfo=None) == dt)

    # Test ndarray of times:
    times = t + np.linspace(0, 24, 10) * u.hour
    times_dt_ndarray = times.datetime
    assert all(
        (obs.datetime_to_astropy_time(times_dt_ndarray)).jd == (times +
                                                                4 * u.hour).jd)

    # Test list of times:
    times_dt_list = list(times.datetime)
    assert all(
        (obs.datetime_to_astropy_time(times_dt_list)).jd == (times +
                                                             4 * u.hour).jd)

    dts = obs.astropy_time_to_datetime(times)
    naive_dts = list(map(lambda t: t.replace(tzinfo=None), dts))
    assert all(naive_dts == times_dt_ndarray - datetime.timedelta(hours=4))
예제 #5
0
def test_timezone_convenience_methods():
    location = EarthLocation(-74.0*u.deg, 40.7*u.deg, 0*u.m)
    obs = Observer(location=location,timezone=pytz.timezone('US/Eastern'))
    t = Time(57100.3, format='mjd')
    assert (obs.astropy_time_to_datetime(t).hour == 3)

    dt = datetime.datetime(2015, 3, 19, 3, 12)
    assert (obs.datetime_to_astropy_time(dt).datetime ==
            datetime.datetime(2015, 3, 19, 7, 12))

    assert (obs.astropy_time_to_datetime(obs.datetime_to_astropy_time(dt)).replace(
            tzinfo=None) == dt)

    # Test ndarray of times:
    times = t + np.linspace(0, 24, 10)*u.hour
    times_dt_ndarray = times.datetime
    assert all((obs.datetime_to_astropy_time(times_dt_ndarray)).jd ==
               (times + 4*u.hour).jd)

    # Test list of times:
    times_dt_list = list(times.datetime)
    assert all((obs.datetime_to_astropy_time(times_dt_list)).jd ==
               (times + 4*u.hour).jd)

    dts = obs.astropy_time_to_datetime(times)
    naive_dts = list(map(lambda t: t.replace(tzinfo=None), dts))
    assert all(naive_dts == times_dt_ndarray - datetime.timedelta(hours=4))
예제 #6
0
def ant_trange(vis):
    ''' Figure out nominal times for tracking of old EOVSA antennas, and return time
        range in CASA format
    '''
    import eovsa_array as ea
    from astropy.time import Time
    from taskinit import ms

    # Get timerange from the visibility file
    # msinfo = dict.fromkeys(['vis', 'scans', 'fieldids', 'btimes', 'btimestr', 'inttimes', 'ras', 'decs', 'observatory'])
    ms.open(vis)
    # metadata = ms.metadata()
    scans = ms.getscansummary()
    sk = np.sort(scans.keys())
    vistrange = np.array([scans[sk[0]]['0']['BeginTime'],scans[sk[-1]]['0']['EndTime']])

    # Get the Sun transit time, based on the date in the vis file name (must have UDByyyymmdd in the name)
    aa = ea.eovsa_array()
    date = vis.split('UDB')[-1][:8]
    slashdate = date[:4] + '/' + date[4:6] + '/' + date[6:8]
    aa.date = slashdate
    sun = aa.cat['Sun']
    mjd_transit = Time(aa.next_transit(sun).datetime(), format='datetime').mjd
    # Construct timerange limits based on +/- 3h55m from transit time (when all dishes are nominally tracking)
    # and clip the visibility range not to exceed those limits
    mjdrange = np.clip(vistrange, mjd_transit - 0.1632, mjd_transit + 0.1632)
    trange = Time(mjdrange[0], format='mjd').iso[:19] + '~' + Time(mjdrange[1], format='mjd').iso[:19]
    trange = trange.replace('-', '/').replace(' ', '/')
    return trange
예제 #7
0
def read_osc_input(filename, template=False):
    """ self.filename in the form osc-name-ageIndex. E.g. osc-sn2002er-10"""
    osc, objName, ageIdx = filename.split('-')

    def read_json(url):
        response = urlopen(url)
        return json.loads(response.read(), object_pairs_hook=OrderedDict)

    # Redshift
    urlRedshift = "https://api.sne.space/" + objName + "/redshift/value"
    redshift = read_json(urlRedshift)
    redshift = float(redshift[next(iter(redshift))]['redshift'][0][0])

    if template is False:
        # Spectrum
        urlSpectrum = "https://api.sne.space/" + objName + "/spectra/time+data?item={0}".format(
            ageIdx)
        data = read_json(urlSpectrum)
        data = data[next(iter(data))]['spectra'][0][1]
        wave, flux = np.array(list(map(list, zip(*data)))).astype(np.float)
        return wave, flux, redshift

    elif template is True:
        # Age Max
        urlAgeMax = "https://api.sne.space/" + objName + "/maxdate/value"
        ageMax = read_json(urlAgeMax)
        ageMax = ageMax[next(iter(ageMax))]['maxdate'][0][0]
        ageMax = Time(ageMax.replace('/', '-')).mjd

        # Type
        urlTType = "https://api.sne.space/" + objName + "/claimedtype/value"
        tType = read_json(urlTType)
        tType = tType[next(
            iter(tType))]['claimedtype']  # List of types... choose one [0][0]

        # Spectrum
        urlSpectrum = "https://api.sne.space/" + objName + "/spectra/time+data"
        data = read_json(urlSpectrum)
        data = data[next(iter(data))]['spectra']
        nCols = len(data)  # number of ages

        waves = []
        fluxes = []
        ages = []
        for datum in data:
            age, spectrum = datum
            age = age - ageMax
            ages.append(age)

            wave, flux = np.array(list(map(list, zip(*data)))).astype(np.float)
            wave = wave / (redshift + 1)  # De-redshift spectrum
            waves.append(wave)
            fluxes.append(flux)

        return waves, fluxes, nCols, ages, tType
예제 #8
0
    def standardizeMetadata(self):
        run = self.header["RUN"].strip()
        field = self.header["FIELD"].strip()
        filter = self.header["COLOUR"].strip()
        chip = self.header["CHIP"]
        sciProg = f"{run}-{field}-{filter}-{chip}"

        # TODO: Fix datetimes
        # There is a timesys key but I have no idea how to generically instantiate
        # timezone aware datetime and astropy Time seems not to work well with
        # Django - the astrometadata is also broken!
        if "UTC" in self.header["TIMESYS"].upper():
            tzinfo = timezone.utc
        else:
            raise ValueError("Can not recognize time scale system that is used?")

        jdstart = Time(self.header["JDSTART"], format="jd", scale="utc")
        jdstart = jdstart.utc.datetime
        jdstart = jdstart.replace(tzinfo=tzinfo)

        jdend = Time(self.header["JDSTART"], format="jd", scale="utc")
        jdend = jdend.utc.datetime
        jdend = jdend.replace(tzinfo=tzinfo)

        # TODO: filter out what is the filter standardization here?
        meta = Metadata(
            obs_lon=self.header["LOGITUD"],
            obs_lat=self.header["LATITUD"],
            obs_height=self.header["HEIGHT"],
            datetime_begin=jdstart.isoformat(),
            datetime_end=jdend.isoformat(),
            telescope=self.header["OBSTEL"].strip(),
            instrument=self.header["CAMERA"].strip(),
            science_program=sciProg,
            exposure_duration=self.header["EXPTIME"],
            filter_name=self.header["COLOUR"].strip()
        )

        return meta
예제 #9
0
def set_info(longitude, latitude, date, ut):
    observer = get_observer(longitude, latitude)
    date = dt.strptime(re.split('T| ', date)[0], '%Y-%m-%d')
    date = Time(date.replace(hour=int(ut)))

    sunset = observer.sun_set_time(date).strftime('%d-%m-%Y %H:%M:%S')
    sunrise = observer.sun_rise_time(date).strftime('%d-%m-%Y %H:%M:%S')
    moon_phase = observer.moon_phase(date + 1 * u.day)
    moon_altaz = observer.moon_altaz(date + 1 * u.day)
    moon_alt = str(round(moon_altaz.alt.deg, 0))
    moon_az = str(round(moon_altaz.az.deg, 0))
    lst = observer.local_sidereal_time(date)

    return sunset, sunrise, 100 - int(
        moon_phase.value / np.pi * 100), ", ".join([moon_alt,
                                                    moon_az]), str(lst)
예제 #10
0
def ant_trange(vis):
    ''' Figure out nominal times for tracking of old EOVSA antennas, and return time
        range in CASA format
    '''
    import eovsa_array as ea
    from astropy.time import Time
    # Get the Sun transit time, based on the date in the vis file name (must have UDByyyymmdd in the name)
    aa = ea.eovsa_array()
    date = vis.split('UDB')[-1][:8]
    slashdate = date[:4] + '/' + date[4:6] + '/' + date[6:8]
    aa.date = slashdate
    sun = aa.cat['Sun']
    mjd_transit = Time(aa.next_transit(sun).datetime(), format='datetime').mjd
    # Construct timerange based on +/- 3h55m from transit time (when all dishes are nominally tracking)
    trange = Time(mjd_transit - 0.1632, format='mjd').iso[:19] + '~' + Time(mjd_transit + 0.1632, format='mjd').iso[:19]
    trange = trange.replace('-', '/').replace(' ', '/')
    return trange
예제 #11
0
    def observing_plan(self, date=None, remove_twilight=False, bright_time=False):
        """
        Make a list of best tiles observable through the night.
        
        Output: observing_plan.txt
        # observing_ideal observing_start observing_stop fieldID path_to_config_file
        1030 1000 1100 fieldID /observers_files/taipan/YYYYMMDD/tile_pk_HHMMSS.obs_config.json
        
        observing_ideal = meridian transit time
        observing_start, observing_stop: observing_ideal +/- dt, dt is determined using weights
        
        Should times be given in UT?
        What is the json file (e.g. list of stars with coordinates etc.?)
        
        TODO: What is ObsConfig file?
        """     
        if date is None:
            datenow=datetime.datetime.now().date()
            date='%d-%02d-%02d'%(datenow.year, datenow.month, datenow.day)
            datestring=date.replace('-', '')
            date = Time('%s'%date)
        else:
            datestring=date.replace('-', '')

        sun_set = self.observatory.sun_set_time(Time(date)).datetime
        sunset=self.observatory.datetime_to_astropy_time(sun_set) # converts into different format (datetime to astropy)
        
        sun_rise = self.observatory.sun_rise_time(Time(date) + TimeDelta(1.0, format='jd')).datetime
        
        # Remove twilight time. Approx 1.5 hour.
        # Remove for simulator. But not for the real observing plans.
        if remove_twilight:
            print 'No observations during twilight.'
            sun_set += datetime.timedelta(minutes=90.0)
            sun_rise -+ datetime.timedelta(minutes=90.0)
            sunset=self.observatory.datetime_to_astropy_time(sun_set) # converts into different format (datetime to astropy)
        
        f=open(params.params['observing_plan_filename']+'_%s.dat'%datestring, 'wb')

        # UTC times when we want to observe each tile
        times=[]
        for i in range(100):
            dt = datetime.timedelta(minutes=i*params.params['TIME_PER_TILE'])
            t = sun_set + dt
            if t < sun_rise:
                times.append(t)
            else:
                break
        
        print 'Selecting tiles for', date
        timezone_correction=TimeDelta(3600.0*11.0, format='sec') # TODO
        sunset_lt=self.observatory.datetime_to_astropy_time(sun_set) + timezone_correction
        sunrise_lt=self.observatory.datetime_to_astropy_time(sun_rise) + timezone_correction
        dark_time = sun_rise-sun_set
        dark_time = dark_time.seconds
        dark_time_hours = int(dark_time/3600.0)
        dark_time_minutes = (dark_time - float(dark_time_hours)*3600.0)/60.0
        print 'Sunset LT %d-%02d-%02d %02d:%02d'%(sunset_lt.value.year, sunset_lt.value.month, sunset_lt.value.day, sunset_lt.value.hour, sunset_lt.value.minute)
        print 'Sunrise LT %d-%02d-%02d %02d:%02d'%(sunrise_lt.value.year, sunrise_lt.value.month, sunrise_lt.value.day, sunrise_lt.value.hour, sunrise_lt.value.minute)
        print 'Nighttime duration %02d:%02d'%(dark_time_hours, dark_time_minutes)
        print 'Number of tiles this night:', len(times)
        print
        
        # Convert UTC times to LST
        times_lst = [Time(t).utc.sidereal_time('mean', longitude=params.params['LON']).value for t in times]

        time_efficiency=[]
        telescope_positions=[] # for testing purposes

        selected_tiles=[]
        count=1
        new_tile_ids_to_be_added_to_list_of_all_observed_stars=set()
        for t, lst in zip(times, times_lst):
            t_start=datetime.datetime.now()      
            utc = self.observatory.datetime_to_astropy_time(t)
            self.moon = get_moon(utc)

            # ONLY BRIGHT TIME
            if bright_time:
                is_bright_time = self.check_if_moon_is_above_horizon(moon=self.moon, time=utc)
                if is_bright_time:
                    pass
                else:
                    continue

            try:
                self.ra_current=best_tile.TaipanTile.ra
                self.dec_current=best_tile.TaipanTile.dec
            except:
                self.ra_current=lst
                self.dec_current=-70.0 # TODO

            self.local_sidereal_time=lst # TODO: check if this violates any other things. Why cant I insert LST to init_best_tiles...??
  
            best_tile = self.find_best_tile()
            
            if best_tile is None:
                continue

            # Update list of observed tiles
            self.observed_tiles.add(best_tile.TaipanTile.field_id)
            new_tile_ids_to_be_added_to_list_of_all_observed_stars.add(best_tile.TaipanTile.field_id)
            
            """
            Code from this point on is only output formatting
            """

            json_filename = create_obs_config_json.create_ObsConfig_json(tile=best_tile, utc=utc)

            local_time=t+datetime.timedelta(hours=11.0)
            
            print '%02d/%02d'%(count, len(times)), 'LT=%d-%02d-%02d %02d:%02d:%02d'%(local_time.year, local_time.month, local_time.day, local_time.hour, local_time.minute, local_time.second), 'UT=%d-%02d-%02d %02d:%02d:%02d'%(times[count-1].year, times[count-1].month, times[count-1].day, times[count-1].hour, times[count-1].minute, times[count-1].second), 'LST=%s'%(('%02.2f'%lst).rjust(5)), best_tile #, best_tile.meridian_transit_time.datetime

            # Print out the data
            H_amp = best_tile.estimate_best_time_interval_to_observe_tile()
            if H_amp is None:
                todo=True
                
            # TODO: check if this times are during the night, not e.g. just before sunset or just after sunrise, otherwise limit them within sunset and sunrise time
            observing_start = t - datetime.timedelta(hours=H_amp)
            observing_stop = t + datetime.timedelta(hours=H_amp)
            observing_ideal = Time(t - datetime.timedelta(hours=best_tile.hour_angle)).utc.sidereal_time('mean', longitude=params.params['LON']).value # Do I insert time NOW or time of meridian crossing?
            
            
            """
            Print output
            """
            line='%02d%02d %02d%02d %02d%02d %05d %s'%(t.hour, t.minute, observing_start.hour, observing_start.minute, observing_stop.hour, observing_stop.minute, best_tile.TaipanTile.field_id, json_filename)
            
            #~ self.print_selected_tile_to_json(line)
           
            # TODO: what happens with observing_ideal for tiles at ALT=90? Because there is a limit at 85 degrees.
            f.write(line+'\n')
        
            count+=1
            
            t_end=datetime.datetime.now() 
            time_efficiency.append((t_end-t_start).seconds)
            
            # PLOT
            #~ telescope_positions.append([best_tile.TaipanTile.ra, best_tile.TaipanTile.dec, self.moon.ra.value, self.moon.dec.value, best_tile.angular_moon_distance])
            #~ visualization.plot_selected_tile_with_neighbourhood(moon=self.moon, lst=lst, best_tiles=self.best_tiles_to_observe_now, tiles=self.tiles, best_tile=best_tile, i=count-1, ra_current=self.ra_current, dec_current=self.dec_current, telescope_positions=telescope_positions, observed_tile_ids=self.observed_tiles)            
            
        f.close()
        
        manage_list_of_observed_tiles.add_tile_id_internal_to_the_list(new_tile_ids_to_be_added_to_list_of_all_observed_stars)
        
        print 'Average time to find the next tile: [seconds]', np.mean(time_efficiency)
예제 #12
0
def main():

    prefix = 'M8'

    # Keep the two halves of the campsign separate
    # Comment or uncomment to do each half:

    filenames = glob.glob(
        '/Volumes/Work/Field_9/TPFs_part1_superstamp/ktwo200*targ.fits')
    #    filenames = glob.glob('/Volumes/Work/Field_9/TPFs_part2_superstamp/ktwo200*targ.fits')

    midfile = fits.open(filenames[0], mode='readonly', memmap=True)
    cards0 = midfile[0].header.cards
    cards1 = midfile[1].header.cards
    cards2 = midfile[2].header.cards

    time = midfile[1].data.field('TIME')[:] + 2454833.0
    timecorr = midfile[1].data.field('TIMECORR')[:]
    cadenceno = midfile[1].data.field('CADENCENO')[:]
    quality = midfile[1].data.field('QUALITY')[:]
    cosmic_rays = midfile[1].data.field('COSMIC_RAYS')[:]
    pos_corr1 = midfile[1].data.field('POS_CORR1')[:]
    pos_corr2 = midfile[1].data.field('POS_CORR2')[:]

    bigarr = np.zeros([135, 220])
    bigarr[:] = np.nan
    bigarr = bigarr.astype('float32')

    #  Part 1 of Campaign 9
    for i in range(0, 669) + range(670, 765) + range(766, 1290):

        #  Part 2 of Campaign 9
        ##    for i in range(0,2022):

        # only continue if there is valid data at this timestamp
        if (~np.isnan(time[i])
                and len(np.where((midfile[1].data['FLUX'])[i] != 0.0)[0]) != 0
                and len(np.where(
                    (~np.isnan(midfile[1].data['FLUX'])[i]))[0]) != 0):

            # construct output primary extension
            outfile = prefix + '_BJD%.4f' % time[i] + '.fits'
            print outfile

            for fn in filenames:
                with fits.open(fn) as f:
                    x = f[1].header['1CRV4P']
                    y = f[1].header['2CRV4P']
                    ch = f[0].header['CHANNEL']
                    mod = f[0].header['MODULE']
                    out = f[0].header['OUTPUT']
                    alldata = f[1].data['FLUX']
                    dim = alldata.shape
                    if (out == 1):
                        bigarr[y - 868 + 10:y - 868 + 10 + dim[1],
                               x - 1002:x - 1002 + dim[2]] = alldata[i]
                    if (out == 2):
                        bigarr[y - 858:y - 858 + dim[1], 1112 - (x + dim[2]) +
                               110:1112 - x + 110] = np.fliplr(alldata[i])

            hdu0 = fits.PrimaryHDU(bigarr)

            # add in primary keywords
            for n in range(len(cards0)):
                try:
                    if cards0[n].keyword not in hdu0.header.keys():
                        hdu0.header[cards0[n].keyword] = (cards0[n].value,
                                                          cards0[n].comment)
                    else:
                        hdu0.header.cards[
                            cards0[n].keyword].comment = cards0[n].comment
                except:
                    pass

# add additional keywords
            for k in range(len(cards1)):
                if (cards1[k].keyword not in hdu0.header.keys()
                        and cards1[k].keyword[:4] not in [
                            'TTYP', 'TFOR', 'TUNI', 'TDIS', 'TDIM', 'WCAX',
                            '1CTY', '2CTY', '1CRP', '2CRP', '1CRV', '2CRV',
                            '1CUN', '2CUN', '1CDE', '2CDE', '1CTY', '2CTY',
                            '1CDL', '2CDL', '11PC', '12PC', '21PC', '22PC',
                            'WCSN', 'TFIE', 'XTEN', 'EXTN', 'PCOU', 'GCOU',
                            'TNUL', 'INHE'
                        ]):
                    hdu0.header.set(cards1[k].keyword, cards1[k].value,
                                    cards1[k].comment)

# and a few more keywords:
            for j in range(len(cards2)):
                try:
                    if cards2[j].keyword not in hdu0.header.keys():
                        hdu0.header.set(cards2[j].keyword, cards2[j].value,
                                        cards2[j].comment)
                except:
                    pass

# pull some additional information out of the TPF headers

            try:
                int_time = cards1['INT_TIME'].value
            except:
                print 'WARNING -- cannot find INT_TIME keyword'
            try:
                frametim = cards1['FRAMETIM'].value
            except:
                print 'WARNING -- cannot find FRAMETIM keyword'
            try:
                num_frm = cards1['NUM_FRM'].value
            except:
                print 'WARNING -- cannot find NUM_FRM keyword'
            try:
                hdu0.header.set('TELAPSE', frametim * num_frm,
                                '[s] elapsed time for exposure')
            except:
                hdu0.header.set('TELAPSE', -999,
                                '[s] elapsed time for exposure')
            try:
                hdu0.header.set('LIVETIME', int_time * num_frm,
                                '[s] TELASPE multiplied by DEADC')
            except:
                hdu0.header.set('LIVETIME', -999,
                                '[s] TELASPE multiplied by DEADC')
            try:
                hdu0.header.set('EXPOSURE', int_time * num_frm,
                                '[s] time on source')
            except:
                hdu0.header.set('EXPOSURE', -999, '[s] time on source')
            try:
                hdu0.header.set('MIDTIME', time[i],
                                '[BJD] mid-time of exposure')
            except:
                hdu0.header.set('MIDTIME', -999, '[BJD] mid-time of exposure')
            try:
                hdu0.header.set('TIMECORR', timecorr[i],
                                '[d] barycenter - timeslice correction')
            except:
                hd01.header.set('TIMECORR', -999,
                                '[d] barycenter - timeslice correction')
            try:
                hdu0.header.set('CADENCEN', cadenceno[i],
                                'unique cadence number')
            except:
                hdu0.header.set('CADENCEN', -999, 'unique cadence number')
            try:
                hdu0.header.set('QUALITY', quality[i], 'pixel quality flag')
            except:
                hdu0.header.set('QUALITY', -999, 'pixel quality flag')
            try:
                pc1 = str(pos_corr1[i])
                pc2 = str(pos_corr2[i])
                hdu0.header.set('POSCORR1', pc1,
                                '[pix] column position correction')
                hdu0.header.set('POSCORR2', pc2,
                                '[pix] row position correction')
            except:
                hdu0.header.set('POSCORR1', -999,
                                '[pix] column position correction')
                hdu0.header.set('POSCORR2', -999,
                                '[pix] row position correction')


# Edit and delete some keywords
            hdu0.header['DATE'] = str(datetime.date.today())
            hdu0.header['CREATOR'] = 'Ann Marie Cody'
            hdu0.header.cards['CREATOR'].comment = 'file creator'
            hdu0.header.set('OBJECT', 'M8', 'target')
            hdu0.header['CHANNEL'] = UNDEFINED
            hdu0.header.cards[
                'CHANNEL'].comment = 'double-valued for this superstamp'
            hdu0.header['OUTPUT'] = UNDEFINED
            hdu0.header.cards[
                'OUTPUT'].comment = 'double-valued for this superstamp'

            hdu0.header.remove('PCOUNT')
            hdu0.header.remove('GCOUNT')
            hdu0.header.remove('PROCVER')
            hdu0.header.remove('FILEVER')
            hdu0.header.remove('TIMVERSN')
            hdu0.header.remove('KEPLERID')
            hdu0.header.remove('TTABLEID')
            hdu0.header.remove('DATA_REL')
            hdu0.header.remove('PMRA')
            hdu0.header.remove('PMDEC')
            hdu0.header.remove('PMTOTAL')
            hdu0.header.remove('PARALLAX')
            hdu0.header.remove('GLON')
            hdu0.header.remove('GLAT')
            hdu0.header.remove('GMAG')
            hdu0.header.remove('RMAG')
            hdu0.header.remove('IMAG')
            hdu0.header.remove('ZMAG')
            hdu0.header.remove('JMAG')
            hdu0.header.remove('HMAG')
            hdu0.header.remove('KMAG')
            hdu0.header.remove('KEPMAG')
            hdu0.header.remove('GRCOLOR')
            hdu0.header.remove('JKCOLOR')
            hdu0.header.remove('GKCOLOR')
            hdu0.header.remove('TEFF')
            hdu0.header.remove('LOGG')
            hdu0.header.remove('FEH')
            hdu0.header.remove('EBMINUSV')
            hdu0.header.remove('AV')
            hdu0.header.remove('RADIUS')
            hdu0.header.remove('TMINDEX')
            hdu0.header.remove('EXTEND')
            hdu0.header.remove('NEXTEND')
            hdu0.header.remove('EXTVER')
            hdu0.header.remove('XTENSION')
            hdu0.header.remove('INHERIT')
            hdu0.header.remove('NPIXSAP')
            hdu0.header.remove('NPIXMISS')
            hdu0.header.remove('TIERABSO')
            hdu0.header.remove('LC_START')
            hdu0.header.remove('LC_END')
            hdu0.header.remove('CDPP3_0')
            hdu0.header.remove('CDPP6_0')
            hdu0.header.remove('CDPP12_0')
            hdu0.header.remove('CROWDSAP')
            hdu0.header.remove('FLFRCSAP')
            #       hdu0.header.set('NEXTEND', 1, 'number of extensions')

            hdu0.header.set('OBJECT', 'M8', 'target')
            hdu0.header[
                'TSTART'] = time[i] - frametim / 3600. / 24. / 2. * num_frm
            hdu0.header[
                'TSTOP'] = time[i] + frametim / 3600. / 24. / 2. * num_frm
            hdu0.header.cards[
                'TSTART'].comment = 'observation start time in BJD'
            hdu0.header.cards[
                'TSTOP'].comment = 'observation start time in BJD'

            # Calculate TIME-OBS from BJD time:
            temptime = Time(time[i] - frametim / 3600. / 24. / 2. * num_frm -
                            2400000.5 - timecorr[i] +
                            (0.25 + 0.62 *
                             (5 - cards1['TIMSLICE'].value)) / 86400.,
                            format='mjd')
            temptime = str(temptime.datetime)
            hdu0.header['DATE-OBS'] = temptime.replace(' ', 'T') + 'Z'

            # Calculate DATE-END:
            temptime = Time(time[i] + frametim / 3600. / 24. / 2. * num_frm -
                            2400000.5 - timecorr[i] +
                            (0.25 + 0.62 *
                             (5 - cards1['TIMSLICE'].value)) / 86400.,
                            format='mjd')
            temptime = str(temptime.datetime)
            hdu0.header['DATE-END'] = temptime.replace(' ', 'T') + 'Z'

            # write output file
            hdu0.writeto(outfile, checksum=True)
예제 #13
0
def tab2_BUT_tCLN_param_save():
    with open(CleanID_dir + 'CASA_CLN_args.json', 'w') as fp:
        json.dump(tab2_tCLN_Param_dict, fp)
    tab2_Div_tCLN2.text = '<p>CASA script and arguments config file saved to <b>{}</b>.</p>'.format(
        CleanID_dir)
    timestrs = []
    fits_local = []
    fits_global = []
    if 'twidth' in tab2_tCLN_Param_dict.keys():
        val = tab2_tCLN_Param_dict['twidth']
        exec('twidth = int({})'.format(val))
    else:
        twidth = 1
    if 'workdir' in tab2_tCLN_Param_dict.keys():
        val = tab2_tCLN_Param_dict['workdir']
        exec('workdir = {}'.format(val))
    else:
        workdir = './'
    # os.system('cp {} {}'.format(CleanID_dir + 'CASA_CLN_args.json', workdir))
    os.system('cp {}/DataBrowser/ToClean/script_clean.py {}'.format(
        suncasa_dir, CleanID_dir))

    for ii in range(tab2_ntim):
        iit = int(ii) / twidth * twidth
        t0 = xx[iit] - tab2_dt / 2
        datestr = Time(t0 / 3600. / 24.,
                       format='jd',
                       scale='utc',
                       precision=3,
                       out_subfmt='date').iso
        timestr0 = Time(t0 / 3600. / 24.,
                        format='jd',
                        scale='utc',
                        precision=3,
                        out_subfmt='date_hms').iso
        timestr0 = timestr0.split(' ')[1]
        timestr = datestr.replace("-", "") + 'T' + timestr0.replace(":", "")
        timestrs.append(timestr0)
        fits_local.append(timestr + '.fits')
        fits_global.append(timestr + '.fits')
    timestrs = timestrs * int(tab2_nfreq)
    fits_local = fits_local * int(tab2_nfreq)
    fits_global = fits_global * int(tab2_nfreq)
    freqstrs = ['{:.3f}'.format(ll) for ll in yy]
    dspecDFout = pd.DataFrame({
        'time': xx - xx[0],
        'freq': yy,
        'timestr': timestrs,
        'freqstr': freqstrs,
        'dspec': tab2_spec_plt.flatten(),
        'fits_local': fits_local,
        'fits_global': fits_global
    })
    with open(FS_dspecDF, 'wb') as fp:
        pickle.dump(dspecDFout, fp)
    tab2_Div_tCLN2.text = '<p>CASA script, arguments config file and dspecDF-base saved to <b>{}</b>. '.format(
        CleanID_dir
    ) + 'Click the <b>clean</b> button to clean. When finished, \
        go back to <b>QLook</b> window, select StrID <b>{}</b> and \
        click <b>FSview</b> button again.</p>'.format(CleanID_dir,
                                                      struct_id[0:-1])
예제 #14
0
    def download_cutouts(self, desg=None, clean_failed=True,
                         retry_failed=True):
        import os
        from tempfile import mktemp
        import numpy as np
        import astropy.units as u
        from astropy.io import fits
        from astropy.time import Time
        from astropy.wcs import WCS
        from .ztf import IRSA

        path = self.config['cutout path'] + os.path.sep
        if not os.path.exists(path):
            os.system('mkdir ' + path)

        fntemplate = os.path.join(
            '{desg}', '{desg}-{datetime}-{prepost}{rh:.3f}-ztf.fits.gz')

        if desg is None:
            desg_constraint = ''
            parameters = []
        else:
            desg_constraint = ' AND desg=? '
            parameters = [desg]

        if retry_failed:
            sync_constraint = ''
        else:
            sync_constraint = 'AND sci_sync_date IS NULL '
        count = self.db.execute('''
            SELECT count() FROM found
            WHERE sciimg=0
            ''' + sync_constraint + desg_constraint, parameters
                                ).fetchone()[0]

        if count == 0:
            self.logger.info('No cutouts to download.')
            return

        self.logger.info('Downloading {} cutouts.'.format(count))

        rows = self.fetch_iter('''
        SELECT * FROM foundobs
        WHERE sciimg=0
        ''' + sync_constraint + '''
        ''' + desg_constraint, parameters)

        with IRSA(path, self.config.auth) as irsa:
            for row in rows:
                # check if target cutout directory exists
                d = desg2file(row['desg'])
                if not os.path.exists(path + d):
                    os.system('mkdir ' + path + d)

                prepost = 'pre' if row['rdot'] < 0 else 'post'
                sync_date = Time(float(row['obsjd']), format='jd').iso
                t = sync_date.replace('-', '').replace(
                    ':', '').replace(' ', '_')[:15]
                fn = fntemplate.format(
                    desg=d, prepost=prepost, rh=row['rh'],
                    datetime=t)

                if os.path.exists(path + fn):
                    self.logger.error(
                        path + fn +
                        ' exists, but was not expected.  Removing.'
                    )
                    os.unlink(path + fn)

                sciurl = row['url'] + '&size=5arcmin'
                sci_downloaded = self._download_file(
                    irsa, sciurl, path + fn, clean_failed=clean_failed)
                if not sci_downloaded:
                    self.db.execute('''
                    UPDATE found SET
                      sci_sync_date=?,
                      sciimg=0,
                      mskimg=0,
                      scipsf=0,
                      diffimg=0,
                      diffpsf=0
                    WHERE foundid=?
                    ''', (sync_date, row['foundid']))
                    self.db.commit()
                    continue

                updates = {
                    'desg': (row['desg'], 'Target designation'),
                    'obsjd': (row['obsjd'], 'Shutter start time'),
                    'rh': (row['rh'], 'Heliocentric distance, au'),
                    'delta': (row['delta'], 'Observer-target distance, au'),
                    'phase': (row['phase'], 'Sun-target-observer angle, deg'),
                    'rdot': (row['rdot'], 'Heliocentric radial velocity, km/s'),
                    'selong': (row['selong'], 'Solar elongation, deg'),
                    'sangle': (row['sangle'], 'Projected target->Sun position angle, deg'),
                    'vangle': (row['vangle'], 'Projected velocity position angle, deg'),
                    'trueanom': (row['trueanomaly'], 'True anomaly (osculating), deg'),
                    'tmtp': (row['tmtp'], 'T-Tp (osculating), days'),
                    'tgtra': (row['ra'], 'Target RA, deg'),
                    'tgtdec': (row['dec'], 'Target Dec, deg'),
                    'tgtdra': (row['dra'], 'Target RA*cos(dec) rate of change, arcsec/s'),
                    'tgtddec': (row['ddec'], 'Target Dec rate of change, arcsec/s'),
                    'tgtrasig': (row['ra3sig'], 'Target RA 3-sigma uncertainty, arcsec'),
                    'tgtdesig': (row['dec3sig'], 'Target Dec 3-sigma uncertainty, arcsec'),
                    'foundid': (row['foundid'], 'ZChecker DB foundid'),
                }

                maskfn = mktemp(dir='/tmp')
                _url = sciurl.replace('sciimg', 'mskimg')
                mask_downloaded = self._download_file(
                    irsa, _url, maskfn, clean_failed=clean_failed)

                psffn = mktemp(dir='/tmp')
                _url = sciurl.replace('sciimg', 'sciimgdaopsfcent')
                _url = _url[:_url.rfind('?')]
                psf_downloaded = self._download_file(
                    irsa, _url, psffn, clean_failed=True)

                difffn = mktemp(dir='/tmp')
                #_url = sciurl.replace('sciimg.fits', 'scimrefdiffimg.fits.fz')
                # diff_downloaded = self._download_file(
                #    irsa, _url, difffn, clean_failed=True)
                diff_downloaded = False

                diffpsffn = mktemp(dir='/tmp')
                #_url = sciurl.replace('sciimg', 'diffimgpsf')
                # if diff_downloaded:  # no need to DL PSF if diff not DL'ed
                #    diffpsf_downloaded = self._download_file(
                #        irsa, _url, diffpsffn, clean_failed=True)
                # else:
                #    diffpsf_downloaded = False
                diffpsf_downloaded = False

                # update header and add mask and PSF
                with fits.open(path + fn, 'update') as hdu:
                    hdu[0].name = 'sci'

                    wcs = WCS(hdu[0].header)
                    x, y = wcs.all_world2pix(
                        row['ra'] * u.deg, row['dec'] * u.deg, 0)
                    updates['tgtx'] = int(
                        x), 'Target x coordinate, 0-based'
                    updates['tgty'] = int(
                        y), 'Target y coordinate, 0-based'

                    try:
                        hdu[0].header.update(updates)
                    except ValueError as e:
                        self.logger.error('Error creating FITS header for foundid {}: {}'.format(row['foundid'], str(e)))
                        hdu.close()
                        continue

                    if mask_downloaded:
                        with fits.open(maskfn) as mask:
                            mask[0].name = 'mask'
                            hdu.append(mask[0])

                    if psf_downloaded:
                        with fits.open(psffn) as psf:
                            psf[0].name = 'psf'
                            hdu.append(psf[0])

                    if diff_downloaded:
                        with fits.open(difffn) as diff:
                            diff[0].name = 'diff'
                            hdu.append(psf[0])

                    if diffpsf_downloaded:
                        with fits.open(diffpsffn) as diffpsf:
                            diffpsf[0].name = 'diff_psf'
                            hdu.append(psf[0])

                for f in (maskfn, psffn, difffn, diffpsffn):
                    if os.path.exists(f):
                        os.unlink(f)

                self.db.execute('''
                UPDATE found SET
                  archivefile=?,
                  sci_sync_date=?,
                  sciimg=?,
                  mskimg=?,
                  scipsf=?,
                  diffimg=?,
                  diffpsf=?
                WHERE foundid=?
                ''', (fn, sync_date, sci_downloaded, mask_downloaded,
                      psf_downloaded, diff_downloaded, diffpsf_downloaded,
                      row['foundid']))

                self.db.commit()

                self.logger.info('  [{}] {}'.format(
                    count, os.path.basename(fn)))
                count -= 1