Esempio n. 1
0
class BlissScheduler(Scheduler):
    _defaults = odict(Scheduler._defaults.items() + [
        ('tactician','coverage'),
        ('windows',os.path.join(fileio.get_datadir(),"bliss-windows.csv")),
        ('targets',os.path.join(fileio.get_datadir(),"bliss-target-fields.csv")),
    ])
    FieldType = BlissFieldArray
Esempio n. 2
0
    def _load_constraints(self, filename=None):
        """ Load Blanco constraint data """
        if filename is None:
            from obztak.utils import fileio
            filename = os.path.join(fileio.get_datadir(),
                                    'blanco_hour_angle_limits.dat')
        self.constraints = np.recfromtxt(filename, names=True)

        # ADW: This is not very pythonic....
        ha_degrees = np.tile(0., len(self.constraints['HA']))
        for ii in range(0, len(self.constraints['HA'])):
            ha_degrees[ii] = proj.hms2dec(self.constraints['HA'][ii])

        # Buffer to protect us from the chicken
        ha_degrees -= 1.25

        self.ha_degrees = ha_degrees
        return self.ha_degrees
Esempio n. 3
0
def testDither(ra_center, dec_center, infile='target_fields.csv', save=False):

    filename = os.path.join(fileio.get_datadir(),
                            '../scratch/ccd_corners_xy_fill.dat')
    data = eval(''.join(open(filename).readlines()))
    ccd_array = []
    for key in data.keys():
        #ccd_array.append(matplotlib.path.Path(data[key]))
        ccd_array.append(data[key])

    data_targets = fileio.csv2rec(infile)

    fig, ax = pylab.subplots(figsize=(8, 8))

    angsep = obztak.utils.projector.angsep(ra_center, dec_center,
                                           data_targets['RA'],
                                           data_targets['DEC'])
    cut = (angsep < 3.) & (data_targets['FILTER']
                           == obztak.utils.constants.BANDS[0]) & (
                               data_targets['TILING'] <= 3)

    print np.sum(angsep < 3.)
    print np.sum(data_targets['FILTER'] == obztak.utils.constants.BANDS[0])
    print np.sum(cut)

    for ii in np.nonzero(cut)[0]:
        plotFocalPlane(ccd_array, ra_center, dec_center,
                       data_targets['RA'][ii], data_targets['DEC'][ii], ax)

    pylab.xlim(-1.5, 1.5)
    pylab.ylim(-1.5, 1.5)
    pylab.xlabel('x (deg)', labelpad=20)
    pylab.ylabel('y (deg)')
    pylab.title('(RA, Dec) = (%.3f, %.3f)' % (ra_center, dec_center))

    if save:
        pattern = infile.split('target_fields_')[-1].split('.csv')[0]
        pylab.savefig('dither_ra_%.2f_dec_%.2f_%s.pdf' %
                      (ra_center, dec_center, pattern))
Esempio n. 4
0
    def prepare_fields(self,
                       infile=None,
                       outfile=None,
                       mode='smash_dither',
                       plot=True,
                       smcnod=False):
        """ Create the list of fields to be targeted by this survey.

        Parameters:
        -----------
        infile : File containing all possible field locations.
        outfile: Output file of selected fields
        mode   : Mode for dithering: 'smash_dither', 'smash_rotate', 'decam_dither', 'none'
        plot   : Create an output plot of selected fields.

        Returns:
        --------
        fields : A FieldArray of the selected fields.
        """
        # Import the dither function here...
        #def dither(ra,dec,dx,dy):
        #    return ra,dec

        if mode is None or mode.lower() == 'none':

            def dither(ra, dec, dx, dy):
                return ra, dec

            TILINGS = [(0, 0), (0, 0), (0, 0), (0, 0)]
        elif mode.lower() == 'smash_dither':
            TILINGS = [(0, 0), (1.0, 0.0), (-1.0, 0.0), (0.0, -0.75)]
            dither = self.smash_dither
        elif mode.lower() == 'smash_rotate':
            TILINGS = [(0, 0), (0.75, 0.75), (-0.75, 0.75), (0.0, -0.75)]
            dither = self.smash_rotate
        elif mode.lower() == 'decam_dither':
            TILINGS = [(0., 0.), (8 / 3. * CCD_X, -11 / 3. * CCD_Y),
                       (8 / 3. * CCD_X, 8 / 3. * CCD_Y), (-8 / 3. * CCD_X, 0.)]
            dither = self.decam_dither

        if infile is None:
            infile = os.path.join(fileio.get_datadir(),
                                  'smash_fields_alltiles.txt')
        data = np.recfromtxt(infile, names=True)

        # Apply footprint selection after tiling/dither
        #sel = obztak.utils.projector.footprint(data['RA'],data['DEC'])

        # This is currently a non-op
        smash_id = data['ID']
        ra = data['RA']
        dec = data['DEC']

        nhexes = len(data)
        #ntilings = len(DECAM_DITHERS)
        ntilings = len(TILINGS)
        nbands = len(BANDS)
        nfields = nhexes * nbands * ntilings

        logging.info("Number of hexes: %d" % nhexes)
        logging.info("Number of tilings: %d" % ntilings)
        logging.info("Number of filters: %d" % nbands)

        fields = FieldArray(nfields)
        fields['HEX'] = np.tile(np.repeat(smash_id, nbands), ntilings)
        fields['PRIORITY'].fill(1)
        fields['TILING'] = np.repeat(np.arange(1, ntilings + 1),
                                     nhexes * nbands)
        fields['FILTER'] = np.tile(BANDS, nhexes * ntilings)

        #for i in range(ntilings):
        for i, tiling in enumerate(TILINGS):
            idx0 = i * nhexes * nbands
            idx1 = idx0 + nhexes * nbands
            ra_dither, dec_dither = dither(ra, dec, tiling[0], tiling[1])
            fields['RA'][idx0:idx1] = np.repeat(ra_dither, nbands)
            fields['DEC'][idx0:idx1] = np.repeat(dec_dither, nbands)

        # Apply footprint selection after tiling/dither
        sel = self.footprint(fields['RA'], fields['DEC'])  # NORMAL OPERATION
        if smcnod:
            # Include SMC northern overdensity fields
            sel_smcnod = self.footprintSMCNOD(fields)  # SMCNOD OPERATION
            sel = sel | sel_smcnod
            #sel = sel_smcnod
            fields['PRIORITY'][sel_smcnod] = 99
        #if True:
        #    # Include 'bridge' region between Magellanic Clouds
        #    sel_bridge = self.footprintBridge(fields['RA'],fields['DEC'])
        #    sel = sel | sel_bridge
        sel = sel & (fields['DEC'] > constants.SOUTHERN_REACH)
        fields = fields[sel]

        logging.info("Number of target fields: %d" % len(fields))

        if plot:
            import pylab as plt
            import obztak.utils.ortho

            plt.ion()

            fig, basemap = obztak.utils.ortho.makePlot('2016/2/11 03:00',
                                                       center=(0, -90),
                                                       airmass=False,
                                                       moon=False)

            proj = obztak.utils.ortho.safeProj(basemap, fields['RA'],
                                               fields['DEC'])
            basemap.scatter(*proj,
                            c=fields['TILING'],
                            edgecolor='none',
                            s=50,
                            cmap='Spectral',
                            vmin=0,
                            vmax=len(TILINGS))
            colorbar = plt.colorbar(label='Tiling')

            if outfile:
                outfig = os.path.splitext(outfile)[0] + '.png'
                fig.savefig(outfig, bbox_inches='tight')
            if not sys.flags.interactive:
                plt.show(block=True)

        if outfile: fields.write(outfile)

        return fields
Esempio n. 5
0
class Scheduler(object):
    """
    Deal with survey scheduling.
    """
    _defaults = odict([
        #('tactician','coverage'),
        ('windows', os.path.join(fileio.get_datadir(),
                                 "maglites-windows.csv")),
        ('targets',
         os.path.join(fileio.get_datadir(), "maglites-target-fields.csv")),
    ])
    FieldType = FieldArray

    def __init__(self,
                 target_fields=None,
                 windows=None,
                 completed_fields=None):
        self.load_target_fields(target_fields)
        self.load_windows(windows)
        self.load_observed_fields()
        self.load_completed_fields(completed_fields)

        self.scheduled_fields = self.FieldType()
        self.observatory = CTIO()

        self.create_seeing()

    def create_seeing(self, filename=None, mode='qc'):
        import obztak.seeing
        #dirname ='/Users/kadrlica/delve/observing/data/'
        #basename = 'delve_sim_01.csv.gz'
        #filename = os.path.join(dirname,basename)
        if mode == 'dimm':
            self.seeing = obztak.seeing.DimmSeeing(filename=filename)
        elif mode == 'qc':
            self.seeing = obztak.seeing.QcSeeing(filename=filename)
        else:
            self.seeing = obztak.seeing.QcSeeing(filename=filename)

        return self.seeing

    def load_target_fields(self, target_fields=None):
        if target_fields is None:
            target_fields = self._defaults['targets']

        if isinstance(target_fields, basestring):
            self.target_fields = self.FieldType.read(target_fields)
        else:
            self.target_fields = self.FieldType(target_fields)
        return self.target_fields

    def load_windows(self, windows=None):
        """
        Load the set of start and stop times for the observation windows.
        """
        if windows is None:
            windows = self._defaults['windows']
            logging.info("Setting default observing windows:\n %s" % windows)

        if isinstance(windows, basestring):
            windows = fileio.csv2rec(windows)

        self.windows = []
        for start, end in windows:
            self.windows.append([ephem.Date(start), ephem.Date(end)])

        # Sanity check that observation windows are properly sorted
        for ii, (start, end) in enumerate(self.windows):
            msg = 'Observation windows are not properly sorted\n'
            msg += '%s: %s -- %s' % (get_nite(start), datestr(start),
                                     datestr(end))
            if (end < start):
                logging.warn(msg)
            if ii > 0 and (start < self.windows[ii - 1][1]):
                logging.warn(msg)

        logging.debug('Observation Windows:')
        for start, end in self.windows:
            logging.debug('  %s: %s UTC -- %s UTC' %
                          (get_nite(start), datestr(start), datestr(end)))
        logging.debug(30 * '-')

    def load_observed_fields(self):
        """
        Load fields from the telemetry database that were already observed.
        """
        try:
            fields = self.FieldType.load_database()
        except Exception as e:
            logging.warn("Failed to load completed exposures from database")
            logging.info(e)
            fields = self.FieldType()
        self.observed_fields = fields
        return self.observed_fields

    def load_completed_fields(self, completed_fields=None):
        """Load completed fields. The default behavior is to load the
        observed_fields as completed_fields. However, if the string
        'None' is passed then return an empty FieldArray.

        Parameters:
        -----------
        completed_fields : Filename, list of filenames, or FieldArray-type object.

        Returns:
        --------
        fields           : FieldArray of the completed fields
        """
        # Deal with 'None' string
        if isinstance(completed_fields, list):
            if completed_fields[0].lower() == 'none':
                self.completed_fields = self.FieldType()
                return self.completed_fields
        elif isinstance(completed_fields, basestring):
            if completed_fields.lower() == 'none':
                self.completed_fields = self.FieldType()
                return self.completed_fields

        self.completed_fields = copy.deepcopy(self.observed_fields)

        if not completed_fields:
            return self.completed_fields

        if isinstance(completed_fields, basestring):
            completed_fields = [completed_fields]

        if isinstance(completed_fields, list):
            fields = self.FieldType()
            for filename in completed_fields:
                fields = fields + self.FieldType.read(filename)

            completed_fields = fields

        new = ~np.in1d(completed_fields.unique_id,
                       self.completed_fields.unique_id)
        new_fields = completed_fields[new]
        self.completed_fields = self.completed_fields + new_fields
        return self.completed_fields

    def create_tactician(self, cls=None, mode=None):
        """ Create a tactician in the given mode.

        Parameters:
        -----------
        cls : the tactician class [defaults to survey]
        mode: the tactician mode

        Returns:
        --------
        tac : the tactician
        """
        return tactician_factory(cls=cls, mode=mode)

    def select_field(self, date, mode=None):
        """
        Select field(s) using the survey tactician.

        Parameters:
        -----------
        date       : ephem.Date object
        mode       : Type of tactician to use for selecting field

        Returns:
        --------
        field      : selected field(s) from tactician
        """
        sel = ~np.in1d(self.target_fields['ID'], self.completed_fields['ID'])

        # ADW: Why do we create the tactician each time?
        self.tactician = self.create_tactician(mode=mode)
        self.tactician.set_date(date)
        self.tactician.set_target_fields(self.target_fields[sel])
        self.tactician.set_completed_fields(self.completed_fields)
        self.tactician.fwhm = self.fwhm

        field_select = self.tactician.select_fields()

        logging.debug(str(field_select))

        # For diagnostic purposes
        if False and len(self.scheduled_fields) % 10 == 0:
            weight = self.tactician.weight
            ortho.plotWeight(field_select[-1], self.target_fields,
                             self.tactician.weight)
            raw_input('WAIT')

        if len(field_select) == 0:
            logging.error("No field selected... we've got problems.")
            msg = "date=%s\n" % (datestr(date))
            msg += "index_select=%s, index=%s\n" % (index_select, index)
            msg += "nselected=%s, selection=%s\n" % (cut.sum(),
                                                     cut[index_select])
            msg += "weights=%s" % weight
            logging.info(msg)
            #ortho.plotWeight(self.scheduled_fields[-1], self.target_fields, self.tactician.weight)
            #ortho.plotField(self.scheduled_fields[-1],self.scheduled_fields,options_basemap=dict(date='2017/02/20 05:00:00'))
            raw_input('WAIT')
            import pdb
            pdb.set_trace()
            raise Exception()

        return field_select

    def run(self, tstart=None, tstop=None, clip=False, plot=False, mode=None):
        """
        Schedule a chunk of exposures. This is the loop where date is incremented

        Parameters:
        -----------
        tstart : Chunk start time
        tstop  : Chunk end time (may be replace with chunk length)
        plot   : Plot the chunk (may be removed)

        Returns:
        --------
        fields : Scheduled fields
        """
        # Reset the scheduled fields
        self.scheduled_fields = self.FieldType()

        # If no tstop, run for 90 minutes
        if tstart is None: tstart = ephem.now()
        if tstop is None:
            timedelta = 90 * ephem.minute
            tstop = tstart + timedelta

        # Convert strings into dates
        if isinstance(tstart, basestring):
            tstart = ephem.Date(tstart)
        if isinstance(tstop, basestring):
            tstop = ephem.Date(tstop)

        msg = "\nRun start: %s\n" % datestr(tstart, 4)
        msg += "Run end: %s\n" % datestr(tstop, 4)
        logging.debug(msg)

        msg = "Previously completed fields: %i" % len(self.completed_fields)
        logging.info(msg)

        # This is not safe since tactician is re-created in select_field
        self.tactician = self.create_tactician(mode=mode)
        msg = "Scheduling with '%s' in mode '%s'" % (
            self.tactician.__class__.__name__, self.tactician.mode)
        logging.info(msg)

        self.seeing.set_date(datestr(tstart))
        self.fwhm = self.seeing.get_fwhm(band='i', airmass=1.0)
        logging.info("Predicted i-band zenith fwhm: %.2f arcsec" % self.fwhm)
        logging.debug(self.seeing.raw)

        date = tstart
        latch = True
        while latch:
            logging.debug(' ' + datestr(date, 4))

            # Check to see if in valid observation window
            if self.windows is not None:
                inside = False
                for window in self.windows:
                    if date >= window[0] and date < window[-1]:
                        inside = True
                        break

                if not inside:
                    if clip:
                        break
                    else:
                        msg = 'Date outside of nominal observing windows'
                        logging.warning(msg)

            # Select one (or more) fields from the tactician
            try:
                field_select = self.select_field(date, mode)
            except Exception as e:
                # Only write if error occurred outside observing window
                if not inside:
                    logging.warning(str(e))
                    break
                else:
                    raise (e)

            # Now update the time from the last selected field (note duplication in tactician.select_field)
            fieldtime = field_select[-1][
                'EXPTIME'] * ephem.second + constants.OVERHEAD
            date = ephem.Date(field_select[-1]['DATE']) + fieldtime

            self.completed_fields = self.completed_fields + field_select
            self.scheduled_fields = self.scheduled_fields + field_select

            msg = " %(DATE).19s: id=%(ID)10s, secz=%(AIRMASS).2f, slew=%(SLEW).2f"
            msg += ", moon=%(PHASE).0f%%,%(ALT).0fdeg"
            for i, f in zip(field_select.unique_id, field_select):
                params = dict([('ID', i)] + [(k, f[k]) for k in f.dtype.names])
                params.update({
                    'PHASE': self.tactician.moon.phase,
                    "ALT": np.degrees(self.tactician.moon.alt)
                })
                logging.info(msg % params)

            #if plot: self.plotField(date, field_select)
            if plot:
                ortho.plotField(field_select[:-1], self.target_fields,
                                self.completed_fields)
            if date >= tstop: break

        msg = "Newly scheduled fields: %i" % len(self.scheduled_fields)
        logging.info(msg)

        return self.scheduled_fields

    def schedule_field(self,
                       hex,
                       tiling,
                       band=None,
                       date=None,
                       plot=False,
                       mode=None):
        """
        Schedule a single filed at a given time.

        Parameters:
        -----------
        hexid  : the hex ID of the field
        tiling : the tiling number of the field
        band   : The band of the field
        date   : The date/time for observation
        plot   : Plot the output
        mode   : Mode for scheduler tactician

        Returns:
        --------
        field : The scheduled field
        """
        # Probably cleaner to make this it's own tactician
        date = ephem.Date(date) if date else ephem.now()

        select = (self.target_fields['HEX'] == hex)
        select &= (self.target_fields['TILING'] == tiling)
        if band is not None:
            select &= (self.target_fields['FILTER'] == band)
        index = np.nonzero(select)[0]

        field = self.target_fields[select]
        nfields = select.sum()
        field['DATE'] = map(datestring, nfields * [date])
        return field

    def schedule_chunk(self,
                       tstart=None,
                       chunk=60,
                       clip=False,
                       plot=False,
                       mode=None):
        """
        Schedule a chunk of exposures.

        Parameters:
        -----------
        tstart : Start time (UTC); in `None` use `ephem.now()`
        chunk  : Chunk of time to schedule.
        plot   : Dynamically plot each scheduled exposure
        mode   : Mode for scheduler tactician

        Returns:
        --------
        fields : Scheduled fields
        """
        # If no tstop, run for 90 minutes
        if tstart is None: tstart = ephem.now()
        tstop = tstart + chunk * ephem.minute

        return self.run(tstart, tstop, clip, plot, mode)

    def schedule_nite(self,
                      date=None,
                      start=None,
                      chunk=60,
                      clip=False,
                      plot=False,
                      mode=None):
        """
        Schedule a night of observing.

        A `nite` is defined by the day (UTC) at noon local time before
        observing started.

        Parameters:
        -----------
        date  : The date of the nite to schedule
        chunk : The duration of a chunk of exposures (minutes)
        plot  : Dynamically plot the progress after each chunk
        mode  : Mode for scheduler tactician

        Returns:
        --------
        chunks : A list of the chunks generated for the scheduled nite.
        """

        # Create the nite
        nite = get_nite(date)

        # Convert chunk to MJD
        if chunk > 1: chunk = chunk * ephem.minute

        try:
            nites = [get_nite(w[0]) for w in self.windows]
            idx = nites.index(nite)
            winstart, finish = self.windows[idx]
            if start is None:
                start = winstart
            else:
                logging.warn("Over-writing nite start time")
        except (TypeError, ValueError):
            msg = "Requested nite (%s) not found in windows:\n" % nite
            msg += '[' + ', '.join([n for n in nites]) + ']'
            logging.warning(msg)

            start = date
            self.observatory.date = date
            self.observatory.horizon = self.observatory.twilight
            finish = self.observatory.next_rising(ephem.Sun(), use_center=True)
            self.observatory.horizon = '0'

        logging.info("Night start (UTC):  %s" % datestr(start))
        logging.info("Night finish (UTC): %s" % datestr(finish))

        chunks = []
        i = 0
        while start < finish:
            i += 1
            msg = "Scheduling %s -- Chunk %i" % (start, i)
            logging.debug(msg)
            end = start + chunk

            try:
                scheduled_fields = self.run(start,
                                            end,
                                            clip=clip,
                                            plot=False,
                                            mode=mode)
            except ValueError:
                # Write fields even if there is an error
                #chunks.append(self.scheduled_fields)
                break

            if plot:
                field_select = scheduled_fields[-1:]
                bmap = ortho.plotField(field_select, self.target_fields,
                                       self.completed_fields)
                if (raw_input(' ...continue ([y]/n)').lower() == 'n'):
                    import pdb
                    pdb.set_trace()

            chunks.append(scheduled_fields)
            fieldtime = chunks[-1]['EXPTIME'][
                -1] * ephem.second + constants.OVERHEAD
            start = ephem.Date(chunks[-1]['DATE'][-1]) + fieldtime
            #start = end

        if plot: raw_input(' ...finish... ')

        return chunks

    def schedule_survey(self,
                        start=None,
                        end=None,
                        chunk=60,
                        plot=False,
                        mode=None,
                        write=False,
                        dirname=None):
        """
        Schedule the entire survey.

        Parameters:
        -----------
        start : Start of survey (int or str)
        end   : End of survey (int or str)
        chunk : The duration of a chunk of exposures (minutes)
        plot  : Dynamically plot the progress after each night
        mode  : Mode of scheduler tactician

        Returns:
        --------
        scheduled_nites : An ordered dictionary of scheduled nites
        """

        self.scheduled_nites = odict()

        for tstart, tend in self.windows:
            if start is not None and ephem.Date(tstart) < ephem.Date(start):
                continue
            if end is not None and ephem.Date(tend) > ephem.Date(end):
                continue

            #nite = nitestring(tstart)
            nite = get_nite(tstart)

            try:
                chunks = self.schedule_nite(tstart,
                                            chunk,
                                            clip=True,
                                            plot=False,
                                            mode=mode)
            except ValueError as error:
                ortho.plotField(self.completed_fields[-1:], self.target_fields,
                                self.completed_fields)
                raise (error)

            self.scheduled_nites[nite] = chunks

            if write:
                self.write_nite(nite, chunks, dirname=dirname)

            if plot:
                ortho.plotField(self.completed_fields[-1:], self.target_fields,
                                self.completed_fields)
                if (raw_input(' ...continue ([y]/n)').lower() == 'n'):
                    import pdb
                    pdb.set_trace()

        if plot: raw_input(' ...finish... ')
        return self.scheduled_nites

    def write_nite(self, nite, chunks, dirname=None):
        if dirname:
            outdir = os.path.join(dirname, nite)
        else:
            outdir = os.path.join(nite)
        if not os.path.exists(outdir): os.makedirs(outdir)
        outfile = os.path.join(outdir, nite + '.json')
        base, ext = os.path.splitext(outfile)

        for i, chunk in enumerate(chunks):
            if len(chunks) > 1:
                outfile = base + '_%02d' % (i + 1) + ext
            logging.debug("Writing %s..." % outfile)
            chunk.write(outfile)

    def write(self, filename):
        self.scheduled_fields.write(filename)

    @classmethod
    def common_parser(cls):
        """
        Comman argument parser for scheduler tools.
        """
        from obztak.utils.parser import Parser, DatetimeAction

        description = __doc__
        parser = Parser(description=description)
        #parser.add_argument('--survey',choices=['obztak','maglites','bliss'],
        #                    default = None, help='choose survey to schedule.')
        parser.add_argument('-p',
                            '--plot',
                            action='store_true',
                            help='create visual output.')
        parser.add_argument('--utc',
                            '--utc-start',
                            dest='utc_start',
                            action=DatetimeAction,
                            help="start time for observation.")
        parser.add_argument('--utc-end',
                            action=DatetimeAction,
                            help="end time for observation.")
        parser.add_argument('-k',
                            '--chunk',
                            default=60.,
                            type=float,
                            help='time chunk (minutes)')
        parser.add_argument('-f',
                            '--fields',
                            default=None,
                            help='all target fields.')
        #parser.add_argument('-m','--mode',default='coverage',
        #                    help='Mode for scheduler tactician.')
        parser.add_argument('-m',
                            '--mode',
                            default=None,
                            help='mode for scheduler tactician.')
        parser.add_argument('-w',
                            '--windows',
                            default=None,
                            help='observation windows.')
        parser.add_argument('-c',
                            '--complete',
                            nargs='?',
                            action='append',
                            help="fields that have been completed.")
        parser.add_argument('-o',
                            '--outfile',
                            default=None,
                            help='save output file of scheduled fields.')
        parser.add_argument('--write-protect',
                            action='store_true',
                            help='write-protect output files')
        return parser

    @classmethod
    def parser(cls):
        return cls.common_parser()

    @classmethod
    def main(cls):
        args = cls.parser().parse_args()
        scheduler = cls(args.fields, args.windows, args.complete)
        scheduler.run(tstart=args.utc_start,
                      tstop=args.utc_end,
                      plot=args.plot)
        if args.outfile:
            scheduler.scheduled_fields.write(args.outfile)

        return scheduler
Esempio n. 6
0
    def prepare_fields(self,
                       infile=None,
                       outfile=None,
                       mode='bliss_rotate',
                       plot=True,
                       smcnod=False):
        """Create the list of fields to be targeted by the BLISS survey.

        Selection starts with 3 regions:
        - P9 - Planet 9 region above DES footprint (priority 1)
        - LIGO - Region targeted based on LIGO sensitivity maps (and good for MW)
        - Alfredo - Overlap with Alfredo's eRosita companion survey.

        Fields that have been previously covered by DECam are removed
        from the LIGO and Alfredo fooptrint regions.

        Parameters:
        -----------
        infile : File containing all possible field locations.
        outfile: Output file of selected fields
        mode   : Mode for dithering. default: 'bliss_rotate'
        plot   : Create an output plot of selected fields.

        Returns:
        --------
        fields : A FieldArray of the selected fields.
        """
        # Import the dither function here...
        #def dither(ra,dec,dx,dy):
        #    return ra,dec

        if mode is None or mode.lower() == 'none':

            def dither(ra, dec, dx, dy):
                return ra, dec

            OFFSETS = TILINGS * [(0, 0)]
        elif mode.lower() == 'smash_dither':
            OFFSETS = [(0, 0), (1.0, 0.0), (-1.0, 0.0), (0.0, -0.75)][:TILINGS]
            dither = self.smash_dither
        elif mode.lower() == 'smash_rotate':
            OFFSETS = [(0, 0), (0.75, 0.75), (-0.75, 0.75),
                       (0.0, -0.75)][:TILINGS]
            dither = self.smash_rotate
        elif mode.lower() == 'decam_dither':
            OFFSETS = [(0., 0.), (8 / 3. * CCD_X, -11 / 3. * CCD_Y),
                       (8 / 3. * CCD_X, 8 / 3. * CCD_Y),
                       (-8 / 3. * CCD_X, 0.)][:TILINGS]
            dither = self.decam_dither
        elif mode.lower() == 'coord_rotate':
            OFFSETS = [(0, 0), (153.0, -17.0), (-1.0, 1.0),
                       (0.0, -1.0)][:TILINGS]
            dither = self.coord_rotate
        elif mode.lower() == 'decals_rotate':
            OFFSETS = [(0, 0), (-0.2917, 0.0833), (-0.5861, 0.1333),
                       (-0.8805, 0.1833)][:TILINGS]
            dither = self.decals_rotate
        elif mode.lower() == 'bliss_rotate':
            OFFSETS = [(0, 0), (8 / 3. * CCD_X, -11 / 3. * CCD_Y),
                       (8 / 3. * CCD_X, 8 / 3. * CCD_Y),
                       (-8 / 3. * CCD_X, 0.)][:TILINGS]
            dither = self.decals_rotate
        else:
            msg = "Unrecognized dither mode: %s" % mode
            raise ValueError(msg)
        logging.info("Dither mode: %s" % mode.lower())

        if infile is None:
            #infile = os.path.join(fileio.get_datadir(),'smash_fields_alltiles.txt')
            #infile = os.path.join(fileio.get_datadir(),'ctr-healpy-32-13131.txt')
            infile = os.path.join(fileio.get_datadir(),
                                  'decam-tiles_obstatus.fits')
        #data = np.recfromtxt(infile, names=True)
        raw_data = fitsio.read(infile)
        data = raw_data[(raw_data['PASS'] == 1)]

        # Apply footprint selection after tiling/dither
        #sel = obztak.utils.projector.footprint(data['RA'],data['DEC'])

        # This is currently a non-op
        decals_id = data['TILEID']
        ra = data['RA']
        dec = data['DEC']

        nhexes = len(data)
        #ntilings = len(DECAM_DITHERS)
        ntilings = TILINGS
        nbands = len(BANDS)
        nfields = nhexes * nbands * ntilings

        logging.info("Number of hexes: %d" % nhexes)
        logging.info("Number of tilings: %d" % ntilings)
        logging.info("Number of filters: %d" % nbands)

        fields = FieldArray(nfields)
        fields['HEX'] = np.tile(np.repeat(decals_id, nbands), ntilings)
        fields['PRIORITY'].fill(1)
        fields['TILING'] = np.repeat(np.arange(1, ntilings + 1),
                                     nhexes * nbands)
        fields['FILTER'] = np.tile(BANDS, nhexes * ntilings)

        #for i in range(ntilings):
        for i, offset in enumerate(OFFSETS):
            idx0 = i * nhexes * nbands
            idx1 = idx0 + nhexes * nbands
            ra_dither, dec_dither = dither(ra, dec, offset[0], offset[1])
            #ra_dither = raw_data[raw_data['PASS'] == i+1]['RA']
            #dec_dither = raw_data[raw_data['PASS'] == i+1]['DEC']
            fields['RA'][idx0:idx1] = np.repeat(ra_dither, nbands)
            fields['DEC'][idx0:idx1] = np.repeat(dec_dither, nbands)

        # Apply footprint selection after tiling/dither
        #sel = self.footprint(fields['RA'],fields['DEC']) # NORMAL OPERATION

        # Apply footprint selection after tiling/dither
        p9 = self.planet9(fields['RA'], fields['DEC'])
        ligo = self.ligo_mw(fields['RA'], fields['DEC'])
        alfredo = self.alfredo(fields['RA'], fields['DEC'])
        p9v2 = self.planet9v2(fields['RA'], fields['DEC'])

        fields['PRIORITY'][p9] = 1
        fields['PRIORITY'][ligo] = 2
        fields['PRIORITY'][alfredo] = 3

        #sel = (p9 | ligo | alfredo)
        sel = (p9 | ligo | alfredo | p9v2)

        # Apply telescope constraints
        sel &= (fields['DEC'] > constants.SOUTHERN_REACH)

        # Apply covered fields
        #sel &= self.uncovered(fields['RA'],fields['DEC'],fields['FILTER'])[0]
        # Apply covered fields (but take everything in P9 region)
        uncovered = self.uncovered(fields['RA'], fields['DEC'],
                                   fields['FILTER'])[0]
        sel &= ((p9v2) | (p9 & (fields['RA'] > 180)) | uncovered)

        fields = fields[sel]

        logging.info("Number of target fields: %d" % len(fields))
        logging.debug("Unique priorities: ", np.unique(fields['PRIORITY']))

        if plot:
            import pylab as plt
            from obztak.utils.ortho import makePlot
            from obztak.utils.ortho import DECamOrtho, DECamMcBride

            kwargs = dict(edgecolor='none',
                          cmap='viridis_r',
                          vmin=0,
                          vmax=ntilings)

            fig, ax = plt.subplots(2, 2, figsize=(16, 9))
            plt.subplots_adjust(wspace=0.01,
                                hspace=0.02,
                                left=0.01,
                                right=0.99,
                                bottom=0.01,
                                top=0.99)
            for i, b in enumerate(BANDS):
                plt.sca(ax.flat[i])
                bmap = DECamMcBride()
                bmap.draw_galaxy()
                bmap.draw_des()
                f = fields[fields['FILTER'] == b]
                bmap.scatter(*bmap.proj(f['RA'], f['DEC']),
                             c=COLORS[b],
                             s=15,
                             **kwargs)

            if outfile:
                outfig = os.path.splitext(outfile)[0] + '_mbt.png'
                plt.savefig(outfig, bbox_inches='tight')

            fig, ax = plt.subplots(2, 2, figsize=(10, 10))
            plt.subplots_adjust(wspace=0.05,
                                hspace=0.05,
                                left=0.05,
                                right=0.95,
                                bottom=0.05,
                                top=0.95)
            for i, b in enumerate(BANDS):
                plt.sca(ax.flat[i])
                bmap = DECamOrtho(date='2017/06/02 03:00')
                bmap.draw_galaxy()
                bmap.draw_des()
                f = fields[fields['FILTER'] == b]
                bmap.scatter(*bmap.proj(f['RA'], f['DEC']),
                             c=COLORS[b],
                             s=50,
                             **kwargs)

            if outfile:
                outfig = os.path.splitext(outfile)[0] + '_ortho.png'
                plt.savefig(outfig, bbox_inches='tight')

            if not sys.flags.interactive:
                plt.show(block=True)

        # Just 3rd tiling
        #fields = fields[fields['TILING'] == 3]

        if outfile: fields.write(outfile)

        return fields
Esempio n. 7
0
"""
__author__ = "Alex Drlica-Wagner"
import os
import time

import pylab as plt
import obztak.utils.projector
import numpy as np

from obztak.utils.projector import SphericalRotator
from obztak.utils.projector import angsep, match, footprint
from obztak.utils.ortho import makePlot, safeProj
from obztak.utils.constants import SMASH_POLE
import obztak.utils.fileio as fileio

datadir = fileio.get_datadir()
filename = os.path.join(datadir, 'smash_fields_alltiles.txt')
data = np.recfromtxt(filename, names=True)
d = data


def smash_dither(dx, dy, ra=d['RA'], dec=d['DEC']):
    ra0, dec0 = SMASH_POLE
    # Rotate the pole at SMASH_POLE to (0,-90)
    R1 = SphericalRotator(ra0, 90 + dec0)
    ra1, dec1 = R1.rotate(ra, dec)
    # Dither the offset
    ra2 = ra1 + dx / np.cos(np.radians(dec1))
    dec2 = dec1 + dy
    # Rotate back to the original frame (keeping R2)
    return R1.rotate(ra2, dec2, invert=True)
Esempio n. 8
0
def makeDither():

    X_CCD = 0.29878  # This is the FITS y-axis
    Y_CCD = 0.14939  # This is the FITS x-axis

    #ra_center, dec_center = 182., -88.0
    ra_center, dec_center = 182., -68.0
    ra_center, dec_center = 178., -80.0
    #ra_center, dec_center = 351.6670, -72.0863
    #ra_center, dec_center = 351.6670, -89.

    pattern = 'alex'
    if pattern == 'none':
        dither_array = []
    elif pattern == 'large':
        dither_array = [[4 * X_CCD / 3., 4. * Y_CCD / 3.],
                        [8. * X_CCD / 3., -11 * Y_CCD / 3.]]
    elif pattern == 'small':
        dither_array = [[1 * X_CCD / 3., 1. * Y_CCD / 3.],
                        [2. * X_CCD / 3., -1 * Y_CCD / 3.]]
    elif pattern == 'alex':
        ### ADW: The pattern suggested is actually in SMASH coordinates not celestial.
        dither_array = [[0.75, 0.75], [-0.75, 0.75]]
    #dither_array = [[4 * X_CCD / 3., 4. * Y_CCD / 3.],
    #                [2. * X_CCD / 3., -4 * Y_CCD / 3.]]
    #dither_array = [[4 * X_CCD / 3., 4. * Y_CCD / 3.]]
    #dither_array = [[5 * X_CCD / 3., 5. * Y_CCD / 3.]]
    #mode = 'single'
    mode = 'fill'
    if mode == 'single':
        angsep_max = 0.
    if mode == 'fill':
        angsep_max = 3.

    # This should use the environment variable MAGLITESDIR to define the path
    datadir = fileio.get_datadir()
    filename = os.path.join(datadir, 'smash_fields_alltiles.txt')
    data_alltiles = np.recfromtxt(filename, names=True)

    filename = os.path.join(datadir, '../scratch/ccd_corners_xy_fill.dat')
    data = eval(''.join(open(filename).readlines()))
    ccd_array = []
    for key in data.keys():
        #ccd_array.append(matplotlib.path.Path(data[key]))
        ccd_array.append(data[key])
    """
    n = 400
    x_mesh, y_mesh = np.meshgrid(np.linspace(-1.1, 1.1, n), np.linspace(-1.1, 1.1, n))

    count = np.zeros([n, n])

    for ii in range(0, len(ccd_array)):
    count += ccd_array[ii].contains_points(zip(x_mesh.flatten(), y_mesh.flatten())).reshape([n, n])

    pylab.figure()
    pylab.pcolor(x_mesh, y_mesh, count)
    """

    fig, ax = pylab.subplots(figsize=(8, 8))

    # Make the collection and add it to the plot.
    #coll = PolyCollection(ccd_array, alpha=0.3, color='red', edgecolors='none')
    #ax.add_collection(coll)

    #plotFocalPlane(ccd_array, ra_center, dec_center, ra_center, dec_center, ax)
    #plotFocalPlane(ccd_array, ra_center, dec_center, ra_center, dec_center + 0.1, ax)

    angsep = obztak.utils.projector.angsep(ra_center, dec_center,
                                           data_alltiles['RA'],
                                           data_alltiles['DEC'])
    for ii in np.nonzero(angsep < (np.min(angsep) + 0.01 + angsep_max))[0]:
        plotFocalPlane(ccd_array, ra_center, dec_center,
                       data_alltiles['RA'][ii], data_alltiles['DEC'][ii], ax)

        for x_dither, y_dither in dither_array:
            ra_dither, dec_dither = applyDither(data_alltiles['RA'][ii],
                                                data_alltiles['DEC'][ii],
                                                x_dither, y_dither)
        plotFocalPlane(ccd_array, ra_center, dec_center, ra_dither, dec_dither,
                       ax)

    pylab.xlim(-1.5, 1.5)
    pylab.ylim(-1.5, 1.5)
    pylab.xlabel('x (deg)', labelpad=20)
    pylab.ylabel('y (deg)')
    pylab.title('(RA, Dec) = (%.3f, %.3f)' % (ra_center, dec_center))