Exemplo n.º 1
0
def login(user):
    eso = Eso()
    eso.login(user, store_password=True)
    return eso
Exemplo n.º 2
0
    "2013-01-23",
    "2013-01-26",
    "2013-01-30",
    "2013-02-01",
    "2013-02-06",
    "2013-02-07",
    "2013-02-08",
    "2013-02-11",
    "2013-02-13",
]

destination = join(dirname(__file__), "raw")
print(destination)

dates = [dt.datetime.strptime(d, "%Y-%m-%d") for d in dates]
day = dt.timedelta(days=1)

eso = Eso()
eso.login("awehrhahn")

for d in dates:
    filters = {
        "instrument": instrument,
        "dp_cat": "CALIB",
        "stime": str(d - day)[:10],
        "etime": str(d + day)[:10],
    }
    table = eso.query_main(column_filters=filters)
    files = table["Dataset ID"]
    eso.retrieve_data(files, destination=destination, continuation=True)
Exemplo n.º 3
0
def wrangle_eso_for_rv_availability(ra, dec):
    """
    Checks via ESO query for available RVs on:
        ['HARPS', 'ESPRESSO', 'FORS2', 'UVES', 'XSHOOTER']

    Possible future expansion: actually get the RVs. (For now, just this is
    just used as a flag to let the user know the RVs might exist!)

    Returns tuple of:
        (nan, nan, provenance)
    """
    eso = Eso()
    eso.ROW_LIMIT = 9999

    coord = SkyCoord(ra=ra, dec=dec, unit=(u.deg, u.deg), frame='icrs')
    print('begin ESO search for {}'.format(repr(coord)))

    rastr = (str(coord.ra.to_string(u.hour)).replace('h', ' ').replace(
        'm', ' ').replace('s', ' '))

    decstr = (str(coord.dec.to_string()).replace('d', ' ').replace(
        'm', ' ').replace('s', ' '))

    # search within 10 arcsec of given position
    boxsize = '00 00 10'
    res = eso.query_main(column_filters={
        'ra': rastr,
        'dec': decstr,
        'box': boxsize
    })

    if res is None:
        return np.nan, np.nan, np.nan

    # limit search to the following instruments, in order of preference
    instruments = ['HARPS', 'ESPRESSO', 'FORS2', 'UVES', 'XSHOOTER']
    sel = np.zeros((len(res))).astype(bool)
    for instrument in instruments:
        sel |= (nparr(res['Instrument']) == instrument)
    res = res[sel]

    # limit returned cateogires
    badcategories = ['CALIB']
    sel = np.zeros((len(res))).astype(bool)
    for badcategory in badcategories:
        sel |= (nparr(res['Category']) != badcategory)
    res = res[sel]

    if len(res) >= 1:

        # XSHOOTER doesn't seem to give archival RVs. would need to derive
        # from spectra yourself
        if np.all(nparr(res['Instrument']) == 'XSHOOTER'):
            return np.nan, np.nan, 'XSHOOTER'

        # Embargo lasts a year on all ESO observations.
        nt = Time.now()
        embargo_end = nt.mjd - 365
        if np.all(nparr(res['MJD-OBS']) > embargo_end):
            return np.nan, np.nan, np.unique(res['Instrument'])[0]

        # HARPS gives archival RVs. downloading them can be done... but for
        # s6+s7, only a few objects are viable.
        if np.all(nparr(res['Instrument']) == 'HARPS'):
            print('WARNING: SKIPPING AUTOMATION OF HARPS ARCHIVAL RV GETTING')
            return np.nan, np.nan, 'HARPS'

    else:
        return np.nan, np.nan, np.nan
Exemplo n.º 4
0
def grab_calfiles():
    workingdir = os.getcwd()
    handler = Eso()

    ### Set your username here!
    USERNAME = ""

    handler.ROW_LIMIT = 10000
    handler.login(USERNAME, store_password=True)

    def mkdir_safe(dirname):
        '''
        Check if directory exists - if it doesn't, make it, if it does, clear it out
        '''
        if os.path.isdir(dirname) == True:
            flist = glob.glob(dirname + "/*")
            for f in flist:
                os.remove(f)
        else:
            os.mkdir(dirname)

    mkdir_safe("flats")
    mkdir_safe("flatdarks")
    mkdir_safe("darks")

    # Read the first FITS in the folder
    filelist = glob.glob("obj/*.fits")
    print(os.getcwd())
    temphdu = fits.open(filelist[0])
    header = temphdu[0].header
    print("FITS header loaded")

    # Extract relevant query params from science frame
    prop_ID = header["HIERARCH ESO OBS PROG ID"]
    date = Time(header["DATE-OBS"])
    sci_exp = header["EXPTIME"]
    # Set start and end time of search - may need to tweak this manually to find the right calfiles.
    # Best to be generous with this window since need to find flats, darks, and flat-darks for the pipeline to run.
    stime = date
    etime = date + 18 * u.hour
    win_size = header["HIERARCH ESO DET WINDOW NY"]
    sci_wav = header["HIERARCH ESO INS WLEN CWLEN"]
    #print(filelist[0], sci_wav, date)

    # Query flat frames - check they match
    print("Querying ESO Archive")
    flat_table = handler.query_instrument("crires",
                                          column_filters={
                                              'stime': stime.value,
                                              'etime': etime.value,
                                              'dp_type': 'FLAT',
                                              'ins_wlen_cwlen': sci_wav
                                          })
    flat_header = handler.get_headers(flat_table["DP.ID"])
    mask = flat_header["HIERARCH ESO DET WINDOW NY"] != win_size
    flat_table = flat_table[~mask]

    #### if flat_exp_time not all the same value, choose the highest one
    #### Download flat fields
    flat_exp_time = np.max(flat_table["EXPTIME"])
    flat_files = handler.retrieve_data(flat_table["DP.ID"])
    #print(flat_files)

    for f in flat_files:
        shutil.copy(f, "flats")

    #### Grab the dark frames matching the science exposure time
    dark_table = handler.query_instrument("crires",
                                          column_filters={
                                              'stime': stime.value,
                                              'etime': etime.value,
                                              'dp_type': 'DARK',
                                              'exptime': sci_exp
                                          })
    dark_header = handler.get_headers(dark_table['DP.ID'])
    mask = dark_header["HIERARCH ESO DET WINDOW NY"] != win_size
    dark_table = dark_table[~mask]
    dark_files = handler.retrieve_data(dark_table["DP.ID"])

    for d in dark_files:
        shutil.copy(d, "darks")

    #### Grab darks matched to flat fields
    flatdark_table = handler.query_instrument("crires",
                                              column_filters={
                                                  'stime': stime.value,
                                                  'etime': etime.value,
                                                  'dp_type': 'DARK',
                                                  'exptime': flat_exp_time
                                              })
    flatdark_header = handler.get_headers(flatdark_table["DP.ID"])
    mask = flatdark_header["HIERARCH ESO DET WINDOW NY"] != win_size
    flatdark_table = flatdark_table[~mask]
    flatdark_files = handler.retrieve_data(flatdark_table["DP.ID"])

    for d in flatdark_files:
        shutil.copy(d, "flatdarks")

    print("Unpacking and moving!")

    ### Unpack all the files -- several possible commands for thisself.

    ### maximum compatibility use "gzip -d *.Z"
    ### pigz is a parallel gzip, but also it can't decompress in parallel apparently.
    ### if you want to use it despite this, "pigz -d *.Z"

    ### For maximum SPEED you could try "ls *.Z | parallel pigz -d" if you have GNU parallel installed.

    os.chdir("flats")
    os.system("pigz -d *.Z")

    os.chdir("../flatdarks")
    os.system("pigz -d *.Z")

    os.chdir("../darks")
    os.system("pigz -d *.Z")

    os.chdir("../")
    print("Calibration selection complete!")
Exemplo n.º 5
0
 def __init__(self, user):
     self.user = user
     self.eso = Eso()
     self.eso.login(self.user)  #login in eso
     self.eso.ROW_LIMIT = -1  #unlimited number of search results
     self.instruments = np.array(['FEROS', 'UVES', 'HARPS', 'ESPRESSO'])