def retrieve_ESO_files(rawpath='/Volumes/passport/w51-apex/raw/',
                       username='******', projids=['098.C-0421',],
                       download=False):

    # don't touch the rest
    Eso.cache_location = rawpath

    Eso.login(username)

    Eso.ROW_LIMIT = 1000000
    #tbl = Eso.query_instrument('apex', pi_coi='ginsburg', cache=False)
    #stbl = tbl[np.char.startswith(tbl['Object'], 'Map') & (tbl['Scan Mode']=='OTF') & (tbl['Number of subscans'] > 10)]
    #programs = set(tbl['ProgId'])
    #projids = set(tbl['APEX Project ID'])

    all_files = []
    for proj in projids:
        tbl = Eso.query_apex_quicklooks(proj, cache=False)
        print(tbl)

        if download:
            files = Eso.retrieve_data(tbl['Product ID'])
            all_files.append(files)

    return all_files
示例#2
0
def retrieve_ESO_files(rawpath='/Volumes/passport/w51-apex/raw/',
                       username='******',
                       projids=[
                           '098.C-0421',
                       ],
                       download=False):

    # don't touch the rest
    Eso.cache_location = rawpath

    Eso.login(username)

    Eso.ROW_LIMIT = 1000000
    #tbl = Eso.query_instrument('apex', pi_coi='ginsburg', cache=False)
    #stbl = tbl[np.char.startswith(tbl['Object'], 'Map') & (tbl['Scan Mode']=='OTF') & (tbl['Number of subscans'] > 10)]
    #programs = set(tbl['ProgId'])
    #projids = set(tbl['APEX Project ID'])

    all_files = []
    for proj in projids:
        tbl = Eso.query_apex_quicklooks(proj, cache=False)
        print(tbl)

        if download:
            files = Eso.retrieve_data(tbl['Product ID'], continuation=True)
            all_files.append(files)

    return all_files
def retrieve_ESO_files(rawpath='/scratch/aginsbur/apex/raw/',
                       username='******', projids=['O-085.F-9311A',
                                                      'E-085.B-0964A',
                                                      'E-093.C-0144A',
                                                      'E-095.C-0242A']):

    # don't touch the rest
    Eso.cache_location = rawpath

    Eso.login(username)

    Eso.ROW_LIMIT = 1000000
    #tbl = Eso.query_instrument('apex', pi_coi='ginsburg', cache=False)
    #stbl = tbl[np.char.startswith(tbl['Object'], 'Map') & (tbl['Scan Mode']=='OTF') & (tbl['Number of subscans'] > 10)]
    #programs = set(tbl['ProgId'])
    #projids = set(tbl['APEX Project ID'])

    for proj in projids:
        tbl = Eso.query_apex_quicklooks(proj)
        print(tbl)
示例#4
0
def retrieve_ESO_files(rawpath='/scratch/aginsbur/apex/raw/',
                       username='******',
                       projids=[
                           'O-085.F-9311A', 'E-085.B-0964A', 'E-093.C-0144A',
                           'E-095.C-0242A'
                       ]):

    # don't touch the rest
    Eso.cache_location = rawpath

    Eso.login(username)

    Eso.ROW_LIMIT = 1000000
    #tbl = Eso.query_instrument('apex', pi_coi='ginsburg', cache=False)
    #stbl = tbl[np.char.startswith(tbl['Object'], 'Map') & (tbl['Scan Mode']=='OTF') & (tbl['Number of subscans'] > 10)]
    #programs = set(tbl['ProgId'])
    #projids = set(tbl['APEX Project ID'])

    for proj in projids:
        tbl = Eso.query_apex_quicklooks(proj)
        print(tbl)
示例#5
0
from astroquery.eso import Eso
import shutil

# log in so you can get proprietary data
Eso.login('aginsburg')
# make sure you don't filter out anything
Eso.ROW_LIMIT = 1e6

# List all of your pi/co projects
all_pi_proj = Eso.query_instrument('apex', pi_coi='ginsburg')

# Have a look at the project IDs only
print(set(all_pi_proj['APEX Project ID']))
# set(['E-095.F-9802A-2015', 'E-095.C-0242A-2015', 'E-093.C-0144A-2014'])

# The full project name includes prefix and suffix
full_proj = 'E-095.F-9802A-2015'
proj_id = full_proj[2:-6]

# Then get the APEX quicklook "reduced" data
tbl = Eso.query_apex_quicklooks(prog_id=proj_id)

# and finally, download it
files = Eso.retrieve_data(tbl['Product ID'])

# then move the files to your local directory
for fn in files:
    shutil.move(fn, '.')
示例#6
0
import logging
import pg
import re
import time
import yaml
from astropy.extern.six import BytesIO, cPickle as pickle
from astropy.table import Table
from astroquery.eso import Eso as ESO
from astroquery.eso.core import _check_response

# Load local catalog of positions.
local_catalog = Table.read("data/HARPS_all.csv")

# Login to ESO.
eso = ESO()
eso.login("andycasey")
eso.ROW_LIMIT = 100000  # Maximum possible number of observations per star

# Connect to the PostgreSQL database.
cwd = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(cwd, "../db/credentials.yaml"), "r") as fp:
    credentials = yaml.load(fp)
connection = pg.connect(**credentials)


def query_harps_phase3_by_position(ra, dec, **kwargs):
    """
    Query the ESO Phase 3 science archive by position.

    :param ra:
        Right ascension [degrees].
示例#7
0
def login(user):
    eso = Eso()
    eso.login(user, store_password=True)
    return eso
print("In total we will request {} records in {} requests".format(N, I))

for i in range(I):

    if i < SKIP:
        print("Skipping {}".format(i + 1))
        continue

    print("Starting with batch number {}/{}".format(i + 1, I))

    data = [("dataset", dataset)
            for dataset in records[i * BATCH:(i + 1) * BATCH]]

    # Login to ESO.
    eso = ESO()
    eso.login(ESO_USERNAME)

    prepare_response = eso._session.request(
        "POST", "http://dataportal.eso.org/rh/confirmation", data=data)
    assert prepare_response.ok

    # Additional payload items required for confirmation.
    data += [
        ("requestDescription", ""),
        ("deliveryMediaType",
         "WEB"),  # OR USB_DISK --> Holy shit what the f**k!
        ("requestCommand", "SELECTIVE_HOTFLY"),
        ("submit", "Submit")
    ]

    confirmation_response = eso._session.request(
示例#9
0
    "2013-01-23",
    "2013-01-26",
    "2013-01-30",
    "2013-02-01",
    "2013-02-06",
    "2013-02-07",
    "2013-02-08",
    "2013-02-11",
    "2013-02-13",
]

destination = join(dirname(__file__), "raw")
print(destination)

dates = [dt.datetime.strptime(d, "%Y-%m-%d") for d in dates]
day = dt.timedelta(days=1)

eso = Eso()
eso.login("awehrhahn")

for d in dates:
    filters = {
        "instrument": instrument,
        "dp_cat": "CALIB",
        "stime": str(d - day)[:10],
        "etime": str(d + day)[:10],
    }
    table = eso.query_main(column_filters=filters)
    files = table["Dataset ID"]
    eso.retrieve_data(files, destination=destination, continuation=True)
示例#10
0
def grab_calfiles():
    workingdir = os.getcwd()
    handler = Eso()

    ### Set your username here!
    USERNAME = ""

    handler.ROW_LIMIT = 10000
    handler.login(USERNAME, store_password=True)

    def mkdir_safe(dirname):
        '''
        Check if directory exists - if it doesn't, make it, if it does, clear it out
        '''
        if os.path.isdir(dirname) == True:
            flist = glob.glob(dirname + "/*")
            for f in flist:
                os.remove(f)
        else:
            os.mkdir(dirname)

    mkdir_safe("flats")
    mkdir_safe("flatdarks")
    mkdir_safe("darks")

    # Read the first FITS in the folder
    filelist = glob.glob("obj/*.fits")
    print(os.getcwd())
    temphdu = fits.open(filelist[0])
    header = temphdu[0].header
    print("FITS header loaded")

    # Extract relevant query params from science frame
    prop_ID = header["HIERARCH ESO OBS PROG ID"]
    date = Time(header["DATE-OBS"])
    sci_exp = header["EXPTIME"]
    # Set start and end time of search - may need to tweak this manually to find the right calfiles.
    # Best to be generous with this window since need to find flats, darks, and flat-darks for the pipeline to run.
    stime = date
    etime = date + 18 * u.hour
    win_size = header["HIERARCH ESO DET WINDOW NY"]
    sci_wav = header["HIERARCH ESO INS WLEN CWLEN"]
    #print(filelist[0], sci_wav, date)

    # Query flat frames - check they match
    print("Querying ESO Archive")
    flat_table = handler.query_instrument("crires",
                                          column_filters={
                                              'stime': stime.value,
                                              'etime': etime.value,
                                              'dp_type': 'FLAT',
                                              'ins_wlen_cwlen': sci_wav
                                          })
    flat_header = handler.get_headers(flat_table["DP.ID"])
    mask = flat_header["HIERARCH ESO DET WINDOW NY"] != win_size
    flat_table = flat_table[~mask]

    #### if flat_exp_time not all the same value, choose the highest one
    #### Download flat fields
    flat_exp_time = np.max(flat_table["EXPTIME"])
    flat_files = handler.retrieve_data(flat_table["DP.ID"])
    #print(flat_files)

    for f in flat_files:
        shutil.copy(f, "flats")

    #### Grab the dark frames matching the science exposure time
    dark_table = handler.query_instrument("crires",
                                          column_filters={
                                              'stime': stime.value,
                                              'etime': etime.value,
                                              'dp_type': 'DARK',
                                              'exptime': sci_exp
                                          })
    dark_header = handler.get_headers(dark_table['DP.ID'])
    mask = dark_header["HIERARCH ESO DET WINDOW NY"] != win_size
    dark_table = dark_table[~mask]
    dark_files = handler.retrieve_data(dark_table["DP.ID"])

    for d in dark_files:
        shutil.copy(d, "darks")

    #### Grab darks matched to flat fields
    flatdark_table = handler.query_instrument("crires",
                                              column_filters={
                                                  'stime': stime.value,
                                                  'etime': etime.value,
                                                  'dp_type': 'DARK',
                                                  'exptime': flat_exp_time
                                              })
    flatdark_header = handler.get_headers(flatdark_table["DP.ID"])
    mask = flatdark_header["HIERARCH ESO DET WINDOW NY"] != win_size
    flatdark_table = flatdark_table[~mask]
    flatdark_files = handler.retrieve_data(flatdark_table["DP.ID"])

    for d in flatdark_files:
        shutil.copy(d, "flatdarks")

    print("Unpacking and moving!")

    ### Unpack all the files -- several possible commands for thisself.

    ### maximum compatibility use "gzip -d *.Z"
    ### pigz is a parallel gzip, but also it can't decompress in parallel apparently.
    ### if you want to use it despite this, "pigz -d *.Z"

    ### For maximum SPEED you could try "ls *.Z | parallel pigz -d" if you have GNU parallel installed.

    os.chdir("flats")
    os.system("pigz -d *.Z")

    os.chdir("../flatdarks")
    os.system("pigz -d *.Z")

    os.chdir("../darks")
    os.system("pigz -d *.Z")

    os.chdir("../")
    print("Calibration selection complete!")

typemask = [spectral_type_cut(x) for x in spectype]
varmask = [not x for x in varstat]

### Needs to be both not variable and not a GKM/whatever else star
totalmask = np.logical_and(typemask, varmask)

new_subset = cat_subset[totalmask]
print("Filter by spectral_type: %s of %s targets remain" %
      (np.sum(typemask), len(initmask)))

science_frames = new_subset["DP.ID"]

eso = Eso()
eso.login("tkillestein")

### Mask all that have non-512 window
heads = eso.get_headers(science_frames)

goodmsk = heads['HIERARCH ESO DET WINDOW NY'] == 512
print("Filter by windowing: %s of %s targets remain" %
      (np.sum(goodmsk), len(initmask)))

check_calib_list = heads[goodmsk]

final_frames = []
frames_rejected = 0

for head in tqdm(check_calib_list,
                 ascii=True,
示例#12
0
class ESOquery():
    """
    ESO query class
    
    Parameters
    ----------
    user: str
        User name used in ESO website
        
    Returns
    -------
    """
    def __init__(self, user):
        self.user = user
        self.eso = Eso()
        self.eso.login(self.user)  #login in eso
        self.eso.ROW_LIMIT = -1  #unlimited number of search results
        self.instruments = np.array(['FEROS', 'UVES', 'HARPS', 'ESPRESSO'])

    def searchReleaseDate(self, star):
        """
        Searches the date the data was released 
        
        Parameters
        ----------
        star: str
            Name of the star
            
        Returns
        -------
        result: array
            Array with the date of the data release
        """
        search = self.eso.query_main(column_filters={'target': star})
        result = np.array(search['Release_Date'])
        return result

    def searchObservationDate(self, star):
        """
        Searches for the modified Julian Date (JD - 2400000.5) of the start of 
        the observation
        
        Parameters
        ----------
        star: str
            Name of the star
            
        Returns
        -------
        result: array
            Array with the start date of the observations
        """
        search = self.eso.query_main(column_filters={'target': star})
        result = np.array(search['MJD-OBS'])
        return result

    def searchInstruments(self, star):
        """
        Checks which instruments where used for the given star and how many
        observations it made
        
        Parameters
        ----------
        star: str
            Name of the star
            
        Returns
        -------
        instrumentDict: dict
            Instruments and number of observations
        """
        searchResult = self.eso.query_main(column_filters={'target': star})
        instruments = np.unique(np.array(searchResult['Instrument']),
                                return_counts=True)
        instrumentDict = dict()
        for i, j in enumerate(instruments[0]):
            instrumentDict[j] = instruments[1][i]
        return instrumentDict

    def _searchAndDownload(self, star, instrument, downloadPath, date, calib):
        """
        Download ESO spectra of a given star; to be used is getStarData()
        
        Parameters
        ----------
        star: str
            Name of the star
        instrument: str
            Instrument we are searching the data
        date: float
            Download spectra younger than date (in modified Julian Date)
        calib : str
            Retrieve associated calibration files: 'none' (default), 'raw' for
            raw calibrations, or 'processed' for processed calibrations.
            
        Returns
        -------
        """
        print('Downloading {0} data\n'.format(instrument))
        table = self.eso.query_main(column_filters={
            'instrument': instrument,
            'target': star
        })
        if downloadPath:
            self.eso.retrieve_data(table['Dataset ID'],
                                   destination=downloadPath,
                                   with_calib=calib)
            return 0
        else:
            self.eso.retrieve_data(table['Dataset ID'], with_calib=calib)
        return 0

    def getALLdata(self, star, downloadPath=None, date=None, calib='none'):
        """
        Download ESO spectra of a given star
        
        Parameters
        ----------
        star: str
            Name of the star
        downloadPatch: str
            Adress where to download data
        date: str
            Download only the data past a certain date
        calib : str
            Retrieve associated calibration files: 'None' (default), 'raw' for
            raw calibrations, or 'processed' for processed calibrations.
            
        Returns
        -------
        """
        checkInstruments = self.searchInstruments(star)
        for _, j in enumerate(self.instruments):
            print('\n*** Searching for {0} results ***\n'.format(j))
            if j in checkInstruments:
                self._searchAndDownload(star, j, downloadPath, date, calib)
            else:
                print('No {0} data\n'.format(j))
        print('\n*** Done ***\n')
        return 0

    def getFEROSdata(self, star, downloadPath=None, date=None, calib='none'):
        """
        Download FEROS spectra of a given star
        
        Parameters
        ----------
        star: str
            Name of the star
        downloadPatch: str
            Adress where to download data
        date: float
            Download spectra younger than date (in modified Julian Date)
        calib : str
            Retrieve associated calibration files: 'None' (default), 'raw' for
            raw calibrations, or 'processed' for processed calibrations.
            
        Returns
        -------
        """
        checkInstruments = self.searchInstruments(star)
        esoInst = np.array(['FEROS'])
        for _, j in enumerate(esoInst):
            print('\n*** Searching for {0} results ***\n'.format(j))
            if j in checkInstruments:
                self._searchAndDownload(star, j, downloadPath, date, calib)
            else:
                print('No {0} data\n'.format(j))
        print('\n*** Done ***\n')
        return 0

    def getUVESdata(self, star, downloadPath=None, date=None, calib='none'):
        """
        Download UVES spectra of a given star
        
        Parameters
        ----------
        star: str
            Name of the star
        downloadPatch: str
            Adress where to download data
        date: str
            Download only the data past a certain date
        calib : str
            Retrieve associated calibration files: 'None' (default), 'raw' for
            raw calibrations, or 'processed' for processed calibrations.
            
        Returns
        -------
        """
        checkInstruments = self.searchInstruments(star)
        esoInst = np.array(['UVES'])
        for _, j in enumerate(esoInst):
            print('\n*** Searching for {0} results ***\n'.format(j))
            if j in checkInstruments:
                self._searchAndDownload(star, j, downloadPath, date, calib)
            else:
                print('No {0} data\n'.format(j))
        print('\n*** Done ***\n')
        return 0

    def getHARPSdata(self, star, downloadPath=None, date=None, calib='none'):
        """
        Download HARPS spectra of a given star
        
        Parameters
        ----------
        star: str
            Name of the star
        downloadPatch: str
            Adress where to download data
        date: str
            Download only the data past a certain date
        calib : str
            Retrieve associated calibration files: 'None' (default), 'raw' for
            raw calibrations, or 'processed' for processed calibrations.
            
        Returns
        -------
        """
        checkInstruments = self.searchInstruments(star)
        esoInst = np.array(['HARPS'])
        for _, j in enumerate(esoInst):
            print('\n*** Searching for {0} results ***\n'.format(j))
            if j in checkInstruments:
                self._searchAndDownload(star, j, downloadPath, date, calib)
            else:
                print('No {0} data\n'.format(j))
        print('\n*** Done ***\n')
        return 0

    def getESPRESSOdata(self,
                        star,
                        downloadPath=None,
                        date=None,
                        calib='none'):
        """
        Download ESPRESSO spectra of a given star
        
        Parameters
        ----------
        star: str
            Name of the star
        downloadPath: str
            Adress where to download data
        date: str
            Download only the data past a certain date
        calib : str
            Retrieve associated calibration files: 'None' (default), 'raw' for
            raw calibrations, or 'processed' for processed calibrations.
            
        Returns
        -------
        """
        checkInstruments = self.searchInstruments(star)
        esoInst = np.array(['ESPRESSO'])
        for _, j in enumerate(esoInst):
            print('\n*** Searching for {0} results ***\n'.format(j))
            if j in checkInstruments:
                self._searchAndDownload(star, j, downloadPath, date, calib)
            else:
                print('No {0} data\n'.format(j))
        print('\n*** Done ***\n')
        return 0