def weather_nightly(utDate='', wxDir='.', dbUpdate=1, log_writer=''):
    '''
    Copies /h (or /s) nightly data to archive location

    @type utDate: string
    @param utDate: UT date of data to copy (defaults to current UT date)
    @type wxDir: string
    @param wxDir: Directory to copy nightly data to (defaults to current directory)
    '''

    if utDate == '':
        utDate = datetime.datetime.utcnow().strftime('%Y-%m-%d')

    if not wxDir:
        exit

    if log_writer:
        log_writer.info('weather_nightly.py started for {}'.format(utDate))

    verification.verify_date(utDate)

    utDate = utDate.replace('/', '-')
    utDate_split = utDate.split('-')
    year = int(utDate_split[0]) - 2000
    month = utDate_split[1]
    day = utDate_split[2]

    error = 0

    for i in range(1,3):

        # Determine nightly location - /h or /s

        joinSeq = ('/h/nightly', str(i), '/', str(year), '/', month, '/', day)
        nightlyDir = ''.join(joinSeq)
        if not os.path.isdir(nightlyDir):
            nightlyDir = nightlyDir.replace('/h/', '/s/')
            if not os.path.isdir(nightlyDir):
                error = 1
                if log_writer:
                    log_writer.error('weather_nightly.py nightly directory not found for K{}'.format(i))
                    if dbUpdate: wxdb.updateWxDb(utDate, f'nightly{i}', 'ERROR', log_writer)
                continue

        joinSeq = (wxDir, '/nightly', str(i))
        wxNewDir = ''.join(joinSeq)

        # Copy the data.  Will also create the new directories.

        if log_writer:
            log_writer.info('weather_nightly.py copying nightly data from {}'.format(nightlyDir))
        shutil.copytree(nightlyDir, wxNewDir)

        # Go through and uncompress any .Z or .gz files

        for (dirpath, dirnames, filenames) in os.walk(wxNewDir):
            for file in filenames:
                cmd = ''
                if file.endswith('.gz'):
                    cmd = 'gunzip'
                if file.endswith('.Z'):
                    cmd = 'uncompress'
                if cmd:
                    sp.run([cmd, dirpath+'/'+file])

        # Update koa.koawx entry

        if dbUpdate: wxdb.updateWxDb(utDate, f'nightly{i}', datetime.utcnow().strftime('%Y%m%d+%H:%M:%S'), log_writer)

    if log_writer:
        log_writer.info('weather_nightly.py complete for {}'.format(utDate))
예제 #2
0
def get_dimm_data(utDate='', mdir='.', log_writer=''):
    '''
    Retrieve DIMM, MASS and MASSPRO data from Mauna Kea Weather Center

    @type utDate: string
    @param utDate: UT date of data to retrieve (default is current UT date)
    @type mdir: string
    @param mdir: Directory to write data to (default is current directory)
    '''

    if log_writer:
        log_writer.info('get_dimm_data.py started for {}'.format(utDate))

    # If no utDate supplied, use the current value

    if utDate == '':
        utDate = datetime.datetime.utcnow().strftime('%Y-%m-%d')

    verification.verify_date(utDate)

    utDate = utDate.replace('/', '-')
    split = utDate.split('-')
    year = split[0]
    month = split[1]
    day = split[2]
    dbDate = utDate
    utDate = utDate.replace('-', '')

    # Create directory

#    joinSeq = (dir, '/massdimm')
#    dir = ''.join(joinSeq)
    if not os.path.exists(mdir):
        os.makedirs(mdir)

    if log_writer:
        log_writer.info('get_dimm_data.py creating massdimm.html')

    joinSeq = (mdir, '/massdimm.html')
    writeFile = ''.join(joinSeq)
    with open(writeFile, 'w') as fp:
        fp.write('<html>\n')
        fp.write('<body>\n')
        fp.write('<title>Mass/Dimm Data</title>\n')

        # Type of data to retrieve

        files = {'dimm', 'mass', 'masspro'}

        # URL to retrieve data from

        url = 'http://mkwc.ifa.hawaii.edu/current/seeing'

        # Get data

        for f in files:

            # Construct URL

            joinSeq = (url, '/', f, '/', utDate, '.', f, '.dat')
            newUrl = ''.join(joinSeq)

            if log_writer:
                log_writer.info('get_dimm_data.py retrieving data from {}'.format(newUrl))

            # Connect to URL

            response = requests.get(newUrl)

            # If page exists, then get data

            if response.status_code == 200:

                # Construct file to write to

                joinSeq = (mdir, '/', utDate, '.mkwc.', f, '.dat')
                writeFile = ''.join(joinSeq)

                # Write data to file

                with open(writeFile, 'w') as fp2:
                    fp2.write(response.text)
                fp.write('<a href="./'+os.path.basename(writeFile)+'">'+os.path.basename(writeFile)+'<p>\n')
            else:
                if log_writer:
                    log_writer.error('get_dimm_data.py no {} data for {}'.format(f, utDate))

        # Get JPG plots

        plots = {
        'CFHT Weather Tower Seeing':'http://hokukea.soest.hawaii.edu/current/seeing/images/YYYYMMDD.wrf-vs-mkam.timeseries.jpg',
        'CFHT MASS Profile':' http://hokukea.soest.hawaii.edu/current/seeing/images/YYYYMMDD.massprofile.jpg',
        'CFHT DIMM Seeing Histogram':'http://hokukea.soest.hawaii.edu/current/seeing/analysis/images/dimmdailyhistogram.jpg',
        'CFHT MASS Seeing Histogram':'http://hokukea.soest.hawaii.edu/current/seeing/analysis/images/massdailyhistogram.jpg'
        }

        for key, url in plots.items():
            url = url.replace('YYYYMMDD', utDate)
            if log_writer:
                log_writer.info('get_dimm_data.py retrieving {}'.format(url))
            try:
                f = os.path.basename(url)
                if 'analysis' in url:
                    joinSeq = (year, month, day, '.', f)
                    f = ''.join(joinSeq)
                joinSeq = (mdir, '/', f)
                writeFile = ''.join(joinSeq)
                urllib.request.urlretrieve(url, writeFile)
                fp.write('<a href="./'+f+'"><img src="'+f+'" width="750" title="'+f+'"><p>\n')
            except:
                if log_writer:
                    log_writer.error('get_dimm_data.py url does not exist - {}'.format(url))
                sendUrl = ''.join(('cmd=updateWxDb&utdate=', dbDate, '&column=cfht_seeing&value=ERROR'))
                wxdb.updateWxDb(sendUrl, log_writer)

        fp.write('</body>\n')
        fp.write('</html>')

    # Create bokeh plot 

    create_bokeh_plot(utDate, mdir)

    if log_writer:
        log_writer.info('get_dimm_data.py complete for {}'.format(utDate))
    sendUrl = ''.join(('cmd=updateWxDb&utdate=', dbDate, '&column=cfht_seeing&value=', datetime.utcnow().strftime('%Y%m%d+%H:%M:%S')))
    wxdb.updateWxDb(sendUrl, log_writer)

    for n in ['mass', 'dimm', 'masspro']:
        joinSeq = (mdir, '/', utDate, '.mkwc.', n, '.dat')
        writeFile = ''.join(joinSeq)
        if not os.path.exists(writeFile):
            with open(writeFile, 'w') as fp2:
                fp2.write('No Data')
예제 #3
0
# Usage can have 0 or 1 additional arguments

assert len(argv) >= 2, 'Usage: weather.py wxDir [YYYY-MM-DD] [-nodb]'

# Parse UT date from argument list

if len(argv) >= 2:
    wxDir = argv[1]
    if len(argv) >= 3:
        utDate = argv[2].replace('/', '-')
    if len(argv) == 4:
        dbUpdate = 0

# Verify date, will exit if verification fails

verification.verify_date(utDate)

# Setup logging

user = os.getlogin()
joinSeq = ('weather <', user, '>')
writerName = ''.join(joinSeq)
log_writer = lg.getLogger(writerName)
log_writer.setLevel(lg.INFO)

# Crete a file handler

joinSeq = (wxDir, '/weather_', utDate.replace('-', ''), '.log')
logFile = ''.join(joinSeq)
log_handler = lg.FileHandler(logFile)
log_handler.setLevel(lg.INFO)
예제 #4
0
파일: skyprobe.py 프로젝트: jmader/weather
def skyprobe(utDate='', dir='.', log_writer=''):
    '''
    Retrieve skyprobe image from CFHT

    @type utDate: string
    @param utDate: UT date of data to retrieve (default is current UT date)
    @type dir: string
    @param dir: Directory to write data to (default is current directory)
    '''

    if log_writer:
        log_writer.info('skyprobe.py started for {}'.format(utDate))

    # If no utDate supplied, use the current value

    if utDate == '':
        utDate = datetime.datetime.utcnow().strftime('%Y%m%d')

    verification.verify_date(utDate)

    utDate = utDate.replace('/', '-')
    dbDate = utDate
    utDate = utDate.replace('-', '')

    if log_writer:
        log_writer.info('skyprobe.py gathering data from CFHT')

    # URL to copy

    url = 'http://nenue.cfht.hawaii.edu/Instruments/Elixir/skyprobe/archive/mcal_'
    joinSeq = (url, utDate, '.png')
    url = ''.join(joinSeq)

    # Create directory

    joinSeq = (dir, '/skyprobe')
    writeDir = dir  #''.join(joinSeq)
    if not os.path.exists(writeDir):
        os.makedirs(writeDir)

    # Construct file to write to

    joinSeq = (dir, '/skyprobe.png')
    writeFile = ''.join(joinSeq)

    try:
        urllib.request.urlretrieve(url, writeFile)
    except:
        if log_writer:
            log_writer.info('skyprobe.py url does not exist - {}'.format(url))
            sendUrl = ''.join(('cmd=updateWxDb&utdate=', dbDate,
                               '&column=skyprobe&value=ERROR'))
            wxdb.updateWxDb(sendUrl, log_writer)
        return

    # Create HTML page

    if log_writer:
        log_writer.info('skyprobe.py creating skyprobe.html')

    joinSeq = (dir, '/skyprobe.html')
    writeFile = ''.join(joinSeq)
    with open(writeFile, 'w') as fp:
        fp.write('<html>\n')
        fp.write('<body>\n')
        fp.write('<title>CFHT SkyProbe</title>\n')
        #        fp.write('<h1>CFHT SkyProbe for ' + utDate + '</h1>\n')
        fp.write(
            '<a href="./skyprobe.png"><img src="./skyprobe.png" title="skyprobe.png"></a>\n'
        )
        fp.write('</body>\n')
        fp.write('</html>')

    # Update koawx entry

    if log_writer:
        log_writer.info('skyprobe.py complete for {}'.format(utDate))
    sendUrl = ''.join(
        ('cmd=updateWxDb&utdate=', dbDate, '&column=skyprobe&value=',
         datetime.utcnow().strftime('%Y%m%d+%H:%M:%S')))
    wxdb.updateWxDb(sendUrl, log_writer)