Ejemplo n.º 1
0
def xml(data_dir, latitude, longitude):
    """
    Generate XML file from grib2 data cube

    args:
        data_dir - Directory where grib2 data cube is located
        latitude - Latitude
        longitude - Longitude

    returns - xml string
    """

    import os

    # build and execute command
    cmd = "{degrib_path} {data_dir}/all.ind -DP -pnt {latitude},{longitude} -XML 1 -geoData {data_dir}/geodata".format(
        data_dir = data_dir, latitude = latitude, longitude = longitude, degrib_path = degrib_path)

    utils.info(cmd)
    xml = ""
    for line in os.popen(cmd).readlines():

        xml += line

    # return output
    return xml
Ejemplo n.º 2
0
def xml(data_dir,
        latitude,
        longitude,
        elements=None,
        product='time-series',
        begin=None,
        end=None):
    """
    Generate XML file from grib2 data cube. Arguments are similar to what is
    expected in the NWS NDFD REST API for the ndfdXMLclient.php interface:
    http://graphical.weather.gov/xml/rest.php

    args:
        data_dir - Directory where grib2 data cube is located (required)
        latitude - Latitude (required)
        longitude - Longitude (required)
        elements - List of elements, or None to return all params
        product - time-series or glance
        begin - begin time, or None to mean beginning of available period
        end - end time, or None to mean end of available period

    returns - xml string
    """

    import os

    geodata = geodata_path if geodata_path else data_dir + '/geodata'

    # build and execute command
    cmd = "{degrib_path} {data_dir}/all.ind -DP -pnt {latitude},{longitude} -geoData {geodata}".format(
        degrib_path=degrib_path,
        data_dir=data_dir,
        latitude=latitude,
        longitude=longitude,
        geodata=geodata)
    if product == "time-series":
        cmd += " -XML 1"
        if elements:
            cmd += " -ndfdConven 1 -ndfdVars " + ",".join(elements)
    elif product == "glance":
        cmd += " -XML 2"

    if begin:
        cmd += " -startTime " + begin
    if end:
        cmd += " -endTime " + end

    utils.info(cmd)
    xml = ""
    for line in os.popen(cmd).readlines():

        xml += line

    # return output
    return xml
Ejemplo n.º 3
0
def xml(data_dir, latitude, longitude, elements=None, product='time-series', begin=None, end=None):
    """
    Generate XML file from grib2 data cube. Arguments are similar to what is
    expected in the NWS NDFD REST API for the ndfdXMLclient.php interface:
    http://graphical.weather.gov/xml/rest.php

    args:
        data_dir - Directory where grib2 data cube is located (required)
        latitude - Latitude (required)
        longitude - Longitude (required)
        elements - List of elements, or None to return all params
        product - time-series or glance
        begin - begin time, or None to mean beginning of available period
        end - end time, or None to mean end of available period

    returns - xml string
    """

    import os

    geodata = geodata_path if geodata_path else data_dir + '/geodata'

    # build and execute command
    cmd = "{degrib_path} {data_dir}/all.ind -DP -pnt {latitude},{longitude} -geoData {geodata}".format(
        degrib_path=degrib_path, data_dir=data_dir,
        latitude=latitude, longitude=longitude,
        geodata=geodata)
    if product == "time-series":
        cmd += " -XML 1"
        if elements:
            cmd += " -ndfdConven 1 -ndfdVars " + ",".join(elements)
    elif product == "glance":
        cmd += " -XML 2"

    if begin:
        cmd += " -startTime " + begin
    if end:
        cmd += " -endTime " + end

    utils.info(cmd)
    xml = ""
    for line in os.popen(cmd).readlines():

        xml += line

    # return output
    return xml
Ejemplo n.º 4
0
def xml_byday(data_dir, latitude, longitude, format='12 hourly'):
    """
    Generate XML file from grib2 data cube. Arguments are similar to what is
    expected in the NWS NDFD REST API for the ndfdBrowserClientByDay.php interface:
    http://graphical.weather.gov/xml/rest.php

    args:
        data_dir - Directory where grib2 data cube is located (required)
        latitude - Latitude (required)
        longitude - Longitude (required)
        format - "12 hourly" or "24 hourly"

    returns - xml string
    """

    import os

    geodata = geodata_path if geodata_path else data_dir + '/geodata'

    # build and execute command
    cmd = "{degrib_path} {data_dir}/all.ind -DP -pnt {latitude},{longitude} -geoData {geodata}".format(
        degrib_path=degrib_path,
        data_dir=data_dir,
        latitude=latitude,
        longitude=longitude,
        geodata=geodata)
    if format == "12 hourly":
        cmd += " -XML 3"
    elif format == "24 hourly":
        cmd += " -XML 4"

    utils.info(cmd)
    xml = ""
    for line in os.popen(cmd).readlines():

        xml += line

    # return output
    return xml
Ejemplo n.º 5
0
def xml_byday(data_dir, latitude, longitude, format='12 hourly'):
    """
    Generate XML file from grib2 data cube. Arguments are similar to what is
    expected in the NWS NDFD REST API for the ndfdBrowserClientByDay.php interface:
    http://graphical.weather.gov/xml/rest.php

    args:
        data_dir - Directory where grib2 data cube is located (required)
        latitude - Latitude (required)
        longitude - Longitude (required)
        format - "12 hourly" or "24 hourly"

    returns - xml string
    """

    import os

    geodata = geodata_path if geodata_path else data_dir + '/geodata'

    # build and execute command
    cmd = "{degrib_path} {data_dir}/all.ind -DP -pnt {latitude},{longitude} -geoData {geodata}".format(
        degrib_path=degrib_path, data_dir=data_dir,
        latitude=latitude, longitude=longitude,
        geodata=geodata)
    if format == "12 hourly":
        cmd += " -XML 3"
    elif format == "24 hourly":
        cmd += " -XML 4"

    utils.info(cmd)
    xml = ""
    for line in os.popen(cmd).readlines():

        xml += line

    # return output
    return xml
Ejemplo n.º 6
0
def get_forecast(latitude, longitude,
                 include_hourly=False, grib2_dir=None):
    """
    Get forecast based on latitude and longitude and returns
    json-formatted result

    Args:
        latitude - forecast point latitude
        longitude - forecast point longitude
        include_hourly - flag to include hourly forecast, defaults to false
        grib2_dir - grib2 data directory, if omitted,
            the SOAP web service will be used

    Returns: json-formatted string - see README
    """

    utils.info("Latitude: {0}".format(latitude))
    utils.info("Longitude: {0}".format(longitude))
    if include_hourly:
        utils.info("Include hourly forecast")
    if grib2_dir:
        utils.info("Using grib2 dir: {0}".format(grib2_dir))

    # If grib2 directory is provided, use grib2 files
    if grib2_dir:
        from pysky import grib2
        xml = grib2.xml(grib2_dir, latitude, longitude)
        utils.info(xml)
    # Otherwise, use SOAP web service
    else:
        from pysky import noaa_ws
        xml = noaa_ws.xml(latitude, longitude)
        utils.info(xml)

    # Initialize object for data
    print(process_xml(xml, include_hourly))  # TODO fix json call
Ejemplo n.º 7
0
def download(data_dir):
    """
    Download grib2 files to data directory

    args:
        data_dir Directory to store data files
    """
    import urllib, re, os, sys, time, urllib2, dateutil
    from datetime import datetime
    from dateutil import tz
    from dateutil.parser import parse

    files_downloaded = False # whether files have been downloaded

    # Loop over directories that have forecast data files
    for dir in ['VP.001-003','VP.004-007']: # loop over remote directories

        data_subdir = "{0}/{1}".format(data_dir, dir)

        utils.info('\nChecking directory {0}'.format(dir))

        # Create directory if it doesn't exist
        if not os.path.exists(data_subdir):
            os.mkdir(data_subdir)

        # Get directory listing so we can see if we need a newer version
        f = urllib.urlopen("{0}/{1}/ls-l".format(base_url, dir))
        data = f.read()
        lines = data.split("\n")
  
        # Loop over lines in directory listing
        for line in lines:

            # Check file modified date if this is a .bin file
            if line.find(".bin") != -1:

                # Split line to get date and filename
                month, day, rtime, filename = re.split("\s+", line)[5:9]

                # Split filename to get noaa param name
                param = filename.split('.')[1]

                # Only download files if we are interested in this parameter
                if param in noaa_params:

                    # Local path and time
                    local_path = "{0}/{1}/{2}".format(data_dir, dir, filename)
                    local_time = os.stat(local_path).st_mtime if os.path.exists(local_path) else 0
                    utils.info("Local: {0} last modified {1}".format(local_path, local_time))

                    # Remote path and time
                    remote_path = "{0}/{1}/{2}".format(base_url, dir, filename)
                    request = urllib2.urlopen(remote_path)
                    last_modified_str = request.info()['Last-Modified']
                    remote_time = _utc2local(parse(last_modified_str))
                    utils.info("Remote: {0} last modified {1}".format(remote_path, remote_time))

                    # If file does not exist or the local file is older than the remote file, download
                    if not os.path.exists(local_path) or local_time < remote_time:
                        utils.info('Downloading remote file {0}'.format(remote_path))
                        _download_file(request, local_path)
                        os.utime(local_path, (remote_time, remote_time))
                        files_downloaded = True
                    # Otherwise, just log some information
                    else:
                        utils.info('Local file is up-to-date, skipping download')
                    
    # Cube data files if any were downloaded
    if files_downloaded:
        cmd = "{degrib} {data_dir}/VP.001-003/*.bin {data_dir}/VP.004-007/*.bin -Data -Index {data_dir}/all.ind -out {data_dir}/all.dat".format(
            degrib = degrib_path,
            data_dir = data_dir
        )
        utils.info(cmd)
        output = ""
        for line in os.popen(cmd).readlines():
    
            output += line
    
        utils.info(output)
    else:
        utils.info('No files downloaded - skipping cube')
Ejemplo n.º 8
0
def download(data_dir, new_data_dir=None):
    """
    Download grib2 files to data directory

    args:
        data_dir     Old directory containing existing data files
        new_data_dir Directory to save new data files. This will only be
                     created if files are downloaded. You may remove the
                     old data_dir after calling this function. Pass
                     None to indicate files shall be updated in-place.
    returns:
        True if new files were downloaded, False otherwise
    """
    import urllib2, re, os, sys, time, urllib2, dateutil, shutil
    from datetime import datetime
    from dateutil import tz
    from dateutil.parser import parse

    if not new_data_dir:
        new_data_dir = data_dir

    files_downloaded = False  # whether files have been downloaded

    files_to_copy = []

    # Loop over directories that have forecast data files
    for dir in ['VP.001-003', 'VP.004-007']:  # loop over remote directories

        new_data_subdir = "{0}/{1}".format(new_data_dir, dir)

        utils.info('\nChecking directory {0}'.format(dir))

        # To save time, first check to see whether the directory listing file
        # itself was updated.
        check_local_path = "{0}/{1}/{2}".format(data_dir, dir, "ls-l")
        save_local_path = "{0}/{1}/{2}".format(new_data_dir, dir, "ls-l")
        ls_local_time = os.stat(check_local_path).st_mtime if os.path.exists(
            check_local_path) else 0
        utils.info("Local: {0} last modified {1}".format(
            check_local_path, ls_local_time))

        ls_request = urllib2.urlopen("{0}/{1}/ls-l".format(base_url, dir))
        last_modified_str = ls_request.info()['Last-Modified']
        ls_remote_time = _utc2local(parse(last_modified_str))
        utils.info("Remote: {0} last modified {1}".format(
            "ls-l", ls_remote_time))

        # If it was, download it and save it to the new directory
        ls_downloaded = False
        if not os.path.exists(
                check_local_path) or ls_local_time < ls_remote_time:
            ls_downloaded = True
            utils.info("Saving new ls-l file")
            if not os.path.exists(new_data_subdir):
                os.makedirs(new_data_subdir)
            _download_file(ls_request, save_local_path)
            os.utime(save_local_path, (ls_remote_time, ls_remote_time))
            files_downloaded = True
            ls_file = save_local_path
        else:  # If not, remember it to be copied into new directory later if needed
            files_to_copy.append((check_local_path, save_local_path))
            ls_file = check_local_path

        # Loop over each file in the directory listing
        for line in open(ls_file):
            # Check file modified date if this is a .bin file
            if line.find(".bin") != -1:

                # Split line to get date and filename
                month, day, rtime, filename = re.split("\s+", line)[5:9]

                # Split filename to get noaa param name
                param = filename.split('.')[1]

                # Only download files if we are interested in this parameter
                if noaa_params == 'ALL' or param in noaa_params:

                    # Local path and time
                    check_local_path = "{0}/{1}/{2}".format(
                        data_dir, dir, filename)
                    save_local_path = "{0}/{1}/{2}".format(
                        new_data_dir, dir, filename)

                    if ls_downloaded:  # Only bother checking if we downloaded a new ls file
                        local_time = os.stat(
                            check_local_path).st_mtime if os.path.exists(
                                check_local_path) else 0
                        utils.info("Local: {0} last modified {1}".format(
                            check_local_path, local_time))

                        # Remote path and time
                        remote_path = "{0}/{1}/{2}".format(
                            base_url, dir, filename)
                        request = urllib2.urlopen(remote_path)
                        last_modified_str = request.info()['Last-Modified']
                        remote_time = _utc2local(parse(last_modified_str))
                        utils.info("Remote: {0} last modified {1}".format(
                            remote_path, remote_time))

                        # If file does not exist or the local file is older than the remote file, download
                        if not os.path.exists(
                                check_local_path) or local_time < remote_time:
                            utils.info('Downloading remote file {0}'.format(
                                remote_path))
                            _download_file(request, save_local_path)
                            os.utime(save_local_path,
                                     (remote_time, remote_time))
                            files_downloaded = True
                        else:  # Otherwise, remember this file for if it needs to be copied later
                            files_to_copy.append(
                                (check_local_path, save_local_path))
                            utils.info(
                                'Local file is up-to-date, skipping download')
                    else:  # ls was not downloaded, so just remember the files for later copying if needed
                        files_to_copy.append(
                            (check_local_path, save_local_path))
                        utils.info(
                            'Local file is up-to-date, skipping download')

    # Cube data files if any were downloaded
    if files_downloaded:
        if data_dir != new_data_dir:
            # Copy the files remembered earlier into the new data directory
            for src, dst in files_to_copy:
                dst_dir = os.path.dirname(dst)
                if not os.path.exists(dst_dir):
                    os.makedirs(dst_dir)
                shutil.copy2(src, dst)
                utils.info("copied {0} to {1}".format(src, dst))

        cmd = "{degrib} {data_dir}/VP.001-003/*.bin {data_dir}/VP.004-007/*.bin -Data -Index {data_dir}/all.ind -out {data_dir}/all.dat".format(
            degrib=degrib_path, data_dir=new_data_dir)
        utils.info(cmd)
        output = ""
        for line in os.popen(cmd).readlines():
            output += line
        utils.info(output)
    else:
        utils.info('No files downloaded - skipping cube')
    return files_downloaded
Ejemplo n.º 9
0
def download(data_dir, new_data_dir=None):
    """
    Download grib2 files to data directory

    args:
        data_dir     Old directory containing existing data files
        new_data_dir Directory to save new data files. This will only be
                     created if files are downloaded. You may remove the
                     old data_dir after calling this function. Pass
                     None to indicate files shall be updated in-place.
    returns:
        True if new files were downloaded, False otherwise
    """
    import urllib2, re, os, sys, time, urllib2, dateutil, shutil
    from datetime import datetime
    from dateutil import tz
    from dateutil.parser import parse

    if not new_data_dir:
        new_data_dir = data_dir

    files_downloaded = False # whether files have been downloaded

    files_to_copy = []

    # Loop over directories that have forecast data files
    for dir in ['VP.001-003','VP.004-007']: # loop over remote directories

        new_data_subdir = "{0}/{1}".format(new_data_dir, dir)

        utils.info('\nChecking directory {0}'.format(dir))

        # To save time, first check to see whether the directory listing file
        # itself was updated.
        check_local_path = "{0}/{1}/{2}".format(data_dir, dir, "ls-l")
        save_local_path = "{0}/{1}/{2}".format(new_data_dir, dir, "ls-l")
        ls_local_time = os.stat(check_local_path).st_mtime if os.path.exists(check_local_path) else 0
        utils.info("Local: {0} last modified {1}".format(check_local_path, ls_local_time))

        ls_request = urllib2.urlopen("{0}/{1}/ls-l".format(base_url, dir))
        last_modified_str = ls_request.info()['Last-Modified']
        ls_remote_time = _utc2local(parse(last_modified_str))
        utils.info("Remote: {0} last modified {1}".format("ls-l", ls_remote_time))

        # If it was, download it and save it to the new directory
        ls_downloaded = False
        if not os.path.exists(check_local_path) or ls_local_time < ls_remote_time:
            ls_downloaded = True
            utils.info("Saving new ls-l file")
            if not os.path.exists(new_data_subdir):
                os.makedirs(new_data_subdir)
            _download_file(ls_request, save_local_path)
            os.utime(save_local_path, (ls_remote_time, ls_remote_time))
            files_downloaded = True
            ls_file = save_local_path
        else: # If not, remember it to be copied into new directory later if needed
            files_to_copy.append((check_local_path, save_local_path))
            ls_file = check_local_path

        # Loop over each file in the directory listing
        for line in open(ls_file):
            # Check file modified date if this is a .bin file
            if line.find(".bin") != -1:
 
                # Split line to get date and filename
                month, day, rtime, filename = re.split("\s+", line)[5:9]
 
                # Split filename to get noaa param name
                param = filename.split('.')[1]
 
                # Only download files if we are interested in this parameter
                if noaa_params == 'ALL' or param in noaa_params:
 
                    # Local path and time
                    check_local_path = "{0}/{1}/{2}".format(data_dir, dir, filename)
                    save_local_path = "{0}/{1}/{2}".format(new_data_dir, dir, filename)

                    if ls_downloaded: # Only bother checking if we downloaded a new ls file
                        local_time = os.stat(check_local_path).st_mtime if os.path.exists(check_local_path) else 0
                        utils.info("Local: {0} last modified {1}".format(check_local_path, local_time))
 
                        # Remote path and time
                        remote_path = "{0}/{1}/{2}".format(base_url, dir, filename)
                        request = urllib2.urlopen(remote_path)
                        last_modified_str = request.info()['Last-Modified']
                        remote_time = _utc2local(parse(last_modified_str))
                        utils.info("Remote: {0} last modified {1}".format(remote_path, remote_time))
 
		        # If file does not exist or the local file is older than the remote file, download
                        if not os.path.exists(check_local_path) or local_time < remote_time:
                            utils.info('Downloading remote file {0}'.format(remote_path))
                            _download_file(request, save_local_path)
                            os.utime(save_local_path, (remote_time, remote_time))
                            files_downloaded = True
                        else: # Otherwise, remember this file for if it needs to be copied later
                            files_to_copy.append((check_local_path, save_local_path))
                            utils.info('Local file is up-to-date, skipping download')
                    else: # ls was not downloaded, so just remember the files for later copying if needed
                        files_to_copy.append((check_local_path, save_local_path))
                        utils.info('Local file is up-to-date, skipping download')
                    
    # Cube data files if any were downloaded
    if files_downloaded:
        if data_dir != new_data_dir:
            # Copy the files remembered earlier into the new data directory
            for src, dst in files_to_copy:
                dst_dir = os.path.dirname(dst)
                if not os.path.exists(dst_dir):
                    os.makedirs(dst_dir)
                shutil.copy2(src, dst)
                utils.info("copied {0} to {1}".format(src, dst))

        cmd = "{degrib} {data_dir}/VP.001-003/*.bin {data_dir}/VP.004-007/*.bin -Data -Index {data_dir}/all.ind -out {data_dir}/all.dat".format(
            degrib = degrib_path,
            data_dir = new_data_dir
        )
        utils.info(cmd)
        output = ""
        for line in os.popen(cmd).readlines():
            output += line
        utils.info(output)
    else:
        utils.info('No files downloaded - skipping cube')
    return files_downloaded
Ejemplo n.º 10
0
def get_forecast(latitude, longitude, include_hourly=False, grib2_dir=None):
    """
    Get forecast based on latitude and longitude and returns
    json-formatted result

    Args:
        latitude - forecast point latitude
        longitude - forecast point longitude
        include_hourly - flag to include hourly forecast, defaults to false
        grib2_dir - grib2 data directory, if omitted,
            the SOAP web service will be used

    Returns: json-formatted string - see README
    """

    utils.info("Latitude: {0}".format(latitude))
    utils.info("Longitude: {0}".format(longitude))
    if include_hourly:
        utils.info("Include hourly forecast")
    if grib2_dir:
        utils.info("Using grib2 dir: {0}".format(grib2_dir))

    # If grib2 directory is provided, use grib2 files
    if grib2_dir:
        from pysky import grib2
        xml = grib2.xml(grib2_dir, latitude, longitude)
        utils.info(xml)
    # Otherwise, use SOAP web service
    else:
        from pysky import noaa_ws
        xml = noaa_ws.xml(latitude, longitude)
        utils.info(xml)

    # Initialize object for data
    print(process_xml(xml, include_hourly))  # TODO fix json call