Example #1
0
def update_site_list(sites=None, state_code=None, huc=None, bounding_box=None, 
        county_code=None, parameter_code=None, site_type=None, service=None, 
        input_file=None, complevel=None, complib=None, autorepack=True, path=None,
        **kwargs):
    """Update cached site information. 

    See ulmo.usgs.nwis.core.get_sites() for description of regular parameters, only
    extra parameters used for caching are listed below.

    Parameters
    ----------
    path : ``None`` or file path
        Path to the hdf5 file to be queried, if ``None`` then the default path
        will be used. If a file path is a directory, then multiple hdf5 files
        will be kept so that file sizes remain small for faster repacking.
    input_file: ``None``, file path or file object
        If ``None`` (default), then the NWIS web services will be queried, but
        if a file is passed then this file will be used instead of requesting
        data from the NWIS web services.
    complevel : ``None`` or int {0-9}
        Open hdf5 file with this level of compression. If ``None` (default),
        then a maximum compression level will be used if a compression library
        can be found. If set to 0 then no compression will be used regardless of
        what complib is.
    complib : ``None`` or str {'zlib', 'bzip2', 'lzo', 'blosc'}
        Open hdf5 file with this type of compression. If ``None`` (default) then
        the best available compression library available on your system will be
        selected. If complevel argument is set to 0 then no compression will be
        used.
    autorepack : bool
        Whether or not to automatically repack the h5 file after updating.
        There is a tradeoff between performance and disk space here: large files
        take a longer time to repack but also tend to grow larger faster, the
        default of True conserves disk space because untamed file growth can
        become quite destructive.  If you set this to False, you can manually
        repack files with repack().

    Returns
    -------
    None : ``None``
    """
    sites_store_path = _get_store_path(path, 'sites.h5')

    new_sites = core.get_sites(sites=sites, state_code=state_code, huc=huc, bounding_box=bounding_box, 
        county_code=county_code, parameter_code=parameter_code, site_type=site_type, service=service, 
        input_file=input_file, **kwargs)

    if len(new_sites) == 0:
        return

    comp_kwargs = _compression_kwargs(complevel=complevel, complib=complib)
    with _get_store(sites_store_path, mode='a', **comp_kwargs) as store:
        _update_stored_sites(store, new_sites)

    if autorepack:
        repack(sites_store_path, complevel=complevel, complib=complib)
Example #2
0
File: hdf5.py Project: ocefpaf/ulmo
def update_site_list(sites=None, state_code=None, huc=None, bounding_box=None, 
        county_code=None, parameter_code=None, site_type=None, service=None, 
        input_file=None, complevel=None, complib=None, autorepack=True, path=None,
        **kwargs):
    """Update cached site information. 

    See ulmo.usgs.nwis.core.get_sites() for description of regular parameters, only
    extra parameters used for caching are listed below.

    Parameters
    ----------
    path : ``None`` or file path
        Path to the hdf5 file to be queried, if ``None`` then the default path
        will be used. If a file path is a directory, then multiple hdf5 files
        will be kept so that file sizes remain small for faster repacking.
    input_file: ``None``, file path or file object
        If ``None`` (default), then the NWIS web services will be queried, but
        if a file is passed then this file will be used instead of requesting
        data from the NWIS web services.
    complevel : ``None`` or int {0-9}
        Open hdf5 file with this level of compression. If ``None` (default),
        then a maximum compression level will be used if a compression library
        can be found. If set to 0 then no compression will be used regardless of
        what complib is.
    complib : ``None`` or str {'zlib', 'bzip2', 'lzo', 'blosc'}
        Open hdf5 file with this type of compression. If ``None`` (default) then
        the best available compression library available on your system will be
        selected. If complevel argument is set to 0 then no compression will be
        used.
    autorepack : bool
        Whether or not to automatically repack the h5 file after updating.
        There is a tradeoff between performance and disk space here: large files
        take a longer time to repack but also tend to grow larger faster, the
        default of True conserves disk space because untamed file growth can
        become quite destructive.  If you set this to False, you can manually
        repack files with repack().

    Returns
    -------
    None : ``None``
    """
    sites_store_path = _get_store_path(path, 'sites.h5')

    new_sites = core.get_sites(sites=sites, state_code=state_code, huc=huc, bounding_box=bounding_box, 
        county_code=county_code, parameter_code=parameter_code, site_type=site_type, service=service, 
        input_file=input_file, **kwargs)

    if len(new_sites) == 0:
        return

    comp_kwargs = _compression_kwargs(complevel=complevel, complib=complib)
    with _get_store(sites_store_path, mode='a', **comp_kwargs) as store:
        _update_stored_sites(store, new_sites)

    if autorepack:
        repack(sites_store_path, complevel=complevel, complib=complib)
Example #3
0
def update_site_list(sites=None, state_code=None, service=None, path=None,
        input_file=None, complevel=None, complib=None, autorepack=True):
    """Update cached site information.

    Parameters
    ----------
    sites : str, iterable of strings or ``None``
        The site to use or list of sites to use; lists will be joined by a ','.
    state_code : str or ``None``
        Two-letter state code used in stateCd parameter.
    site_type : str or ``None``
        Type of site used in siteType parameter.
    service : {``None``, 'instantaneous', 'iv', 'daily', 'dv'}
        The service to use, either "instantaneous", "daily", or ``None``
        (default).  If set to ``None``, then both services are used.  The
        abbreviations "iv" and "dv" can be used for "instantaneous" and "daily",
        respectively.
    path : ``None`` or file path
        Path to the hdf5 file to be queried, if ``None`` then the default path
        will be used. If a file path is a directory, then multiple hdf5 files
        will be kept so that file sizes remain small for faster repacking.
    input_file: ``None``, file path or file object
        If ``None`` (default), then the NWIS web services will be queried, but
        if a file is passed then this file will be used instead of requesting
        data from the NWIS web services.
    complevel : ``None`` or int {0-9}
        Open hdf5 file with this level of compression. If ``None` (default),
        then a maximum compression level will be used if a compression library
        can be found. If set to 0 then no compression will be used regardless of
        what complib is.
    complib : ``None`` or str {'zlib', 'bzip2', 'lzo', 'blosc'}
        Open hdf5 file with this type of compression. If ``None`` (default) then
        the best available compression library available on your system will be
        selected. If complevel argument is set to 0 then no compression will be
        used.
    autorepack : bool
        Whether or not to automatically repack the h5 file after updating.
        There is a tradeoff between performance and disk space here: large files
        take a longer time to repack but also tend to grow larger faster, the
        default of True conserves disk space because untamed file growth can
        become quite destructive.  If you set this to False, you can manually
        repack files with repack().

    Returns
    -------
    None : ``None``
    """
    sites_store_path = _get_store_path(path, 'sites.h5')

    new_sites = core.get_sites(sites=sites, state_code=state_code, service=service,
            input_file=input_file)

    if len(new_sites) == 0:
        return

    comp_kwargs = _compression_kwargs(complevel=complevel, complib=complib)
    with _get_store(sites_store_path, 'a', **comp_kwargs) as store:
        _update_stored_sites(store, new_sites)

    if autorepack:
        repack(sites_store_path, complevel=complevel, complib=complib)
Example #4
0
def update_site_list(sites=None,
                     state_code=None,
                     service=None,
                     path=None,
                     input_file=None,
                     complevel=None,
                     complib=None,
                     autorepack=True):
    """Update cached site information.

    Parameters
    ----------
    sites : str, iterable of strings or ``None``
        The site to use or list of sites to use; lists will be joined by a ','.
    state_code : str or ``None``
        Two-letter state code used in stateCd parameter.
    site_type : str or ``None``
        Type of site used in siteType parameter.
    service : {``None``, 'instantaneous', 'iv', 'daily', 'dv'}
        The service to use, either "instantaneous", "daily", or ``None``
        (default).  If set to ``None``, then both services are used.  The
        abbreviations "iv" and "dv" can be used for "instantaneous" and "daily",
        respectively.
    path : ``None`` or file path
        Path to the hdf5 file to be queried, if ``None`` then the default path
        will be used. If a file path is a directory, then multiple hdf5 files
        will be kept so that file sizes remain small for faster repacking.
    input_file: ``None``, file path or file object
        If ``None`` (default), then the NWIS web services will be queried, but
        if a file is passed then this file will be used instead of requesting
        data from the NWIS web services.
    complevel : ``None`` or int {0-9}
        Open hdf5 file with this level of compression. If ``None` (default),
        then a maximum compression level will be used if a compression library
        can be found. If set to 0 then no compression will be used regardless of
        what complib is.
    complib : ``None`` or str {'zlib', 'bzip2', 'lzo', 'blosc'}
        Open hdf5 file with this type of compression. If ``None`` (default) then
        the best available compression library available on your system will be
        selected. If complevel argument is set to 0 then no compression will be
        used.
    autorepack : bool
        Whether or not to automatically repack the h5 file after updating.
        There is a tradeoff between performance and disk space here: large files
        take a longer time to repack but also tend to grow larger faster, the
        default of True conserves disk space because untamed file growth can
        become quite destructive.  If you set this to False, you can manually
        repack files with repack().

    Returns
    -------
    None : ``None``
    """
    sites_store_path = _get_store_path(path, 'sites.h5')

    new_sites = core.get_sites(sites=sites,
                               state_code=state_code,
                               service=service,
                               input_file=input_file)

    if len(new_sites) == 0:
        return

    comp_kwargs = _compression_kwargs(complevel=complevel, complib=complib)
    with _get_store(sites_store_path, mode='a', **comp_kwargs) as store:
        _update_stored_sites(store, new_sites)

    if autorepack:
        repack(sites_store_path, complevel=complevel, complib=complib)