Ejemplo n.º 1
0
def unpackFromCache(cache_key, to_directory):
    ''' If the specified cache key exists, unpack the tarball into the
        specified directory, otherwise raise KeyError.
    '''
    if cache_key is None:
        raise KeyError('"None" is never in cache')
    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    path = os.path.join(cache_dir, cache_key)
    logger.debug('attempt to unpack from cache %s -> %s', path, to_directory)
    try:
        unpackFrom(path, to_directory)
        try:
            shutil.copy(path + '.json', os.path.join(to_directory, '.yotta_origin.json'))
        except IOError as e:
            if e.errno == errno.ENOENT:
                pass
            else:
                raise
        cache_logger.debug('unpacked %s from cache into %s', cache_key, to_directory)
        return
    except IOError as e:
        if e.errno == errno.ENOENT:
            cache_logger.debug('%s not in cache', cache_key)
            raise KeyError('not in cache')
Ejemplo n.º 2
0
def unpackFromCache(cache_key, to_directory):
    ''' If the specified cache key exists, unpack the tarball into the
        specified directory, otherwise raise KeyError.
    '''
    if cache_key is None:
        raise KeyError('"None" is never in cache')
    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    path = os.path.join(cache_dir, cache_key)
    logger.debug('attempt to unpack from cache %s -> %s', path, to_directory)
    try:
        unpackFrom(path, to_directory)
        try:
            shutil.copy(path + '.json',
                        os.path.join(to_directory, '.yotta_origin.json'))
        except IOError as e:
            if e.errno == errno.ENOENT:
                pass
            else:
                raise
        cache_logger.debug('unpacked %s from cache into %s', cache_key,
                           to_directory)
        return
    except IOError as e:
        if e.errno == errno.ENOENT:
            cache_logger.debug('%s not in cache', cache_key)
            raise KeyError('not in cache')
def downloadToCache(stream, hashinfo={}, cache_key=None):
    ''' Download the specified stream to a temporary cache directory, and
        return (path to the downloaded, cache key).
        If cache_key is None, then a cache key will be generated and returned,
        but you will probably want to remove the cache file yourself (this is
        safe).
    '''
    hash_name  = None
    hash_value = None
    m = None

    if len(hashinfo):
        # check for hashes in preferred order. Currently this is just sha256
        # (which the registry uses). Initial investigations suggest that github
        # doesn't return a header with the hash of the file being downloaded.
        for h in ('sha256',):
            if h in hashinfo:
                hash_name  = h
                hash_value = hashinfo[h]
                m = getattr(hashlib, h)()
                break
        if not hash_name:
            logger.warning('could not find supported hash type in %s', hashinfo)

    if cache_key is None:
        cache_key = '%032x' % random.getrandbits(256)

    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    download_fname = os.path.join(cache_dir, cache_key)

    with _openExclusively(download_fname) as f:
        f.seek(0)
        for chunk in stream.iter_content(1024):
            f.write(chunk)
            if hash_name:
                m.update(chunk)

        if hash_name:
            calculated_hash = m.hexdigest()
            logger.debug(
                'calculated %s hash: %s check against: %s' % (
                    hash_name, calculated_hash, hash_value
                )
            )
            if hash_value and (hash_value != calculated_hash):
                raise Exception('Hash verification failed.')
        logger.debug('wrote tarfile of size: %s to %s', f.tell(), download_fname)
        f.truncate()

    return (download_fname, cache_key)
Ejemplo n.º 4
0
def downloadToCache(stream, hashinfo={}, cache_key=None):
    ''' Download the specified stream to a temporary cache directory, and
        return (path to the downloaded, cache key).
        If cache_key is None, then a cache key will be generated and returned,
        but you will probably want to remove the cache file yourself (this is
        safe).
    '''
    hash_name = None
    hash_value = None
    m = None

    if len(hashinfo):
        # check for hashes in preferred order. Currently this is just sha256
        # (which the registry uses). Initial investigations suggest that github
        # doesn't return a header with the hash of the file being downloaded.
        for h in ('sha256', ):
            if h in hashinfo:
                hash_name = h
                hash_value = hashinfo[h]
                m = getattr(hashlib, h)()
                break
        if not hash_name:
            logger.warning('could not find supported hash type in %s',
                           hashinfo)

    if cache_key is None:
        cache_key = '%032x' % random.getrandbits(256)

    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    download_fname = os.path.join(cache_dir, cache_key)

    with _openExclusively(download_fname) as f:
        f.seek(0)
        for chunk in stream.iter_content(1024):
            f.write(chunk)
            if hash_name:
                m.update(chunk)

        if hash_name:
            calculated_hash = m.hexdigest()
            logger.debug('calculated %s hash: %s check against: %s' %
                         (hash_name, calculated_hash, hash_value))
            if hash_value and (hash_value != calculated_hash):
                raise Exception('Hash verification failed.')
        logger.debug('wrote tarfile of size: %s to %s', f.tell(),
                     download_fname)
        f.truncate()

    return (download_fname, cache_key)
def pruneCache():
    ''' Prune the cache '''
    cache_dir = folders.cacheDirectory()
    def fullpath(f):
        return os.path.join(cache_dir, f)
    # ensure cache exists
    fsutils.mkDirP(cache_dir)
    for f in sorted(
            [f for f in os.listdir(cache_dir) if os.path.isfile(fullpath(f))],
            key = lambda f: os.stat(fullpath(f)).st_mtime
        )[Max_Cached_Modules:]:
        cache_logger.debug('cleaning up cache file %s', f)
        fsutils.rmF(fullpath(f))
    cache_logger.debug('cache pruned to %s items', Max_Cached_Modules)
Ejemplo n.º 6
0
def pruneCache():
    ''' Prune the cache '''
    cache_dir = folders.cacheDirectory()

    def fullpath(f):
        return os.path.join(cache_dir, f)

    # ensure cache exists
    fsutils.mkDirP(cache_dir)
    for f in sorted(
        [f for f in os.listdir(cache_dir) if os.path.isfile(fullpath(f))],
            key=lambda f: os.stat(fullpath(f)).st_mtime)[Max_Cached_Modules:]:
        cache_logger.debug('cleaning up cache file %s', f)
        fsutils.rmF(fullpath(f))
    cache_logger.debug('cache pruned to %s items', Max_Cached_Modules)
def unpackFromCache(cache_key, to_directory):
    ''' If the specified cache key exists, unpack the tarball into the
        specified directory, otherwise raise KeyError.
    '''
    if cache_key is None:
        raise KeyError('"None" is never in cache')
    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    path = os.path.join(cache_dir, cache_key)
    try:
        unpackFrom(path, to_directory)
        cache_logger.debug('unpacked %s from cache into %s', cache_key, to_directory)
        return
    except IOError as e:
        if e.errno == errno.ENOENT:
            cache_logger.debug('%s not in cache', cache_key)
            raise KeyError('not in cache')
Ejemplo n.º 8
0
def unpackFromCache(cache_key, to_directory):
    ''' If the specified cache key exists, unpack the tarball into the
        specified directory, otherwise raise KeyError.
    '''
    if cache_key is None:
        raise KeyError('"None" is never in cache')
    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    path = os.path.join(cache_dir, cache_key)
    try:
        unpackFrom(path, to_directory)
        cache_logger.debug('unpacked %s from cache into %s', cache_key,
                           to_directory)
        return
    except IOError as e:
        if e.errno == errno.ENOENT:
            cache_logger.debug('%s not in cache', cache_key)
            raise KeyError('not in cache')
Ejemplo n.º 9
0
def pruneCache():
    ''' Prune the cache '''
    cache_dir = folders.cacheDirectory()
    def fullpath(f):
        return os.path.join(cache_dir, f)
    # ensure cache exists
    fsutils.mkDirP(cache_dir)
    max_cached_modules = getMaxCachedModules()
    for f in sorted(
            [f for f in os.listdir(cache_dir) if
                os.path.isfile(fullpath(f)) and not f.endswith('.json')
            ],
            key = lambda f: os.stat(fullpath(f)).st_mtime,
            reverse = True
        )[max_cached_modules:]:
        cache_logger.debug('cleaning up cache file %s', f)
        removeFromCache(f)
    cache_logger.debug('cache pruned to %s items', max_cached_modules)
Ejemplo n.º 10
0
def pruneCache():
    ''' Prune the cache '''
    cache_dir = folders.cacheDirectory()

    def fullpath(f):
        return os.path.join(cache_dir, f)

    # ensure cache exists
    fsutils.mkDirP(cache_dir)
    max_cached_modules = getMaxCachedModules()
    for f in sorted([
            f for f in os.listdir(cache_dir)
            if os.path.isfile(fullpath(f)) and not f.endswith('.json')
    ],
                    key=lambda f: os.stat(fullpath(f)).st_mtime,
                    reverse=True)[max_cached_modules:]:
        cache_logger.debug('cleaning up cache file %s', f)
        removeFromCache(f)
    cache_logger.debug('cache pruned to %s items', max_cached_modules)
Ejemplo n.º 11
0
def downloadToCache(stream, hashinfo={}, cache_key=None, origin_info=dict()):
    ''' Download the specified stream to a temporary cache directory, and
        returns a cache key that can be used to access/remove the file.
        If cache_key is None, then a cache key will be generated and returned.
        You will probably want to use removeFromCache(cache_key) to remove it.
    '''
    hash_name  = None
    hash_value = None
    m = None

    if len(hashinfo):
        # check for hashes in preferred order. Currently this is just sha256
        # (which the registry uses). Initial investigations suggest that github
        # doesn't return a header with the hash of the file being downloaded.
        for h in ('sha256',):
            if h in hashinfo:
                hash_name  = h
                hash_value = hashinfo[h]
                m = getattr(hashlib, h)()
                break
        if not hash_name:
            logger.warning('could not find supported hash type in %s', hashinfo)

    if cache_key is None:
        cache_key = '%032x' % random.getrandbits(256)

    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    cache_as = os.path.join(cache_dir, cache_key)
    file_size = 0

    (download_file, download_fname) = tempfile.mkstemp(dir=cache_dir)
    with os.fdopen(download_file, 'wb') as f:
        f.seek(0)
        for chunk in stream.iter_content(4096):
            f.write(chunk)
            if hash_name:
                m.update(chunk)

        if hash_name:
            calculated_hash = m.hexdigest()
            logger.debug(
                'calculated %s hash: %s check against: %s' % (
                    hash_name, calculated_hash, hash_value
                )
            )
            if hash_value and (hash_value != calculated_hash):
                raise Exception('Hash verification failed.')
        file_size = f.tell()
        logger.debug('wrote tarfile of size: %s to %s', file_size, download_fname)
        f.truncate()
    try:
        os.rename(download_fname, cache_as)
        extended_origin_info = {
            'hash': hashinfo,
            'size': file_size
        }
        extended_origin_info.update(origin_info)
        ordered_json.dump(cache_as + '.json', extended_origin_info)
    except OSError as e:
        if e.errno == errno.ENOENT:
            # if we failed, it's because the file already exists (probably
            # because another process got there first), so just rm our
            # temporary file and continue
            cache_logger.debug('another process downloaded %s first', cache_key)
            fsutils.rmF(download_fname)
        else:
            raise

    return cache_key
Ejemplo n.º 12
0
def removeFromCache(cache_key):
    f = os.path.join(folders.cacheDirectory(), cache_key)
    fsutils.rmF(f)
    # remove any metadata too, if it exists
    fsutils.rmF(f + '.json')
Ejemplo n.º 13
0
def downloadToCache(stream, hashinfo={}, cache_key=None, origin_info=dict()):
    ''' Download the specified stream to a temporary cache directory, and
        returns a cache key that can be used to access/remove the file.
        If cache_key is None, then a cache key will be generated and returned.
        You will probably want to use removeFromCache(cache_key) to remove it.
    '''
    hash_name = None
    hash_value = None
    m = None

    if len(hashinfo):
        # check for hashes in preferred order. Currently this is just sha256
        # (which the registry uses). Initial investigations suggest that github
        # doesn't return a header with the hash of the file being downloaded.
        for h in ('sha256', ):
            if h in hashinfo:
                hash_name = h
                hash_value = hashinfo[h]
                m = getattr(hashlib, h)()
                break
        if not hash_name:
            logger.warning('could not find supported hash type in %s',
                           hashinfo)

    if cache_key is None:
        cache_key = '%032x' % random.getrandbits(256)

    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    cache_as = os.path.join(cache_dir, cache_key)
    file_size = 0

    (download_file, download_fname) = tempfile.mkstemp(dir=cache_dir)
    with os.fdopen(download_file, 'wb') as f:
        f.seek(0)
        for chunk in stream.iter_content(4096):
            f.write(chunk)
            if hash_name:
                m.update(chunk)

        if hash_name:
            calculated_hash = m.hexdigest()
            logger.debug('calculated %s hash: %s check against: %s' %
                         (hash_name, calculated_hash, hash_value))
            if hash_value and (hash_value != calculated_hash):
                raise Exception('Hash verification failed.')
        file_size = f.tell()
        logger.debug('wrote tarfile of size: %s to %s', file_size,
                     download_fname)
        f.truncate()
    try:
        os.rename(download_fname, cache_as)
        extended_origin_info = {'hash': hashinfo, 'size': file_size}
        extended_origin_info.update(origin_info)
        ordered_json.dump(cache_as + '.json', extended_origin_info)
    except OSError as e:
        if e.errno == errno.ENOENT:
            # if we failed, it's because the file already exists (probably
            # because another process got there first), so just rm our
            # temporary file and continue
            cache_logger.debug('another process downloaded %s first',
                               cache_key)
            fsutils.rmF(download_fname)
        else:
            raise

    return cache_key
Ejemplo n.º 14
0
def removeFromCache(cache_key):
    f = os.path.join(folders.cacheDirectory(), cache_key)
    fsutils.rmF(f)
    # remove any metadata too, if it exists
    fsutils.rmF(f + '.json')