Exemple #1
0
def unpackFromCache(cache_key, to_directory):
    ''' If the specified cache key exists, unpack the tarball into the
        specified directory, otherwise raise KeyError.
    '''
    if cache_key is None:
        raise KeyError('"None" is never in cache')
    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    path = os.path.join(cache_dir, cache_key)
    logger.debug('attempt to unpack from cache %s -> %s', path, to_directory)
    try:
        unpackFrom(path, to_directory)
        try:
            shutil.copy(path + '.json', os.path.join(to_directory, '.yotta_origin.json'))
        except IOError as e:
            if e.errno == errno.ENOENT:
                pass
            else:
                raise
        cache_logger.debug('unpacked %s from cache into %s', cache_key, to_directory)
        return
    except IOError as e:
        if e.errno == errno.ENOENT:
            cache_logger.debug('%s not in cache', cache_key)
            raise KeyError('not in cache')
Exemple #2
0
def pruneCache():
    ''' Prune the cache '''
    cache_dir = folders.cacheDirectory()

    def fullpath(f):
        return os.path.join(cache_dir, f)

    def getMTimeSafe(f):
        # it's possible that another process removed the file before we stat
        # it, handle this gracefully
        try:
            return os.stat(f).st_mtime
        except FileNotFoundError:
            import time
            return time.clock()

    # ensure cache exists
    fsutils.mkDirP(cache_dir)
    max_cached_modules = getMaxCachedModules()
    for f in sorted([
            f for f in os.listdir(cache_dir) if os.path.isfile(fullpath(f))
            and not f.endswith('.json') and not f.endswith('.locked')
    ],
                    key=lambda f: getMTimeSafe(fullpath(f)),
                    reverse=True)[max_cached_modules:]:
        cache_logger.debug('cleaning up cache file %s', f)
        removeFromCache(f)
    cache_logger.debug('cache pruned to %s items', max_cached_modules)
Exemple #3
0
def unpackFromCache(cache_key, to_directory):
    ''' If the specified cache key exists, unpack the tarball into the
        specified directory, otherwise raise NotInCache (a KeyError subclass).
    '''
    if cache_key is None:
        raise NotInCache('"None" is never in cache')
    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    path = os.path.join(cache_dir, cache_key)
    logger.debug('attempt to unpack from cache %s -> %s', path, to_directory)
    try:
        unpackFrom(path, to_directory)
        try:
            shutil.copy(path + '.json',
                        os.path.join(to_directory, '.yotta_origin.json'))
        except IOError as e:
            if e.errno == errno.ENOENT:
                pass
            else:
                raise
        cache_logger.debug('unpacked %s from cache into %s', cache_key,
                           to_directory)
        return
    except IOError as e:
        if e.errno == errno.ENOENT:
            cache_logger.debug('%s not in cache', cache_key)
            raise NotInCache('not in cache')
Exemple #4
0
def pruneCache():
    ''' Prune the cache '''
    cache_dir = folders.cacheDirectory()
    def fullpath(f):
        return os.path.join(cache_dir, f)
    def getMTimeSafe(f):
        # it's possible that another process removed the file before we stat
        # it, handle this gracefully
        try:
            return os.stat(f).st_mtime
        except FileNotFoundError:
            import time
            return time.clock()
    # ensure cache exists
    fsutils.mkDirP(cache_dir)
    max_cached_modules = getMaxCachedModules()
    for f in sorted(
            [f for f in os.listdir(cache_dir) if
                os.path.isfile(fullpath(f)) and not f.endswith('.json')
            ],
            key = lambda f: getMTimeSafe(fullpath(f)),
            reverse = True
        )[max_cached_modules:]:
        cache_logger.debug('cleaning up cache file %s', f)
        removeFromCache(f)
    cache_logger.debug('cache pruned to %s items', max_cached_modules)
Exemple #5
0
def removeFromCache(cache_key):
    f = os.path.join(folders.cacheDirectory(), cache_key)
    try:
        fsutils.rmF(f)
        # remove any metadata too, if it exists
        fsutils.rmF(f + '.json')
    except OSError as e:
        # if we failed to remove either file, then it might be because another
        # instance of yotta is using it, so just skip it this time.
        pass
Exemple #6
0
def _downloadToCache(stream, hashinfo={}, origin_info=dict()):
    ''' Download the specified stream to a temporary cache directory, and
        returns a cache key that can be used to access/remove the file.
        You should use either removeFromCache(cache_key) or _moveCachedFile to
        move the downloaded file to a known key after downloading.
    '''
    hash_name = None
    hash_value = None
    m = None

    if len(hashinfo):
        # check for hashes in preferred order. Currently this is just sha256
        # (which the registry uses). Initial investigations suggest that github
        # doesn't return a header with the hash of the file being downloaded.
        for h in ('sha256', ):
            if h in hashinfo:
                hash_name = h
                hash_value = hashinfo[h]
                m = getattr(hashlib, h)()
                break
        if not hash_name:
            logger.warning('could not find supported hash type in %s',
                           hashinfo)

    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    file_size = 0

    (download_file, download_fname) = tempfile.mkstemp(dir=cache_dir,
                                                       suffix='.locked')

    with os.fdopen(download_file, 'wb') as f:
        f.seek(0)
        for chunk in stream.iter_content(4096):
            f.write(chunk)
            if hash_name:
                m.update(chunk)

        if hash_name:
            calculated_hash = m.hexdigest()
            logger.debug('calculated %s hash: %s check against: %s' %
                         (hash_name, calculated_hash, hash_value))
            if hash_value and (hash_value != calculated_hash):
                raise Exception('Hash verification failed.')
        file_size = f.tell()
        logger.debug('wrote tarfile of size: %s to %s', file_size,
                     download_fname)
        f.truncate()

    extended_origin_info = {'hash': hashinfo, 'size': file_size}
    extended_origin_info.update(origin_info)
    ordered_json.dump(download_fname + '.json', extended_origin_info)
    return os.path.basename(download_fname)
Exemple #7
0
def pruneCache():
    ''' Prune the cache '''
    cache_dir = folders.cacheDirectory()
    def fullpath(f):
        return os.path.join(cache_dir, f)
    # ensure cache exists
    fsutils.mkDirP(cache_dir)
    max_cached_modules = getMaxCachedModules()
    for f in sorted(
            [f for f in os.listdir(cache_dir) if
                os.path.isfile(fullpath(f)) and not f.endswith('.json')
            ],
            key = lambda f: os.stat(fullpath(f)).st_mtime,
            reverse = True
        )[max_cached_modules:]:
        cache_logger.debug('cleaning up cache file %s', f)
        removeFromCache(f)
    cache_logger.debug('cache pruned to %s items', max_cached_modules)
Exemple #8
0
def _moveCachedFile(from_key, to_key):
    ''' Move a file atomically within the cache: used to make cached files
        available at known keys, so they can be used by other processes.
    '''
    cache_dir = folders.cacheDirectory()
    from_path = os.path.join(cache_dir, from_key)
    to_path = os.path.join(cache_dir, to_key)
    try:
        os.rename(from_path, to_path)
        # if moving the actual file was successful, then try to move the
        # metadata:
        os.rename(from_path + '.json', to_path + '.json')
    except Exception as e:
        # if the source doesn't exist, or the destination doesn't exist, remove
        # the file instead.
        # windows error 183 == file already exists
        # (be careful not to use WindowsError on non-windows platforms as it
        # isn't defined)
        if (isinstance(e, OSError) and e.errno == errno.ENOENT) or \
           (isinstance(e, getattr(__builtins__, "WindowsError", type(None))) and e.errno == 183):
            fsutils.rmF(from_path)
        else:
            raise
Exemple #9
0
def unpackFromCache(cache_key, to_directory):
    ''' If the specified cache key exists, unpack the tarball into the
        specified directory, otherwise raise NotInCache (a KeyError subclass).
    '''
    if cache_key is None:
        raise NotInCache('"None" is never in cache')

    cache_key = _encodeCacheKey(cache_key)

    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    path = os.path.join(cache_dir, cache_key)
    logger.debug('attempt to unpack from cache %s -> %s', path, to_directory)
    try:
        unpackFrom(path, to_directory)
        try:
            shutil.copy(path + '.json',
                        os.path.join(to_directory, '.yotta_origin.json'))
        except IOError as e:
            if e.errno == errno.ENOENT:
                pass
            else:
                raise
        cache_logger.debug('unpacked %s from cache into %s', cache_key,
                           to_directory)
        return
    except IOError as e:
        if e.errno == errno.ENOENT:
            cache_logger.debug('%s not in cache', cache_key)
            raise NotInCache('not in cache')
    except OSError as e:
        if e.errno == errno.ENOTEMPTY:
            logger.error(
                'directory %s was not empty: probably simultaneous invocation of yotta! It is likely that downloaded sources are corrupted.'
            )
        else:
            raise
Exemple #10
0
def downloadToCache(stream, hashinfo={}, cache_key=None, origin_info=dict()):
    ''' Download the specified stream to a temporary cache directory, and
        returns a cache key that can be used to access/remove the file.
        If cache_key is None, then a cache key will be generated and returned.
        You will probably want to use removeFromCache(cache_key) to remove it.
    '''
    hash_name  = None
    hash_value = None
    m = None

    if len(hashinfo):
        # check for hashes in preferred order. Currently this is just sha256
        # (which the registry uses). Initial investigations suggest that github
        # doesn't return a header with the hash of the file being downloaded.
        for h in ('sha256',):
            if h in hashinfo:
                hash_name  = h
                hash_value = hashinfo[h]
                m = getattr(hashlib, h)()
                break
        if not hash_name:
            logger.warning('could not find supported hash type in %s', hashinfo)

    if cache_key is None:
        cache_key = '%032x' % random.getrandbits(256)

    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    cache_as = os.path.join(cache_dir, cache_key)
    file_size = 0

    (download_file, download_fname) = tempfile.mkstemp(dir=cache_dir)
    with os.fdopen(download_file, 'wb') as f:
        f.seek(0)
        for chunk in stream.iter_content(4096):
            f.write(chunk)
            if hash_name:
                m.update(chunk)

        if hash_name:
            calculated_hash = m.hexdigest()
            logger.debug(
                'calculated %s hash: %s check against: %s' % (
                    hash_name, calculated_hash, hash_value
                )
            )
            if hash_value and (hash_value != calculated_hash):
                raise Exception('Hash verification failed.')
        file_size = f.tell()
        logger.debug('wrote tarfile of size: %s to %s', file_size, download_fname)
        f.truncate()
    try:
        os.rename(download_fname, cache_as)
        extended_origin_info = {
            'hash': hashinfo,
            'size': file_size
        }
        extended_origin_info.update(origin_info)
        ordered_json.dump(cache_as + '.json', extended_origin_info)
    except OSError as e:
        if e.errno == errno.ENOENT:
            # if we failed, it's because the file already exists (probably
            # because another process got there first), so just rm our
            # temporary file and continue
            cache_logger.debug('another process downloaded %s first', cache_key)
            fsutils.rmF(download_fname)
        else:
            raise

    return cache_key
Exemple #11
0
def removeFromCache(cache_key):
    f = os.path.join(folders.cacheDirectory(), cache_key)
    fsutils.rmF(f)
    # remove any metadata too, if it exists
    fsutils.rmF(f + '.json')
Exemple #12
0
def downloadToCache(stream, hashinfo={}, cache_key=None, origin_info=dict()):
    ''' Download the specified stream to a temporary cache directory, and
        returns a cache key that can be used to access/remove the file.
        If cache_key is None, then a cache key will be generated and returned.
        You will probably want to use removeFromCache(cache_key) to remove it.
    '''
    hash_name = None
    hash_value = None
    m = None

    if len(hashinfo):
        # check for hashes in preferred order. Currently this is just sha256
        # (which the registry uses). Initial investigations suggest that github
        # doesn't return a header with the hash of the file being downloaded.
        for h in ('sha256', ):
            if h in hashinfo:
                hash_name = h
                hash_value = hashinfo[h]
                m = getattr(hashlib, h)()
                break
        if not hash_name:
            logger.warning('could not find supported hash type in %s',
                           hashinfo)

    if cache_key is None:
        cache_key = '%032x' % random.getrandbits(256)

    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    cache_as = os.path.join(cache_dir, cache_key)
    file_size = 0

    (download_file, download_fname) = tempfile.mkstemp(dir=cache_dir)
    with os.fdopen(download_file, 'wb') as f:
        f.seek(0)
        for chunk in stream.iter_content(4096):
            f.write(chunk)
            if hash_name:
                m.update(chunk)

        if hash_name:
            calculated_hash = m.hexdigest()
            logger.debug('calculated %s hash: %s check against: %s' %
                         (hash_name, calculated_hash, hash_value))
            if hash_value and (hash_value != calculated_hash):
                raise Exception('Hash verification failed.')
        file_size = f.tell()
        logger.debug('wrote tarfile of size: %s to %s', file_size,
                     download_fname)
        f.truncate()
    try:
        os.rename(download_fname, cache_as)
        extended_origin_info = {'hash': hashinfo, 'size': file_size}
        extended_origin_info.update(origin_info)
        ordered_json.dump(cache_as + '.json', extended_origin_info)
    except Exception as e:
        # windows error 183 == file already exists
        # (be careful not to use WindowsError on non-windows platforms as it
        # isn't defined)
        if (isinstance(e, OSError) and e.errno == errno.ENOENT) or \
           (isinstance(e, getattr(__builtins__, "WindowsError", type(None))) and e.errno == 183):
            # if we failed, it's because the file already exists (probably
            # because another process got there first), so just rm our
            # temporary file and continue
            cache_logger.debug('another process downloaded %s first',
                               cache_key)
            fsutils.rmF(download_fname)
        else:
            raise

    return cache_key
Exemple #13
0
def removeFromCache(cache_key):
    f = os.path.join(folders.cacheDirectory(), cache_key)
    fsutils.rmF(f)
    # remove any metadata too, if it exists
    fsutils.rmF(f + '.json')