Esempio n. 1
0
File: pack.py Progetto: Timmmm/yotta
 def writeDescription(self):
     ''' Write the current (possibly modified) component description to a
         package description file in the component directory.
     '''
     ordered_json.dump(os.path.join(self.path, self.description_filename), self.description)
     if self.vcs:
         self.vcs.markForCommit(self.description_filename)
Esempio n. 2
0
 def writeDescription(self):
     ''' Write the current (possibly modified) component description to a
         package description file in the component directory.
     '''
     ordered_json.dump(os.path.join(self.path, self.description_filename), self.description)
     if self.vcs:
         self.vcs.markForCommit(self.description_filename)
Esempio n. 3
0
def _downloadToCache(stream, hashinfo={}, origin_info=dict()):
    ''' Download the specified stream to a temporary cache directory, and
        returns a cache key that can be used to access/remove the file.
        You should use either removeFromCache(cache_key) or _moveCachedFile to
        move the downloaded file to a known key after downloading.
    '''
    hash_name = None
    hash_value = None
    m = None

    if len(hashinfo):
        # check for hashes in preferred order. Currently this is just sha256
        # (which the registry uses). Initial investigations suggest that github
        # doesn't return a header with the hash of the file being downloaded.
        for h in ('sha256', ):
            if h in hashinfo:
                hash_name = h
                hash_value = hashinfo[h]
                m = getattr(hashlib, h)()
                break
        if not hash_name:
            logger.warning('could not find supported hash type in %s',
                           hashinfo)

    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    file_size = 0

    (download_file, download_fname) = tempfile.mkstemp(dir=cache_dir,
                                                       suffix='.locked')

    with os.fdopen(download_file, 'wb') as f:
        f.seek(0)
        for chunk in stream.iter_content(4096):
            f.write(chunk)
            if hash_name:
                m.update(chunk)

        if hash_name:
            calculated_hash = m.hexdigest()
            logger.debug('calculated %s hash: %s check against: %s' %
                         (hash_name, calculated_hash, hash_value))
            if hash_value and (hash_value != calculated_hash):
                raise Exception('Hash verification failed.')
        file_size = f.tell()
        logger.debug('wrote tarfile of size: %s to %s', file_size,
                     download_fname)
        f.truncate()

    extended_origin_info = {'hash': hashinfo, 'size': file_size}
    extended_origin_info.update(origin_info)
    ordered_json.dump(download_fname + '.json', extended_origin_info)
    return os.path.basename(download_fname)
Esempio n. 4
0
 def write(self, filename=None):
     # fsutils, , misc filesystem utils, internal
     from yotta.lib import fsutils
     if filename is None:
         filename, data = self._firstConfig()
     elif filename in self.configs:
         data = self.configs[filename]
     else:
         raise ValueError('No such file.')
     dirname = os.path.normpath(os.path.dirname(filename))
     logging.debug('write settings to "%s" (will ensure directory "%s" exists)', filename, dirname)
     try:
         fsutils.mkDirP(dirname)
         ordered_json.dump(filename, data)
     except OSError as e:
         logging.error('Failed to save user settings to %s/%s, please check that the path exists and is writable.', dirname, filename)
Esempio n. 5
0
 def write(self, filename=None):
     # fsutils, , misc filesystem utils, internal
     from yotta.lib import fsutils
     if filename is None:
         filename, data = self._firstConfig()
     elif filename in self.configs:
         data = self.configs[filename]
     else:
         raise ValueError('No such file.')
     dirname = os.path.normpath(os.path.dirname(filename))
     logging.debug(
         'write settings to "%s" (will ensure directory "%s" exists)',
         filename, dirname)
     try:
         fsutils.mkDirP(dirname)
         ordered_json.dump(filename, data)
     except OSError as e:
         logging.error(
             'Failed to save user settings to %s/%s, please check that the path exists and is writable.',
             dirname, filename)
Esempio n. 6
0
def downloadToCache(stream, hashinfo={}, cache_key=None, origin_info=dict()):
    ''' Download the specified stream to a temporary cache directory, and
        returns a cache key that can be used to access/remove the file.
        If cache_key is None, then a cache key will be generated and returned.
        You will probably want to use removeFromCache(cache_key) to remove it.
    '''
    hash_name  = None
    hash_value = None
    m = None

    if len(hashinfo):
        # check for hashes in preferred order. Currently this is just sha256
        # (which the registry uses). Initial investigations suggest that github
        # doesn't return a header with the hash of the file being downloaded.
        for h in ('sha256',):
            if h in hashinfo:
                hash_name  = h
                hash_value = hashinfo[h]
                m = getattr(hashlib, h)()
                break
        if not hash_name:
            logger.warning('could not find supported hash type in %s', hashinfo)

    if cache_key is None:
        cache_key = '%032x' % random.getrandbits(256)

    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    cache_as = os.path.join(cache_dir, cache_key)
    file_size = 0

    (download_file, download_fname) = tempfile.mkstemp(dir=cache_dir)
    with os.fdopen(download_file, 'wb') as f:
        f.seek(0)
        for chunk in stream.iter_content(4096):
            f.write(chunk)
            if hash_name:
                m.update(chunk)

        if hash_name:
            calculated_hash = m.hexdigest()
            logger.debug(
                'calculated %s hash: %s check against: %s' % (
                    hash_name, calculated_hash, hash_value
                )
            )
            if hash_value and (hash_value != calculated_hash):
                raise Exception('Hash verification failed.')
        file_size = f.tell()
        logger.debug('wrote tarfile of size: %s to %s', file_size, download_fname)
        f.truncate()
    try:
        os.rename(download_fname, cache_as)
        extended_origin_info = {
            'hash': hashinfo,
            'size': file_size
        }
        extended_origin_info.update(origin_info)
        ordered_json.dump(cache_as + '.json', extended_origin_info)
    except OSError as e:
        if e.errno == errno.ENOENT:
            # if we failed, it's because the file already exists (probably
            # because another process got there first), so just rm our
            # temporary file and continue
            cache_logger.debug('another process downloaded %s first', cache_key)
            fsutils.rmF(download_fname)
        else:
            raise

    return cache_key
Esempio n. 7
0
def downloadToCache(stream, hashinfo={}, cache_key=None, origin_info=dict()):
    ''' Download the specified stream to a temporary cache directory, and
        returns a cache key that can be used to access/remove the file.
        If cache_key is None, then a cache key will be generated and returned.
        You will probably want to use removeFromCache(cache_key) to remove it.
    '''
    hash_name = None
    hash_value = None
    m = None

    if len(hashinfo):
        # check for hashes in preferred order. Currently this is just sha256
        # (which the registry uses). Initial investigations suggest that github
        # doesn't return a header with the hash of the file being downloaded.
        for h in ('sha256', ):
            if h in hashinfo:
                hash_name = h
                hash_value = hashinfo[h]
                m = getattr(hashlib, h)()
                break
        if not hash_name:
            logger.warning('could not find supported hash type in %s',
                           hashinfo)

    if cache_key is None:
        cache_key = '%032x' % random.getrandbits(256)

    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    cache_as = os.path.join(cache_dir, cache_key)
    file_size = 0

    (download_file, download_fname) = tempfile.mkstemp(dir=cache_dir)
    with os.fdopen(download_file, 'wb') as f:
        f.seek(0)
        for chunk in stream.iter_content(4096):
            f.write(chunk)
            if hash_name:
                m.update(chunk)

        if hash_name:
            calculated_hash = m.hexdigest()
            logger.debug('calculated %s hash: %s check against: %s' %
                         (hash_name, calculated_hash, hash_value))
            if hash_value and (hash_value != calculated_hash):
                raise Exception('Hash verification failed.')
        file_size = f.tell()
        logger.debug('wrote tarfile of size: %s to %s', file_size,
                     download_fname)
        f.truncate()
    try:
        os.rename(download_fname, cache_as)
        extended_origin_info = {'hash': hashinfo, 'size': file_size}
        extended_origin_info.update(origin_info)
        ordered_json.dump(cache_as + '.json', extended_origin_info)
    except Exception as e:
        # windows error 183 == file already exists
        # (be careful not to use WindowsError on non-windows platforms as it
        # isn't defined)
        if (isinstance(e, OSError) and e.errno == errno.ENOENT) or \
           (isinstance(e, getattr(__builtins__, "WindowsError", type(None))) and e.errno == 183):
            # if we failed, it's because the file already exists (probably
            # because another process got there first), so just rm our
            # temporary file and continue
            cache_logger.debug('another process downloaded %s first',
                               cache_key)
            fsutils.rmF(download_fname)
        else:
            raise

    return cache_key