Exemplo n.º 1
0
Arquivo: pack.py Projeto: ntoll/yotta
 def writeDescription(self):
     ''' Write the current (possibly modified) component description to a
         package description file in the component directory.
     '''
     ordered_json.dump(os.path.join(self.path, self.description_filename), self.description)
     if self.vcs:
         self.vcs.markForCommit(self.description_filename)
Exemplo n.º 2
0
Arquivo: pack.py Projeto: geky/yotta
 def writeDescription(self):
     ''' Write the current (possibly modified) component description to a
         package description file in the component directory.
     '''
     ordered_json.dump(os.path.join(self.path, self.description_filename), self.description)
     if self.vcs:
         self.vcs.markForCommit(self.description_filename)
Exemplo n.º 3
0
 def write(self, filename=None):
     if filename is None:
         filename, data = self._firstConfig()
     elif filename in self.configs:
         data = self.configs[filename]
     else:
         raise ValueError('No such file.')
     dirname = os.path.dirname(filename)
     fsutils.mkDirP(dirname)
     ordered_json.dump(filename, data)
Exemplo n.º 4
0
 def write(self, filename=None):
     if filename is None:
         filename, data = self._firstConfig()
     elif filename in self.configs:
         data = self.configs[filename]
     else:
         raise ValueError('No such file.')
     dirname = os.path.dirname(filename)
     fsutils.mkDirP(dirname)
     ordered_json.dump(filename, data)
Exemplo n.º 5
0
 def write(self, filename=None):
     if filename is None:
         filename, data = self._firstConfig()
     elif filename in self.configs:
         data = self.configs[filename]
     else:
         raise ValueError('No such file.')
     dirname = os.path.normpath(os.path.dirname(filename))
     logging.debug('write settings to "%s" (will ensure directory "%s" exists)', filename, dirname)
     try:
         fsutils.mkDirP(dirname)
         ordered_json.dump(filename, data)
     except OSError as e:
         logging.error('Failed to save user settings to %s/%s, please check that the path exists and is writable.', dirname, filename)
Exemplo n.º 6
0
def downloadToCache(stream, hashinfo={}, cache_key=None, origin_info=dict()):
    ''' Download the specified stream to a temporary cache directory, and
        returns a cache key that can be used to access/remove the file.
        If cache_key is None, then a cache key will be generated and returned.
        You will probably want to use removeFromCache(cache_key) to remove it.
    '''
    hash_name  = None
    hash_value = None
    m = None

    if len(hashinfo):
        # check for hashes in preferred order. Currently this is just sha256
        # (which the registry uses). Initial investigations suggest that github
        # doesn't return a header with the hash of the file being downloaded.
        for h in ('sha256',):
            if h in hashinfo:
                hash_name  = h
                hash_value = hashinfo[h]
                m = getattr(hashlib, h)()
                break
        if not hash_name:
            logger.warning('could not find supported hash type in %s', hashinfo)

    if cache_key is None:
        cache_key = '%032x' % random.getrandbits(256)

    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    cache_as = os.path.join(cache_dir, cache_key)
    file_size = 0

    (download_file, download_fname) = tempfile.mkstemp(dir=cache_dir)
    with os.fdopen(download_file, 'wb') as f:
        f.seek(0)
        for chunk in stream.iter_content(4096):
            f.write(chunk)
            if hash_name:
                m.update(chunk)

        if hash_name:
            calculated_hash = m.hexdigest()
            logger.debug(
                'calculated %s hash: %s check against: %s' % (
                    hash_name, calculated_hash, hash_value
                )
            )
            if hash_value and (hash_value != calculated_hash):
                raise Exception('Hash verification failed.')
        file_size = f.tell()
        logger.debug('wrote tarfile of size: %s to %s', file_size, download_fname)
        f.truncate()
    try:
        os.rename(download_fname, cache_as)
        extended_origin_info = {
            'hash': hashinfo,
            'size': file_size
        }
        extended_origin_info.update(origin_info)
        ordered_json.dump(cache_as + '.json', extended_origin_info)
    except OSError as e:
        if e.errno == errno.ENOENT:
            # if we failed, it's because the file already exists (probably
            # because another process got there first), so just rm our
            # temporary file and continue
            cache_logger.debug('another process downloaded %s first', cache_key)
            fsutils.rmF(download_fname)
        else:
            raise

    return cache_key
Exemplo n.º 7
0
def downloadToCache(stream, hashinfo={}, cache_key=None, origin_info=dict()):
    ''' Download the specified stream to a temporary cache directory, and
        returns a cache key that can be used to access/remove the file.
        If cache_key is None, then a cache key will be generated and returned.
        You will probably want to use removeFromCache(cache_key) to remove it.
    '''
    hash_name = None
    hash_value = None
    m = None

    if len(hashinfo):
        # check for hashes in preferred order. Currently this is just sha256
        # (which the registry uses). Initial investigations suggest that github
        # doesn't return a header with the hash of the file being downloaded.
        for h in ('sha256', ):
            if h in hashinfo:
                hash_name = h
                hash_value = hashinfo[h]
                m = getattr(hashlib, h)()
                break
        if not hash_name:
            logger.warning('could not find supported hash type in %s',
                           hashinfo)

    if cache_key is None:
        cache_key = '%032x' % random.getrandbits(256)

    cache_dir = folders.cacheDirectory()
    fsutils.mkDirP(cache_dir)
    cache_as = os.path.join(cache_dir, cache_key)
    file_size = 0

    (download_file, download_fname) = tempfile.mkstemp(dir=cache_dir)
    with os.fdopen(download_file, 'wb') as f:
        f.seek(0)
        for chunk in stream.iter_content(4096):
            f.write(chunk)
            if hash_name:
                m.update(chunk)

        if hash_name:
            calculated_hash = m.hexdigest()
            logger.debug('calculated %s hash: %s check against: %s' %
                         (hash_name, calculated_hash, hash_value))
            if hash_value and (hash_value != calculated_hash):
                raise Exception('Hash verification failed.')
        file_size = f.tell()
        logger.debug('wrote tarfile of size: %s to %s', file_size,
                     download_fname)
        f.truncate()
    try:
        os.rename(download_fname, cache_as)
        extended_origin_info = {'hash': hashinfo, 'size': file_size}
        extended_origin_info.update(origin_info)
        ordered_json.dump(cache_as + '.json', extended_origin_info)
    except OSError as e:
        if e.errno == errno.ENOENT:
            # if we failed, it's because the file already exists (probably
            # because another process got there first), so just rm our
            # temporary file and continue
            cache_logger.debug('another process downloaded %s first',
                               cache_key)
            fsutils.rmF(download_fname)
        else:
            raise

    return cache_key