def _hash_check(file_, absfile, macros, remove=True):
    failed = False
    hash = sources.get_hash(file_.lower(), macros)
    if hash is not None:
        hash = hash.split()
        if len(hash) != 2:
            raise error.internal('invalid hash format: %s' % (file_))
        try:
            hashlib_algorithms = hashlib.algorithms
        except:
            hashlib_algorithms = [
                'md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'
            ]
        if hash[0] not in hashlib_algorithms:
            raise error.general('invalid hash algorithm for %s: %s' %
                                (file_, hash[0]))
        if hash[0] in ['md5', 'sha1']:
            raise error.general('hash: %s: insecure: %s' % (file_, hash[0]))
        hasher = None
        _in = None
        try:
            hasher = hashlib.new(hash[0])
            _in = open(path.host(absfile), 'rb')
            hasher.update(_in.read())
        except IOError as err:
            log.notice('hash: %s: read error: %s' % (file_, str(err)))
            failed = True
        except:
            msg = 'hash: %s: error' % (file_)
            log.stderr(msg)
            log.notice(msg)
            if _in is not None:
                _in.close()
            raise
        if _in is not None:
            _in.close()
        hash_hex = hasher.hexdigest()
        hash_base64 = base64.b64encode(hasher.digest()).decode('utf-8')
        log.output('checksums: %s: (hex: %s) (b64: %s) => %s' %
                   (file_, hash_hex, hash_base64, hash[1]))
        if hash_hex != hash[1] and hash_base64 != hash[1]:
            log.warning('checksum error: %s' % (file_))
            failed = True
        if failed and remove:
            log.warning('removing: %s' % (file_))
            if path.exists(absfile):
                try:
                    os.remove(path.host(absfile))
                except IOError as err:
                    raise error.general('hash: %s: remove: %s' %
                                        (absfile, str(err)))
                except:
                    raise error.general('hash: %s: remove error' % (file_))
        if hasher is not None:
            del hasher
    else:
        if version.released():
            raise error.general('%s: no hash found in released RSB' % (file_))
        log.warning('%s: no hash found' % (file_))
    return not failed
 def introduction(self, name, intro_text = None):
     now = datetime.datetime.now().ctime()
     self.formatter.introduction(name, now, intro_text)
     if version.released():
         self.release_status()
     else:
         self.git_status()
 def introduction(self, name, intro_text=None):
     now = datetime.datetime.now().ctime()
     self.formatter.introduction(name, now, intro_text)
     if version.released():
         self.release_status()
     else:
         self.git_status()
def get_file(url, local, opts, config):
    if local is None:
        raise error.general('source/patch path invalid')
    if not path.isdir(path.dirname(local)) and not opts.download_disabled():
        log.notice('Creating source directory: %s' % \
                       (os.path.relpath(path.host(path.dirname(local)))))
    log.output('making dir: %s' % (path.host(path.dirname(local))))
    if _do_download(opts):
        path.mkdir(path.dirname(local))
    if not path.exists(local) and opts.download_disabled():
        raise error.general('source not found: %s' % (path.host(local)))
    #
    # Check if a URL has been provided on the command line. If the package is
    # release push to the start the RTEMS URL.
    #
    url_bases = opts.urls()
    if version.released():
        rtems_release_url = config.macros.expand('%{rtems_release_url}/%{rsb_version}/sources')
        log.trace('release url: %s' % (rtems_release_url))
        #
        # If the URL being fetched is under the release path do not add the
        # sources release path because it is already there.
        #
        if not url.startswith(rtems_release_url):
            if url_bases is None:
                url_bases = [rtems_release_url]
            else:
                url_bases.append(rtems_release_url)
    urls = []
    if url_bases is not None:
        #
        # Split up the URL we are being asked to download.
        #
        url_path = urlparse.urlsplit(url)[2]
        slash = url_path.rfind('/')
        if slash < 0:
            url_file = url_path
        else:
            url_file = url_path[slash + 1:]
        log.trace('url_file: %s' %(url_file))
        for base in url_bases:
            if base[-1:] != '/':
                base += '/'
            next_url = urlparse.urljoin(base, url_file)
            log.trace('url: %s' %(next_url))
            urls.append(next_url)
    urls += url.split()
    log.trace('_url: %s -> %s' % (','.join(urls), local))
    for url in urls:
        for dl in downloaders:
            if url.startswith(dl):
                if downloaders[dl](url, local, config, opts):
                    return
    if _do_download(opts):
        raise error.general('downloading %s: all paths have failed, giving up' % (url))
示例#5
0
def _hash_check(file_, absfile, macros, remove = True):
    failed = False
    hash = sources.get_hash(file_.lower(), macros)
    if hash is not None:
        hash = hash.split()
        if len(hash) != 2:
            raise error.internal('invalid hash format: %s' % (file_))
        try:
            hashlib_algorithms = hashlib.algorithms
        except:
            hashlib_algorithms = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']
        if hash[0] not in hashlib_algorithms:
            raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0]))
        if hash[0] in ['md5', 'sha1']:
            raise error.general('hash: %s: insecure: %s' % (file_, hash[0]))
        hasher = None
        _in = None
        try:
            hasher = hashlib.new(hash[0])
            _in = open(path.host(absfile), 'rb')
            hasher.update(_in.read())
        except IOError as err:
            log.notice('hash: %s: read error: %s' % (file_, str(err)))
            failed = True
        except:
            msg = 'hash: %s: error' % (file_)
            log.stderr(msg)
            log.notice(msg)
            if _in is not None:
                _in.close()
            raise
        if _in is not None:
            _in.close()
        log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1]))
        if hasher.hexdigest() != hash[1]:
            log.warning('checksum error: %s' % (file_))
            failed = True
        if failed and remove:
            log.warning('removing: %s' % (file_))
            if path.exists(absfile):
                try:
                    os.remove(path.host(absfile))
                except IOError as err:
                    raise error.general('hash: %s: remove: %s' % (absfile, str(err)))
                except:
                    raise error.general('hash: %s: remove error' % (file_))
        if hasher is not None:
            del hasher
    else:
        if version.released():
            raise error.general('%s: no hash found in released RSB' % (file_))
        log.warning('%s: no hash found' % (file_))
    return not failed
示例#6
0
 def sb_released(self):
     if version.released():
         self.defaults['rsb_released'] = '1'
     self.defaults['rsb_version'] = version.str()
示例#7
0
def get_file(url, local, opts, config):
    if local is None:
        raise error.general('source/patch path invalid')
    if not path.isdir(path.dirname(local)) and not opts.download_disabled():
        log.notice('Creating source directory: %s' % \
                       (os.path.relpath(path.host(path.dirname(local)))))
    log.output('making dir: %s' % (path.host(path.dirname(local))))
    if _do_download(opts):
        path.mkdir(path.dirname(local))
    if not path.exists(local) and opts.download_disabled():
        raise error.general('source not found: %s' % (path.host(local)))
    #
    # Check if a URL has been provided on the command line. If the package is
    # released push to the start the RTEMS URL unless overrided by the command
    # line option --with-release-url. The variant --without-release-url can
    # override the released check.
    #
    url_bases = opts.urls()
    try:
        rtems_release_url_value = config.macros.expand('%{rtems_release_url}/%{rsb_version}/sources')
    except:
        rtems_release_url_value = None
        log.output('RTEMS release URL could not be expanded')
    rtems_release_url = None
    if version.released() and rtems_release_url_value:
        rtems_release_url = rtems_release_url_value
    with_rel_url = opts.with_arg('release-url')
    if with_rel_url[1] == 'not-found':
        if config.defined('without_release_url'):
            with_rel_url = ('without_release-url', 'yes')
    if with_rel_url[0] == 'with_release-url':
        if with_rel_url[1] == 'yes':
            if rtems_release_url_value is None:
                raise error.general('no valid release URL')
            rtems_release_url = rtems_release_url_value
        elif with_rel_url[1] == 'no':
            pass
        else:
            rtems_release_url = with_rel_url[1]
    elif with_rel_url[0] == 'without_release-url' and with_rel_url[1] == 'yes':
        rtems_release_url = None
    if rtems_release_url is not None:
        log.trace('release url: %s' % (rtems_release_url))
        #
        # If the URL being fetched is under the release path do not add the
        # sources release path because it is already there.
        #
        if not url.startswith(rtems_release_url):
            if url_bases is None:
                url_bases = [rtems_release_url]
            else:
                url_bases.append(rtems_release_url)
    urls = []
    if url_bases is not None:
        #
        # Split up the URL we are being asked to download.
        #
        url_path = urllib_parse.urlsplit(url)[2]
        slash = url_path.rfind('/')
        if slash < 0:
            url_file = url_path
        else:
            url_file = url_path[slash + 1:]
        log.trace('url_file: %s' %(url_file))
        for base in url_bases:
            if base[-1:] != '/':
                base += '/'
            next_url = urllib_parse.urljoin(base, url_file)
            log.trace('url: %s' %(next_url))
            urls.append(next_url)
    urls += url.split()
    log.trace('_url: %s -> %s' % (','.join(urls), local))
    for url in urls:
        for dl in downloaders:
            if url.startswith(dl):
                if downloaders[dl](url, local, config, opts):
                    return
    if _do_download(opts):
        raise error.general('downloading %s: all paths have failed, giving up' % (url))
        if hasher.hexdigest() != hash[1]:
            log.warning('checksum error: %s' % (file_))
            failed = True
        if failed and remove:
            log.warning('removing: %s' % (file_))
            if path.exists(absfile):
                try:
                    os.remove(path.host(absfile))
                except IOError, err:
                    raise error.general('hash: %s: remove: %s' % (absfile, str(err)))
                except:
                    raise error.general('hash: %s: remove error' % (file_))
        if hasher is not None:
            del hasher
    else:
        if version.released():
            raise error.general('%s: no hash found in released RSB' % (file_))
        log.warning('%s: no hash found' % (file_))
    return not failed

def _local_path(source, pathkey, config):
    for p in config.define(pathkey).split(':'):
        local = path.join(path.abspath(p), source['file'])
        if source['local'] is None:
            source['local_prefix'] = path.abspath(p)
            source['local'] = local
        if path.exists(local):
            source['local_prefix'] = path.abspath(p)
            source['local'] = local
            _hash_check(source['file'], local, config.macros)
            break
示例#9
0
 def sb_released(self):
     if version.released():
         self.defaults['rsb_released'] = '1'
     self.defaults['rsb_version'] = version.str()