def _http_downloader(url, local, config, opts): if path.exists(local): return True # # Hack for GitHub. # if url.startswith('https://api.github.com'): url = urlparse.urljoin(url, config.expand('tarball/%{version}')) log.notice('download: %s -> %s' % (url, os.path.relpath(path.host(local)))) failed = False if not opts.dry_run(): _in = None _out = None try: _in = urllib2.urlopen(url) _out = open(path.host(local), 'wb') _out.write(_in.read()) except IOError, err: log.notice('download: %s: error: %s' % (url, str(err))) if path.exists(local): os.remove(path.host(local)) failed = True except ValueError, err: log.notice('download: %s: error: %s' % (url, str(err))) if path.exists(local): os.remove(path.host(local)) failed = True
def get_file(url, local, opts, config): if local is None: raise error.general("source/patch path invalid") if not path.isdir(path.dirname(local)) and not opts.download_disabled(): log.notice("Creating source directory: %s" % (os.path.relpath(path.host(path.dirname(local))))) log.output("making dir: %s" % (path.host(path.dirname(local)))) if not opts.dry_run(): path.mkdir(path.dirname(local)) if not path.exists(local) and opts.download_disabled(): raise error.general("source not found: %s" % (path.host(local))) # # Check if a URL has been provided on the command line. # url_bases = opts.urls() urls = [] if url_bases is not None: for base in url_bases: if base[-1:] != "/": base += "/" url_path = urlparse.urlsplit(url)[2] slash = url_path.rfind("/") if slash < 0: url_file = url_path else: url_file = url_path[slash + 1 :] urls.append(urlparse.urljoin(base, url_file)) urls += url.split() log.trace("_url: %s -> %s" % (",".join(urls), local)) for url in urls: for dl in downloaders: if url.startswith(dl): if downloaders[dl](url, local, config, opts): return if not opts.dry_run(): raise error.general("downloading %s: all paths have failed, giving up" % (url))
def _hash_check(file_, absfile, macros, remove=True): failed = False hash = sources.get_hash(file_.lower(), macros) if hash is not None: hash = hash.split() if len(hash) != 2: raise error.internal('invalid hash format: %s' % (file_)) try: hashlib_algorithms = hashlib.algorithms except: hashlib_algorithms = [ 'md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512' ] if hash[0] not in hashlib_algorithms: raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0])) if hash[0] in ['md5', 'sha1']: raise error.general('hash: %s: insecure: %s' % (file_, hash[0])) hasher = None _in = None try: hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError as err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except: msg = 'hash: %s: error' % (file_) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() raise if _in is not None: _in.close() hash_hex = hasher.hexdigest() hash_base64 = base64.b64encode(hasher.digest()).decode('utf-8') log.output('checksums: %s: (hex: %s) (b64: %s) => %s' % (file_, hash_hex, hash_base64, hash[1])) if hash_hex != hash[1] and hash_base64 != hash[1]: log.warning('checksum error: %s' % (file_)) failed = True if failed and remove: log.warning('removing: %s' % (file_)) if path.exists(absfile): try: os.remove(path.host(absfile)) except IOError as err: raise error.general('hash: %s: remove: %s' % (absfile, str(err))) except: raise error.general('hash: %s: remove error' % (file_)) if hasher is not None: del hasher else: if version.released(): raise error.general('%s: no hash found in released RSB' % (file_)) log.warning('%s: no hash found' % (file_)) return not failed
def write(self, name, check_for_errors=False): s = None try: s = open(path.host(name), "w") s.write("\n".join(self.body)) s.close() os.chmod(path.host(name), stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) except IOError, err: raise error.general("creating script: " + name)
def report(self, _config, _build, opts, macros, format=None): if len(_build.main_package().name()) > 0 \ and not _build.macros.get('%{_disable_reporting}') \ and (not _build.opts.get_arg('--no-report') \ or _build.opts.get_arg('--mail')): if format is None: format = _build.opts.get_arg('--report-format') if format is not None: if len(format) != 2: raise error.general( 'invalid report format option: %s' % ('='.join(format))) format = format[1] if format is None: format = 'text' if format == 'text': ext = '.txt' elif format == 'asciidoc': ext = '.txt' elif format == 'html': ext = '.html' elif format == 'xml': ext = '.xml' elif format == 'ini': ext = '.ini' else: raise error.general('invalid report format: %s' % (format)) buildroot = _build.config.abspath('%{buildroot}') prefix = _build.macros.expand('%{_prefix}') name = _build.main_package().name() + ext log.notice('reporting: %s -> %s' % (_config, name)) if not _build.opts.get_arg('--no-report'): outpath = path.host( path.join(buildroot, prefix, 'share', 'rtems', 'rsb')) if not _build.opts.dry_run(): outname = path.host(path.join(outpath, name)) else: outname = None r = reports.report(format, self.configs, copy.copy(opts), copy.copy(macros)) r.introduction(_build.config.file_name()) r.generate(_build.config.file_name()) r.epilogue(_build.config.file_name()) if not _build.opts.dry_run(): _build.mkdir(outpath) r.write(outname) del r if _build.opts.get_arg('--mail'): r = reports.report('text', self.configs, copy.copy(opts), copy.copy(macros)) r.introduction(_build.config.file_name()) r.generate(_build.config.file_name()) r.epilogue(_build.config.file_name()) self.write_mail_report(r.out) del r
def get_file(url, local, opts, config): if local is None: raise error.general('source/patch path invalid') if not path.isdir(path.dirname(local)) and not opts.download_disabled(): log.notice('Creating source directory: %s' % \ (os.path.relpath(path.host(path.dirname(local))))) log.output('making dir: %s' % (path.host(path.dirname(local)))) if _do_download(opts): path.mkdir(path.dirname(local)) if not path.exists(local) and opts.download_disabled(): raise error.general('source not found: %s' % (path.host(local))) # # Check if a URL has been provided on the command line. If the package is # release push to the start the RTEMS URL. # url_bases = opts.urls() if version.released(): rtems_release_url = config.macros.expand('%{rtems_release_url}/%{rsb_version}/sources') log.trace('release url: %s' % (rtems_release_url)) # # If the URL being fetched is under the release path do not add the # sources release path because it is already there. # if not url.startswith(rtems_release_url): if url_bases is None: url_bases = [rtems_release_url] else: url_bases.append(rtems_release_url) urls = [] if url_bases is not None: # # Split up the URL we are being asked to download. # url_path = urlparse.urlsplit(url)[2] slash = url_path.rfind('/') if slash < 0: url_file = url_path else: url_file = url_path[slash + 1:] log.trace('url_file: %s' %(url_file)) for base in url_bases: if base[-1:] != '/': base += '/' next_url = urlparse.urljoin(base, url_file) log.trace('url: %s' %(next_url)) urls.append(next_url) urls += url.split() log.trace('_url: %s -> %s' % (','.join(urls), local)) for url in urls: for dl in downloaders: if url.startswith(dl): if downloaders[dl](url, local, config, opts): return if _do_download(opts): raise error.general('downloading %s: all paths have failed, giving up' % (url))
def write(self, name, check_for_errors=False): s = None try: s = open(path.host(name), 'w') s.write('\n'.join(self.body)) s.close() os.chmod(path.host(name), stat.S_IRWXU | \ stat.S_IRGRP | stat.S_IXGRP | \ stat.S_IROTH | stat.S_IXOTH) except IOError, err: raise error.general('creating script: ' + name)
def report(self, _config, _build, opts, macros, format = None, mail = None): if len(_build.main_package().name()) > 0 \ and not _build.macros.get('%{_disable_reporting}') \ and (not _build.opts.get_arg('--no-report') \ or _build.opts.get_arg('--mail')): if format is None: format = _build.opts.get_arg('--report-format') if format is not None: if len(format) != 2: raise error.general('invalid report format option: %s' % \ ('='.join(format))) format = format[1] if format is None: format = 'text' if format == 'text': ext = '.txt' elif format == 'asciidoc': ext = '.txt' elif format == 'html': ext = '.html' elif format == 'xml': ext = '.xml' elif format == 'ini': ext = '.ini' else: raise error.general('invalid report format: %s' % (format)) buildroot = _build.config.abspath('%{buildroot}') prefix = _build.macros.expand('%{_prefix}') name = _build.main_package().name() + ext log.notice('reporting: %s -> %s' % (_config, name)) if not _build.opts.get_arg('--no-report'): outpath = path.host(path.join(buildroot, prefix, 'share', 'rtems', 'rsb')) if not _build.opts.dry_run(): outname = path.host(path.join(outpath, name)) else: outname = None r = reports.report(format, self.configs, copy.copy(opts), copy.copy(macros)) r.introduction(_build.config.file_name()) r.generate(_build.config.file_name()) r.epilogue(_build.config.file_name()) if not _build.opts.dry_run(): _build.mkdir(outpath) r.write(outname) del r if mail: r = reports.report('text', self.configs, copy.copy(opts), copy.copy(macros)) r.introduction(_build.config.file_name()) r.generate(_build.config.file_name()) r.epilogue(_build.config.file_name()) self.write_mail_report(r.get_output()) del r
def _hash_check(file_, absfile, macros, remove = True): failed = False hash = sources.get_hash(file_.lower(), macros) if hash is not None: hash = hash.split() if len(hash) != 2: raise error.internal('invalid hash format: %s' % (file_)) try: hashlib_algorithms = hashlib.algorithms except: hashlib_algorithms = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'] if hash[0] not in hashlib_algorithms: raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0])) if hash[0] in ['md5', 'sha1']: raise error.general('hash: %s: insecure: %s' % (file_, hash[0])) hasher = None _in = None try: hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError as err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except: msg = 'hash: %s: error' % (file_) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() raise if _in is not None: _in.close() log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1])) if hasher.hexdigest() != hash[1]: log.warning('checksum error: %s' % (file_)) failed = True if failed and remove: log.warning('removing: %s' % (file_)) if path.exists(absfile): try: os.remove(path.host(absfile)) except IOError as err: raise error.general('hash: %s: remove: %s' % (absfile, str(err))) except: raise error.general('hash: %s: remove error' % (file_)) if hasher is not None: del hasher else: if version.released(): raise error.general('%s: no hash found in released RSB' % (file_)) log.warning('%s: no hash found' % (file_)) return not failed
def load(self, name): names = self.expand(name).split(":") for n in names: if path.exists(n): try: mc = open(path.host(n), "r") macros = self.parse(mc) mc.close() self.files += [n] return except IOError as err: pass raise error.general("opening macro file: %s" % (path.host(self.expand(name))))
def report(self, _config, _build, opts, macros, format=None): if ( len(_build.main_package().name()) > 0 and not _build.macros.get("%{_disable_reporting}") and (not _build.opts.get_arg("--no-report") or _build.opts.get_arg("--mail")) ): if format is None: format = _build.opts.get_arg("--report-format") if format is not None: if len(format) != 2: raise error.general("invalid report format option: %s" % ("=".join(format))) format = format[1] if format is None: format = "text" if format == "text": ext = ".txt" elif format == "asciidoc": ext = ".txt" elif format == "html": ext = ".html" elif format == "xml": ext = ".xml" elif format == "ini": ext = ".ini" else: raise error.general("invalid report format: %s" % (format)) buildroot = _build.config.abspath("%{buildroot}") prefix = _build.macros.expand("%{_prefix}") name = _build.main_package().name() + ext log.notice("reporting: %s -> %s" % (_config, name)) if not _build.opts.get_arg("--no-report"): outpath = path.host(path.join(buildroot, prefix, "share", "rtems", "rsb")) if not _build.opts.dry_run(): outname = path.host(path.join(outpath, name)) else: outname = None r = reports.report(format, self.configs, copy.copy(opts), copy.copy(macros)) r.introduction(_build.config.file_name()) r.generate(_build.config.file_name()) r.epilogue(_build.config.file_name()) if not _build.opts.dry_run(): _build.mkdir(outpath) r.write(outname) del r if _build.opts.get_arg("--mail"): r = reports.report("text", self.configs, copy.copy(opts), copy.copy(macros)) r.introduction(_build.config.file_name()) r.generate(_build.config.file_name()) r.epilogue(_build.config.file_name()) self.write_mail_report(r.out) del r
def report(self, _config, _build): if not _build.opts.get_arg('--no-report') \ and not _build.macros.get('%{_disable_reporting}') \ and _build.opts.get_arg('--mail'): format = _build.opts.get_arg('--report-format') if format is None: format = 'html' ext = '.html' else: if len(format) != 2: raise error.general('invalid report format option: %s' % ('='.join(format))) if format[1] == 'text': format = 'text' ext = '.txt' elif format[1] == 'asciidoc': format = 'asciidoc' ext = '.txt' elif format[1] == 'html': format = 'html' ext = '.html' else: raise error.general('invalid report format: %s' % (format[1])) buildroot = _build.config.abspath('%{buildroot}') prefix = _build.macros.expand('%{_prefix}') name = _build.main_package().name() + ext log.notice('reporting: %s -> %s' % (_config, name)) if not _build.opts.get_arg('--no-report'): outpath = path.host( path.join(buildroot, prefix, 'share', 'rtems-source-builder')) outname = path.host(path.join(outpath, name)) r = reports.report(format, self.configs, _build.opts, _build.macros) r.setup() r.introduction(_build.config.file_name()) r.config(_build.config, _build.opts, _build.macros) if not _build.opts.dry_run(): _build.mkdir(outpath) r.write(outname) del r if not _build.macros.get('%{_disable_reporting}') \ and _build.opts.get_arg('--mail'): r = reports.report('text', self.configs, _build.opts, _build.macros) r.setup() r.introduction(_build.config.file_name()) r.config(_build.config, _build.opts, _build.macros) self.write_mail_report(r.out) del r
def runner(self): import subprocess # # Support Python 2.6 # if "check_output" not in dir(subprocess): def f(*popenargs, **kwargs): if 'stdout' in kwargs: raise ValueError( 'stdout argument not allowed, it will be overridden.') process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) output, unused_err = process.communicate() retcode = process.poll() if retcode: cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] raise subprocess.CalledProcessError(retcode, cmd) return output subprocess.check_output = f self.start_time = datetime.datetime.now() self.exit_code = 0 try: try: if os.name == 'nt': cmd = ['sh', '-c'] + self.cmd else: cmd = self.cmd self.output = subprocess.check_output(cmd, cwd=path.host(self.cwd)) except subprocess.CalledProcessError as cpe: self.exit_code = cpe.returncode self.output = cpe.output except OSError as ose: raise error.general('bootstrap failed: %s in %s: %s' % \ (' '.join(cmd), path.host(self.cwd), (str(ose)))) except KeyboardInterrupt: pass except: raise except: self.result = sys.exc_info() self.end_time = datetime.datetime.now()
def load(self, name): names = self.expand(name).split(':') for n in names: if path.exists(n): try: mc = open(path.host(n), 'r') macros = self.parse(mc) mc.close() self.files += [n] return except IOError as err: pass raise error.general('opening macro file: %s' % \ (path.host(self.expand(name))))
def load(self, name): names = self.expand(name).split(':') for n in names: log.trace('opening: %s' % (n)) if path.exists(n): try: mc = open(path.host(n), 'r') macros = self.parse(mc) mc.close() self.files += [n] return except IOError as err: pass raise error.general('opening macro file: %s' % \ (path.host(self.expand(name))))
def get_config_files(self, config): # # Convert to shell paths and return shell paths. # # @fixme should this use a passed in set of defaults and not # not the initial set of values ? # config = path.shell(config) if '*' in config or '?' in config: print config configdir = path.dirname(config) configbase = path.basename(config) if len(configbase) == 0: configbase = '*' if not configbase.endswith('.cfg'): configbase = configbase + '.cfg' if len(configdir) == 0: configdir = self.macros.expand(self.defaults['_configdir']) configs = [] for cp in configdir.split(':'): hostconfigdir = path.host(cp) for f in glob.glob(os.path.join(hostconfigdir, configbase)): configs += path.shell(f) else: configs = [config] return configs
def run(): import sys ec = 0 setbuilder_error = False try: optargs = { '--list-configs': 'List available configurations', '--list-bsets': 'List available build sets', '--list-deps': 'List the dependent files.', '--bset-tar-file': 'Create a build set tar file', '--pkg-tar-files': 'Create package tar files', '--no-report': 'Do not create a package report.', '--report-format': 'The report format (text, html, asciidoc).' } mailer.append_options(optargs) opts = options.load(sys.argv, optargs) log.notice('RTEMS Source Builder - Set Builder, %s' % (version.str())) opts.log_info() if not check.host_setup(opts): raise error.general('host build environment is not set up correctly') configs = build.get_configs(opts) if opts.get_arg('--list-deps'): deps = [] else: deps = None if not list_bset_cfg_files(opts, configs): prefix = opts.defaults.expand('%{_prefix}') if opts.canadian_cross(): opts.disable_install() if not opts.dry_run() and \ not opts.canadian_cross() and \ not opts.no_install() and \ not path.ispathwritable(prefix): raise error.general('prefix is not writable: %s' % (path.host(prefix))) for bset in opts.params(): setbuilder_error = True b = buildset(bset, configs, opts) b.build(deps) b = None setbuilder_error = False if deps is not None: c = 0 for d in sorted(set(deps)): c += 1 print('dep[%d]: %s' % (c, d)) except error.general as gerr: if not setbuilder_error: log.stderr(str(gerr)) log.stderr('Build FAILED') ec = 1 except error.internal as ierr: if not setbuilder_error: log.stderr(str(ierr)) log.stderr('Internal Build FAILED') ec = 1 except error.exit as eerr: pass except KeyboardInterrupt: log.notice('abort: user terminated') ec = 1 sys.exit(ec)
def _git_downloader(url, local, config, opts): rlp = os.path.relpath(path.host(local)) us = url.split('?') repo = git.repo(local, opts, config.macros) if not repo.valid(): log.notice('git: clone: %s -> %s' % (us[0], rlp)) if not opts.dry_run(): repo.clone(us[0], local) for a in us[1:]: _as = a.split('=') if _as[0] == 'branch': log.notice('git: checkout: %s => %s' % (us[0], _as[1])) if not opts.dry_run(): repo.checkout(_as[1]) elif _as[0] == 'pull': log.notice('git: pull: %s' % (us[0])) if not opts.dry_run(): repo.pull() elif _as[0] == 'fetch': log.notice('git: fetch: %s -> %s' % (us[0], rlp)) if not opts.dry_run(): repo.fetch() elif _as[0] == 'reset': arg = [] if len(_as) > 1: arg = ['--%s' % (_as[1])] log.notice('git: reset: %s' % (us[0])) if not opts.dry_run(): repo.reset(arg) return True
def get_config_files(self, config): # # Convert to shell paths and return shell paths. # # @fixme should this use a passed in set of defaults and not # not the initial set of values ? # config = path.shell(config) if '*' in config or '?' in config: print(config) configdir = path.dirname(config) configbase = path.basename(config) if len(configbase) == 0: configbase = '*' if not configbase.endswith('.cfg'): configbase = configbase + '.cfg' if len(configdir) == 0: configdir = self.macros.expand(self.defaults['_configdir']) configs = [] for cp in configdir.split(':'): hostconfigdir = path.host(cp) for f in glob.glob(os.path.join(hostconfigdir, configbase)): configs += path.shell(f) else: configs = [config] return configs
def _hash_check(file_, absfile, macros, remove=True): failed = False hash = sources.get_hash(file_.lower(), macros) if hash is not None: hash = hash.split() if len(hash) != 2: raise error.internal('invalid hash format: %s' % (file_)) try: hashlib_algorithms = hashlib.algorithms except: hashlib_algorithms = [ 'md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512' ] if hash[0] not in hashlib_algorithms: raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0])) hasher = None _in = None try: hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError, err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except:
def parse(self, bset): def _clean(line): line = line[0:-1] b = line.find('#') if b >= 0: line = line[1:b] return line.strip() bsetname = bset if not path.exists(bsetname): for cp in self.macros.expand('%{_configdir}').split(':'): configdir = path.abspath(cp) bsetname = path.join(configdir, bset) if path.exists(bsetname): break bsetname = None if bsetname is None: raise error.general('no build set file found: %s' % (bset)) try: log.trace('_bset: %s: open: %s' % (self.bset, bsetname)) bset = open(path.host(bsetname), 'r') except IOError, err: raise error.general('error opening bset file: %s' % (bsetname))
def run(self, command, shell_opts = '', cwd = None): e = execute.capture_execution(log = log.default, dump = self.opts.quiet()) cmd = self.config.expand('%{___build_shell} -ex ' + shell_opts + ' ' + command) log.output('run: ' + cmd) exit_code, proc, output = e.shell(cmd, cwd = path.host(cwd)) if exit_code != 0: raise error.general('shell cmd failed: %s' % (cmd))
def _collect(path_, file): confs = [] for root, dirs, files in os.walk(path.host(path_), topdown = True): for f in files: if f == file: confs += [path.shell(path.join(root, f))] return confs
def _collect(path_, file): confs = [] for root, dirs, files in os.walk(path.host(path_), topdown=True): for f in files: if f == file: confs += [path.shell(path.join(root, f))] return confs
def _cvs_downloader(url, local, config, opts): rlp = os.path.relpath(path.host(local)) us = url.split('?') module = None tag = None date = None src_prefix = None for a in us[1:]: _as = a.split('=') if _as[0] == 'module': if len(_as) != 2: raise error.general('invalid cvs module: %s' % (a)) module = _as[1] elif _as[0] == 'src-prefix': if len(_as) != 2: raise error.general('invalid cvs src-prefix: %s' % (a)) src_prefix = _as[1] elif _as[0] == 'tag': if len(_as) != 2: raise error.general('invalid cvs tag: %s' % (a)) tag = _as[1] elif _as[0] == 'date': if len(_as) != 2: raise error.general('invalid cvs date: %s' % (a)) date = _as[1] repo = cvs.repo(local, opts, config.macros, src_prefix) if not repo.valid(): if not path.isdir(local): log.notice('Creating source directory: %s' % \ (os.path.relpath(path.host(local)))) if _do_download(opts): path.mkdir(local) log.notice('cvs: checkout: %s -> %s' % (us[0], rlp)) if _do_download(opts): repo.checkout(':%s' % (us[0][6:]), module, tag, date) for a in us[1:]: _as = a.split('=') if _as[0] == 'update': log.notice('cvs: update: %s' % (us[0])) if _do_download(opts): repo.update() elif _as[0] == 'reset': log.notice('cvs: reset: %s' % (us[0])) if _do_download(opts): repo.reset() return True
def run(self, command, shell_opts="", cwd=None): e = execute.capture_execution(log=log.default, dump=self.opts.quiet()) cmd = self.config.expand("%{___build_shell} -ex " + shell_opts + " " + command) log.output("run: " + cmd) exit_code, proc, output = e.shell(cmd, cwd=path.host(cwd)) if exit_code != 0: log.output("shell cmd failed: %s" % (cmd)) raise error.general("building %s" % (self.macros["buildname"]))
def _grep(file, pattern): rege = re.compile(pattern) try: f = open(path.host(file), 'r') matches = [rege.match(l) != None for l in f.readlines()] f.close() except IOError, err: raise error.general('reading: %s' % (file))
def run(self, command, shell_opts='', cwd=None): e = execute.capture_execution(log=log.default, dump=self.opts.quiet()) cmd = self.config.expand('%{___build_shell} -ex ' + shell_opts + ' ' + command) log.output('run: ' + cmd) exit_code, proc, output = e.shell(cmd, cwd=path.host(cwd)) if exit_code != 0: raise error.general('shell cmd failed: %s' % (cmd))
def _grep(file, pattern): rege = re.compile(pattern) try: f = open(path.host(file), 'r') matches = [rege.match(l) != None for l in f.readlines()] f.close() except IOError, err: raise error.general('error reading: %s' % (file))
def write(self, name, check_for_errors = False): s = None try: s = open(path.host(name), 'w') s.write('\n'.join(self.body)) s.close() os.chmod(path.host(name), stat.S_IRWXU | \ stat.S_IRGRP | stat.S_IXGRP | \ stat.S_IROTH | stat.S_IXOTH) except IOError as err: raise error.general('creating script: ' + name) except: if s is not None: s.close() raise if s is not None: s.close()
def report(self, _config, _build): if not _build.opts.get_arg('--no-report') \ and not _build.macros.get('%{_disable_reporting}') \ and _build.opts.get_arg('--mail'): format = _build.opts.get_arg('--report-format') if format is None: format = 'html' ext = '.html' else: if len(format) != 2: raise error.general('invalid report format option: %s' % ('='.join(format))) if format[1] == 'text': format = 'text' ext = '.txt' elif format[1] == 'asciidoc': format = 'asciidoc' ext = '.txt' elif format[1] == 'html': format = 'html' ext = '.html' else: raise error.general('invalid report format: %s' % (format[1])) buildroot = _build.config.abspath('%{buildroot}') prefix = _build.macros.expand('%{_prefix}') name = _build.main_package().name() + ext log.notice('reporting: %s -> %s' % (_config, name)) if not _build.opts.get_arg('--no-report'): outpath = path.host(path.join(buildroot, prefix, 'share', 'rtems-source-builder')) outname = path.host(path.join(outpath, name)) r = reports.report(format, self.configs, _build.opts, _build.macros) r.setup() r.introduction(_build.config.file_name()) r.config(_build.config, _build.opts, _build.macros) if not _build.opts.dry_run(): _build.mkdir(outpath) r.write(outname) del r if not _build.macros.get('%{_disable_reporting}') \ and _build.opts.get_arg('--mail'): r = reports.report('text', self.configs, _build.opts, _build.macros) r.setup() r.introduction(_build.config.file_name()) r.config(_build.config, _build.opts, _build.macros) self.write_mail_report(r.out) del r
def _cvs_downloader(url, local, config, opts): rlp = os.path.relpath(path.host(local)) us = url.split("?") module = None tag = None date = None src_prefix = None for a in us[1:]: _as = a.split("=") if _as[0] == "module": if len(_as) != 2: raise error.general("invalid cvs module: %s" % (a)) module = _as[1] elif _as[0] == "src-prefix": if len(_as) != 2: raise error.general("invalid cvs src-prefix: %s" % (a)) src_prefix = _as[1] elif _as[0] == "tag": if len(_as) != 2: raise error.general("invalid cvs tag: %s" % (a)) tag = _as[1] elif _as[0] == "date": if len(_as) != 2: raise error.general("invalid cvs date: %s" % (a)) date = _as[1] repo = cvs.repo(local, opts, config.macros, src_prefix) if not repo.valid(): if not path.isdir(local): log.notice("Creating source directory: %s" % (os.path.relpath(path.host(local)))) if not opts.dry_run(): path.mkdir(local) log.notice("cvs: checkout: %s -> %s" % (us[0], rlp)) if not opts.dry_run(): repo.checkout(":%s" % (us[0][6:]), module, tag, date) for a in us[1:]: _as = a.split("=") if _as[0] == "update": log.notice("cvs: update: %s" % (us[0])) if not opts.dry_run(): repo.update() elif _as[0] == "reset": log.notice("cvs: reset: %s" % (us[0])) if not opts.dry_run(): repo.reset() return True
def runner(self): import subprocess # # Support Python 2.6 # if "check_output" not in dir(subprocess): def f(*popenargs, **kwargs): if 'stdout' in kwargs: raise ValueError('stdout argument not allowed, it will be overridden.') process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) output, unused_err = process.communicate() retcode = process.poll() if retcode: cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] raise subprocess.CalledProcessError(retcode, cmd) return output subprocess.check_output = f self.start_time = datetime.datetime.now() self.exit_code = 0 try: try: if os.name == 'nt': cmd = ['sh', '-c'] + self.cmd else: cmd = self.cmd self.output = subprocess.check_output(cmd, cwd = path.host(self.cwd)) except subprocess.CalledProcessError as cpe: self.exit_code = cpe.returncode self.output = cpe.output except OSError as ose: raise error.general('bootstrap failed: %s in %s: %s' % \ (' '.join(cmd), path.host(self.cwd), (str(ose)))) except KeyboardInterrupt: pass except: raise except: self.result = sys.exc_info() self.end_time = datetime.datetime.now()
def write(self, name): self.out = self.formatter.post_process() if name is not None: try: o = open(path.host(name), "w") o.write(self.out) o.close() del o except IOError as err: raise error.general('writing output file: %s: %s' % (name, err))
def post_process(self): if self.command is not None: if self.command.exit_code != 0: raise error.general('error: ampolish3: %s' % (' '.join(self.command.cmd))) try: p = open(path.host(self.preinstall), 'w') for l in self.command.output: p.write(l) p.close() except IOError as err: raise error.general('writing: %s' % (self.preinstall))
def run(): import sys ec = 0 setbuilder_error = False try: optargs = { "--list-configs": "List available configurations", "--list-bsets": "List available build sets", "--list-deps": "List the dependent files.", "--bset-tar-file": "Create a build set tar file", "--pkg-tar-files": "Create package tar files", "--no-report": "Do not create a package report.", "--report-format": "The report format (text, html, asciidoc).", } mailer.append_options(optargs) opts = options.load(sys.argv, optargs) log.notice("RTEMS Source Builder - Set Builder, %s" % (version.str())) opts.log_info() if not check.host_setup(opts): raise error.general("host build environment is not set up correctly") configs = build.get_configs(opts) if opts.get_arg("--list-deps"): deps = [] else: deps = None if not list_bset_cfg_files(opts, configs): prefix = opts.defaults.expand("%{_prefix}") if opts.canadian_cross(): opts.disable_install() if ( not opts.dry_run() and not opts.canadian_cross() and not opts.no_install() and not path.ispathwritable(prefix) ): raise error.general("prefix is not writable: %s" % (path.host(prefix))) for bset in opts.params(): setbuilder_error = True b = buildset(bset, configs, opts) b.build(deps) b = None setbuilder_error = False if deps is not None: c = 0 for d in sorted(set(deps)): c += 1 print "dep[%d]: %s" % (c, d) except error.general, gerr: if not setbuilder_error: log.stderr(str(gerr)) log.stderr("Build FAILED") ec = 1
def post_process(self): if self.command is not None: if self.command.exit_code != 0: raise error.general('error: ampolish3: %s' % (' '.join(self.command.cmd))) try: p = open(path.host(self.preinstall), 'w') for l in self.command.output: p.write(l) p.close() except IOError, err: raise error.general('error writing: %s' % (self.preinstall))
def bset_tar(self, _build): tardir = _build.config.expand('%{_tardir}') if (self.opts.get_arg('--bset-tar-file') or self.opts.canadian_cross()) \ and not _build.macros.get('%{_disable_packaging}'): path.mkdir(tardir) tar = path.join(tardir, _build.config.expand('%s.tar.bz2' % (_build.main_package().name()))) log.notice('tarball: %s' % (os.path.relpath(path.host(tar)))) if not self.opts.dry_run(): tmproot = _build.config.expand('%{_tmproot}') cmd = _build.config.expand('"cd ' + tmproot + \ ' && %{__tar} -cf - . | %{__bzip2} > ' + tar + '"') _build.run(cmd, shell_opts = '-c', cwd = tmproot)
def load(self, name): names = self.expand(name).split(':') for n in names: if path.exists(n): try: mc = open(path.host(n), 'r') macros = self.parse(mc) mc.close() self.files += [n] return except IOError, err: pass
def bset_tar(self, _build): tardir = _build.config.expand('%{_tardir}') if self.opts.get_arg('--bset-tar-file') \ and not _build.macros.get('%{_disable_packaging}'): path.mkdir(tardir) tar = path.join(tardir, _build.config.expand('%s.tar.bz2' % (self.bset_pkg))) log.notice('tarball: %s' % (os.path.relpath(path.host(tar)))) if not self.opts.dry_run(): tmproot = _build.config.expand('%{_tmproot}') cmd = _build.config.expand("'cd " + tmproot + \ " && %{__tar} -cf - . | %{__bzip2} > " + tar + "'") _build.run(cmd, shell_opts = '-c', cwd = tmproot)
def bset_tar(self, _build): tardir = _build.config.expand("%{_tardir}") if (self.opts.get_arg("--bset-tar-file") or self.opts.canadian_cross()) and not _build.macros.get( "%{_disable_packaging}" ): path.mkdir(tardir) tar = path.join(tardir, _build.config.expand("%s.tar.bz2" % (self.bset_pkg))) log.notice("tarball: %s" % (os.path.relpath(path.host(tar)))) if not self.opts.dry_run(): tmproot = _build.config.expand("%{_tmproot}") cmd = _build.config.expand("'cd " + tmproot + " && %{__tar} -cf - . | %{__bzip2} > " + tar + "'") _build.run(cmd, shell_opts="-c", cwd=tmproot)
def _run(self, args, check = False): e = execute.capture_execution() if path.exists(self.path): cwd = self.path else: cwd = None cmd = [self.git] + args log.trace('cmd: (%s) %s' % (str(cwd), ' '.join(cmd))) exit_code, proc, output = e.spawn(cmd, cwd = path.host(cwd)) log.trace(output) if check: self._git_exit_code(exit_code) return exit_code, output
def _run(self, args, check = False, cwd = None): e = execute.capture_execution() if cwd is None: cwd = path.join(self.path, self.prefix) if not path.exists(cwd): raise error.general('cvs path needs to exist: %s' % (cwd)) cmd = [self.cvs, '-z', '9', '-q'] + args log.output('cmd: (%s) %s' % (str(cwd), ' '.join(cmd))) exit_code, proc, output = e.spawn(cmd, cwd = path.host(cwd)) log.trace(output) if check: self._cvs_exit_code(cmd, exit_code, output) return exit_code, output
def bset_tar(self, _build): tardir = _build.config.expand('%{_tardir}') if self.opts.get_arg('--bset-tar-file') \ and not _build.macros.get('%{_disable_packaging}'): path.mkdir(tardir) tar = path.join( tardir, _build.config.expand('%s.tar.bz2' % (self.bset_pkg))) log.notice('tarball: %s' % (os.path.relpath(path.host(tar)))) if not self.opts.dry_run(): tmproot = _build.config.expand('%{_tmproot}') cmd = _build.config.expand("'cd " + tmproot + \ " && %{__tar} -cf - . | %{__bzip2} > " + tar + "'") _build.run(cmd, shell_opts='-c', cwd=tmproot)
def _run(self, args, check=False): e = execute.capture_execution() if path.exists(self.path): cwd = self.path else: cwd = None cmd = [self.git] + args log.trace('cmd: (%s) %s' % (str(cwd), ' '.join(cmd))) exit_code, proc, output = e.spawn(cmd, cwd=path.host(cwd)) log.trace(output) if check: self._git_exit_code(exit_code) return exit_code, output
def post_process(self): if self.command is not None: if self.command.exit_code != 0: raise error.general('error: autoreconf: %s' % (' '.join(self.command.cmd))) makefile = path.join(self.cwd, 'Makefile.am') if path.exists(makefile): if _grep(makefile, 'stamp-h\.in'): stamp_h = path.join(self.cwd, 'stamp-h.in') try: t = open(path.host(stamp_h), 'w') t.write('timestamp') t.close() except IOError, err: raise error.general('error writing: %s' % (stamp_h))
def get_file(url, local, opts, config): if local is None: raise error.general('source/patch path invalid') if not path.isdir(path.dirname(local)) and not opts.download_disabled(): log.notice('Creating source directory: %s' % \ (os.path.relpath(path.host(path.dirname(local))))) log.output('making dir: %s' % (path.host(path.dirname(local)))) if _do_download(opts): path.mkdir(path.dirname(local)) if not path.exists(local) and opts.download_disabled(): raise error.general('source not found: %s' % (path.host(local))) # # Check if a URL has been provided on the command line. # url_bases = opts.urls() urls = [] if url_bases is not None: for base in url_bases: if base[-1:] != '/': base += '/' url_path = urlparse.urlsplit(url)[2] slash = url_path.rfind('/') if slash < 0: url_file = url_path else: url_file = url_path[slash + 1:] urls.append(urlparse.urljoin(base, url_file)) urls += url.split() log.trace('_url: %s -> %s' % (','.join(urls), local)) for url in urls: for dl in downloaders: if url.startswith(dl): if downloaders[dl](url, local, config, opts): return if _do_download(opts): raise error.general( 'downloading %s: all paths have failed, giving up' % (url))
def _load_released_version_config(): '''Local worker to load a configuration file.''' top = _top() for ver in [path.join(top, 'VERSION'), path.join('..', 'VERSION')]: if path.exists(path.join(ver)): try: import configparser except ImportError: import ConfigParser as configparser v = configparser.SafeConfigParser() try: v.read(path.host(ver)) except Exception as e: raise error.general('Invalid version config format: %s: %s' % (ver, e)) return ver, v return None, None
def get_configs(opts): def _scan(_path, ext): configs = [] for root, dirs, files in os.walk(_path): prefix = root[len(_path) + 1:] for file in files: for e in ext: if file.endswith(e): configs += [path.join(prefix, file)] return configs configs = {'paths': [], 'files': []} for cp in opts.defaults.expand('%{_configdir}').split(':'): hcp = path.host(path.abspath(cp)) configs['paths'] += [hcp] configs['files'] += _scan(hcp, ['.cfg', '.bset']) configs['files'] = sorted(set(configs['files'])) return configs
def run(): import sys try: optargs = { '--list-configs': 'List available configurations', '--list-bsets': 'List available build sets', '--list-deps': 'List the dependent files.', '--bset-tar-file': 'Create a build set tar file', '--pkg-tar-files': 'Create package tar files', '--no-report': 'Do not create a package report.', '--report-format': 'The report format (text, html, asciidoc).' } mailer.append_options(optargs) opts = options.load(sys.argv, optargs) log.notice('RTEMS Source Builder - Set Builder, v%s' % (version.str())) opts.log_info() if not check.host_setup(opts): raise error.general( 'host build environment is not set up correctly') configs = build.get_configs(opts) if opts.get_arg('--list-deps'): deps = [] else: deps = None if not list_bset_cfg_files(opts, configs): prefix = opts.defaults.expand('%{_prefix}') if not opts.dry_run() and not opts.no_install() and \ not path.ispathwritable(prefix): raise error.general('prefix is not writable: %s' % (path.host(prefix))) for bset in opts.params(): b = buildset(bset, configs, opts) b.build(deps) del b if deps is not None: c = 0 for d in sorted(set(deps)): c += 1 print 'dep[%d]: %s' % (c, d) except error.general, gerr: log.notice(str(gerr)) print >> sys.stderr, 'Build FAILED' sys.exit(1)
def _git_downloader(url, local, config, opts): rlp = os.path.relpath(path.host(local)) us = url.split('?') repo = git.repo(local, opts, config.macros) if not repo.valid(): log.notice('git: clone: %s -> %s' % (us[0], rlp)) if not opts.dry_run(): repo.clone(us[0], local) else: repo.clean(['-f', '-d']) repo.reset('--hard') repo.checkout('master') for a in us[1:]: _as = a.split('=') if _as[0] == 'branch' or _as[0] == 'checkout': if len(_as) != 2: raise error.general('invalid git branch/checkout: %s' % (_as)) log.notice('git: checkout: %s => %s' % (us[0], _as[1])) if not opts.dry_run(): repo.checkout(_as[1]) elif _as[0] == 'pull': log.notice('git: pull: %s' % (us[0])) if not opts.dry_run(): repo.pull() elif _as[0] == 'submodule': if len(_as) != 2: raise error.general('invalid git submodule: %s' % (_as)) log.notice('git: submodule: %s <= %s' % (us[0], _as[1])) if not opts.dry_run(): repo.submodule(_as[1]) elif _as[0] == 'fetch': log.notice('git: fetch: %s -> %s' % (us[0], rlp)) if not opts.dry_run(): repo.fetch() elif _as[0] == 'reset': arg = [] if len(_as) > 1: arg = ['--%s' % (_as[1])] log.notice('git: reset: %s' % (us[0])) if not opts.dry_run(): repo.reset(arg) return True
def bspopts(self): if _grep(self.configure, 'RTEMS_CHECK_BSPDIR'): bsp_specs = _collect(self.cwd, 'bsp_specs') try: acinclude = path.join(self.cwd, 'acinclude.m4') b = open(path.host(acinclude), 'w') b.write('# RTEMS_CHECK_BSPDIR(RTEMS_BSP_FAMILY)' + os.linesep) b.write('AC_DEFUN([RTEMS_CHECK_BSPDIR],' + os.linesep) b.write('[' + os.linesep) b.write(' case "$1" in' + os.linesep) for bs in sorted(bsp_specs): dir = path.dirname(bs)[len(self.cwd) + 1:] b.write(' %s )%s' % (dir, os.linesep)) b.write(' AC_CONFIG_SUBDIRS([%s]);;%s' % (dir, os.linesep)) b.write(' *)' + os.linesep) b.write(' AC_MSG_ERROR([Invalid BSP]);;' + os.linesep) b.write(' esac' + os.linesep) b.write('])' + os.linesep) b.close() except IOError, err: raise error.general('error writing: %s' % (acinclude))