def host_setup(opts): """ Basic sanity check. All executables and directories must exist.""" checks = { 'none': _check_none, 'triplet': _check_triplet, 'dir': _check_dir, 'exe': _check_exe } sane = True for d in opts.defaults.keys(): try: (test, constraint, value) = opts.defaults.get(d) except: if opts.defaults.get(d) is None: raise error.general('invalid default: %s: not found' % (d)) else: raise error.general('invalid default: %s [%r]' % (d, opts.defaults.get(d))) if test != 'none': value = opts.defaults.expand(value) if test not in checks: raise error.general('invalid check test: %s [%r]' % (test, opts.defaults.get(d))) ok = checks[test](opts, d, value, constraint) if ok: tag = ' ' else: tag = '*' log.trace('%c %15s: %r -> "%s"' % (tag, d, opts.defaults.get(d), value)) if sane and not ok: sane = False return sane
def post_process(self): # Handle the log first. log.default = log.log(self.logfiles()) if self.trace(): log.tracing = True if self.quiet(): log.quiet = True # Handle the jobs for make if '_ncpus' not in self.defaults: raise error.general('host number of CPUs not set') ncpus = self.jobs(self.defaults['_ncpus']) if ncpus > 1: self.defaults['_smp_mflags'] = '-j %d' % (ncpus) else: self.defaults['_smp_mflags'] = self.defaults['nil'] # Load user macro files um = self.user_macros() if um: checked = path.exists(um) if False in checked: raise error.general('macro file not found: %s' % (um[checked.index(False)])) for m in um: self.defaults.load(m) # Check if the user has a private set of macros to load if 'RSB_MACROS' in os.environ: if path.exists(os.environ['RSB_MACROS']): self.defaults.load(os.environ['RSB_MACROS']) if 'HOME' in os.environ: rsb_macros = path.join(os.environ['HOME'], '.rsb_macros') if path.exists(rsb_macros): self.defaults.load(rsb_macros)
def __init__(self, opts, prefix, arch_bsp): self.opts = opts self.prefix = prefix if not path.exists(prefix): raise error.general('RTEMS prefix path not found: %s' % (prefix)) self.makefile_inc = None if '/' in arch_bsp: arch, bsp = arch_bsp.split('/', 1) else: arch = None bsp = arch_bsp makefile_incs = _collect(prefix, 'Makefile.inc') for mi in makefile_incs: found = True if arch is not None and arch not in mi: found = False if bsp not in mi: found = False if found: self.makefile_inc = mi break if self.makefile_inc is None: raise error.general('RTEMS BSP not found: %s' % (arch_bsp)) if not path.exists(self.makefile_inc): raise error.general('RTEMS BSP configuration not found: %s: %s' % \ (arch_bsp, self.makefile_inc)) self.command = command(opts, ['%{__make}', '-f' '%{_sbdir}/sb/rtemsconfig.mk', 'makefile_inc=%s' % (self.makefile_inc)]) self.command.run() self.parse(self.command.output)
def parse_url(url, pathkey, config, opts, file_override = None): # # Split the source up into the parts we need. # source = {} source['url'] = url source['options'] = [] colon = url.find(':') if url[colon + 1:colon + 3] != '//': raise error.general('malforned URL (no protocol prefix): %s' % (url)) source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:]) if file_override is None: source['file'] = path.basename(url) else: bad_chars = [c for c in ['/', '\\', '?', '*'] if c in file_override] if len(bad_chars) > 0: raise error.general('bad characters in file name: %s' % (file_override)) log.output('download: file-override: %s' % (file_override)) source['file'] = file_override source['options'] += ['file-override'] source['name'], source['ext'] = path.splitext(source['file']) if source['name'].endswith('.tar'): source['name'] = source['name'][:-4] source['ext'] = '.tar' + source['ext'] # # Get the file. Checks the local source directory first. # source['local'] = None for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, pathkey, config, opts): break source['script'] = '' return source
def directive(self, _package, name): if _package not in self._packages: raise error.general('package "' + _package + '" not found') if name not in self._packages[_package].directives: raise error.general('directive "' + name + \ '" not found in package "' + _package + '"') return self._packages[_package].directives[name]
def get_file(url, local, opts, config): if local is None: raise error.general("source/patch path invalid") if not path.isdir(path.dirname(local)) and not opts.download_disabled(): log.notice("Creating source directory: %s" % (os.path.relpath(path.host(path.dirname(local))))) log.output("making dir: %s" % (path.host(path.dirname(local)))) if not opts.dry_run(): path.mkdir(path.dirname(local)) if not path.exists(local) and opts.download_disabled(): raise error.general("source not found: %s" % (path.host(local))) # # Check if a URL has been provided on the command line. # url_bases = opts.urls() urls = [] if url_bases is not None: for base in url_bases: if base[-1:] != "/": base += "/" url_path = urlparse.urlsplit(url)[2] slash = url_path.rfind("/") if slash < 0: url_file = url_path else: url_file = url_path[slash + 1 :] urls.append(urlparse.urljoin(base, url_file)) urls += url.split() log.trace("_url: %s -> %s" % (",".join(urls), local)) for url in urls: for dl in downloaders: if url.startswith(dl): if downloaders[dl](url, local, config, opts): return if not opts.dry_run(): raise error.general("downloading %s: all paths have failed, giving up" % (url))
def set_host_details(self, host, opts, macros): if host not in host_profiles: raise error.general('invalid host: ' + host) for m in host_profiles[host]: opts.defaults[m] = host_profiles[host][m] macros[m] = host_profiles[host][m] macros_to_copy = [('%{_build}', '%{_host}'), ('%{_build_alias}', '%{_host_alias}'), ('%{_build_arch}', '%{_host_arch}'), ('%{_build_cpu}', '%{_host_cpu}'), ('%{_build_os}', '%{_host_os}'), ('%{_build_vendor}', '%{_host_vendor}')] for m in macros_to_copy: opts.defaults[m[0]] = opts.defaults[m[1]] macros[m[0]] = macros[m[1]] # # Look for a valid cc and cxx. # for cc in ['/usr/bin/cc', '/usr/bin/clang', '/usr/bin/gcc']: if check.check_exe(cc, cc): opts.defaults['__cc'] = cc macros['__cc'] = cc break if not macros.defined('__cc'): raise error.general('no valid cc found') for cxx in ['/usr/bin/c++', '/usr/bin/clang++', '/usr/bin/g++']: if check.check_exe(cxx, cxx): opts.defaults['__cxx'] = cxx macros['__cxx'] = cxx if not macros.defined('__cxx'): raise error.general('no valid c++ found')
def parse(self, bset): def _clean(line): line = line[0:-1] b = line.find('#') if b >= 0: line = line[1:b] return line.strip() bsetname = bset if not path.exists(bsetname): for cp in self.macros.expand('%{_configdir}').split(':'): configdir = path.abspath(cp) bsetname = path.join(configdir, bset) if path.exists(bsetname): break bsetname = None if bsetname is None: raise error.general('no build set file found: %s' % (bset)) try: log.trace('_bset: %s: open: %s' % (self.bset, bsetname)) bset = open(path.host(bsetname), 'r') except IOError, err: raise error.general('error opening bset file: %s' % (bsetname))
def run(): import sys ec = 0 setbuilder_error = False try: optargs = { '--list-configs': 'List available configurations', '--list-bsets': 'List available build sets', '--list-deps': 'List the dependent files.', '--bset-tar-file': 'Create a build set tar file', '--pkg-tar-files': 'Create package tar files', '--no-report': 'Do not create a package report.', '--report-format': 'The report format (text, html, asciidoc).' } mailer.append_options(optargs) opts = options.load(sys.argv, optargs) log.notice('RTEMS Source Builder - Set Builder, %s' % (version.str())) opts.log_info() if not check.host_setup(opts): raise error.general('host build environment is not set up correctly') configs = build.get_configs(opts) if opts.get_arg('--list-deps'): deps = [] else: deps = None if not list_bset_cfg_files(opts, configs): prefix = opts.defaults.expand('%{_prefix}') if opts.canadian_cross(): opts.disable_install() if not opts.dry_run() and \ not opts.canadian_cross() and \ not opts.no_install() and \ not path.ispathwritable(prefix): raise error.general('prefix is not writable: %s' % (path.host(prefix))) for bset in opts.params(): setbuilder_error = True b = buildset(bset, configs, opts) b.build(deps) b = None setbuilder_error = False if deps is not None: c = 0 for d in sorted(set(deps)): c += 1 print('dep[%d]: %s' % (c, d)) except error.general as gerr: if not setbuilder_error: log.stderr(str(gerr)) log.stderr('Build FAILED') ec = 1 except error.internal as ierr: if not setbuilder_error: log.stderr(str(ierr)) log.stderr('Internal Build FAILED') ec = 1 except error.exit as eerr: pass except KeyboardInterrupt: log.notice('abort: user terminated') ec = 1 sys.exit(ec)
def open(self, command, capture = True, shell = False, cwd = None, env = None, stdin = None, stdout = None, stderr = None, timeout = None): """Open a command with arguments. Provide the arguments as a list or a string.""" if self.verbose: s = command if type(command) is list: def add(x, y): return x + ' ' + str(y) s = functools.reduce(add, command, '')[1:] what = 'spawn' if shell: what = 'shell' log.output(what + ': ' + s) if self.output is None: raise error.general('capture needs an output handler') if shell and self.shell_exe: command = arg_list(command) command[:0] = self.shell_exe if not stdin and self.input: stdin = subprocess.PIPE if not stdout: stdout = subprocess.PIPE if not stderr: stderr = subprocess.PIPE proc = None if cwd is None: cwd = self.path if env is None: env = self.environment try: # Work around a problem on Windows with commands that # have a '.' and no extension. Windows needs the full # command name. if sys.platform == "win32" and type(command) is list: if command[0].find('.') >= 0: r, e = os.path.splitext(command[0]) if e not in ['.exe', '.com', '.bat']: command[0] = command[0] + '.exe' log.trace('exe: %s' % (command)) proc = subprocess.Popen(command, shell = shell, cwd = cwd, env = env, stdin = stdin, stdout = stdout, stderr = stderr, close_fds = False) if not capture: return (0, proc) if self.output is None: raise error.general('capture needs an output handler') exit_code = self.capture(proc, command, timeout) if self.verbose: log.output('exit: ' + str(exit_code)) except OSError as ose: exit_code = ose.errno if self.verbose: log.output('exit: ' + str(ose)) return (exit_code, proc)
def source(self, name): # # Return the list of sources. Merge in any macro defined sources as # these may be overridden by user loaded macros. # _map = 'source-%s' % (name) src_keys = [s for s in self.macros.map_keys(_map) if s != 'setup'] if len(src_keys) == 0: raise error.general('no source set: %s (%s)' % (name, _map)) srcs = [] for s in src_keys: sm = self.macros.get(s, globals = False, maps = _map) if sm is None: raise error.internal('source macro not found: %s in %s (%s)' % \ (s, name, _map)) opts = [] url = [] for sp in sm[2].split(): if len(url) == 0 and sp[0] == '-': opts += [sp] else: url += [sp] if len(url) == 0: raise error.general('source URL not found: %s' % (' '.join(args))) # # Look for --rsb-file as an option we use as a local file name. # This can be used if a URL has no reasonable file name the # download URL parser can figure out. # file_override = None if len(opts) > 0: for o in opts: if o.startswith('--rsb-file'): os_ = o.split('=') if len(os_) != 2: raise error.general('invalid --rsb-file option: %s' % (' '.join(args))) if os_[0] != '--rsb-file': raise error.general('invalid --rsb-file option: %s' % (' '.join(args))) file_override = os_[1] opts = [o for o in opts if not o.startswith('--rsb-')] url = self.config.expand(' '.join(url)) src = download.parse_url(url, '_sourcedir', self.config, self.opts, file_override) download.get_file(src['url'], src['local'], self.opts, self.config) if 'symlink' in src: sname = name.replace('-', '_') src['script'] = '%%{__ln_s} %s ${source_dir_%s}' % (src['symlink'], sname) elif 'compressed' in src: # # Zip files unpack as well so do not use tar. # src['script'] = '%s %s' % (src['compressed'], src['local']) if src['compressed-type'] != 'zip': src['script'] += ' | %{__tar_extract} -' else: src['script'] = '%%{__tar_extract} %s' % (src['local']) srcs += [src] return srcs
def _lo_int(self, opt, macro, value): if value is None: raise error.general('option requires a value: %s' % (opt)) try: num = int(value) except: raise error.general('option conversion to int failed: %s' % (opt)) self.opts[opt[2:]] = value self.defaults[macro] = value
def patch_setup(self, package, args): name = args[1] args = args[2:] _map = 'patch-%s' % (name) default_opts = ' '.join(args) patch_keys = [p for p in self.macros.map_keys(_map) if p != 'setup'] patches = [] for p in patch_keys: pm = self.macros.get(p, globals = False, maps = _map) if pm is None: raise error.internal('patch macro not found: %s in %s (%s)' % \ (p, name, _map)) opts = [] url = [] for pp in pm[2].split(): if len(url) == 0 and pp[0] == '-': opts += [pp] else: url += [pp] if len(url) == 0: raise error.general('patch URL not found: %s' % (' '.join(args))) # # Look for --rsb-file as an option we use as a local file name. # This can be used if a URL has no reasonable file name the # download URL parser can figure out. # file_override = None if len(opts) > 0: for o in opts: if o.startswith('--rsb-file'): os_ = o.split('=') if len(os_) != 2: raise error.general('invalid --rsb-file option: %s' % (' '.join(args))) if os_[0] != '--rsb-file': raise error.general('invalid --rsb-file option: %s' % (' '.join(args))) file_override = os_[1] opts = [o for o in opts if not o.startswith('--rsb-')] if len(opts) == 0: opts = default_opts else: opts = ' '.join(opts) opts = self.config.expand(opts) url = self.config.expand(' '.join(url)) # # Parse the URL first in the source builder's patch directory. # patch = download.parse_url(url, '_patchdir', self.config, self.opts, file_override) # # Download the patch # download.get_file(patch['url'], patch['local'], self.opts, self.config) if 'compressed' in patch: patch['script'] = patch['compressed'] + ' ' + patch['local'] else: patch['script'] = '%{__cat} ' + patch['local'] patch['script'] += ' | %%{__patch} %s' % (opts) self.script.append(self.config.expand(patch['script']))
def post_process(self, logfile = True): # Handle the log first. logctrl = self.parse_args('--without-log') if logctrl is None: if logfile: logfiles = self.logfiles() else: logfiles = None log.default = log.log(streams = logfiles) if self.trace(): log.tracing = True if self.quiet(): log.quiet = True # Must have a host if self.defaults['_host'] == self.defaults['nil']: raise error.general('--host not set') # Must have a host if self.defaults['_build'] == self.defaults['nil']: raise error.general('--build not set') # Default prefix prefix = self.parse_args('--prefix') if prefix is None: value = path.join(self.defaults['_prefix'], 'rtems', str(self.defaults['rtems_version'])) self.opts['prefix'] = value self.defaults['_prefix'] = value # Manage the regression option if self.opts['regression'] != '0': self.opts['no-install'] = '1' self.defaults['_no_install'] = '1' self.opts['keep-going'] = '1' self.defaults['_keep_going'] = '1' self.opts['always-clean'] = '1' self.defaults['_always_clean'] = '1' # Handle the jobs for make if '_ncpus' not in self.defaults: raise error.general('host number of CPUs not set') ncpus = self.jobs(self.defaults['_ncpus']) if ncpus > 1: self.defaults['_smp_mflags'] = '-j %d' % (ncpus) else: self.defaults['_smp_mflags'] = self.defaults['nil'] # Load user macro files um = self.user_macros() if um: checked = path.exists(um) if False in checked: raise error.general('macro file not found: %s' % (um[checked.index(False)])) for m in um: self.defaults.load(m) # Check if the user has a private set of macros to load if 'RSB_MACROS' in os.environ: if path.exists(os.environ['RSB_MACROS']): self.defaults.load(os.environ['RSB_MACROS']) if 'HOME' in os.environ: rsb_macros = path.join(os.environ['HOME'], '.rsb_macros') if path.exists(rsb_macros): self.defaults.load(rsb_macros)
def git_version(self): ec, output = self._run(['--version'], True) gvs = output.split() if len(gvs) < 3: raise error.general('invalid version string from git: %s' % (output)) vs = gvs[2].split('.') if len(vs) not in [3, 4]: raise error.general('invalid version number from git: %s' % (gvs[2])) return tuple(map(int, vs))
def git_version(self): ec, output = self._run(['--version'], True) gvs = output.split() if len(gvs) < 3: raise error.general('invalid version string from git: %s' % (output)) vs = gvs[2].split('.') if len(vs) != 4: raise error.general('invalid version number from git: %s' % (gvs[2])) return (int(vs[0]), int(vs[1]), int(vs[2]), int(vs[3]))
def get_file(url, local, opts, config): if local is None: raise error.general('source/patch path invalid') if not path.isdir(path.dirname(local)) and not opts.download_disabled(): log.notice('Creating source directory: %s' % \ (os.path.relpath(path.host(path.dirname(local))))) log.output('making dir: %s' % (path.host(path.dirname(local)))) if _do_download(opts): path.mkdir(path.dirname(local)) if not path.exists(local) and opts.download_disabled(): raise error.general('source not found: %s' % (path.host(local))) # # Check if a URL has been provided on the command line. If the package is # release push to the start the RTEMS URL. # url_bases = opts.urls() if version.released(): rtems_release_url = config.macros.expand('%{rtems_release_url}/%{rsb_version}/sources') log.trace('release url: %s' % (rtems_release_url)) # # If the URL being fetched is under the release path do not add the # sources release path because it is already there. # if not url.startswith(rtems_release_url): if url_bases is None: url_bases = [rtems_release_url] else: url_bases.append(rtems_release_url) urls = [] if url_bases is not None: # # Split up the URL we are being asked to download. # url_path = urlparse.urlsplit(url)[2] slash = url_path.rfind('/') if slash < 0: url_file = url_path else: url_file = url_path[slash + 1:] log.trace('url_file: %s' %(url_file)) for base in url_bases: if base[-1:] != '/': base += '/' next_url = urlparse.urljoin(base, url_file) log.trace('url: %s' %(next_url)) urls.append(next_url) urls += url.split() log.trace('_url: %s -> %s' % (','.join(urls), local)) for url in urls: for dl in downloaders: if url.startswith(dl): if downloaders[dl](url, local, config, opts): return if _do_download(opts): raise error.general('downloading %s: all paths have failed, giving up' % (url))
def run(): import sys ec = 0 setbuilder_error = False try: optargs = { "--list-configs": "List available configurations", "--list-bsets": "List available build sets", "--list-deps": "List the dependent files.", "--bset-tar-file": "Create a build set tar file", "--pkg-tar-files": "Create package tar files", "--no-report": "Do not create a package report.", "--report-format": "The report format (text, html, asciidoc).", } mailer.append_options(optargs) opts = options.load(sys.argv, optargs) log.notice("RTEMS Source Builder - Set Builder, %s" % (version.str())) opts.log_info() if not check.host_setup(opts): raise error.general("host build environment is not set up correctly") configs = build.get_configs(opts) if opts.get_arg("--list-deps"): deps = [] else: deps = None if not list_bset_cfg_files(opts, configs): prefix = opts.defaults.expand("%{_prefix}") if opts.canadian_cross(): opts.disable_install() if ( not opts.dry_run() and not opts.canadian_cross() and not opts.no_install() and not path.ispathwritable(prefix) ): raise error.general("prefix is not writable: %s" % (path.host(prefix))) for bset in opts.params(): setbuilder_error = True b = buildset(bset, configs, opts) b.build(deps) b = None setbuilder_error = False if deps is not None: c = 0 for d in sorted(set(deps)): c += 1 print "dep[%d]: %s" % (c, d) except error.general, gerr: if not setbuilder_error: log.stderr(str(gerr)) log.stderr("Build FAILED") ec = 1
def _http_parser(source, pathkey, config, opts): # # Hack for gitweb.cgi patch downloads. We rewrite the various fields. # if 'gitweb.cgi' in source['url']: url = source['url'] if '?' not in url: raise error.general('invalid gitweb.cgi request: %s' % (url)) req = url.split('?')[1] if len(req) == 0: raise error.general('invalid gitweb.cgi request: %s' % (url)) # # The gitweb.cgi request should have: # p=<what> # a=patch # h=<hash> # so extract the p and h parts to make the local name. # p = None a = None h = None for r in req.split(';'): if '=' not in r: raise error.general('invalid gitweb.cgi path: %s' % (url)) rs = r.split('=') if rs[0] == 'p': p = rs[1].replace('.', '-') elif rs[0] == 'a': a = rs[1] elif rs[0] == 'h': h = rs[1] if p is None or h is None: raise error.general('gitweb.cgi path missing p or h: %s' % (url)) source['file'] = '%s-%s.patch' % (p, h) # # Check local path # _local_path(source, pathkey, config) # # Is the file compressed ? # esl = source['ext'].split('.') if esl[-1:][0] == 'gz': source['compressed-type'] = 'gzip' source['compressed'] = '%{__gzip} -dc' elif esl[-1:][0] == 'bz2': source['compressed-type'] = 'bzip2' source['compressed'] = '%{__bzip2} -dc' elif esl[-1:][0] == 'zip': source['compressed-type'] = 'zip' source['compressed'] = '%{__unzip} -u' elif esl[-1:][0] == 'xz': source['compressed-type'] = 'xz' source['compressed'] = '%{__xz} -dc'
def post_process(self): if self.command is not None: if self.command.exit_code != 0: raise error.general('error: ampolish3: %s' % (' '.join(self.command.cmd))) try: p = open(path.host(self.preinstall), 'w') for l in self.command.output: p.write(l) p.close() except IOError, err: raise error.general('error writing: %s' % (self.preinstall))
def send(self, to_addr, subject, body): from_addr = self.from_address() msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n" % \ (from_addr, to_addr, subject) + body try: s = smtplib.SMTP(self.smtp_host()) s.sendmail(from_addr, [to_addr], msg) except smtplib.SMTPException as se: raise error.general('sending mail: %s' % (str(se))) except socket.error as se: raise error.general('sending mail: %s' % (str(se)))
def source_setup(self, package, args): log.output('source setup: %s: %s' % (package.name(), ' '.join(args))) setup_name = args[1] args = args[1:] try: opts, args = getopt.getopt(args[1:], 'qDcn:ba') except getopt.GetoptError as ge: raise error.general('source setup error: %s' % str(ge)) quiet = False unpack_before_chdir = True delete_before_unpack = True create_dir = False deleted_dir = False created_dir = False changed_dir = False opt_name = None for o in opts: if o[0] == '-q': quiet = True elif o[0] == '-D': delete_before_unpack = False elif o[0] == '-c': create_dir = True elif o[0] == '-n': opt_name = o[1] elif o[0] == '-b': unpack_before_chdir = True elif o[0] == '-a': unpack_before_chdir = False name = None for source in self.source(setup_name): if name is None: if opt_name is None: if source: opt_name = source['name'] else: raise error.general('setup source tag not found: %d' % (source_tag)) else: name = opt_name self.script.append(self.config.expand('cd %{_builddir}')) if not deleted_dir and delete_before_unpack: self.script.append(self.config.expand('%{__rm} -rf ' + name)) deleted_dir = True if not created_dir and create_dir: self.script.append(self.config.expand('%{__mkdir_p} ' + name)) created_dir = True if not changed_dir and (not unpack_before_chdir or create_dir): self.script.append(self.config.expand('cd ' + name)) changed_dir = True self.script.append(self.config.expand(source['script'])) if not changed_dir and (unpack_before_chdir and not create_dir): self.script.append(self.config.expand('cd ' + name)) changed_dir = True self.script.append(self.config.expand('%{__setup_post}'))
def setup(self): if self.format == "html": try: import asciidocapi except: raise error.general("installation error: no asciidocapi found") asciidoc_py = self._sbpath(options.basepath, "asciidoc", "asciidoc.py") try: self.asciidoc = asciidocapi.AsciiDocAPI(asciidoc_py) except: raise error.general("application error: asciidocapi failed")
def __init__(self): super(html_formatter, self).__init__() try: import asciidocapi except: raise error.general('installation error: no asciidocapi found') asciidoc_py = _make_path(self.sbpath, options.basepath, 'asciidoc', 'asciidoc.py') try: self.asciidoc = asciidocapi.AsciiDocAPI(asciidoc_py) except: raise error.general('application error: asciidocapi failed')
def run(args): try: optargs = { '--list-bsets': 'List available build sets', '--list-configs': 'List available configurations', '--format': 'Output format (text, html, asciidoc, ini)', '--output': 'File name to output the report' } opts = options.load(args, optargs) if opts.get_arg('--output') and len(opts.params()) > 1: raise error.general('--output can only be used with a single config') print 'RTEMS Source Builder, Reporter v%s' % (version.str()) opts.log_info() if not check.host_setup(opts): log.warning('forcing build with known host setup problems') configs = build.get_configs(opts) if not setbuilder.list_bset_cfg_files(opts, configs): output = opts.get_arg('--output') if output is not None: output = output[1] format = 'text' ext = '.txt' format_opt = opts.get_arg('--format') if format_opt: if len(format_opt) != 2: raise error.general('invalid format option: %s' % ('='.join(format_opt))) if format_opt[1] == 'text': pass elif format_opt[1] == 'asciidoc': format = 'asciidoc' ext = '.txt' elif format_opt[1] == 'html': format = 'html' ext = '.html' elif format_opt[1] == 'ini': format = 'ini' ext = '.ini' else: raise error.general('invalid format: %s' % (format_opt[1])) r = report(format, configs, opts) for _config in opts.params(): if output is None: outname = path.splitext(_config)[0] + ext outname = outname.replace('/', '-') else: outname = output config = build.find_config(_config, configs) if config is None: raise error.general('config file not found: %s' % (inname)) r.create(config, outname) del r else: raise error.general('invalid config type: %s' % (config)) except error.general, gerr: print gerr sys.exit(1)
def setup(self): if self.is_html(): try: import asciidocapi except: raise error.general('installation error: no asciidocapi found') asciidoc_py = self._sbpath(options.basepath, 'asciidoc', 'asciidoc.py') try: self.asciidoc = asciidocapi.AsciiDocAPI(asciidoc_py) except: raise error.general('application error: asciidocapi failed')
def copy_tree(src, dst): trace = False hsrc = host(src) hdst = host(dst) if os.path.exists(hsrc): names = os.listdir(hsrc) else: names = [] if trace: print 'path.copy_tree:' print ' src: %s' % (src) print ' hsrc: %s' % (hsrc) print ' dst: %s' % (dst) print ' hdst: %s' % (hdst) print ' names: %r' % (names) if not os.path.isdir(hdst): if trace: print ' mkdir: %s' % (hdst) os.makedirs(hdst) for name in names: srcname = host(os.path.join(hsrc, name)) dstname = host(os.path.join(hdst, name)) try: if os.path.islink(srcname): linkto = os.readlink(srcname) if os.path.exists(dstname): if os.path.islink(dstname): dstlinkto = os.readlink(dstname) if linkto != dstlinkto: log.warning('copying tree: link does not match: %s -> %s' % \ (dstname, dstlinkto)) os.remove(dstname) else: log.warning('copying tree: destination is not a link: %s' % \ (dstname)) os.remove(dstname) else: os.symlink(linkto, dstname) elif os.path.isdir(srcname): copy_tree(srcname, dstname) else: shutil.copy2(host(srcname), host(dstname)) except shutil.Error, err: raise error.general('copying tree: %s -> %s: %s' % \ (hsrc, hdst, str(err))) except EnvironmentError, why: raise error.general('copying tree: %s -> %s: %s' % \ (srcname, dstname, str(why)))
def run(args): try: optargs = { "--list-bsets": "List available build sets", "--list-configs": "List available configurations", "--format": "Output format (text, html, asciidoc)", "--output": "File name to output the report", } opts = options.load(args, optargs) if opts.get_arg("--output") and len(opts.params()) > 1: raise error.general("--output can only be used with a single config") print "RTEMS Source Builder, Reporter v%s" % (version.str()) opts.log_info() if not check.host_setup(opts): log.warning("forcing build with known host setup problems") configs = build.get_configs(opts) if not setbuilder.list_bset_cfg_files(opts, configs): output = opts.get_arg("--output") if output is not None: output = output[1] format = "text" ext = ".txt" format_opt = opts.get_arg("--format") if format_opt: if len(format_opt) != 2: raise error.general("invalid format option: %s" % ("=".join(format_opt))) if format_opt[1] == "text": pass elif format_opt[1] == "asciidoc": format = "asciidoc" ext = ".txt" elif format_opt[1] == "html": format = "html" ext = ".html" else: raise error.general("invalid format: %s" % (format_opt[1])) r = report(format, configs, opts) for _config in opts.params(): if output is None: outname = path.splitext(_config)[0] + ext outname = outname.replace("/", "-") else: outname = output config = build.find_config(_config, configs) if config is None: raise error.general("config file not found: %s" % (inname)) r.create(config, outname) del r else: raise error.general("invalid config type: %s" % (config)) except error.general, gerr: print gerr sys.exit(1)
def cvs_version(self): ec, output = self._run(['--version'], True) lines = output.split('\n') if len(lines) < 12: raise error.general('invalid version string from cvs: %s' % (output)) cvs = lines[0].split(' ') if len(cvs) != 6: raise error.general('invalid version number from cvs: %s' % (lines[0])) vs = cvs[4].split('.') if len(vs) < 3: raise error.general('invalid version number from cvs: %s' % (cvs[4])) return (int(vs[0]), int(vs[1]), int(vs[2]))
def _hash_check(file_, absfile, macros, remove = True): failed = False hash = sources.get_hash(file_.lower(), macros) if hash is not None: hash = hash.split() if len(hash) != 2: raise error.internal('invalid hash format: %s' % (file_)) try: hashlib_algorithms = hashlib.algorithms except: hashlib_algorithms = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'] if hash[0] not in hashlib_algorithms: raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0])) if hash[0] in ['md5', 'sha1']: raise error.general('hash: %s: insecure: %s' % (file_, hash[0])) hasher = None _in = None try: hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError as err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except: msg = 'hash: %s: error' % (file_) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() raise if _in is not None: _in.close() log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1])) if hasher.hexdigest() != hash[1]: log.warning('checksum error: %s' % (file_)) failed = True if failed and remove: log.warning('removing: %s' % (file_)) if path.exists(absfile): try: os.remove(path.host(absfile)) except IOError as err: raise error.general('hash: %s: remove: %s' % (absfile, str(err))) except: raise error.general('hash: %s: remove error' % (file_)) if hasher is not None: del hasher else: if version.released(): raise error.general('%s: no hash found in released RSB' % (file_)) log.warning('%s: no hash found' % (file_)) return not failed
def run(args=sys.argv): ec = 0 get_sources_error = True try: # # The RSB options support cannot be used because it loads the defaults # for the host which we cannot do here. # description = 'RTEMS Get Sources downloads all the source a build set ' description += 'references for all hosts.' argsp = argparse.ArgumentParser(prog='rtems-get-sources', description=description) argsp.add_argument('--rtems-version', help='Set the RTEMS version.', type=str, default=version.version()) argsp.add_argument('--list-hosts', help='List the hosts.', action='store_true') argsp.add_argument('--list-bsets', help='List the hosts.', action='store_true') argsp.add_argument('--download-dir', help='Download directory.', type=str) argsp.add_argument('--clean', help='Clean the download directory.', action='store_true') argsp.add_argument('--tar', help='Create a tarball of all the source.', action='store_true') argsp.add_argument('--log', help='Log file.', type=str, default=log_default()) argsp.add_argument('--trace', help='Enable trace logging for debugging.', action='store_true') argsp.add_argument('bsets', nargs='*', help='Build sets.') argopts = argsp.parse_args(args[2:]) load_log(argopts.log) log.notice('RTEMS Source Builder - Get Sources, %s' % (version.str())) log.tracing = argopts.trace opts = load_options(args, argopts) configs = build.get_configs(opts) if argopts.list_bsets: list_bset_files(opts, configs) else: if argopts.clean: if argopts.download_dir is None: raise error.general( 'cleaning of the default download directories is not supported' ) if path.exists(argopts.download_dir): log.notice('Cleaning source directory: %s' % (argopts.download_dir)) path.removeall(argopts.download_dir) if len(argopts.bsets) == 0: raise error.general( 'no build sets provided on the command line') for bset in argopts.bsets: get_sources_error = True b = buildset(bset, configs, opts) get_sources_error = False for host in host_profiles: b.build(host) b = None except error.general as gerr: if get_sources_error: log.stderr(str(gerr)) log.stderr('Build FAILED') ec = 1 except error.internal as ierr: if get_sources_error: log.stderr(str(ierr)) log.stderr('Internal Build FAILED') ec = 1 except error.exit as eerr: pass except KeyboardInterrupt: log.notice('abort: user terminated') ec = 1 except: raise log.notice('abort: unknown error') ec = 1 sys.exit(ec)
def source(self, name): # # Return the list of sources. Merge in any macro defined sources as # these may be overridden by user loaded macros. # _map = 'source-%s' % (name) src_keys = [s for s in self.macros.map_keys(_map) if s != 'setup'] if len(src_keys) == 0: raise error.general('no source set: %s (%s)' % (name, _map)) srcs = [] for s in src_keys: sm = self.macros.get(s, globals=False, maps=_map) if sm is None: raise error.internal('source macro not found: %s in %s (%s)' % \ (s, name, _map)) opts = [] url = [] for sp in sm[2].split(): if len(url) == 0 and sp[0] == '-': opts += [sp] else: url += [sp] if len(url) == 0: raise error.general('source URL not found: %s' % (' '.join(args))) # # Look for --rsb-file as an option we use as a local file name. # This can be used if a URL has no reasonable file name the # download URL parser can figure out. # file_override = None if len(opts) > 0: for o in opts: if o.startswith('--rsb-file'): os_ = o.split('=') if len(os_) != 2: raise error.general( 'invalid --rsb-file option: %s' % (' '.join(args))) if os_[0] != '--rsb-file': raise error.general( 'invalid --rsb-file option: %s' % (' '.join(args))) file_override = os_[1] opts = [o for o in opts if not o.startswith('--rsb-')] url = self.config.expand(' '.join(url)) src = download.parse_url(url, '_sourcedir', self.config, self.opts, file_override) download.get_file(src['url'], src['local'], self.opts, self.config) if 'symlink' in src: sname = name.replace('-', '_') src['script'] = '%%{__ln_s} %s ${source_dir_%s}' % ( src['symlink'], sname) elif 'compressed' in src: # # Zip files unpack as well so do not use tar. # src['script'] = '%s %s' % (src['compressed'], src['local']) if src['compressed-type'] != 'zip': src['script'] += ' | %{__tar_extract} -' else: src['script'] = '%%{__tar_extract} %s' % (src['local']) srcs += [src] return srcs
def err(msg): raise error.general('%s: %s' % (package, msg))
def _expand(self, s): expand_count = 0 expanded = True while expanded: expand_count += 1 if expand_count > 500: raise error.general('macro expand looping: %s' % (s)) expanded = False ms = self._macro_split(s) for m in ms: mn = m # # A macro can be '%{macro}' or '%macro'. Turn the later into # the former. # show_warning = True if mn[1] != '{': for r in self._ignore: if r.match(mn) is not None: mn = None break else: mn = self._label(mn[1:]) show_warning = False elif m.startswith('%{expand'): colon = m.find(':') if colon < 8: log.warning('malformed expand macro, no colon found') else: e = self._expand(m[colon + 1:-1].strip()) s = s.replace(m, self._label(e)) expanded = True mn = None elif m.startswith('%{with '): # # Change the ' ' to '_' because the macros have no spaces. # n = self._label('with_' + m[7:-1].strip()) if n in self.macros: s = s.replace(m, '1') else: s = s.replace(m, '0') expanded = True mn = None elif m.startswith('%{echo'): if not m.endswith('}'): log.warning("malformed conditional macro '%s'" % (m)) mn = None else: e = self._expand(m[6:-1].strip()) log.notice('%s' % (self._name_line_msg(e))) s = '' expanded = True mn = None elif m.startswith('%{defined'): n = self._label(m[9:-1].strip()) if n in self.macros: s = s.replace(m, '1') else: s = s.replace(m, '0') expanded = True mn = None elif m.startswith('%{!defined'): n = self._label(m[10:-1].strip()) if n in self.macros: s = s.replace(m, '0') else: s = s.replace(m, '1') expanded = True mn = None elif m.startswith('%{path '): pl = m[7:-1].strip().split() ok = False if len(pl) == 2: ok = True epl = [] for p in pl[1:]: epl += [self._expand(p)] p = ' '.join(epl) if pl[0].lower() == 'prepend': if len(self.macros['_pathprepend']): self.macros['_pathprepend'] = \ '%s:%s' % (p, self.macros['_pathprepend']) else: self.macros['_pathprepend'] = p elif pl[0].lower() == 'postpend': if len(self.macros['_pathprepend']): self.macros['_pathprepend'] = \ '%s:%s' % (self.macros['_pathprepend'], p) else: self.macros['_pathprepend'] = p else: ok = False if ok: s = s.replace(m, '') else: self._error('path error: %s' % (' '.join(pl))) mn = None elif m.startswith('%{pkgconfig '): pcl = m[11:-1].strip().split() if len(pcl): epcl = [] for pc in pcl: epcl += [self._expand(pc)] ps = self._pkgconfig(epcl) s = s.replace(m, ps) expanded = True else: self._error('pkgconfig error: %s' % (m[11:-1].strip())) mn = None elif m.startswith('%{?') or m.startswith('%{!?'): if m[2] == '!': start = 4 else: start = 3 colon = m[start:].find(':') if colon < 0: if not m.endswith('}'): log.warning("malformed conditional macro '%s'" % (m)) mn = None else: mn = self._label(m[start:-1]) else: mn = self._label(m[start:start + colon]) if mn: if m.startswith('%{?'): istrue = False if mn in self.macros: # If defined and 0 or '' then it is false. istrue = _check_bool(self.macros[mn]) if istrue is None: istrue = _check_nil(self.macros[mn]) if colon >= 0 and istrue: s = s.replace(m, m[start + colon + 1:-1]) expanded = True mn = None elif not istrue: mn = '%{nil}' else: isfalse = True if mn in self.macros: istrue = _check_bool(self.macros[mn]) if istrue is None or istrue == True: isfalse = False if colon >= 0 and isfalse: s = s.replace(m, m[start + colon + 1:-1]) expanded = True mn = None else: mn = '%{nil}' if mn: if mn.lower() in self.macros: s = s.replace(m, self.macros[mn.lower()]) expanded = True elif show_warning: self._error("macro '%s' not found" % (mn)) return self._shell(s)
def run(): import sys ec = 0 setbuilder_error = False mail = None try: optargs = { '--list-configs': 'List available configurations', '--list-bsets': 'List available build sets', '--list-configs': 'List available configuration files.', '--list-deps': 'List the dependent files.', '--bset-tar-file': 'Create a build set tar file', '--pkg-tar-files': 'Create package tar files', '--no-report': 'Do not create a package report.', '--report-format': 'The report format (text, html, asciidoc).' } mailer.append_options(optargs) opts = options.load(sys.argv, optargs) if opts.get_arg('--mail'): mail = { 'mail': mailer.mail(opts), 'output': log_capture(), 'log': '', 'reports': [], 'failure': None } to_addr = opts.get_arg('--mail-to') if to_addr is not None: mail['to'] = to_addr[1] else: mail['to'] = macro_expand(opts.defaults, '%{_mail_tools_to}') mail['from'] = mail['mail'].from_address() log.notice('RTEMS Source Builder - Set Builder, %s' % (version.str())) opts.log_info() if not check.host_setup(opts): raise error.general( 'host build environment is not set up correctly') if mail: mail['header'] = os.linesep.join(mail['output'].get()) + os.linesep mail['header'] += os.linesep mail['header'] += 'Host: ' + reports.platform( 'compact') + os.linesep indent = ' ' for l in textwrap.wrap(reports.platform('extended'), width=80 - len(indent)): mail['header'] += indent + l + os.linesep configs = build.get_configs(opts) if opts.get_arg('--list-deps'): deps = [] else: deps = None if not list_bset_cfg_files(opts, configs): prefix = macro_expand(opts.defaults, '%{_prefix}') if opts.canadian_cross(): opts.disable_install() if not opts.dry_run() and \ not opts.canadian_cross() and \ not opts.no_install() and \ not path.ispathwritable(prefix): raise error.general('prefix is not writable: %s' % (path.host(prefix))) for bset in opts.params(): setbuilder_error = True b = buildset(bset, configs, opts) b.build(deps, mail=mail) b = None setbuilder_error = False if deps is not None: c = 0 for d in sorted(set(deps)): c += 1 print('dep[%d]: %s' % (c, d)) except error.general as gerr: if not setbuilder_error: log.stderr(str(gerr)) log.stderr('Build FAILED') ec = 1 except error.internal as ierr: if not setbuilder_error: log.stderr(str(ierr)) log.stderr('Internal Build FAILED') ec = 1 except error.exit as eerr: pass except KeyboardInterrupt: log.notice('abort: user terminated') ec = 1 except: raise log.notice('abort: unknown error') ec = 1 sys.exit(ec)
def parse(self, bset): def _clean(line): line = line[0:-1] b = line.find('#') if b >= 0: line = line[1:b] return line.strip() bsetname = bset if not path.exists(bsetname): for cp in macro_expand(self.macros, '%{_configdir}').split(':'): configdir = path.abspath(cp) bsetname = path.join(configdir, bset) if path.exists(bsetname): break bsetname = None if bsetname is None: raise error.general('no build set file found: %s' % (bset)) try: log.trace('_bset: : %s: open: %s' % (self.bset, bsetname)) bset = open(path.host(bsetname), 'r') except IOError as err: raise error.general('error opening bset file: %s' % (bsetname)) configs = [] try: lc = 0 for l in bset: lc += 1 l = _clean(l) if len(l) == 0: continue log.trace('_bset: : %s: %03d: %s' % (self.bset, lc, l)) ls = l.split() if ls[0][-1] == ':' and ls[0][:-1] == 'package': self.bset_pkg = ls[1].strip() self.macros['package'] = self.bset_pkg elif ls[0][0] == '%': def err(msg): raise error.general('%s:%d: %s' % (self.bset, lc, msg)) if ls[0] == '%define': if len(ls) > 2: self.macros.define( ls[1].strip(), ' '.join([f.strip() for f in ls[2:]])) else: self.macros.define(ls[1].strip()) elif ls[0] == '%undefine': if len(ls) > 2: raise error.general('%s:%d: %undefine requires ' \ 'just the name' % (self.bset, lc)) self.macros.undefine(ls[1].strip()) elif ls[0] == '%include': configs += self.parse(ls[1].strip()) elif ls[0] in ['%patch', '%source']: sources.process(ls[0][1:], ls[1:], self.macros, err) elif ls[0] == '%hash': sources.hash(ls[1:], self.macros, err) else: l = l.strip() c = build.find_config(l, self.configs) if c is None: raise error.general('%s:%d: cannot find file: %s' % (self.bset, lc, l)) configs += [c] except: bset.close() raise bset.close() return configs
def __init__(self, base_path=None, argv=None, optargs=None, defaults=None, long_opts=None, long_opts_help=None, command_path='', log_default=None): if argv is None: return global basepath if long_opts == None: long_opts = {} basepath = base_path if log_default is not None and type(log_default) is not list: raise error.general('log default is a list') self.log_default = log_default if defaults is None: defaults = macros.macros() self.long_opts = { # key macro handler param defs init '--jobs': ('_jobs', self._lo_jobs, True, 'default', True), '--log': ('_logfile', self._lo_string, True, None, False), '--macros': ('_macros', self._lo_string, True, None, False), '--force': ('_force', self._lo_bool, False, '0', True), '--quiet': ('_quiet', self._lo_bool, False, '0', True), '--trace': ('_trace', self._lo_bool, False, '0', True), '--dry-run': ('_dry_run', self._lo_bool, False, '0', True), '--warn-all': ('_warn_all', self._lo_bool, False, '0', True), '--no-clean': ('_no_clean', self._lo_bool, False, '0', True), '--keep-going': ('_keep_going', self._lo_bool, False, '0', True), '--always-clean': ('_always_clean', self._lo_bool, False, '0', True), '--no-install': ('_no_install', self._lo_bool, False, '0', True), '--help': (None, self._lo_help, False, None, False) } self.long_opts_help = { '--force': 'Force the build to proceed', '--quiet': 'Quiet output (not used)', '--trace': 'Trace the execution', '--dry-run': 'Do everything but actually run the build', '--warn-all': 'Generate warnings', '--no-clean': 'Do not clean up the build tree', '--always-clean': 'Always clean the build tree, even with an error', '--keep-going': 'Do not stop on an error.', '--jobs=[0..n,none,half,full]': 'Run with specified number of jobs, default: num CPUs.', '--macros file[,file]': 'Macro format files to load after the defaults', '--log file': 'Log file where all build output is written to' } self.opts = {'params': []} self.command_path = command_path self.command_name = path.basename(argv[0]) self.argv = argv self.args = argv[1:] self.optargs = optargs self.defaults = defaults for lo in self.long_opts: self.opts[lo[2:]] = self.long_opts[lo][3] if self.long_opts[lo][4]: self.defaults[self.long_opts[lo][0]] = ('none', 'none', self.long_opts[lo][3]) for lo in long_opts: if lo in self.long_opts: raise error.general('suplicate option: %s' % (lo)) self.opts[lo[2:]] = long_opts[lo][3] if long_opts[lo][4]: self.defaults[long_opts[lo][0]] = ('none', 'none', long_opts[lo][3]) if long_opts[lo][1] == 'int': handler = self._lo_int elif long_opts[lo][1] == 'string': handler = self._lo_string elif long_opts[lo][1] == 'path': handler = self._lo_path elif long_opts[lo][1] == 'jobs': handler = self._lo_jobs elif long_opts[lo][1] == 'bool': handler = self._lo_bool elif long_opts[lo][1] == 'triplet': handler = self._lo_triplets else: raise error.general('invalid option handler: %s: %s' % (lo, long_opts[lo][1])) self.long_opts[lo] = (long_opts[lo][0], handler, long_opts[lo][2], long_opts[lo][3], long_opts[lo][4]) if long_opts_help is not None: if lo not in long_opts_help: raise error.general('no help for option: %s' % (lo)) self.long_opts_help[lo] = long_opts_help[lo]
def load(self, name): def common_end(left, right): end = '' while len(left) and len(right): if left[-1] != right[-1]: return end end = left[-1] + end left = left[:-1] right = right[:-1] return end if self.load_depth == 0: self._reset(name) self._packages[self.package] = package(self.package, self.define('%{_arch}'), self) self.load_depth += 1 save_name = self.name save_lc = self.lc # # Locate the config file. Expand any macros then add the # extension. Check if the file exists, therefore directly # referenced. If not see if the file contains ':' or the path # separator. If it does split the path else use the standard config dir # path in the defaults. # exname = self.expand(name) # # Macro could add an extension. # if exname.endswith('.cfg'): configname = exname else: configname = '%s.cfg' % (exname) name = '%s.cfg' % (name) if ':' in configname: cfgname = path.basename(configname) else: cfgname = common_end(configname, name) if not path.exists(configname): if ':' in configname: configdirs = path.dirname(configname).split(':') else: configdirs = self.define('_configdir').split(':') for cp in configdirs: configname = path.join(path.abspath(cp), cfgname) if path.exists(configname): break configname = None if configname is None: raise error.general('no config file found: %s' % (cfgname)) try: log.trace('config: %s: _open: %s' % (self.name, path.host(configname))) config = open(path.host(configname), 'r') except IOError as err: raise error.general('error opening config file: %s' % (path.host(configname))) self.configpath += [configname] self._includes += [configname] self.name = self._relative_path(configname) self.lc = 0 try: dir = None info = None data = [] while True: r = self._parse(config, dir, info) if r[0] == 'package': dir, info, data = self._process_package(r, dir, info, data) elif r[0] == 'control': if r[1] == '%end': break log.warning("unexpected '%s'" % (r[1])) elif r[0] == 'directive': if r[1] == '%include': self.load(r[2][0]) continue dir, info, data = self._process_directive( r, dir, info, data) elif r[0] == 'data': dir, info, data = self._process_data(r, dir, info, data) else: self._error("%d: invalid parse state: '%s" % (self.lc, r[0])) if dir is not None: self._directive_extend(dir, data) except: config.close() raise config.close() self.name = save_name self.lc = save_lc self.load_depth -= 1
def set_output(self, output): raise error.general('output capture cannot be overrided')
def macro(self, name): if name in self.macros: return self.macros[name] raise error.general('macro "%s" not found' % (name))
def load(args, optargs=None, defaults='%{_sbdir}/defaults.mc', logfile=True): """ Copy the defaults, get the host specific values and merge them overriding any matching defaults, then create an options object to handle the command line merging in any command line overrides. Finally post process the command line. """ global host_windows global host_posix # # Adjust the args to remove the wrapper. # args = args[1:] # # The path to this command. # command_path = path.dirname(path.abspath(args[0])) if len(command_path) == 0: command_path = '.' # # The command line contains the base defaults object all build objects copy # and modify by loading a configuration. # o = command_line(args, optargs, macros.macros(name=defaults, sbdir=command_path), command_path) overrides = None if os.name == 'nt': try: import windows overrides = windows.load() host_windows = True host_posix = False except: raise error.general('failed to load Windows host support') elif os.name == 'posix': uname = os.uname() try: if uname[0].startswith('MINGW64_NT'): import windows overrides = windows.load() host_windows = True elif uname[0].startswith('CYGWIN_NT'): import windows overrides = windows.load() elif uname[0] == 'Darwin': import darwin overrides = darwin.load() elif uname[0] == 'FreeBSD': import freebsd overrides = freebsd.load() elif uname[0] == 'NetBSD': import netbsd overrides = netbsd.load() elif uname[0] == 'Linux': import linux overrides = linux.load() elif uname[0] == 'SunOS': import solaris overrides = solaris.load() except error.general as ge: raise error.general('failed to load %s host support: %s' % (uname[0], ge)) except: raise error.general('failed to load %s host support' % (uname[0])) else: raise error.general('unsupported host type; please add') if overrides is None: raise error.general('no hosts defaults found; please add') for k in overrides: o.defaults[k] = overrides[k] o.sb_released() o.sb_git() o.rtems_options() o.pre_process() o.process() o.post_process(logfile) # # Load the release settings # version.load_release_settings(o.defaults) return o
def _lo_bool(self, opt, macro, value): if value is not None: raise error.general('option does not take a value: %s' % (opt)) self.opts[opt[2:]] = '1' self.defaults[macro] = '1'
def build(self, host, nesting_count=0): build_error = False nesting_count += 1 log.trace('_bset: %s for %s: make' % (self.bset, host)) log.notice('Build Set: %s for %s' % (self.bset, host)) mail_subject = '%s on %s' % (self.bset, self.macros.expand('%{_host}')) current_path = os.environ['PATH'] start = datetime.datetime.now() have_errors = False try: configs = self.load() log.trace('_bset: %s: configs: %s' % (self.bset, ','.join(configs))) sizes_valid = False builds = [] for s in range(0, len(configs)): b = None try: # # Each section of the build set gets a separate set of # macros so we do not contaminate one configuration with # another. # opts = copy.copy(self.opts) macros = copy.copy(self.macros) self.set_host_details(host, opts, macros) if configs[s].endswith('.bset'): log.trace('_bset: == %2d %s' % (nesting_count + 1, '=' * 75)) bs = buildset(configs[s], self.configs, opts, macros) bs.build(host, nesting_count) del bs elif configs[s].endswith('.cfg'): log.trace('_bset: -- %2d %s' % (nesting_count + 1, '-' * 75)) try: b = build.build(configs[s], False, opts, macros) except: build_error = True raise self.build_package(configs[s], b) builds += [b] # # Dump post build macros. # log.trace('_bset: macros post-build') log.trace(str(macros)) else: raise error.general('invalid config type: %s' % (configs[s])) except error.general as gerr: have_errors = True if b is not None: if self.build_failure is None: self.build_failure = b.name() raise # # Clear out the builds ... # for b in builds: del b except error.general as gerr: if not build_error: log.stderr(str(gerr)) raise except KeyboardInterrupt: raise except: self.build_failure = 'RSB general failure' raise finally: end = datetime.datetime.now() os.environ['PATH'] = current_path build_time = str(end - start) log.notice('Build Set: Time %s' % (build_time))
def err(msg): raise error.general('%s:%d: %s' % (self.bset, lc, msg))
def find(self, name): _keys = list(self.keys()) nl = name.lower() if nl in _keys and not nl in bsp_config.filter_out: return self.configs[_keys[nl]] raise error.general('invalid configuration: %s' % (name))
def _git_exit_code(self, ec, cmd, output): if ec: log.notice('git: cmd: ' + ' '.join(cmd)) log.notice('git: output: ' + output) raise error.general('git command failed (%s): %d' % (self.git, ec))
def parse(self, lines): def _clean(l): if '#' in l: l = l[:l.index('#')] if '\r' in l: l = l[:l.index('r')] if '\n' in l: l = l[:l.index('\n')] return l.strip() trace_me = False if trace_me: print('[[[[]]]] parsing macros') macros = {'global': {}} map = 'global' lc = 0 state = 'key' token = '' macro = [] for l in lines: lc += 1 #print 'l:%s' % (l[:-1]) if len(l) == 0: continue l = self._unicode_to_str(l) l_remaining = l for c in l: if trace_me: print(']]]]]]]] c:%s(%d) s:%s t:"%s" m:%r M:%s' % \ (c, ord(c), state, token, macro, map)) l_remaining = l_remaining[1:] if c is '#' and not state.startswith('value'): break if c == '\n' or c == '\r': if not (state is 'key' and len(token) == 0) and \ not state.startswith('value-multiline'): raise error.general('malformed macro line:%d: %s' % (lc, l)) if state is 'key': if c not in string.whitespace: if c is '[': state = 'map' elif c is '%': state = 'directive' elif c is ':': macro += [token] token = '' state = 'attribs' elif c is '#': break else: token += c elif state is 'map': if c is ']': if token not in macros: macros[token] = {} map = token token = '' state = 'key' elif c in string.printable and c not in string.whitespace: token += c else: raise error.general('invalid macro map:%d: %s' % (lc, l)) elif state is 'directive': if c in string.whitespace: if token == 'include': self.load(_clean(l_remaining)) token = '' state = 'key' break elif c in string.printable and c not in string.whitespace: token += c else: raise error.general('invalid macro directive:%d: %s' % (lc, l)) elif state is 'include': if c is string.whitespace: if token == 'include': state = 'include' elif c in string.printable and c not in string.whitespace: token += c else: raise error.general('invalid macro directive:%d: %s' % (lc, l)) elif state is 'attribs': if c not in string.whitespace: if c is ',': macro += [token] token = '' if len(macro) == 3: state = 'value-start' else: token += c elif state is 'value-start': if c is "'": state = 'value-line-start' elif state is 'value-line-start': if c is "'": state = 'value-multiline-start' else: state = 'value-line' token += c elif state is 'value-multiline-start': if c is "'": state = 'value-multiline' else: macro += [token] state = 'macro' elif state is 'value-line': if c is "'": macro += [token] state = 'macro' else: token += c elif state is 'value-multiline': if c is "'": state = 'value-multiline-end' else: token += c elif state is 'value-multiline-end': if c is "'": state = 'value-multiline-end-end' else: state = 'value-multiline' token += "'" + c elif state is 'value-multiline-end-end': if c is "'": macro += [token] state = 'macro' else: state = 'value-multiline' token += "''" + c else: raise error.internal('bad state: %s' % (state)) if state is 'macro': macros[map][self._unicode_to_str(macro[0].lower())] = \ (self._unicode_to_str(macro[1]), self._unicode_to_str(macro[2]), self._unicode_to_str(macro[3])) macro = [] token = '' state = 'key' for m in macros: if m not in self.macros: self.macros[m] = {} for mm in macros[m]: self.macros[m][mm] = macros[m][mm]
os.remove(path.host(local)) failed = True except: msg = 'download: %s: error' % (url) log.stderr(msd) log.notice(msg) if _out is not None: _out.close() raise if _out is not None: _out.close() if _in is not None: del _in if not failed: if not path.isfile(local): raise error.general('source is not a file: %s' % (path.host(local))) return not failed def _git_downloader(url, local, config, opts): rlp = os.path.relpath(path.host(local)) us = url.split('?') repo = git.repo(local, opts, config.macros) if not repo.valid(): log.notice('git: clone: %s -> %s' % (us[0], rlp)) if not opts.dry_run(): repo.clone(us[0], local) for a in us[1:]: _as = a.split('=') if _as[0] == 'branch': log.notice('git: checkout: %s => %s' % (us[0], _as[1]))
def build(self, deps=None, nesting_count=0, mail=None): build_error = False nesting_count += 1 if self.mail_active(mail, nesting_count): mail['output'].clear() mail['log'] = '' mail['reports'] = [] mail['failure'] = None log.trace('_bset: %2d: %s: make' % (nesting_count, self.bset)) log.notice('Build Set: %s' % (self.bset)) current_path = os.environ['PATH'] start = datetime.datetime.now() mail_report = False have_errors = False interrupted = False # # If this is the outter most buildset it's files are installed. Nested # build sets staged their installed file. The staged files are install # when the outtter most build finishes. # if nesting_count != 1: if self.installing(): self.macros['install_mode'] = 'staging' # # Only the outter build set can have staging to install. Get the staging # root via the config because it could require a valid config. # have_staging = False try: configs = self.load() log.trace('_bset: %2d: %s: configs: %s' % (nesting_count, self.bset, ', '.join(configs))) if nesting_count == 1 and len(configs) > 1: # # Prepend staging areas, bin directory to the # path. Lets the later package depend on the earlier # ones. # pathprepend = ['%{stagingroot}/bin'] + \ macro_expand(self.macros, '%{_pathprepend}').split(':') pathprepend = [pp for pp in pathprepend if len(pp)] if len(pathprepend) == 1: self.macros['_pathprepend'] = pathprepend[0] else: self.macros['_pathprepend'] = ':'.join(pathprepend) sizes_valid = False builds = [] for s in range(0, len(configs)): b = None try: # # Each section of the build set gets a separate set of # macros so we do not contaminate one configuration with # another. # opts = copy.copy(self.opts) macros = copy.copy(self.macros) if configs[s].endswith('.bset'): log.trace('_bset: %2d: %s %s' % (nesting_count, configs[s], '=' * (74 - len(configs[s])))) bs = buildset(configs[s], self.configs, opts, macros) bs.build(deps, nesting_count, mail) if self.installing(): have_staging = True del bs elif configs[s].endswith('.cfg'): if mail: mail_report = True log.trace('_bset: %2d: %s %s' % (nesting_count, configs[s], '=' * (74 - len(configs[s])))) try: b = build.build( configs[s], self.opts.get_arg('--pkg-tar-files'), opts, macros) except: build_error = True raise if b.macros.get('%{_disable_reporting}'): mail_report = False if deps is None: self.build_package(configs[s], b) self.report(configs[s], b, copy.copy(self.opts), copy.copy(self.macros), mail=mail) # Always produce an XML report. self.report(configs[s], b, copy.copy(self.opts), copy.copy(self.macros), format='xml', mail=mail) if s == len(configs) - 1 and not have_errors: self.bset_tar(b) else: deps += b.config.includes() builds += [b] # # Dump post build macros. # log.trace('_bset: : macros post-build') log.trace(str(b.macros)) else: raise error.general('invalid config type: %s' % (configs[s])) except error.general as gerr: have_errors = True if b is not None: if self.build_failure is None: self.build_failure = b.name() self.write_mail_header('') self.write_mail_header('= ' * 40) self.write_mail_header('Build FAILED: %s' % (b.name())) self.write_mail_header('- ' * 40) self.write_mail_header(str(log.default)) self.write_mail_header('- ' * 40) if self.opts.keep_going(): log.notice(str(gerr)) if self.opts.always_clean(): builds += [b] else: raise else: raise # # Installing or staging ... # log.trace('_bset: %2d: %s: deps:%r no-install:%r' % \ (nesting_count, self.install_mode(), deps is None, self.opts.no_install())) log.trace('_bset: %2d: %s: builds: %s' % \ (nesting_count, self.install_mode(), ', '.join([b.name() for b in builds]))) if deps is None and not self.opts.no_install() and not have_errors: for b in builds: log.trace('_bset: : %s: %r' % (self.install_mode(), b.installable())) if b.installable(): prefix = b.config.expand('%{_prefix}') buildroot = path.join(b.config.expand('%{buildroot}'), prefix) if self.staging(): prefix = b.config.expand('%{stagingroot}') self.install(self.install_mode(), b.name(), buildroot, prefix) # # Sizes ... # if len(builds) > 1: size_build = 0 size_installed = 0 size_build_max = 0 for b in builds: s = b.get_build_size() size_build += s if s > size_build_max: size_build_max = s size_installed += b.get_installed_size() size_sources = 0 for p in builds[0].config.expand('%{_sourcedir}').split(':'): size_sources += path.get_size(p) size_patches = 0 for p in builds[0].config.expand('%{_patchdir}').split(':'): size_patches += path.get_size(p) size_total = size_sources + size_patches + size_installed build_max_size_human = build.humanize_number( size_build_max + size_installed, 'B') build_total_size_human = build.humanize_number(size_total, 'B') build_sources_size_human = build.humanize_number( size_sources, 'B') build_patches_size_human = build.humanize_number( size_patches, 'B') build_installed_size_human = build.humanize_number( size_installed, 'B') build_size = 'usage: %s' % (build_max_size_human) build_size += ' total: %s' % (build_total_size_human) build_size += ' (sources: %s' % (build_sources_size_human) build_size += ', patches: %s' % (build_patches_size_human) build_size += ', installed %s)' % (build_installed_size_human) sizes_valid = True # # Cleaning ... # if deps is None and \ (not self.opts.no_clean() or self.opts.always_clean()): for b in builds: if not b.disabled(): log.notice('cleaning: %s' % (b.name())) b.cleanup() # # Log the build size message # if len(builds) > 1: log.notice('Build Sizes: %s' % (build_size)) # # Clear out the builds ... # for b in builds: del b # # If builds have been staged install into the finaly prefix. # if have_staging and not self.opts.no_install() and not have_errors: stagingroot = macro_expand(self.macros, '%{stagingroot}') have_stagingroot = path.exists(stagingroot) log.trace('_bset: %2d: install staging, present: %s' % \ (nesting_count, have_stagingroot)) if have_stagingroot: prefix = macro_expand(self.macros, '%{_prefix}') self.install(self.install_mode(), self.bset, stagingroot, prefix) staging_size = path.get_size(stagingroot) if not self.opts.no_clean() or self.opts.always_clean(): log.notice('clean staging: %s' % (self.bset)) log.trace('removing: %s' % (stagingroot)) if not self.opts.dry_run(): if path.exists(stagingroot): path.removeall(stagingroot) log.notice('Staging Size: %s' % \ (build.humanize_number(staging_size, 'B'))) except error.general as gerr: if not build_error: log.stderr(str(gerr)) raise except KeyboardInterrupt: interrupted = True raise except: self.build_failure = 'RSB general failure' interrupted = True raise finally: end = datetime.datetime.now() os.environ['PATH'] = current_path build_time = str(end - start) if self.mail_single_report() and nesting_count == 1: mail_report = True if interrupted or self.macros.defined('mail_disable'): mail_report = False if mail_report and mail is not None: if self.installing(): self.write_mail_header('Build Time: %s' % (build_time), True) self.write_mail_header('', True) self.write_mail_header(mail['header'], True) self.write_mail_header('') log.notice('Mailing report: %s' % (mail['to'])) mail['log'] += self.get_mail_header() if sizes_valid: mail['log'] += 'Sizes' + os.linesep mail['log'] += '=====' + os.linesep + os.linesep mail['log'] += \ 'Maximum build usage: ' + build_max_size_human + os.linesep mail['log'] += \ 'Total size: ' + build_total_size_human + os.linesep mail['log'] += \ 'Installed : ' + build_installed_size_human + os.linesep mail[ 'log'] += 'Sources: ' + build_sources_size_human + os.linesep mail[ 'log'] += 'Patches: ' + build_patches_size_human + os.linesep mail['log'] += os.linesep mail['log'] += 'Output' + os.linesep mail['log'] += '======' + os.linesep + os.linesep mail['log'] += os.linesep.join(mail['output'].get()) mail['log'] += os.linesep + os.linesep mail['log'] += 'Report' + os.linesep mail['log'] += '======' + os.linesep + os.linesep mail['reports'] += [self.get_mail_report()] if self.build_failure is not None: mail['failure'] = self.build_failure if self.mail_active(mail, nesting_count): self.mail_send(mail) log.notice('Build Set: Time %s' % (build_time))
def build(self, deps=None, nesting_count=0): build_error = False nesting_count += 1 log.trace('_bset: %s: make' % (self.bset)) log.notice('Build Set: %s' % (self.bset)) if self.opts.get_arg('--mail'): mail_report_subject = '%s %s' % (self.bset, self.macros.expand('%{_host}')) current_path = os.environ['PATH'] start = datetime.datetime.now() mail_report = False have_errors = False try: configs = self.load() log.trace('_bset: %s: configs: %s' % (self.bset, ','.join(configs))) builds = [] for s in range(0, len(configs)): b = None try: # # Each section of the build set gets a separate set of # macros so we do not contaminate one configuration with # another. # opts = copy.copy(self.opts) macros = copy.copy(self.macros) if configs[s].endswith('.bset'): log.trace('_bset: == %2d %s' % (nesting_count + 1, '=' * 75)) bs = buildset(configs[s], self.configs, opts, macros) bs.build(deps, nesting_count) del bs elif configs[s].endswith('.cfg'): mail_report = self.opts.get_arg('--mail') log.trace('_bset: -- %2d %s' % (nesting_count + 1, '-' * 75)) try: b = build.build( configs[s], self.opts.get_arg('--pkg-tar-files'), opts, macros) except: build_error = True raise if b.macros.get('%{_disable_reporting}'): mail_report = False if deps is None: self.build_package(configs[s], b) self.report(configs[s], b, copy.copy(self.opts), copy.copy(self.macros)) # Always product an XML report. self.report(configs[s], b, copy.copy(self.opts), copy.copy(self.macros), format='xml') if s == len(configs) - 1 and not have_errors: self.bset_tar(b) else: deps += b.config.includes() builds += [b] else: raise error.general('invalid config type: %s' % (configs[s])) except error.general, gerr: have_errors = True if b is not None: if self.build_failure is None: self.build_failure = b.name() self.write_mail_header('') self.write_mail_header('= ' * 40) self.write_mail_header('Build FAILED: %s' % (b.name())) self.write_mail_header('- ' * 40) self.write_mail_header(str(log.default)) self.write_mail_header('- ' * 40) if self.opts.keep_going(): log.notice(str(gerr)) if self.opts.always_clean(): builds += [b] else: raise else: raise # # Installing ... # log.trace('_bset: installing: deps:%r no-install:%r' % \ (deps is None, self.opts.no_install())) if deps is None \ and not self.opts.no_install() \ and not have_errors: for b in builds: log.trace('_bset: installing: %r' % b.installable()) if b.installable(): self.install(b.name(), b.config.expand('%{buildroot}'), b.config.expand('%{_prefix}')) if deps is None and \ (not self.opts.no_clean() or self.opts.always_clean()): for b in builds: if not b.disabled(): log.notice('cleaning: %s' % (b.name())) b.cleanup() for b in builds: del b
def patch_setup(self, package, args): name = args[1] args = args[2:] _map = 'patch-%s' % (name) default_opts = ' '.join(args) patch_keys = [p for p in self.macros.map_keys(_map) if p != 'setup'] patches = [] for p in patch_keys: pm = self.macros.get(p, globals=False, maps=_map) if pm is None: raise error.internal('patch macro not found: %s in %s (%s)' % \ (p, name, _map)) opts = [] url = [] for pp in pm[2].split(): if len(url) == 0 and pp[0] == '-': opts += [pp] else: url += [pp] if len(url) == 0: raise error.general('patch URL not found: %s' % (' '.join(args))) # # Look for --rsb-file as an option we use as a local file name. # This can be used if a URL has no reasonable file name the # download URL parser can figure out. # file_override = None if len(opts) > 0: for o in opts: if o.startswith('--rsb-file'): os_ = o.split('=') if len(os_) != 2: raise error.general( 'invalid --rsb-file option: %s' % (' '.join(args))) if os_[0] != '--rsb-file': raise error.general( 'invalid --rsb-file option: %s' % (' '.join(args))) file_override = os_[1] opts = [o for o in opts if not o.startswith('--rsb-')] if len(opts) == 0: opts = default_opts else: opts = ' '.join(opts) opts = self.config.expand(opts) url = self.config.expand(' '.join(url)) # # Parse the URL first in the source builder's patch directory. # patch = download.parse_url(url, '_patchdir', self.config, self.opts, file_override) # # Download the patch # download.get_file(patch['url'], patch['local'], self.opts, self.config) if 'compressed' in patch: patch['script'] = patch['compressed'] + ' ' + patch['local'] else: patch['script'] = '%{__cat} ' + patch['local'] patch['script'] += ' | %%{__patch} %s' % (opts) self.script.append(self.config.expand(patch['script']))
def _lo_string(self, opt, macro, value): if value is None: raise error.general('option requires a value: %s' % (opt)) self.opts[opt[2:]] = value self.defaults[macro] = value
class buildset: """Build a set builds a set of packages.""" def __init__(self, bset, _configs, opts, macros=None): log.trace('_bset: %s: init' % (bset)) self.configs = _configs self.opts = opts if macros is None: self.macros = copy.copy(opts.defaults) else: self.macros = copy.copy(macros) self.bset = bset _target = self.macros.expand('%{_target}') if len(_target): pkg_prefix = _target else: pkg_prefix = self.macros.expand('%{_host}') self.bset_pkg = '%s-%s-set' % (pkg_prefix, self.bset) self.mail_header = '' self.mail_report = '' self.build_failure = None def write_mail_header(self, text, prepend=False): if len(text) == 0 or text[-1] != '\n' or text[-1] != '\r': text += os.linesep if prepend: self.mail_header = text + self.mail_header else: self.mail_header += text def write_mail_report(self, text, prepend=False): if len(text) == 0 or text[-1] != '\n' or text[-1] != '\r': text += os.linesep if prepend: self.mail_report = text + self.mail_report else: self.mail_report += text def copy(self, src, dst): log.output('copy: %s => %s' % (path.host(src), path.host(dst))) if not self.opts.dry_run(): path.copy_tree(src, dst) def report(self, _config, _build, opts, macros, format=None): if len(_build.main_package().name()) > 0 \ and not _build.macros.get('%{_disable_reporting}') \ and (not _build.opts.get_arg('--no-report') \ or _build.opts.get_arg('--mail')): if format is None: format = _build.opts.get_arg('--report-format') if format is not None: if len(format) != 2: raise error.general('invalid report format option: %s' % \ ('='.join(format))) format = format[1] if format is None: format = 'text' if format == 'text': ext = '.txt' elif format == 'asciidoc': ext = '.txt' elif format == 'html': ext = '.html' elif format == 'xml': ext = '.xml' elif format == 'ini': ext = '.ini' else: raise error.general('invalid report format: %s' % (format)) buildroot = _build.config.abspath('%{buildroot}') prefix = _build.macros.expand('%{_prefix}') name = _build.main_package().name() + ext log.notice('reporting: %s -> %s' % (_config, name)) if not _build.opts.get_arg('--no-report'): outpath = path.host( path.join(buildroot, prefix, 'share', 'rtems', 'rsb')) if not _build.opts.dry_run(): outname = path.host(path.join(outpath, name)) else: outname = None r = reports.report(format, self.configs, copy.copy(opts), copy.copy(macros)) r.introduction(_build.config.file_name()) r.generate(_build.config.file_name()) r.epilogue(_build.config.file_name()) if not _build.opts.dry_run(): _build.mkdir(outpath) r.write(outname) del r if _build.opts.get_arg('--mail'): r = reports.report('text', self.configs, copy.copy(opts), copy.copy(macros)) r.introduction(_build.config.file_name()) r.generate(_build.config.file_name()) r.epilogue(_build.config.file_name()) self.write_mail_report(r.out) del r def root_copy(self, src, dst): what = '%s -> %s' % \ (os.path.relpath(path.host(src)), os.path.relpath(path.host(dst))) log.trace('_bset: %s: collecting: %s' % (self.bset, what)) self.copy(src, dst) def install(self, name, buildroot, prefix): dst = prefix src = path.join(buildroot, prefix) log.notice('installing: %s -> %s' % (name, path.host(dst))) self.copy(src, dst) def canadian_cross(self, _build): log.trace('_bset: Cxc for build machine: _build => _host') macros_to_copy = [('%{_host}', '%{_build}'), ('%{_host_alias}', '%{_build_alias}'), ('%{_host_arch}', '%{_build_arch}'), ('%{_host_cpu}', '%{_build_cpu}'), ('%{_host_os}', '%{_build_os}'), ('%{_host_vendor}', '%{_build_vendor}'), ('%{_tmproot}', '%{_tmpcxcroot}'), ('%{buildroot}', '%{buildcxcroot}'), ('%{_builddir}', '%{_buildcxcdir}')] cxc_macros = _build.copy_init_macros() for m in macros_to_copy: log.trace('_bset: Cxc: %s <= %s' % (m[0], cxc_macros[m[1]])) cxc_macros[m[0]] = cxc_macros[m[1]] _build.set_macros(cxc_macros) _build.reload() _build.make() if not _build.macros.get('%{_disable_collecting}'): self.root_copy(_build.config.expand('%{buildroot}'), _build.config.expand('%{_tmproot}')) _build.set_macros(_build.copy_init_macros()) _build.reload() def build_package(self, _config, _build): if not _build.disabled(): if _build.canadian_cross(): self.canadian_cross(_build) _build.make() if not _build.macros.get('%{_disable_collecting}'): self.root_copy(_build.config.expand('%{buildroot}'), _build.config.expand('%{_tmproot}')) def bset_tar(self, _build): tardir = _build.config.expand('%{_tardir}') if (self.opts.get_arg('--bset-tar-file') or self.opts.canadian_cross()) \ and not _build.macros.get('%{_disable_packaging}'): path.mkdir(tardir) tar = path.join( tardir, _build.config.expand('%s.tar.bz2' % (self.bset_pkg))) log.notice('tarball: %s' % (os.path.relpath(path.host(tar)))) if not self.opts.dry_run(): tmproot = _build.config.expand('%{_tmproot}') cmd = _build.config.expand("'cd " + tmproot + \ " && %{__tar} -cf - . | %{__bzip2} > " + tar + "'") _build.run(cmd, shell_opts='-c', cwd=tmproot) def parse(self, bset): def _clean(line): line = line[0:-1] b = line.find('#') if b >= 0: line = line[1:b] return line.strip() bsetname = bset if not path.exists(bsetname): for cp in self.macros.expand('%{_configdir}').split(':'): configdir = path.abspath(cp) bsetname = path.join(configdir, bset) if path.exists(bsetname): break bsetname = None if bsetname is None: raise error.general('no build set file found: %s' % (bset)) try: log.trace('_bset: %s: open: %s' % (self.bset, bsetname)) bset = open(path.host(bsetname), 'r') except IOError, err: raise error.general('error opening bset file: %s' % (bsetname)) configs = [] try: lc = 0 for l in bset: lc += 1 l = _clean(l) if len(l) == 0: continue log.trace('_bset: %s: %03d: %s' % (self.bset, lc, l)) ls = l.split() if ls[0][-1] == ':' and ls[0][:-1] == 'package': self.bset_pkg = self.macros.expand(ls[1].strip()) self.macros['package'] = self.bset_pkg elif ls[0][0] == '%': def err(msg): raise error.general('%s:%d: %s' % (self.bset, lc, msg)) if ls[0] == '%define': if len(ls) > 2: self.macros.define( ls[1].strip(), ' '.join([f.strip() for f in ls[2:]])) else: self.macros.define(ls[1].strip()) elif ls[0] == '%undefine': if len(ls) > 2: raise error.general('%s:%d: %undefine requires just the name' % \ (self.bset, lc)) self.macros.undefine(ls[1].strip()) elif ls[0] == '%include': configs += self.parse(ls[1].strip()) elif ls[0] in ['%patch', '%source']: sources.process(ls[0][1:], ls[1:], self.macros, err) elif ls[0] == '%hash': sources.hash(ls[1:], self.macros, err) else: l = l.strip() c = build.find_config(l, self.configs) if c is None: raise error.general('%s:%d: cannot find file: %s' % (self.bset, lc, l)) configs += [c] except: bset.close() raise bset.close() return configs
def open(self, command, capture=True, shell=False, cwd=None, env=None, stdin=None, stdout=None, stderr=None, timeout=None): """Open a command with arguments. Provide the arguments as a list or a string.""" if self.verbose: s = command if type(command) is list: def add(x, y): return x + ' ' + str(y) s = functools.reduce(add, command, '')[1:] what = 'spawn' if shell: what = 'shell' log.output(what + ': ' + s) if self.output is None: raise error.general('capture needs an output handler') if shell and self.shell_exe: command = arg_list(command) command[:0] = self.shell_exe if not stdin and self.input: stdin = subprocess.PIPE if not stdout: stdout = subprocess.PIPE if not stderr: stderr = subprocess.PIPE proc = None if cwd is None: cwd = self.path if env is None: env = self.environment try: # Work around a problem on Windows with commands that # have a '.' and no extension. Windows needs the full # command name. if sys.platform == "win32" and type(command) is list: if command[0].find('.') >= 0: r, e = os.path.splitext(command[0]) if e not in ['.exe', '.com', '.bat']: command[0] = command[0] + '.exe' log.trace('exe: %s' % (command)) proc = subprocess.Popen(command, shell=shell, cwd=cwd, env=env, stdin=stdin, stdout=stdout, stderr=stderr) if not capture: return (0, proc) if self.output is None: raise error.general('capture needs an output handler') exit_code = self.capture(proc, command, timeout) if self.verbose: log.output('exit: ' + str(exit_code)) except OSError as ose: exit_code = ose.errno if self.verbose: log.output('exit: ' + str(ose)) return (exit_code, proc)
def load(): # Default to the native Windows Python. uname = 'win32' if 'PROCESSOR_ARCHITECTURE' in os.environ: if os.environ['PROCESSOR_ARCHITECTURE'] == 'AMD64': hosttype = 'x86_64' machsize = '64' else: hosttype = 'i686' machsize = '32' else: hosttype = 'x86_64' machsize = '32' uname = 'mingw32' machine = 'w%s' % (machsize) # Set the C/C++ compilers we want to use. cc = '%s-%s-%s-gcc' % (hosttype, machine, uname) cxx = '%s-%s-%s-g++' % (hosttype, machine, uname) # See if this is actually MSYS2/Cygwin Python if os.name == 'posix': _uname = os.uname() if _uname[0].startswith('MINGW'): pass elif _uname[0].startswith('CYGWIN'): hosttype = _uname[4] uname = 'cygwin' machine = 'pc' cc = 'gcc' cxx = 'g++' else: raise error.general('invalid POSIX python for Windows') host_triple = '%s-%s-%s' % (hosttype, machine, uname) build_triple = '%s-%s-%s' % (hosttype, machine, uname) if 'NUMBER_OF_PROCESSORS' in os.environ: ncpus = os.environ['NUMBER_OF_PROCESSORS'] else: ncpus = '1' if 'MSYSTEM' in os.environ: os.environ.pop('NUMBER_OF_PROCESSORS') version = uname[2] defines = { '_ncpus': ('none', 'none', ncpus), '_os': ('none', 'none', 'win32'), '_build': ('triplet', 'required', build_triple), '_build_vendor': ('none', 'none', 'microsoft'), '_build_os': ('none', 'none', 'win32'), '_build_os_version': ('none', 'none', version), '_build_cpu': ('none', 'none', hosttype), '_build_alias': ('none', 'none', '%{nil}'), '_build_arch': ('none', 'none', hosttype), '_host': ('triplet', 'required', host_triple), '_host_vendor': ('none', 'none', 'microsoft'), '_host_os': ('none', 'none', 'win32'), '_host_cpu': ('none', 'none', hosttype), '_host_alias': ('none', 'none', '%{nil}'), '_host_arch': ('none', 'none', hosttype), '_usr': ('dir', 'optional', '/opt/local'), '_var': ('dir', 'optional', '/opt/local/var'), '__bash': ('exe', 'required', 'bash'), '__bzip2': ('exe', 'required', 'bzip2'), '__bison': ('exe', 'required', 'bison'), '__cat': ('exe', 'required', 'cat'), '__cc': ('exe', 'required', cc), '__chgrp': ('exe', 'required', 'chgrp'), '__chmod': ('exe', 'required', 'chmod'), '__chown': ('exe', 'required', 'chown'), '__cp': ('exe', 'required', 'cp'), '__cvs': ('exe', 'optional', 'cvs'), '__cxx': ('exe', 'required', cxx), '__flex': ('exe', 'required', 'flex'), '__git': ('exe', 'required', 'git'), '__grep': ('exe', 'required', 'grep'), '__gzip': ('exe', 'required', 'gzip'), '__id': ('exe', 'required', 'id'), '__install': ('exe', 'required', 'install'), '__install_info': ('exe', 'required', 'install-info'), '__ld': ('exe', 'required', 'ld'), '__ldconfig': ('exe', 'none', ''), '__makeinfo': ('exe', 'required', 'makeinfo'), '__mkdir': ('exe', 'required', 'mkdir'), '__mv': ('exe', 'required', 'mv'), '__nm': ('exe', 'required', 'nm'), '__nm': ('exe', 'required', 'nm'), '__objcopy': ('exe', 'required', 'objcopy'), '__objdump': ('exe', 'required', 'objdump'), '__patch': ('exe', 'required', 'patch'), '__patch_bin': ('exe', 'required', 'patch'), '__rm': ('exe', 'required', 'rm'), '__sed': ('exe', 'required', 'sed'), '__sh': ('exe', 'required', 'sh'), '__tar': ('exe', 'required', 'bsdtar'), '__touch': ('exe', 'required', 'touch'), '__unzip': ('exe', 'required', 'unzip'), '__xz': ('exe', 'required', 'xz'), '_buildshell': ('exe', 'required', '%{__sh}'), '___setup_shell': ('exe', 'required', '%{__sh}') } # # Locate a suitable python to use with GDB. Python Windows is more # complicated than most hosts. There are 7 possible pythons on Windows and # we can use only 4 which are split on machine size. The types are: # # 1. Python27 - python.org, cannot use cause built with MSVC. # 2. Python35 - python.org, cannot use cause built with MSVC. # 3. MSYS/Python - MSYS2, cannot use cause it is a MSYS executable. # 4. W64/Python2 - Ok if machsize is 64 # 5. W64/Python3 - gdb-7.9 needs python2. # 6. W64/Python2 - Ok if machsize is 32 # 7. W64/Python3 - gdb-7.9 needs python2. # if sys.platform == 'win32' and 'MSC' in sys.version: raise error.general( 'python.org Pythons are built with MSC and cannot be linked with GDB' ) # # Search the MSYS2 install tree for a suitable python. # if sys.platform == 'msys': e = execute.capture_execution() exit_code, proc, output = e.shell("sh -c mount") if exit_code != 0: raise error.general('cannot get MSYS mount points') install_point = None for l in output.split('\n'): if ' on / ' in l: install_point = l.split()[0] break if install_point is None: raise error.general('cannot locate MSYS root mount point') if install_point[1] != ':': raise error.general('invalid MSYS root mount point: %s' % install_point) install_point = '/%s%s' % (install_point[0], install_point[2:]) bin = '/mingw%s/bin' % (machsize) bin_list = os.listdir(bin) exe = None for python in ['python2.exe']: for f in bin_list: if f == python: exe = install_point + os.path.join(bin, f) break if exe is not None: break if exe is None: raise error.general( 'no valid python found; you need a mingw%s python2 installed' % (machsize)) defines['with_python_path'] = exe return defines
elif o[0] == '-c': create_dir = True elif o[0] == '-n': opt_name = o[1] elif o[0] == '-b': unpack_before_chdir = True elif o[0] == '-a': unpack_before_chdir = False name = None for source in self.source(setup_name): if name is None: if opt_name is None: if source: opt_name = source['name'] else: raise error.general('setup source tag not found: %d' % (source_tag)) else: name = opt_name self.script.append(self.config.expand('cd %{_builddir}')) if not deleted_dir and delete_before_unpack: self.script.append(self.config.expand('%{__rm} -rf ' + name)) deleted_dir = True if not created_dir and create_dir: self.script.append(self.config.expand('%{__mkdir_p} ' + name)) created_dir = True if not changed_dir and (not unpack_before_chdir or create_dir): self.script.append(self.config.expand('cd ' + name)) changed_dir = True self.script.append(self.config.expand(source['script'])) if not changed_dir and (unpack_before_chdir and not create_dir): self.script.append(self.config.expand('cd ' + name))
def load(): uname = os.uname() sysctl = '/sbin/sysctl ' e = execute.capture_execution() exit_code, proc, output = e.shell(sysctl + 'hw.ncpu') if exit_code == 0: ncpus = output.split(' ')[1].strip() else: ncpus = '1' if uname[4] == 'amd64': cpu = 'x86_64' else: cpu = uname[4] version = uname[2] if version.find('-') > 0: version = version.split('-')[0] defines = { '_ncpus': ('none', 'none', ncpus), '_os': ('none', 'none', 'freebsd'), '_host': ('triplet', 'required', cpu + '-freebsd' + version), '_host_vendor': ('none', 'none', 'pc'), '_host_os': ('none', 'none', 'freebsd'), '_host_os_version': ('none', 'none', version), '_host_cpu': ('none', 'none', cpu), '_host_alias': ('none', 'none', '%{nil}'), '_host_arch': ('none', 'none', cpu), '_usr': ('dir', 'required', '/usr/local'), '_var': ('dir', 'optional', '/usr/local/var'), '__bash': ('exe', 'optional', '/usr/local/bin/bash'), '__bison': ('exe', 'required', '/usr/local/bin/bison'), '__git': ('exe', 'required', '/usr/local/bin/git'), '__svn': ('exe', 'required', '/usr/local/bin/svn'), '__unzip': ('exe', 'optional', '/usr/local/bin/unzip'), '__xz': ('exe', 'optional', '/usr/bin/xz'), '__make': ('exe', 'required', 'gmake'), '__patch_opts': ('none', 'none', '-E') } defines['_build'] = defines['_host'] defines['_build_vendor'] = defines['_host_vendor'] defines['_build_os'] = defines['_host_os'] defines['_build_cpu'] = defines['_host_cpu'] defines['_build_alias'] = defines['_host_alias'] defines['_build_arch'] = defines['_host_arch'] # FreeBSD 10 and above no longer have /usr/bin/cvs, but it can (e.g.) be # installed to /usr/local/bin/cvs through the devel/cvs port fb_version = int(float(version)) if fb_version >= 10: # # FreeBSD has switched to clang plus gcc. On 10.0 cc is gcc based and # clang is provided however it is not building binutils-2.24. # cc = '/usr/bin/cc' if check.check_exe(cc, cc): defines['__cc'] = cc else: cc = '/usr/bin/clang' if not check.check_exe(cc, cc): raise error.general('no valid cc found') cxx = '/usr/bin/c++' if check.check_exe(cxx, cxx): defines['__cxx'] = cxx else: cxx = '/usr/bin/clang++' if check.check_exe(cxx, cxx): raise error.general('no valid c++ found') # # Assume the compiler is clang and so we need to increase # bracket depth build build the gcc ARM compiler. # defines['build_cflags'] = '-O2 -pipe -fbracket-depth=1024' defines['build_cxxflags'] = '-O2 -pipe -fbracket-depth=1024' cvs = 'cvs' if check.check_exe(cvs, cvs): defines['__cvs'] = cvs # # Fix the mess iconv is on FreeBSD 10.0. # defines['iconv_includes'] = ('none', 'none', '-I/usr/local/include -L/usr/local/lib') # # On 11.0+ makeinfo and install-info have moved to /usr/local/... # if fb_version >= 11: defines['__install_info'] = ('exe', 'optional', '/usr/local/bin/install-info') defines['__makeinfo'] = ('exe', 'required', '/usr/local/bin/makeinfo') else: for gv in ['49', '48', '47']: gcc = '%s-portbld-freebsd%s-gcc%s' % (cpu, version, gv) if check.check_exe(gcc, gcc): defines['__cc'] = gcc break for gv in ['49', '48', '47']: gxx = '%s-portbld-freebsd%s-g++%s' % (cpu, version, gv) if check.check_exe(gxx, gxx): defines['__cxx'] = gxx break return defines
def _git_exit_code(self, ec): if ec: raise error.general('git command failed (%s): %d' % (self.git, ec))
def _lo_path(self, opt, macro, value): if value is None: raise error.general('option requires a path: %s' % (opt)) value = path.abspath(value) self.opts[opt[2:]] = value self.defaults[macro] = value
def set_shell(self, execute): """Set the shell to execute when issuing a shell command.""" args = arg_list(execute) if len(args) == 0 or not os.path.isfile(args[0]): raise error.general('could find shell: ' + execute) self.shell_exe = args