def generate(name, opts, header = None, footer = None): r = ['RTEMS Tools Project - Source Builder Error Report'] + [] if header: r += [' %s' % (header)] r += [opts.info()] if opts.defaults.get_value('%{_sbgit_valid}') == '1': r += [' %s/%s' % (opts.defaults.get_value('%{_sbgit_remotes}'), opts.defaults.get_value('%{_sbgit_id}'))] else: r += [' RSB: not a valid repo'] r += [' %s' % (' '.join(os.uname()))] r += [] r += ['Tail of the build log:'] r += log.tail() if footer: r += [footer] try: name = name.replace('/', '-') l = open(name, 'w') l.write(os.linesep.join(r)) l.close() log.notice(' See error report: %s' % (name)) except: log.stderr('error: failure to create error report') raise
def _hash_check(file_, absfile, macros, remove=True): failed = False hash = sources.get_hash(file_.lower(), macros) if hash is not None: hash = hash.split() if len(hash) != 2: raise error.internal('invalid hash format: %s' % (file_)) try: hashlib_algorithms = hashlib.algorithms except: hashlib_algorithms = [ 'md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512' ] if hash[0] not in hashlib_algorithms: raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0])) if hash[0] in ['md5', 'sha1']: raise error.general('hash: %s: insecure: %s' % (file_, hash[0])) hasher = None _in = None try: hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError as err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except: msg = 'hash: %s: error' % (file_) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() raise if _in is not None: _in.close() hash_hex = hasher.hexdigest() hash_base64 = base64.b64encode(hasher.digest()).decode('utf-8') log.output('checksums: %s: (hex: %s) (b64: %s) => %s' % (file_, hash_hex, hash_base64, hash[1])) if hash_hex != hash[1] and hash_base64 != hash[1]: log.warning('checksum error: %s' % (file_)) failed = True if failed and remove: log.warning('removing: %s' % (file_)) if path.exists(absfile): try: os.remove(path.host(absfile)) except IOError as err: raise error.general('hash: %s: remove: %s' % (absfile, str(err))) except: raise error.general('hash: %s: remove error' % (file_)) if hasher is not None: del hasher else: if version.released(): raise error.general('%s: no hash found in released RSB' % (file_)) log.warning('%s: no hash found' % (file_)) return not failed
def generate(name, opts, header = None, footer = None): label, result = opts.with_arg('error-report') if (label.startswith('without_') and result != 'yes') or \ (label.startswith('with_') and result != 'no'): r = ['RTEMS Tools Project - Source Builder Error Report'] + [] if header: r += [' %s' % (header)] r += [opts.info()] if opts.defaults.get_value('%{_sbgit_valid}') == '1': r += [' %s/%s' % (opts.defaults.get_value('%{_sbgit_remotes}'), opts.defaults.get_value('%{_sbgit_id}'))] else: r += [' RSB: not a valid repo'] if os.name == 'nt': r += [' Windows'] else: r += [' %s' % (' '.join(os.uname()))] r += [] r += ['Tail of the build log:'] r += log.tail() if footer: r += [footer] try: name = name.replace('/', '-') with open(name, 'w') as l: l.write(os.linesep.join(r)) log.notice(' See error report: %s' % (name)) except: log.stderr('error: failure to create error report') raise
def run(args): ec = 0 try: optargs = {'--list-configs': 'List available configurations'} opts = options.load(args, optargs) log.notice('RTEMS Source Builder, Package Builder v%s' % (version.str())) if not check.host_setup(opts): if not opts.force(): raise error.general('host build environment is not set up' + ' correctly (use --force to proceed)') log.notice('warning: forcing build with known host setup problems') if opts.get_arg('--list-configs'): configs = get_configs(opts) for p in configs['paths']: print 'Examining: %s' % (os.path.relpath(p)) for c in configs['files']: if c.endswith('.cfg'): print ' %s' % (c) else: for config_file in opts.config_files(): b = build(config_file, True, opts) b.make() b = None except error.general, gerr: log.stderr('Build FAILED') ec = 1
def generate(name, opts, header=None, footer=None): label, result = opts.with_arg('error-report') if (label.startswith('without_') and result != 'yes') or \ (label.startswith('with_') and result != 'no'): r = ['RTEMS Tools Project - Source Builder Error Report'] + [] if header: r += [' %s' % (header)] r += [opts.info()] if opts.defaults.get_value('%{_sbgit_valid}') == '1': r += [ ' %s/%s' % (opts.defaults.get_value('%{_sbgit_remotes}'), opts.defaults.get_value('%{_sbgit_id}')) ] else: r += [' RSB: not a valid repo'] if os.name == 'nt': r += [' Windows'] else: r += [' %s' % (' '.join(os.uname()))] r += [] r += ['Tail of the build log:'] r += log.tail() if footer: r += [footer] try: name = name.replace('/', '-') with open(name, 'w') as l: l.write(os.linesep.join(r)) log.notice(' See error report: %s' % (name)) except: log.stderr('error: failure to create error report') raise
def run(args): ec = 0 try: optargs = { '--list-configs': 'List available configurations' } opts = options.load(args, optargs) log.notice('RTEMS Source Builder, Package Builder v%s' % (version.str())) if not check.host_setup(opts): if not opts.force(): raise error.general('host build environment is not set up' + ' correctly (use --force to proceed)') log.notice('warning: forcing build with known host setup problems') if opts.get_arg('--list-configs'): configs = get_configs(opts) for p in configs['paths']: print 'Examining: %s' % (os.path.relpath(p)) for c in configs['files']: if c.endswith('.cfg'): print ' %s' % (c) else: for config_file in opts.config_files(): b = build(config_file, True, opts) b.make() b = None except error.general, gerr: log.stderr('Build FAILED') ec = 1
def cmd_define(WORD, g): """ Print dictionary info for word name or synonyms. """ if not WORD: return 1 syn, syns = syn_or_syns(WORD) if not syn and not syns: log.stderr('{yellow}No results{default}') return 1 d, defs = 1, 0 if syn: WORD, _, d = WORD.split('.') d = int(d) # XXX: maybe move word outside, together with short origin and # pronounciation as most including oldest dictionary typographies do for i, s in enumerate(syns): defs += 1 if i != 0: print( "%s(%i)" % (WORD.upper(), defs), end='. ') else: print(WORD.upper(), end='. ') print_short_def(s, w=WORD) print((' '*INDENT*(1+d))+position_label(s)+' '+s.definition()) if s.examples(): print() for e in s.examples(): print((' '*INDENT*(2+d))+'"%s"'%(e,)) # TODO: how to get at derived forms? #for a in s.entailments() + s.similar_tos() + s.also_sees(): # print((' '*INDENT*(1+d))+', '.join(a.name())) if len(syns)>1: print()
def cmd_examples(WORD, g): """ List soft definitions with examples for word name or synonyms. Skip synonyms without examples. """ if not WORD: return 1 syn, syns = syn_or_syns(WORD) if not syn and not syns: log.stderr('{yellow}No results{blue}') return 1 d = 0 if g.head: if syn: WORD = wn_sense(WORD, syn) log.stdout('{green}'+WORD+'{blue}') d = 1 for i, s in enumerate(syns): if not s.examples(): continue print_short_def(s, d=d, i=i, w=WORD) print() for e in s.examples(): if syn: i=None print((' '*INDENT*(1+d))+'"%s"'%(e,)) if i+1<len(syns): print()
def _process_data(self, results, directive, info, data): new_data = [] for l in results[1]: if l.startswith('%error'): l = self._expand(l) raise error.general('config error: %s' % (l[7:])) elif l.startswith('%warning'): l = self._expand(l) log.stderr('warning: %s' % (l[9:])) log.warning(l[9:]) if not directive: l = self._expand(l) ls = self.tags.split(l, 1) log.trace('config: %s: _tag: %s %s' % (self.name, l, ls)) if len(ls) > 1: info = ls[0].lower() if info[-1] == ':': info = info[:-1] info_data = ls[1].strip() else: info_data = ls[0].strip() if info is not None: self._info_append(info, info_data) else: log.warning("invalid format: '%s'" % (info_data[:-1])) else: log.trace('config: %s: _data: %s %s' % (self.name, l, new_data)) new_data.append(l) return (directive, info, data + new_data)
def run(args): ec = 0 try: optargs = {"--list-configs": "List available configurations"} opts = options.load(args, optargs) log.notice("RTEMS Source Builder, Package Builder v%s" % (version.str())) if not check.host_setup(opts): if not opts.force(): raise error.general("host build environment is not set up" + " correctly (use --force to proceed)") log.notice("warning: forcing build with known host setup problems") if opts.get_arg("--list-configs"): configs = get_configs(opts) for p in configs["paths"]: print "Examining: %s" % (os.path.relpath(p)) for c in configs["files"]: if c.endswith(".cfg"): print " %s" % (c) else: for config_file in opts.config_files(): b = build(config_file, True, opts) b.make() b = None except error.general, gerr: log.stderr("Build FAILED") ec = 1
def _process_data(self, results, directive, info, data): new_data = [] for l in results[1]: if l.startswith('%error'): l = self._expand(l) raise error.general('config error: %s' % (l[7:])) elif l.startswith('%warning'): l = self._expand(l) log.stderr('warning: %s' % (l[9:])) log.warning(l[9:]) if not directive: l = self._expand(l) ls = self.tags.split(l, 1) log.trace('config: %s: _tag: %s %s' % (self.name, l, ls)) if len(ls) > 1: info = ls[0].lower() if info[-1] == ':': info = info[:-1] info_data = ls[1].strip() else: info_data = ls[0].strip() if info is not None: self._info_append(info, info_data) else: log.warning("invalid format: '%s'" % (info_data[:-1])) else: l = self._expand(l) log.trace('config: %s: _data: %s %s' % (self.name, l, new_data)) new_data.append(l) return (directive, info, data + new_data)
def _error(self, msg): err = 'error: %s' % (self._name_line_msg(msg)) log.stderr(err) log.output(err) self.in_error = True if not self.opts.dry_run(): log.stderr('warning: switched to dry run due to errors') self.opts.set_dry_run()
def _error(self, msg): if not self.opts.dry_run(): if self.opts.keep_going(): err = 'error: %s' % (self._name_line_msg(msg)) log.stderr(err) log.output(err) self.in_error = True log.stderr('warning: switched to dry run due to errors') self.opts.set_dry_run() raise error.general(self._name_line_msg(msg))
def run(): import sys ec = 0 setbuilder_error = False try: optargs = { "--list-configs": "List available configurations", "--list-bsets": "List available build sets", "--list-deps": "List the dependent files.", "--bset-tar-file": "Create a build set tar file", "--pkg-tar-files": "Create package tar files", "--no-report": "Do not create a package report.", "--report-format": "The report format (text, html, asciidoc).", } mailer.append_options(optargs) opts = options.load(sys.argv, optargs) log.notice("RTEMS Source Builder - Set Builder, %s" % (version.str())) opts.log_info() if not check.host_setup(opts): raise error.general("host build environment is not set up correctly") configs = build.get_configs(opts) if opts.get_arg("--list-deps"): deps = [] else: deps = None if not list_bset_cfg_files(opts, configs): prefix = opts.defaults.expand("%{_prefix}") if opts.canadian_cross(): opts.disable_install() if ( not opts.dry_run() and not opts.canadian_cross() and not opts.no_install() and not path.ispathwritable(prefix) ): raise error.general("prefix is not writable: %s" % (path.host(prefix))) for bset in opts.params(): setbuilder_error = True b = buildset(bset, configs, opts) b.build(deps) b = None setbuilder_error = False if deps is not None: c = 0 for d in sorted(set(deps)): c += 1 print "dep[%d]: %s" % (c, d) except error.general, gerr: if not setbuilder_error: log.stderr(str(gerr)) log.stderr("Build FAILED") ec = 1
def run(): import sys ec = 0 setbuilder_error = False try: optargs = { '--list-configs': 'List available configurations', '--list-bsets': 'List available build sets', '--list-deps': 'List the dependent files.', '--bset-tar-file': 'Create a build set tar file', '--pkg-tar-files': 'Create package tar files', '--no-report': 'Do not create a package report.', '--report-format': 'The report format (text, html, asciidoc).' } mailer.append_options(optargs) opts = options.load(sys.argv, optargs) log.notice('RTEMS Source Builder - Set Builder, %s' % (version.str())) opts.log_info() if not check.host_setup(opts): raise error.general( 'host build environment is not set up correctly') configs = build.get_configs(opts) if opts.get_arg('--list-deps'): deps = [] else: deps = None if not list_bset_cfg_files(opts, configs): prefix = opts.defaults.expand('%{_prefix}') if opts.canadian_cross(): opts.disable_install() if not opts.dry_run() and \ not opts.canadian_cross() and \ not opts.no_install() and \ not path.ispathwritable(prefix): raise error.general('prefix is not writable: %s' % (path.host(prefix))) for bset in opts.params(): setbuilder_error = True b = buildset(bset, configs, opts) b.build(deps) b = None setbuilder_error = False if deps is not None: c = 0 for d in sorted(set(deps)): c += 1 print 'dep[%d]: %s' % (c, d) except error.general, gerr: if not setbuilder_error: log.stderr(str(gerr)) log.stderr('Build FAILED') ec = 1
def _hash_check(file_, absfile, macros, remove = True): failed = False hash = sources.get_hash(file_.lower(), macros) if hash is not None: hash = hash.split() if len(hash) != 2: raise error.internal('invalid hash format: %s' % (file_)) try: hashlib_algorithms = hashlib.algorithms except: hashlib_algorithms = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'] if hash[0] not in hashlib_algorithms: raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0])) if hash[0] in ['md5', 'sha1']: raise error.general('hash: %s: insecure: %s' % (file_, hash[0])) hasher = None _in = None try: hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError as err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except: msg = 'hash: %s: error' % (file_) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() raise if _in is not None: _in.close() log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1])) if hasher.hexdigest() != hash[1]: log.warning('checksum error: %s' % (file_)) failed = True if failed and remove: log.warning('removing: %s' % (file_)) if path.exists(absfile): try: os.remove(path.host(absfile)) except IOError as err: raise error.general('hash: %s: remove: %s' % (absfile, str(err))) except: raise error.general('hash: %s: remove error' % (file_)) if hasher is not None: del hasher else: if version.released(): raise error.general('%s: no hash found in released RSB' % (file_)) log.warning('%s: no hash found' % (file_)) return not failed
def cmd_positions(WORD, g): """ Print positions that given word name or synonym appears in speech: adj, adj-sat, adv, noun and/or verb. """ if not WORD: return 1 syn, syns = syn_or_syns(WORD) if not syn and not syns: log.stderr('{yellow}No results{default}') return 1 for s in syns: poss = set( poss + position_label(s) ) print(" ".join(poss))
def make(self): package = self.main_package() if package.disabled(): log.notice('package: nothing to build') else: try: name = package.name() if self.canadian_cross(): cxc_label = '(Cxc) ' else: cxc_label = '' log.notice('package: %s%s' % (cxc_label, name)) log.trace('---- macro maps %s' % ('-' * 55)) log.trace('%s' % (str(self.config.macros))) log.trace('-' * 70) self.script_build.reset() self.script_build.append( self.config.expand('%{___build_template}')) self.script_build.append('echo "=> ' + name + ': BUILD"') self.prep(package) self.build_package(package) if not self.opts.dry_run(): self.builddir() build_sn = path.join(self.config.expand('%{_builddir}'), 'do-build') log.output('write script: ' + build_sn) self.script_build.write(build_sn) clean_sn = path.join(self.config.expand('%{_builddir}'), 'do-clean') log.output('write script: ' + clean_sn) self.script_clean.write(clean_sn) log.notice('building: %s%s' % (cxc_label, name)) self.run(build_sn) self.sizes(package) log.notice('cleaning: %s%s' % (cxc_label, name)) self.run(clean_sn) except error.general as gerr: log.notice(str(gerr)) log.stderr('Build FAILED') self._generate_report_('Build: %s' % (gerr)) raise except error.internal as ierr: log.notice(str(ierr)) log.stderr('Internal Build FAILED') self._generate_report_('Build: %s' % (ierr)) raise except: raise if self.opts.dry_run(): self._generate_report_('Build: dry run (no actual error)', 'Build: dry run (no actual error)')
def run(): import sys ec = 0 setbuilder_error = False try: optargs = { '--list-configs': 'List available configurations', '--list-bsets': 'List available build sets', '--list-deps': 'List the dependent files.', '--bset-tar-file': 'Create a build set tar file', '--pkg-tar-files': 'Create package tar files', '--no-report': 'Do not create a package report.', '--report-format': 'The report format (text, html, asciidoc).' } mailer.append_options(optargs) opts = options.load(sys.argv, optargs) log.notice('RTEMS Source Builder - Set Builder, %s' % (version.str())) opts.log_info() if not check.host_setup(opts): raise error.general('host build environment is not set up correctly') configs = build.get_configs(opts) if opts.get_arg('--list-deps'): deps = [] else: deps = None if not list_bset_cfg_files(opts, configs): prefix = opts.defaults.expand('%{_prefix}') if opts.canadian_cross(): opts.disable_install() if not opts.dry_run() and \ not opts.canadian_cross() and \ not opts.no_install() and \ not path.ispathwritable(prefix): raise error.general('prefix is not writable: %s' % (path.host(prefix))) for bset in opts.params(): setbuilder_error = True b = buildset(bset, configs, opts) b.build(deps) b = None setbuilder_error = False if deps is not None: c = 0 for d in sorted(set(deps)): c += 1 print('dep[%d]: %s' % (c, d)) except error.general as gerr: if not setbuilder_error: log.stderr(str(gerr)) log.stderr('Build FAILED') ec = 1 except error.internal as ierr: if not setbuilder_error: log.stderr(str(ierr)) log.stderr('Internal Build FAILED') ec = 1 except error.exit as eerr: pass except KeyboardInterrupt: log.notice('abort: user terminated') ec = 1 sys.exit(ec)
def make(self): package = self.main_package() if package.disabled(): log.notice('package: nothing to build') else: try: name = package.name() if self.canadian_cross(): cxc_label = '(Cxc) ' else: cxc_label = '' log.notice('package: %s%s' % (cxc_label, name)) log.trace('---- macro maps %s' % ('-' * 55)) log.trace('%s' % (str(self.config.macros))) log.trace('-' * 70) self.script_build.reset() self.script_build.append(self.config.expand('%{___build_template}')) self.script_build.append('echo "=> ' + name + ': BUILD"') self.prep(package) self.build_package(package) if not self.opts.dry_run(): self.builddir() build_sn = path.join(self.config.expand('%{_builddir}'), 'do-build') log.output('write script: ' + build_sn) self.script_build.write(build_sn) clean_sn = path.join(self.config.expand('%{_builddir}'), 'do-clean') log.output('write script: ' + clean_sn) self.script_clean.write(clean_sn) log.notice('building: %s%s' % (cxc_label, name)) self.run(build_sn) self.sizes(package) log.notice('cleaning: %s%s' % (cxc_label, name)) self.run(clean_sn) except error.general as gerr: log.notice(str(gerr)) log.stderr('Build FAILED') self._generate_report_('Build: %s' % (gerr)) raise except error.internal as ierr: log.notice(str(ierr)) log.stderr('Internal Build FAILED') self._generate_report_('Build: %s' % (ierr)) raise except: raise if self.opts.dry_run(): self._generate_report_('Build: dry run (no actual error)', 'Build: dry run (no actual error)')
def __init__(self, name, create_tar_files, opts, macros=None): try: self.opts = opts self.init_name = name self.init_macros = macros self.config = None self.create_tar_files = create_tar_files log.notice('config: ' + name) self.set_macros(macros) self.config = config.file(name, opts, self.macros) self.script = script() self.macros['buildname'] = self._name_(self.macros['name']) except error.general, gerr: log.notice(str(gerr)) log.stderr('Build FAILED') raise
def __init__(self, name, create_tar_files, opts, macros = None): try: self.opts = opts if macros is None: self.macros = opts.defaults else: self.macros = macros self.create_tar_files = create_tar_files log.notice('config: ' + name) self.config = config.file(name, opts, self.macros) self.script = script() self.macros['buildname'] = self._name_(self.macros['name']) except error.general, gerr: log.notice(str(gerr)) log.stderr('Build FAILED') raise
def cmd_path(WORD, g): if not WORD: return 1 syn, syns = syn_or_syns(WORD) if not syn and not syns: log.stderr('{yellow}No results{default}') return 1 d_ = 0 if g.head: if syn: WORD = wn_sense(WORD, syn) log.stdout('{green}'+WORD+'{blue}') d_ += 1 for i, syn in enumerate(syns): print_short_def(syn, w=WORD, d=d_, i=i) for d, x in traverse_hypernyms(syn, d_+1): print_short_def(x, w=WORD, d=d) pass#print(x)
def make(self): package = self.main_package() if package.disabled(): log.notice('package: nothing to build') else: try: name = package.name() if self.canadian_cross(): log.notice('package: (Cxc) %s' % (name)) else: log.notice('package: %s' % (name)) log.trace('---- macro maps %s' % ('-' * 55)) log.trace('%s' % (str(self.config.macros))) log.trace('-' * 70) self.script.reset() self.script.append(self.config.expand('%{___build_template}')) self.script.append('echo "=> ' + name + ':"') self.prep(package) self.build_package(package) if not self.opts.dry_run(): self.builddir() sn = path.join(self.config.expand('%{_builddir}'), 'doit') log.output('write script: ' + sn) self.script.write(sn) if self.canadian_cross(): log.notice('building: (Cxc) %s' % (name)) else: log.notice('building: %s' % (name)) self.run(sn) except error.general, gerr: log.notice(str(gerr)) log.stderr('Build FAILED') self._generate_report_('Build: %s' % (gerr)) raise except error.internal, ierr: log.notice(str(ierr)) log.stderr('Internal Build FAILED') self._generate_report_('Build: %s' % (ierr)) raise
def __init__(self, name, create_tar_files, opts, macros = None): try: self.opts = opts self.init_name = name self.init_macros = macros self.config = None self.create_tar_files = create_tar_files log.notice('config: ' + name) self.set_macros(macros) self.config = config.file(name, opts, self.macros) self.script = script() self.macros['buildname'] = self._name_(self.macros['name']) except error.general as gerr: log.notice(str(gerr)) log.stderr('Build FAILED') raise except error.internal as ierr: log.notice(str(ierr)) log.stderr('Internal Build FAILED') raise except: raise
class build: """Build a package given a config file.""" def _name_(self, name): # # If on Windows use shorter names to keep the build paths. # if options.host_windows: buildname = '' add = True for c in name: if c == '-': add = True elif add: buildname += c add = False return buildname else: return name def _generate_report_(self, header, footer=None): label, result = self.opts.with_arg('error-report') if label.startswith('without') and result == 'no': ereport.generate('rsb-report-%s.txt' % self.macros['name'], self.opts, header, footer) def __init__(self, name, create_tar_files, opts, macros=None): try: self.opts = opts self.init_name = name self.init_macros = macros self.config = None self.create_tar_files = create_tar_files log.notice('config: ' + name) self.set_macros(macros) self.config = config.file(name, opts, self.macros) self.script = script() self.macros['buildname'] = self._name_(self.macros['name']) except error.general, gerr: log.notice(str(gerr)) log.stderr('Build FAILED') raise except error.internal, ierr: log.notice(str(ierr)) log.stderr('Internal Build FAILED') raise
if deps is None and not self.opts.no_install() and not have_errors: for b in builds: log.trace("_bset: installing: %r" % b.installable()) if b.installable(): self.install(b.name(), b.config.expand("%{buildroot}"), b.config.expand("%{_prefix}")) if deps is None and (not self.opts.no_clean() or self.opts.always_clean()): for b in builds: if not b.disabled(): log.notice("cleaning: %s" % (b.name())) b.cleanup() for b in builds: del b except error.general, gerr: if not build_error: log.stderr(str(gerr)) raise except KeyboardInterrupt: mail_report = False raise except: self.build_failure = "RSB general failure" raise finally: end = datetime.datetime.now() os.environ["PATH"] = current_path build_time = str(end - start) if mail_report: to_addr = self.opts.get_arg("--mail-to") if to_addr is not None: to_addr = to_addr[1]
raise error.general("host build environment is not set up" + " correctly (use --force to proceed)") log.notice("warning: forcing build with known host setup problems") if opts.get_arg("--list-configs"): configs = get_configs(opts) for p in configs["paths"]: print "Examining: %s" % (os.path.relpath(p)) for c in configs["files"]: if c.endswith(".cfg"): print " %s" % (c) else: for config_file in opts.config_files(): b = build(config_file, True, opts) b.make() b = None except error.general, gerr: log.stderr("Build FAILED") ec = 1 except error.internal, ierr: log.stderr("Internal Build FAILED") ec = 1 except error.exit, eerr: pass except KeyboardInterrupt: log.notice("abort: user terminated") ec = 1 sys.exit(ec) if __name__ == "__main__": run(sys.argv)
raise error.general('host build environment is not set up' + ' correctly (use --force to proceed)') log.notice('warning: forcing build with known host setup problems') if opts.get_arg('--list-configs'): configs = get_configs(opts) for p in configs['paths']: print 'Examining: %s' % (os.path.relpath(p)) for c in configs['files']: if c.endswith('.cfg'): print ' %s' % (c) else: for config_file in opts.config_files(): b = build(config_file, True, opts) b.make() b = None except error.general, gerr: log.stderr('Build FAILED') ec = 1 except error.internal, ierr: log.stderr('Internal Build FAILED') ec = 1 except error.exit, eerr: pass except KeyboardInterrupt: log.notice('abort: user terminated') ec = 1 sys.exit(ec) if __name__ == "__main__": run(sys.argv)
def run(args=sys.argv): ec = 0 get_sources_error = True try: # # The RSB options support cannot be used because it loads the defaults # for the host which we cannot do here. # description = 'RTEMS Get Sources downloads all the source a build set ' description += 'references for all hosts.' argsp = argparse.ArgumentParser(prog='rtems-get-sources', description=description) argsp.add_argument('--rtems-version', help='Set the RTEMS version.', type=str, default=version.version()) argsp.add_argument('--list-hosts', help='List the hosts.', action='store_true') argsp.add_argument('--list-bsets', help='List the hosts.', action='store_true') argsp.add_argument('--download-dir', help='Download directory.', type=str) argsp.add_argument('--clean', help='Clean the download directory.', action='store_true') argsp.add_argument('--tar', help='Create a tarball of all the source.', action='store_true') argsp.add_argument('--log', help='Log file.', type=str, default=log_default()) argsp.add_argument('--trace', help='Enable trace logging for debugging.', action='store_true') argsp.add_argument('bsets', nargs='*', help='Build sets.') argopts = argsp.parse_args(args[2:]) load_log(argopts.log) log.notice('RTEMS Source Builder - Get Sources, %s' % (version.str())) log.tracing = argopts.trace opts = load_options(args, argopts) configs = build.get_configs(opts) if argopts.list_bsets: list_bset_files(opts, configs) else: if argopts.clean: if argopts.download_dir is None: raise error.general( 'cleaning of the default download directories is not supported' ) if path.exists(argopts.download_dir): log.notice('Cleaning source directory: %s' % (argopts.download_dir)) path.removeall(argopts.download_dir) if len(argopts.bsets) == 0: raise error.general( 'no build sets provided on the command line') for bset in argopts.bsets: get_sources_error = True b = buildset(bset, configs, opts) get_sources_error = False for host in host_profiles: b.build(host) b = None except error.general as gerr: if get_sources_error: log.stderr(str(gerr)) log.stderr('Build FAILED') ec = 1 except error.internal as ierr: if get_sources_error: log.stderr(str(ierr)) log.stderr('Internal Build FAILED') ec = 1 except error.exit as eerr: pass except KeyboardInterrupt: log.notice('abort: user terminated') ec = 1 except: raise log.notice('abort: unknown error') ec = 1 sys.exit(ec)
def build(self, deps = None, nesting_count = 0, mail = None): build_error = False nesting_count += 1 if mail: mail['output'].clear() log.trace('_bset: %s: make' % (self.bset)) log.notice('Build Set: %s' % (self.bset)) mail_subject = '%s on %s' % (self.bset, self.macros.expand('%{_host}')) current_path = os.environ['PATH'] start = datetime.datetime.now() mail_report = False have_errors = False if mail: mail['output'].clear() try: configs = self.load() log.trace('_bset: %s: configs: %s' % (self.bset, ','.join(configs))) sizes_valid = False builds = [] for s in range(0, len(configs)): b = None try: # # Each section of the build set gets a separate set of # macros so we do not contaminate one configuration with # another. # opts = copy.copy(self.opts) macros = copy.copy(self.macros) if configs[s].endswith('.bset'): log.trace('_bset: == %2d %s' % (nesting_count + 1, '=' * 75)) bs = buildset(configs[s], self.configs, opts, macros) bs.build(deps, nesting_count, mail) del bs elif configs[s].endswith('.cfg'): if mail: mail_report = True log.trace('_bset: -- %2d %s' % (nesting_count + 1, '-' * 75)) try: b = build.build(configs[s], self.opts.get_arg('--pkg-tar-files'), opts, macros) except: build_error = True raise if b.macros.get('%{_disable_reporting}'): mail_report = False if deps is None: self.build_package(configs[s], b) self.report(configs[s], b, copy.copy(self.opts), copy.copy(self.macros), mail = mail) # Always produce an XML report. self.report(configs[s], b, copy.copy(self.opts), copy.copy(self.macros), format = 'xml', mail = mail) if s == len(configs) - 1 and not have_errors: self.bset_tar(b) else: deps += b.config.includes() builds += [b] # # Dump post build macros. # log.trace('_bset: macros post-build') log.trace(str(macros)) else: raise error.general('invalid config type: %s' % (configs[s])) except error.general as gerr: have_errors = True if b is not None: if self.build_failure is None: self.build_failure = b.name() self.write_mail_header('') self.write_mail_header('= ' * 40) self.write_mail_header('Build FAILED: %s' % (b.name())) self.write_mail_header('- ' * 40) self.write_mail_header(str(log.default)) self.write_mail_header('- ' * 40) if self.opts.keep_going(): log.notice(str(gerr)) if self.opts.always_clean(): builds += [b] else: raise else: raise # # Installing ... # log.trace('_bset: installing: deps:%r no-install:%r' % \ (deps is None, self.opts.no_install())) if deps is None \ and not self.opts.no_install() \ and not have_errors: for b in builds: log.trace('_bset: installing: %r' % b.installable()) if b.installable(): self.install(b.name(), b.config.expand('%{buildroot}'), b.config.expand('%{_prefix}')) # # Sizes ... # if len(builds) > 1: size_build = 0 size_installed = 0 size_build_max = 0 for b in builds: s = b.get_build_size() size_build += s if s > size_build_max: size_build_max = s size_installed += b.get_installed_size() size_sources = 0 for p in builds[0].config.expand('%{_sourcedir}').split(':'): size_sources += path.get_size(p) size_patches = 0 for p in builds[0].config.expand('%{_patchdir}').split(':'): size_patches += path.get_size(p) size_total = size_sources + size_patches + size_installed build_max_size_human = build.humanize_number(size_build_max + size_installed, 'B') build_total_size_human = build.humanize_number(size_total, 'B') build_sources_size_human = build.humanize_number(size_sources, 'B') build_patches_size_human = build.humanize_number(size_patches, 'B') build_installed_size_human = build.humanize_number(size_installed, 'B') build_size = 'usage: %s' % (build_max_size_human) build_size += ' total: %s' % (build_total_size_human) build_size += ' (sources: %s' % (build_sources_size_human) build_size += ', patches: %s' % (build_patches_size_human) build_size += ', installed %s)' % (build_installed_size_human) sizes_valid = True # # Cleaning ... # if deps is None and \ (not self.opts.no_clean() or self.opts.always_clean()): for b in builds: if not b.disabled(): log.notice('cleaning: %s' % (b.name())) b.cleanup() # # Log the build size message # if len(builds) > 1: log.notice('Build Sizes: %s' % (build_size)) # # Clear out the builds ... # for b in builds: del b except error.general as gerr: if not build_error: log.stderr(str(gerr)) raise except KeyboardInterrupt: mail_report = False raise except: self.build_failure = 'RSB general failure' raise finally: end = datetime.datetime.now() os.environ['PATH'] = current_path build_time = str(end - start) if mail_report and not self.macros.defined('mail_disable'): self.write_mail_header('Build Time: %s' % (build_time), True) self.write_mail_header('', True) if self.build_failure is not None: mail_subject = 'FAILED %s (%s)' % \ (mail_subject, self.build_failure) else: mail_subject = 'PASSED %s' % (mail_subject) mail_subject = 'Build %s: %s' % (reports.platform(mode = 'system'), mail_subject) self.write_mail_header(mail['header'], True) self.write_mail_header('') log.notice('Mailing report: %s' % (mail['to'])) body = self.get_mail_header() body += 'Sizes' + os.linesep body += '=====' + os.linesep + os.linesep if sizes_valid: body += 'Maximum build usage: ' + build_max_size_human + os.linesep body += 'Total size: ' + build_total_size_human + os.linesep body += 'Installed : ' + build_installed_size_human + os.linesep body += 'Sources: ' + build_sources_size_human + os.linesep body += 'Patches: ' + build_patches_size_human + os.linesep else: body += 'No packages built' body += os.linesep body += 'Output' + os.linesep body += '======' + os.linesep + os.linesep body += os.linesep.join(mail['output'].get()) body += os.linesep + os.linesep body += 'Report' + os.linesep body += '======' + os.linesep + os.linesep body += self.get_mail_report() if not opts.dry_run(): mail['mail'].send(mail['to'], mail_subject, body) log.notice('Build Set: Time %s' % (build_time))
_in = urllib2.urlopen(url) _out = open(path.host(local), 'wb') _out.write(_in.read()) except IOError, err: log.notice('download: %s: error: %s' % (url, str(err))) if path.exists(local): os.remove(path.host(local)) failed = True except ValueError, err: log.notice('download: %s: error: %s' % (url, str(err))) if path.exists(local): os.remove(path.host(local)) failed = True except: msg = 'download: %s: error' % (url) log.stderr(msd) log.notice(msg) if _out is not None: _out.close() raise if _out is not None: _out.close() if _in is not None: del _in if not failed: if not path.isfile(local): raise error.general('source is not a file: %s' % (path.host(local))) return not failed
' correctly (use --force to proceed)') log.notice('warning: forcing build with known host setup problems') if opts.get_arg('--list-configs'): configs = get_configs(opts) for p in configs['paths']: print 'Examining: %s' % (os.path.relpath(p)) for c in configs['files']: if c.endswith('.cfg'): print ' %s' % (c) else: for config_file in opts.config_files(): b = build(config_file, True, opts) b.make() b = None except error.general, gerr: log.stderr('Build FAILED') ec = 1 except error.internal, ierr: log.stderr('Internal Build FAILED') ec = 1 except error.exit, eerr: pass except KeyboardInterrupt: log.notice('abort: user terminated') ec = 1 sys.exit(ec) if __name__ == "__main__": run(sys.argv)
def build(self, deps=None, nesting_count=0, mail=None): build_error = False nesting_count += 1 if self.mail_active(mail, nesting_count): mail['output'].clear() mail['log'] = '' mail['reports'] = [] mail['failure'] = None log.trace('_bset: %2d: %s: make' % (nesting_count, self.bset)) log.notice('Build Set: %s' % (self.bset)) current_path = os.environ['PATH'] start = datetime.datetime.now() mail_report = False have_errors = False interrupted = False # # If this is the outter most buildset it's files are installed. Nested # build sets staged their installed file. The staged files are install # when the outtter most build finishes. # if nesting_count != 1: if self.installing(): self.macros['install_mode'] = 'staging' # # Only the outter build set can have staging to install. Get the staging # root via the config because it could require a valid config. # have_staging = False try: configs = self.load() log.trace('_bset: %2d: %s: configs: %s' % (nesting_count, self.bset, ', '.join(configs))) if nesting_count == 1 and len(configs) > 1: # # Prepend staging areas, bin directory to the # path. Lets the later package depend on the earlier # ones. # pathprepend = ['%{stagingroot}/bin'] + \ macro_expand(self.macros, '%{_pathprepend}').split(':') pathprepend = [pp for pp in pathprepend if len(pp)] if len(pathprepend) == 1: self.macros['_pathprepend'] = pathprepend[0] else: self.macros['_pathprepend'] = ':'.join(pathprepend) sizes_valid = False builds = [] for s in range(0, len(configs)): b = None try: # # Each section of the build set gets a separate set of # macros so we do not contaminate one configuration with # another. # opts = copy.copy(self.opts) macros = copy.copy(self.macros) if configs[s].endswith('.bset'): log.trace('_bset: %2d: %s %s' % (nesting_count, configs[s], '=' * (74 - len(configs[s])))) bs = buildset(configs[s], self.configs, opts, macros) bs.build(deps, nesting_count, mail) if self.installing(): have_staging = True del bs elif configs[s].endswith('.cfg'): if mail: mail_report = True log.trace('_bset: %2d: %s %s' % (nesting_count, configs[s], '=' * (74 - len(configs[s])))) try: b = build.build( configs[s], self.opts.get_arg('--pkg-tar-files'), opts, macros) except: build_error = True raise if b.macros.get('%{_disable_reporting}'): mail_report = False if deps is None: self.build_package(configs[s], b) self.report(configs[s], b, copy.copy(self.opts), copy.copy(self.macros), mail=mail) # Always produce an XML report. self.report(configs[s], b, copy.copy(self.opts), copy.copy(self.macros), format='xml', mail=mail) if s == len(configs) - 1 and not have_errors: self.bset_tar(b) else: deps += b.config.includes() builds += [b] # # Dump post build macros. # log.trace('_bset: : macros post-build') log.trace(str(b.macros)) else: raise error.general('invalid config type: %s' % (configs[s])) except error.general as gerr: have_errors = True if b is not None: if self.build_failure is None: self.build_failure = b.name() self.write_mail_header('') self.write_mail_header('= ' * 40) self.write_mail_header('Build FAILED: %s' % (b.name())) self.write_mail_header('- ' * 40) self.write_mail_header(str(log.default)) self.write_mail_header('- ' * 40) if self.opts.keep_going(): log.notice(str(gerr)) if self.opts.always_clean(): builds += [b] else: raise else: raise # # Installing or staging ... # log.trace('_bset: %2d: %s: deps:%r no-install:%r' % \ (nesting_count, self.install_mode(), deps is None, self.opts.no_install())) log.trace('_bset: %2d: %s: builds: %s' % \ (nesting_count, self.install_mode(), ', '.join([b.name() for b in builds]))) if deps is None and not self.opts.no_install() and not have_errors: for b in builds: log.trace('_bset: : %s: %r' % (self.install_mode(), b.installable())) if b.installable(): prefix = b.config.expand('%{_prefix}') buildroot = path.join(b.config.expand('%{buildroot}'), prefix) if self.staging(): prefix = b.config.expand('%{stagingroot}') self.install(self.install_mode(), b.name(), buildroot, prefix) # # Sizes ... # if len(builds) > 1: size_build = 0 size_installed = 0 size_build_max = 0 for b in builds: s = b.get_build_size() size_build += s if s > size_build_max: size_build_max = s size_installed += b.get_installed_size() size_sources = 0 for p in builds[0].config.expand('%{_sourcedir}').split(':'): size_sources += path.get_size(p) size_patches = 0 for p in builds[0].config.expand('%{_patchdir}').split(':'): size_patches += path.get_size(p) size_total = size_sources + size_patches + size_installed build_max_size_human = build.humanize_number( size_build_max + size_installed, 'B') build_total_size_human = build.humanize_number(size_total, 'B') build_sources_size_human = build.humanize_number( size_sources, 'B') build_patches_size_human = build.humanize_number( size_patches, 'B') build_installed_size_human = build.humanize_number( size_installed, 'B') build_size = 'usage: %s' % (build_max_size_human) build_size += ' total: %s' % (build_total_size_human) build_size += ' (sources: %s' % (build_sources_size_human) build_size += ', patches: %s' % (build_patches_size_human) build_size += ', installed %s)' % (build_installed_size_human) sizes_valid = True # # Cleaning ... # if deps is None and \ (not self.opts.no_clean() or self.opts.always_clean()): for b in builds: if not b.disabled(): log.notice('cleaning: %s' % (b.name())) b.cleanup() # # Log the build size message # if len(builds) > 1: log.notice('Build Sizes: %s' % (build_size)) # # Clear out the builds ... # for b in builds: del b # # If builds have been staged install into the finaly prefix. # if have_staging and not self.opts.no_install() and not have_errors: stagingroot = macro_expand(self.macros, '%{stagingroot}') have_stagingroot = path.exists(stagingroot) log.trace('_bset: %2d: install staging, present: %s' % \ (nesting_count, have_stagingroot)) if have_stagingroot: prefix = macro_expand(self.macros, '%{_prefix}') self.install(self.install_mode(), self.bset, stagingroot, prefix) staging_size = path.get_size(stagingroot) if not self.opts.no_clean() or self.opts.always_clean(): log.notice('clean staging: %s' % (self.bset)) log.trace('removing: %s' % (stagingroot)) if not self.opts.dry_run(): if path.exists(stagingroot): path.removeall(stagingroot) log.notice('Staging Size: %s' % \ (build.humanize_number(staging_size, 'B'))) except error.general as gerr: if not build_error: log.stderr(str(gerr)) raise except KeyboardInterrupt: interrupted = True raise except: self.build_failure = 'RSB general failure' interrupted = True raise finally: end = datetime.datetime.now() os.environ['PATH'] = current_path build_time = str(end - start) if self.mail_single_report() and nesting_count == 1: mail_report = True if interrupted or self.macros.defined('mail_disable'): mail_report = False if mail_report and mail is not None: if self.installing(): self.write_mail_header('Build Time: %s' % (build_time), True) self.write_mail_header('', True) self.write_mail_header(mail['header'], True) self.write_mail_header('') log.notice('Mailing report: %s' % (mail['to'])) mail['log'] += self.get_mail_header() if sizes_valid: mail['log'] += 'Sizes' + os.linesep mail['log'] += '=====' + os.linesep + os.linesep mail['log'] += \ 'Maximum build usage: ' + build_max_size_human + os.linesep mail['log'] += \ 'Total size: ' + build_total_size_human + os.linesep mail['log'] += \ 'Installed : ' + build_installed_size_human + os.linesep mail[ 'log'] += 'Sources: ' + build_sources_size_human + os.linesep mail[ 'log'] += 'Patches: ' + build_patches_size_human + os.linesep mail['log'] += os.linesep mail['log'] += 'Output' + os.linesep mail['log'] += '======' + os.linesep + os.linesep mail['log'] += os.linesep.join(mail['output'].get()) mail['log'] += os.linesep + os.linesep mail['log'] += 'Report' + os.linesep mail['log'] += '======' + os.linesep + os.linesep mail['reports'] += [self.get_mail_report()] if self.build_failure is not None: mail['failure'] = self.build_failure if self.mail_active(mail, nesting_count): try: self.mail_send(mail) except error.general as gerr: log.notice('Mail Send Failure: %s' % (gerr)) log.notice('Build Set: Time %s' % (build_time))
def build(self, host, nesting_count = 0): build_error = False nesting_count += 1 log.trace('_bset: %s for %s: make' % (self.bset, host)) log.notice('Build Set: %s for %s' % (self.bset, host)) mail_subject = '%s on %s' % (self.bset, self.macros.expand('%{_host}')) current_path = os.environ['PATH'] start = datetime.datetime.now() have_errors = False try: configs = self.load() log.trace('_bset: %s: configs: %s' % (self.bset, ','.join(configs))) sizes_valid = False builds = [] for s in range(0, len(configs)): b = None try: # # Each section of the build set gets a separate set of # macros so we do not contaminate one configuration with # another. # opts = copy.copy(self.opts) macros = copy.copy(self.macros) self.set_host_details(host, opts, macros) if configs[s].endswith('.bset'): log.trace('_bset: == %2d %s' % (nesting_count + 1, '=' * 75)) bs = buildset(configs[s], self.configs, opts, macros) bs.build(host, nesting_count) del bs elif configs[s].endswith('.cfg'): log.trace('_bset: -- %2d %s' % (nesting_count + 1, '-' * 75)) try: b = build.build(configs[s], False, opts, macros) except: build_error = True raise self.build_package(configs[s], b) builds += [b] # # Dump post build macros. # log.trace('_bset: macros post-build') log.trace(str(macros)) else: raise error.general('invalid config type: %s' % (configs[s])) except error.general as gerr: have_errors = True if b is not None: if self.build_failure is None: self.build_failure = b.name() raise # # Clear out the builds ... # for b in builds: del b except error.general as gerr: if not build_error: log.stderr(str(gerr)) raise except KeyboardInterrupt: raise except: self.build_failure = 'RSB general failure' raise finally: end = datetime.datetime.now() os.environ['PATH'] = current_path build_time = str(end - start) log.notice('Build Set: Time %s' % (build_time))
if not b.canadian_cross() \ and not b.disabled() \ and not b.macros.get('%{_disable_installing}'): self.install(b.name(), b.config.expand('%{buildroot}'), b.config.expand('%{_prefix}')) if deps is None and \ (not self.opts.no_clean() or self.opts.always_clean()): for b in builds: if not b.disabled(): log.notice('cleaning: %s' % (b.name())) b.cleanup() for b in builds: del b except error.general, gerr: if not build_error: log.stderr(str(gerr)) raise except KeyboardInterrupt: mail_report = False raise except: self.build_failure = 'RSB general failure' raise finally: end = datetime.datetime.now() os.environ['PATH'] = current_path build_time = str(end - start) if mail_report: to_addr = self.opts.get_arg('--mail-to') if to_addr is not None: to_addr = to_addr[1]
def build(self, host, nesting_count=0): build_error = False nesting_count += 1 log.trace('_bset: %s for %s: make' % (self.bset, host)) log.notice('Build Set: %s for %s' % (self.bset, host)) mail_subject = '%s on %s' % (self.bset, self.macros.expand('%{_host}')) current_path = os.environ['PATH'] start = datetime.datetime.now() have_errors = False try: configs = self.load() log.trace('_bset: %s: configs: %s' % (self.bset, ','.join(configs))) sizes_valid = False builds = [] for s in range(0, len(configs)): b = None try: # # Each section of the build set gets a separate set of # macros so we do not contaminate one configuration with # another. # opts = copy.copy(self.opts) macros = copy.copy(self.macros) self.set_host_details(host, opts, macros) if configs[s].endswith('.bset'): log.trace('_bset: == %2d %s' % (nesting_count + 1, '=' * 75)) bs = buildset(configs[s], self.configs, opts, macros) bs.build(host, nesting_count) del bs elif configs[s].endswith('.cfg'): log.trace('_bset: -- %2d %s' % (nesting_count + 1, '-' * 75)) try: b = build.build(configs[s], False, opts, macros) except: build_error = True raise self.build_package(configs[s], b) builds += [b] # # Dump post build macros. # log.trace('_bset: macros post-build') log.trace(str(macros)) else: raise error.general('invalid config type: %s' % (configs[s])) except error.general as gerr: have_errors = True if b is not None: if self.build_failure is None: self.build_failure = b.name() raise # # Clear out the builds ... # for b in builds: del b except error.general as gerr: if not build_error: log.stderr(str(gerr)) raise except KeyboardInterrupt: raise except: self.build_failure = 'RSB general failure' raise finally: end = datetime.datetime.now() os.environ['PATH'] = current_path build_time = str(end - start) log.notice('Build Set: Time %s' % (build_time))
_in = urllib2.urlopen(url) _out = open(path.host(local), 'wb') _out.write(_in.read()) except IOError, err: log.notice('download: %s: error: %s' % (url, str(err))) if path.exists(local): os.remove(path.host(local)) failed = True except ValueError, err: log.notice('download: %s: error: %s' % (url, str(err))) if path.exists(local): os.remove(path.host(local)) failed = True except: msg = 'download: %s: error' % (url) log.stderr(msd) log.notice(msg) if _out is not None: _out.close() raise if _out is not None: _out.close() if _in is not None: del _in if not failed: if not path.isfile(local): raise error.general('source is not a file: %s' % (path.host(local))) return not failed def _git_downloader(url, local, config, opts): rlp = os.path.relpath(path.host(local))
def _http_downloader(url, local, config, opts): if path.exists(local): return True # # Hack for GitHub. # if url.startswith('https://api.github.com'): url = urllib_parse.urljoin(url, config.expand('tarball/%{version}')) dst = os.path.relpath(path.host(local)) log.output('download: %s -> %s' % (url, dst)) log.notice('download: %s -> %s' % (_sensible_url(url, len(dst)), dst)) failed = False if _do_download(opts): _in = None _out = None _length = None _have = 0 _chunk_size = 256 * 1024 _chunk = None _last_percent = 200.0 _last_msg = '' _have_status_output = False _url = url try: try: _in = None _ssl_context = None # See #2656 _req = urllib_request.Request(_url) _req.add_header('User-Agent', 'Wget/1.16.3 (freebsd10.1)') try: import ssl _ssl_context = ssl._create_unverified_context() _in = urllib_request.urlopen(_req, context = _ssl_context) except: _ssl_context = None if _ssl_context is None: _in = urllib_request.urlopen(_req) if _url != _in.geturl(): _url = _in.geturl() log.output(' redirect: %s' % (_url)) log.notice(' redirect: %s' % (_sensible_url(_url))) _out = open(path.host(local), 'wb') try: _length = int(_in.info()['Content-Length'].strip()) except: pass while True: _msg = '\rdownloading: %s - %s ' % (dst, _humanize_bytes(_have)) if _length: _percent = round((float(_have) / _length) * 100, 2) if _percent != _last_percent: _msg += 'of %s (%0.0f%%) ' % (_humanize_bytes(_length), _percent) if _msg != _last_msg: extras = (len(_last_msg) - len(_msg)) log.stdout_raw('%s%s' % (_msg, ' ' * extras + '\b' * extras)) _last_msg = _msg _have_status_output = True _chunk = _in.read(_chunk_size) if not _chunk: break _out.write(_chunk) _have += len(_chunk) log.stdout_raw('\n\r') except: if _have_status_output: log.stdout_raw('\n\r') raise except IOError as err: log.notice('download: %s: error: %s' % (_sensible_url(_url), str(err))) if path.exists(local): os.remove(path.host(local)) failed = True except ValueError as err: log.notice('download: %s: error: %s' % (_sensible_url(_url), str(err))) if path.exists(local): os.remove(path.host(local)) failed = True except: msg = 'download: %s: error' % (_sensible_url(_url)) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() if _out is not None: _out.close() raise if _out is not None: _out.close() if _in is not None: _in.close() del _in if not failed: if not path.isfile(local): raise error.general('source is not a file: %s' % (path.host(local))) if not _hash_check(path.basename(local), local, config.macros, False): raise error.general('checksum failure file: %s' % (dst)) return not failed
def build(self, deps=None, nesting_count=0): build_error = False nesting_count += 1 log.trace('_bset: %s: make' % (self.bset)) log.notice('Build Set: %s' % (self.bset)) if self.opts.get_arg('--mail'): mail_report_subject = '%s %s' % (self.bset, self.macros.expand('%{_host}')) current_path = os.environ['PATH'] start = datetime.datetime.now() mail_report = False have_errors = False try: configs = self.load() log.trace('_bset: %s: configs: %s' % (self.bset, ','.join(configs))) builds = [] for s in range(0, len(configs)): b = None try: # # Each section of the build set gets a separate set of # macros so we do not contaminate one configuration with # another. # opts = copy.copy(self.opts) macros = copy.copy(self.macros) if configs[s].endswith('.bset'): log.trace('_bset: == %2d %s' % (nesting_count + 1, '=' * 75)) bs = buildset(configs[s], self.configs, opts, macros) bs.build(deps, nesting_count) del bs elif configs[s].endswith('.cfg'): mail_report = self.opts.get_arg('--mail') log.trace('_bset: -- %2d %s' % (nesting_count + 1, '-' * 75)) try: b = build.build( configs[s], self.opts.get_arg('--pkg-tar-files'), opts, macros) except: build_error = True raise if b.macros.get('%{_disable_reporting}'): mail_report = False if deps is None: self.build_package(configs[s], b) self.report(configs[s], b, copy.copy(self.opts), copy.copy(self.macros)) # Always product an XML report. self.report(configs[s], b, copy.copy(self.opts), copy.copy(self.macros), format='xml') if s == len(configs) - 1 and not have_errors: self.bset_tar(b) else: deps += b.config.includes() builds += [b] # # Dump post build macros. # log.trace('_bset: macros post-build') log.trace(str(macros)) else: raise error.general('invalid config type: %s' % (configs[s])) except error.general as gerr: have_errors = True if b is not None: if self.build_failure is None: self.build_failure = b.name() self.write_mail_header('') self.write_mail_header('= ' * 40) self.write_mail_header('Build FAILED: %s' % (b.name())) self.write_mail_header('- ' * 40) self.write_mail_header(str(log.default)) self.write_mail_header('- ' * 40) if self.opts.keep_going(): log.notice(str(gerr)) if self.opts.always_clean(): builds += [b] else: raise else: raise # # Installing ... # log.trace('_bset: installing: deps:%r no-install:%r' % \ (deps is None, self.opts.no_install())) if deps is None \ and not self.opts.no_install() \ and not have_errors: for b in builds: log.trace('_bset: installing: %r' % b.installable()) if b.installable(): self.install(b.name(), b.config.expand('%{buildroot}'), b.config.expand('%{_prefix}')) if deps is None and \ (not self.opts.no_clean() or self.opts.always_clean()): for b in builds: if not b.disabled(): log.notice('cleaning: %s' % (b.name())) b.cleanup() for b in builds: del b except error.general as gerr: if not build_error: log.stderr(str(gerr)) raise except KeyboardInterrupt: mail_report = False raise except: self.build_failure = 'RSB general failure' raise finally: end = datetime.datetime.now() os.environ['PATH'] = current_path build_time = str(end - start) if mail_report: to_addr = self.opts.get_arg('--mail-to') if to_addr is not None: to_addr = to_addr[1] else: to_addr = self.macros.expand('%{_mail_tools_to}') log.notice('Mailing report: %s' % (to_addr)) self.write_mail_header('Build Time %s' % (build_time), True) self.write_mail_header('') m = mailer.mail(self.opts) if self.build_failure is not None: mail_report_subject = 'Build: FAILED %s (%s)' %\ (mail_report_subject, self.build_failure) pass_fail = 'FAILED' else: mail_report_subject = 'Build: PASSED %s' % ( mail_report_subject) if not self.opts.dry_run(): m.send(to_addr, mail_report_subject, self.mail_header + self.mail_report) log.notice('Build Set: Time %s' % (build_time))
] if hash[0] not in hashlib_algorithms: raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0])) hasher = None _in = None try: hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError, err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except: msg = 'hash: %s: error' % (file_) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() raise if _in is not None: _in.close() log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1])) if hasher.hexdigest() != hash[1]: log.warning('checksum error: %s' % (file_)) failed = True if failed and remove: log.warning('removing: %s' % (file_)) if path.exists(absfile): try:
def run(): import sys ec = 0 setbuilder_error = False mail = None try: optargs = { '--list-configs': 'List available configurations', '--list-bsets': 'List available build sets', '--list-configs': 'List available configuration files.', '--list-deps': 'List the dependent files.', '--bset-tar-file': 'Create a build set tar file', '--pkg-tar-files': 'Create package tar files', '--no-report': 'Do not create a package report.', '--report-format': 'The report format (text, html, asciidoc).' } mailer.append_options(optargs) opts = options.load(sys.argv, optargs) if opts.get_arg('--mail'): mail = {'mail': mailer.mail(opts), 'output': log_capture()} to_addr = opts.get_arg('--mail-to') if to_addr is not None: mail['to'] = to_addr[1] else: mail['to'] = opts.defaults.expand('%{_mail_tools_to}') mail['from'] = mail['mail'].from_address() log.notice('RTEMS Source Builder - Set Builder, %s' % (version.str())) opts.log_info() if not check.host_setup(opts): raise error.general( 'host build environment is not set up correctly') if mail: mail['header'] = os.linesep.join(mail['output'].get()) + os.linesep mail['header'] += os.linesep mail['header'] += 'Host: ' + reports.platform( 'compact') + os.linesep indent = ' ' for l in textwrap.wrap(reports.platform('extended'), width=80 - len(indent)): mail['header'] += indent + l + os.linesep configs = build.get_configs(opts) if opts.get_arg('--list-deps'): deps = [] else: deps = None if not list_bset_cfg_files(opts, configs): prefix = opts.defaults.expand('%{_prefix}') if opts.canadian_cross(): opts.disable_install() if not opts.dry_run() and \ not opts.canadian_cross() and \ not opts.no_install() and \ not path.ispathwritable(prefix): raise error.general('prefix is not writable: %s' % (path.host(prefix))) for bset in opts.params(): setbuilder_error = True b = buildset(bset, configs, opts) b.build(deps, mail=mail) b = None setbuilder_error = False if deps is not None: c = 0 for d in sorted(set(deps)): c += 1 print('dep[%d]: %s' % (c, d)) except error.general as gerr: if not setbuilder_error: log.stderr(str(gerr)) log.stderr('Build FAILED') ec = 1 except error.internal as ierr: if not setbuilder_error: log.stderr(str(ierr)) log.stderr('Internal Build FAILED') ec = 1 except error.exit as eerr: pass except KeyboardInterrupt: log.notice('abort: user terminated') ec = 1 except: raise log.notice('abort: unknown error') ec = 1 sys.exit(ec)
def run(): import sys ec = 0 setbuilder_error = False mail = None try: optargs = { '--list-configs': 'List available configurations', '--list-bsets': 'List available build sets', '--list-configs': 'List available configuration files.', '--list-deps': 'List the dependent files.', '--bset-tar-file': 'Create a build set tar file', '--pkg-tar-files': 'Create package tar files', '--no-report': 'Do not create a package report.', '--report-format': 'The report format (text, html, asciidoc).' } mailer.append_options(optargs) opts = options.load(sys.argv, optargs) if opts.get_arg('--mail'): mail = { 'mail' : mailer.mail(opts), 'output': log_capture() } to_addr = opts.get_arg('--mail-to') if to_addr is not None: mail['to'] = to_addr[1] else: mail['to'] = opts.defaults.expand('%{_mail_tools_to}') mail['from'] = mail['mail'].from_address() log.notice('RTEMS Source Builder - Set Builder, %s' % (version.str())) opts.log_info() if not check.host_setup(opts): raise error.general('host build environment is not set up correctly') if mail: mail['header'] = os.linesep.join(mail['output'].get()) + os.linesep mail['header'] += os.linesep mail['header'] += 'Host: ' + reports.platform('compact') + os.linesep indent = ' ' for l in textwrap.wrap(reports.platform('extended'), width = 80 - len(indent)): mail['header'] += indent + l + os.linesep configs = build.get_configs(opts) if opts.get_arg('--list-deps'): deps = [] else: deps = None if not list_bset_cfg_files(opts, configs): prefix = opts.defaults.expand('%{_prefix}') if opts.canadian_cross(): opts.disable_install() if not opts.dry_run() and \ not opts.canadian_cross() and \ not opts.no_install() and \ not path.ispathwritable(prefix): raise error.general('prefix is not writable: %s' % (path.host(prefix))) for bset in opts.params(): setbuilder_error = True b = buildset(bset, configs, opts) b.build(deps, mail = mail) b = None setbuilder_error = False if deps is not None: c = 0 for d in sorted(set(deps)): c += 1 print('dep[%d]: %s' % (c, d)) except error.general as gerr: if not setbuilder_error: log.stderr(str(gerr)) log.stderr('Build FAILED') ec = 1 except error.internal as ierr: if not setbuilder_error: log.stderr(str(ierr)) log.stderr('Internal Build FAILED') ec = 1 except error.exit as eerr: pass except KeyboardInterrupt: log.notice('abort: user terminated') ec = 1 except: raise log.notice('abort: unknown error') ec = 1 sys.exit(ec)
and not b.disabled() \ and not b.macros.get('%{_disable_installing}'): self.install(b.name(), b.config.expand('%{buildroot}'), b.config.expand('%{_prefix}')) if deps is None and \ (not self.opts.no_clean() or self.opts.always_clean()): for b in builds: if not b.disabled(): log.notice('cleaning: %s' % (b.name())) b.cleanup() for b in builds: del b except error.general, gerr: if not build_error: log.stderr(str(gerr)) raise except KeyboardInterrupt: mail_report = False raise except: self.build_failure = 'RSB general failure' raise finally: end = datetime.datetime.now() os.environ['PATH'] = current_path build_time = str(end - start) if mail_report: to_addr = self.opts.get_arg('--mail-to') if to_addr is not None: to_addr = to_addr[1]
def run(args = sys.argv): ec = 0 get_sources_error = True try: # # The RSB options support cannot be used because it loads the defaults # for the host which we cannot do here. # description = 'RTEMS Get Sources downloads all the source a build set ' description += 'references for all hosts.' argsp = argparse.ArgumentParser(prog = 'rtems-get-sources', description = description) argsp.add_argument('--rtems-version', help = 'Set the RTEMS version.', type = str, default = version.version()) argsp.add_argument('--list-hosts', help = 'List the hosts.', action = 'store_true') argsp.add_argument('--list-bsets', help = 'List the hosts.', action = 'store_true') argsp.add_argument('--download-dir', help = 'Download directory.', type = str) argsp.add_argument('--clean', help = 'Clean the download directory.', action = 'store_true') argsp.add_argument('--tar', help = 'Create a tarball of all the source.', action = 'store_true') argsp.add_argument('--log', help = 'Log file.', type = str, default = log_default()) argsp.add_argument('--trace', help = 'Enable trace logging for debugging.', action = 'store_true') argsp.add_argument('bsets', nargs='*', help = 'Build sets.') argopts = argsp.parse_args(args[2:]) load_log(argopts.log) log.notice('RTEMS Source Builder - Get Sources, %s' % (version.str())) log.tracing = argopts.trace opts = load_options(args, argopts) configs = build.get_configs(opts) if argopts.list_bsets: list_bset_files(opts, configs) else: if argopts.clean: if argopts.download_dir is None: raise error.general('cleaning of the default download directories is not supported') if path.exists(argopts.download_dir): log.notice('Cleaning source directory: %s' % (argopts.download_dir)) path.removeall(argopts.download_dir) if len(argopts.bsets) == 0: raise error.general('no build sets provided on the command line') for bset in argopts.bsets: get_sources_error = True b = buildset(bset, configs, opts) get_sources_error = False for host in host_profiles: b.build(host) b = None except error.general as gerr: if get_sources_error: log.stderr(str(gerr)) log.stderr('Build FAILED') ec = 1 except error.internal as ierr: if get_sources_error: log.stderr(str(ierr)) log.stderr('Internal Build FAILED') ec = 1 except error.exit as eerr: pass except KeyboardInterrupt: log.notice('abort: user terminated') ec = 1 except: raise log.notice('abort: unknown error') ec = 1 sys.exit(ec)
def build(self, deps=None, nesting_count=0, mail=None): build_error = False nesting_count += 1 if mail: mail['output'].clear() log.trace('_bset: %s: make' % (self.bset)) log.notice('Build Set: %s' % (self.bset)) mail_subject = '%s on %s' % (self.bset, self.macros.expand('%{_host}')) current_path = os.environ['PATH'] start = datetime.datetime.now() mail_report = False have_errors = False if mail: mail['output'].clear() try: configs = self.load() log.trace('_bset: %s: configs: %s' % (self.bset, ','.join(configs))) sizes_valid = False builds = [] for s in range(0, len(configs)): b = None try: # # Each section of the build set gets a separate set of # macros so we do not contaminate one configuration with # another. # opts = copy.copy(self.opts) macros = copy.copy(self.macros) if configs[s].endswith('.bset'): log.trace('_bset: == %2d %s' % (nesting_count + 1, '=' * 75)) bs = buildset(configs[s], self.configs, opts, macros) bs.build(deps, nesting_count, mail) del bs elif configs[s].endswith('.cfg'): if mail: mail_report = True log.trace('_bset: -- %2d %s' % (nesting_count + 1, '-' * 75)) try: b = build.build( configs[s], self.opts.get_arg('--pkg-tar-files'), opts, macros) except: build_error = True raise if b.macros.get('%{_disable_reporting}'): mail_report = False if deps is None: self.build_package(configs[s], b) self.report(configs[s], b, copy.copy(self.opts), copy.copy(self.macros), mail=mail) # Always produce an XML report. self.report(configs[s], b, copy.copy(self.opts), copy.copy(self.macros), format='xml', mail=mail) if s == len(configs) - 1 and not have_errors: self.bset_tar(b) else: deps += b.config.includes() builds += [b] # # Dump post build macros. # log.trace('_bset: macros post-build') log.trace(str(macros)) else: raise error.general('invalid config type: %s' % (configs[s])) except error.general as gerr: have_errors = True if b is not None: if self.build_failure is None: self.build_failure = b.name() self.write_mail_header('') self.write_mail_header('= ' * 40) self.write_mail_header('Build FAILED: %s' % (b.name())) self.write_mail_header('- ' * 40) self.write_mail_header(str(log.default)) self.write_mail_header('- ' * 40) if self.opts.keep_going(): log.notice(str(gerr)) if self.opts.always_clean(): builds += [b] else: raise else: raise # # Installing ... # log.trace('_bset: installing: deps:%r no-install:%r' % \ (deps is None, self.opts.no_install())) if deps is None \ and not self.opts.no_install() \ and not have_errors: for b in builds: log.trace('_bset: installing: %r' % b.installable()) if b.installable(): self.install(b.name(), b.config.expand('%{buildroot}'), b.config.expand('%{_prefix}')) # # Sizes ... # if len(builds) > 1: size_build = 0 size_installed = 0 size_build_max = 0 for b in builds: s = b.get_build_size() size_build += s if s > size_build_max: size_build_max = s size_installed += b.get_installed_size() size_sources = 0 for p in builds[0].config.expand('%{_sourcedir}').split(':'): size_sources += path.get_size(p) size_patches = 0 for p in builds[0].config.expand('%{_patchdir}').split(':'): size_patches += path.get_size(p) size_total = size_sources + size_patches + size_installed build_max_size_human = build.humanize_number( size_build_max + size_installed, 'B') build_total_size_human = build.humanize_number(size_total, 'B') build_sources_size_human = build.humanize_number( size_sources, 'B') build_patches_size_human = build.humanize_number( size_patches, 'B') build_installed_size_human = build.humanize_number( size_installed, 'B') build_size = 'usage: %s' % (build_max_size_human) build_size += ' total: %s' % (build_total_size_human) build_size += ' (sources: %s' % (build_sources_size_human) build_size += ', patches: %s' % (build_patches_size_human) build_size += ', installed %s)' % (build_installed_size_human) sizes_valid = True # # Cleaning ... # if deps is None and \ (not self.opts.no_clean() or self.opts.always_clean()): for b in builds: if not b.disabled(): log.notice('cleaning: %s' % (b.name())) b.cleanup() # # Log the build size message # if len(builds) > 1: log.notice('Build Sizes: %s' % (build_size)) # # Clear out the builds ... # for b in builds: del b except error.general as gerr: if not build_error: log.stderr(str(gerr)) raise except KeyboardInterrupt: mail_report = False raise except: self.build_failure = 'RSB general failure' raise finally: end = datetime.datetime.now() os.environ['PATH'] = current_path build_time = str(end - start) if mail_report and not self.macros.defined('mail_disable'): self.write_mail_header('Build Time: %s' % (build_time), True) self.write_mail_header('', True) if self.build_failure is not None: mail_subject = 'FAILED %s (%s)' % \ (mail_subject, self.build_failure) else: mail_subject = 'PASSED %s' % (mail_subject) mail_subject = 'Build %s: %s' % (reports.platform( mode='system'), mail_subject) self.write_mail_header(mail['header'], True) self.write_mail_header('') log.notice('Mailing report: %s' % (mail['to'])) body = self.get_mail_header() body += 'Sizes' + os.linesep body += '=====' + os.linesep + os.linesep if sizes_valid: body += 'Maximum build usage: ' + build_max_size_human + os.linesep body += 'Total size: ' + build_total_size_human + os.linesep body += 'Installed : ' + build_installed_size_human + os.linesep body += 'Sources: ' + build_sources_size_human + os.linesep body += 'Patches: ' + build_patches_size_human + os.linesep else: body += 'No packages built' body += os.linesep body += 'Output' + os.linesep body += '======' + os.linesep + os.linesep body += os.linesep.join(mail['output'].get()) body += os.linesep + os.linesep body += 'Report' + os.linesep body += '======' + os.linesep + os.linesep body += self.get_mail_report() if not opts.dry_run(): mail['mail'].send(mail['to'], mail_subject, body) log.notice('Build Set: Time %s' % (build_time))
except: hashlib_algorithms = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'] if hash[0] not in hashlib_algorithms: raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0])) hasher = None _in = None try: hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError, err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except: msg = 'hash: %s: error' % (file_) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() raise if _in is not None: _in.close() log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1])) if hasher.hexdigest() != hash[1]: log.warning('checksum error: %s' % (file_)) failed = True if failed and remove: log.warning('removing: %s' % (file_)) if path.exists(absfile): try: os.remove(path.host(absfile))
def build(self, deps = None, nesting_count = 0): build_error = False nesting_count += 1 log.trace('_bset: %s: make' % (self.bset)) log.notice('Build Set: %s' % (self.bset)) if self.opts.get_arg('--mail'): mail_report_subject = '%s %s' % (self.bset, self.macros.expand('%{_host}')) current_path = os.environ['PATH'] start = datetime.datetime.now() mail_report = False have_errors = False try: configs = self.load() log.trace('_bset: %s: configs: %s' % (self.bset, ','.join(configs))) builds = [] for s in range(0, len(configs)): b = None try: # # Each section of the build set gets a separate set of # macros so we do not contaminate one configuration with # another. # opts = copy.copy(self.opts) macros = copy.copy(self.macros) if configs[s].endswith('.bset'): log.trace('_bset: == %2d %s' % (nesting_count + 1, '=' * 75)) bs = buildset(configs[s], self.configs, opts, macros) bs.build(deps, nesting_count) del bs elif configs[s].endswith('.cfg'): mail_report = self.opts.get_arg('--mail') log.trace('_bset: -- %2d %s' % (nesting_count + 1, '-' * 75)) try: b = build.build(configs[s], self.opts.get_arg('--pkg-tar-files'), opts, macros) except: build_error = True raise if b.macros.get('%{_disable_reporting}'): mail_report = False if deps is None: self.build_package(configs[s], b) self.report(configs[s], b, copy.copy(self.opts), copy.copy(self.macros)) # Always product an XML report. self.report(configs[s], b, copy.copy(self.opts), copy.copy(self.macros), format = 'xml') if s == len(configs) - 1 and not have_errors: self.bset_tar(b) else: deps += b.config.includes() builds += [b] # # Dump post build macros. # log.trace('_bset: macros post-build') log.trace(str(macros)) else: raise error.general('invalid config type: %s' % (configs[s])) except error.general as gerr: have_errors = True if b is not None: if self.build_failure is None: self.build_failure = b.name() self.write_mail_header('') self.write_mail_header('= ' * 40) self.write_mail_header('Build FAILED: %s' % (b.name())) self.write_mail_header('- ' * 40) self.write_mail_header(str(log.default)) self.write_mail_header('- ' * 40) if self.opts.keep_going(): log.notice(str(gerr)) if self.opts.always_clean(): builds += [b] else: raise else: raise # # Installing ... # log.trace('_bset: installing: deps:%r no-install:%r' % \ (deps is None, self.opts.no_install())) if deps is None \ and not self.opts.no_install() \ and not have_errors: for b in builds: log.trace('_bset: installing: %r' % b.installable()) if b.installable(): self.install(b.name(), b.config.expand('%{buildroot}'), b.config.expand('%{_prefix}')) if deps is None and \ (not self.opts.no_clean() or self.opts.always_clean()): for b in builds: if not b.disabled(): log.notice('cleaning: %s' % (b.name())) b.cleanup() for b in builds: del b except error.general as gerr: if not build_error: log.stderr(str(gerr)) raise except KeyboardInterrupt: mail_report = False raise except: self.build_failure = 'RSB general failure' raise finally: end = datetime.datetime.now() os.environ['PATH'] = current_path build_time = str(end - start) if mail_report: to_addr = self.opts.get_arg('--mail-to') if to_addr is not None: to_addr = to_addr[1] else: to_addr = self.macros.expand('%{_mail_tools_to}') log.notice('Mailing report: %s' % (to_addr)) self.write_mail_header('Build Time %s' % (build_time), True) self.write_mail_header('') m = mailer.mail(self.opts) if self.build_failure is not None: mail_report_subject = 'Build: FAILED %s (%s)' %\ (mail_report_subject, self.build_failure) pass_fail = 'FAILED' else: mail_report_subject = 'Build: PASSED %s' % (mail_report_subject) if not self.opts.dry_run(): m.send(to_addr, mail_report_subject, self.mail_header + self.mail_report) log.notice('Build Set: Time %s' % (build_time))