def mark_stable(data): """Find the single release marked as 'testing' and make it 'stable'.""" doc = minidom.parseString(data) testing = [] all_impls = doc.documentElement.getElementsByTagNameNS( namespaces.XMLNS_IFACE, 'implementation') for x in all_impls: if get_stability(x) == 'testing': testing.append((get_version(x), x)) if len(testing) == 0: raise Exception('No implementations are currently "testing"!') testing = sorted(testing, key=lambda x: x[0]) higest_version = testing[-1][0] latest_testing = [ impl for version, impl in testing if version == higest_version ] if len(latest_testing) < len(testing): warn( "Multiple 'testing' versions - changing %d (of %d) with version %s", len(latest_testing), len(testing), model.format_version(higest_version)) for impl in latest_testing: impl.setAttribute('stability', 'stable') return doc.toxml('utf-8')
def graduation_check(feeds, feeds_dir): # Warn about releases that are still 'testing' a while after release now = time.time() def age(impl): released = impl.metadata.get('released', None) if not released: return 0 released_time = time.mktime(time.strptime(released, '%Y-%m-%d')) return now - released_time shown_header = False for feed in feeds: with open(feed.source_path, 'rb') as stream: zfeed = model.ZeroInstallFeed(qdom.parse(stream)) if zfeed.implementations: # Find the latest version number (note that there may be several implementations with this version number) latest_version = max(impl.version for impl in zfeed.implementations.values()) testing_impls = [impl for impl in zfeed.implementations.values() if impl.version == latest_version and impl.upstream_stability == model.stability_levels['testing'] and age(impl) > TIME_TO_GRADUATE] if testing_impls: if not shown_header: print("Releases which are still marked as 'testing' after {days} days:".format( days = TIME_TO_GRADUATE / DAY)) shown_header = True print("- {name} v{version}, {age} days ({path})".format( age = int(age(testing_impls[0]) / DAY), name = zfeed.get_name(), path = os.path.relpath(feed.source_path, feeds_dir), version = model.format_version(latest_version)))
def ask_if_previous_still_testing(master_doc, new_version): new_version_parsed = model.parse_version(new_version) xml = master_doc.toxml(encoding = 'utf-8') master = model.ZeroInstallFeed(qdom.parse(BytesIO(xml))) previous_versions = [impl.version for impl in master.implementations.values() if impl.version < new_version_parsed] if not previous_versions: return previous_version = max(previous_versions) # (all the <implementations> with this version number) previous_testing_impls = [impl for impl in master.implementations.values() if impl.version == previous_version and impl.upstream_stability == model.testing] if not previous_testing_impls: return print("The previous release, version {version}, is still marked as 'testing'. Set to stable?".format( version = model.format_version(previous_version))) if get_choice(['Yes', 'No']) != 'Yes': return ids_to_change = frozenset(impl.id for impl in previous_testing_impls) for impl in master_doc.getElementsByTagNameNS(XMLNS_IFACE, 'implementation'): if impl.getAttribute('id') in ids_to_change: impl.setAttribute('stability', 'stable')
def get_previous_release(this_version): """Return the highest numbered verison in the master feed before this_version. @return: version, or None if there wasn't one""" parsed_release_version = model.parse_version(this_version) versions = [model.parse_version(version) for version in scm.get_tagged_versions()] versions = [version for version in versions if version < parsed_release_version] if versions: return model.format_version(max(versions)) return None
def suggest_release_version(snapshot_version): """Given a snapshot version, suggest a suitable release version. >>> suggest_release_version('1.0-pre') '1.0' >>> suggest_release_version('0.9-post') '0.10' >>> suggest_release_version('3') Traceback (most recent call last): ... SafeException: Version '3' is not a snapshot version (should end in -pre or -post) """ version = model.parse_version(snapshot_version) mod = version[-1] if mod == 0: raise SafeException("Version '%s' is not a snapshot version (should end in -pre or -post)" % snapshot_version) if mod > 0: # -post, so increment the number version[-2][-1] += 1 version[-1] = 0 # Remove the modifier return model.format_version(version)
def suggest_release_version(snapshot_version): """Given a snapshot version, suggest a suitable release version. >>> suggest_release_version('1.0-pre') '1.0' >>> suggest_release_version('0.9-post') '0.10' >>> suggest_release_version('3') Traceback (most recent call last): ... SafeException: Version '3' is not a snapshot version (should end in -pre or -post) """ version = model.parse_version(snapshot_version) mod = version[-1] if mod == 0: raise SafeException( "Version '%s' is not a snapshot version (should end in -pre or -post)" % snapshot_version) if mod > 0: # -post, so increment the number version[-2][-1] += 1 version[-1] = 0 # Remove the modifier return model.format_version(version)
def write_sample_feed(buildenv, master_feed, src_impl): path = buildenv.local_iface_file old_path = os.path.join(buildenv.metadir, buildenv.iface_name + '.xml') if os.path.exists(old_path): warn("Removing old %s file: use %s instead now", old_path, path) os.unlink(old_path) impl = minidom.getDOMImplementation() XMLNS_IFACE = namespaces.XMLNS_IFACE doc = impl.createDocument(XMLNS_IFACE, "interface", None) root = doc.documentElement root.setAttributeNS(XMLNS_NAMESPACE, 'xmlns', XMLNS_IFACE) prefixes = Prefixes(XMLNS_IFACE) def addSimple(parent, name, text=None): elem = doc.createElementNS(XMLNS_IFACE, name) parent.appendChild( doc.createTextNode('\n' + ' ' * (1 + depth(parent)))) parent.appendChild(elem) if text: elem.appendChild(doc.createTextNode(text)) return elem def close(element): element.appendChild(doc.createTextNode('\n' + ' ' * depth(element))) addSimple(root, 'name', master_feed.name) addSimple(root, 'summary', master_feed.summary) addSimple(root, 'description', master_feed.description) feed_for = addSimple(root, 'feed-for') feed_for.setAttributeNS(None, 'interface', find_feed_for(master_feed)) group = addSimple(root, 'group') main = src_impl.attrs.get(XMLNS_0COMPILE + ' binary-main', None) if main: group.setAttributeNS(None, 'main', main) lib_mappings = src_impl.attrs.get(XMLNS_0COMPILE + ' binary-lib-mappings', None) if lib_mappings: prefixes.setAttributeNS(group, XMLNS_0COMPILE, 'lib-mappings', lib_mappings) for d in src_impl.dependencies: if parse_bool( d.metadata.get(XMLNS_0COMPILE + ' include-binary', 'false')): requires = d.qdom.toDOM(doc, prefixes) requires.removeAttributeNS(XMLNS_0COMPILE, 'include-binary') group.appendChild(requires) set_arch = True impl_elem = addSimple(group, 'implementation') impl_template = buildenv.get_binary_template() if impl_template: arm_if_0install_attrs(impl_template) # Copy attributes from template for fullname, value in impl_template.attrs.iteritems(): if fullname == 'arch': set_arch = False if value == '*-*': continue if ' ' in fullname: ns, localName = fullname.split(' ', 1) else: ns, localName = None, fullname prefixes.setAttributeNS(impl_elem, ns, localName, value) # Copy child nodes for child in impl_template.childNodes: impl_elem.appendChild(child.toDOM(doc, prefixes)) if impl_template.content: impl_elem.appendChild(doc.createTextNode(impl_template.content)) for version_elem in itertools.chain( impl_elem.getElementsByTagName('version'), ): pin_components = version_elem.getAttributeNS( XMLNS_0COMPILE, "pin-components") if pin_components: pin_components = int(pin_components) iface = version_elem.parentNode.getAttribute("interface") assert iface dep_impl = buildenv.chosen_impl(iface) impl_version = model.parse_version( dep_impl.attrs.get('version')) pinned_components = [impl_version[0][:pin_components]] # (for -pre versions) min_version = min(pinned_components, impl_version) # clone and increment max_version = [pinned_components[0][:]] max_version[0][-1] += 1 version_elem.setAttribute("not-before", model.format_version(min_version)) version_elem.setAttribute("before", model.format_version(max_version)) if set_arch: group.setAttributeNS(None, 'arch', buildenv.target_arch) impl_elem.setAttributeNS(None, 'version', src_impl.version) if not impl_elem.hasAttribute('license'): license = src_impl.attrs.get('license') if license: impl_elem.setAttributeNS(None, 'license', license) version_modifier = buildenv.version_modifier if version_modifier: impl_elem.setAttributeNS(None, 'version-modifier', version_modifier) impl_elem.setAttributeNS(None, 'id', '..') impl_elem.setAttributeNS(None, 'released', time.strftime('%Y-%m-%d')) close(group) close(root) for ns, prefix in prefixes.prefixes.items(): root.setAttributeNS(XMLNS_NAMESPACE, 'xmlns:' + prefix, ns) stream = codecs.open(path, 'w', encoding='utf-8') try: doc.writexml(stream) finally: stream.close()
def do_build_internal(options, args): """build-internal""" # If a sandbox is being used, we're in it now. import getpass, socket buildenv = BuildEnv() sels = buildenv.get_selections() builddir = os.path.realpath('build') ensure_dir(buildenv.metadir) build_env_xml = join(buildenv.metadir, 'build-environment.xml') buildenv_doc = sels.toDOM() # Create build-environment.xml file root = buildenv_doc.documentElement info = buildenv_doc.createElementNS(XMLNS_0COMPILE, 'build-info') root.appendChild(info) info.setAttributeNS(None, 'time', time.strftime('%Y-%m-%d %H:%M').strip()) info.setAttributeNS(None, 'host', socket.getfqdn()) info.setAttributeNS(None, 'user', getpass.getuser()) info.setAttributeNS(None, 'arch', '%s-%s' % (uname[0], uname[4])) stream = file(build_env_xml, 'w') buildenv_doc.writexml(stream, addindent=" ", newl="\n") stream.close() # Create local binary interface file. # We use the main feed for the interface as the template for the name, # summary, etc (note: this is not necessarily the feed that contained # the source code). master_feed = iface_cache.get_feed(buildenv.interface) src_impl = buildenv.chosen_impl(buildenv.interface) write_sample_feed(buildenv, master_feed, src_impl) # Check 0compile is new enough min_version = model.parse_version( src_impl.attrs.get(XMLNS_0COMPILE + ' min-version', None)) if min_version and min_version > model.parse_version(__main__.version): raise SafeException( "%s-%s requires 0compile >= %s, but we are only version %s" % (master_feed.get_name(), src_impl.version, model.format_version(min_version), __main__.version)) # Create the patch patch_file = join(buildenv.metadir, 'from-%s.patch' % src_impl.version) if buildenv.user_srcdir: with open(patch_file, 'w') as stream: # (ignore errors; will already be shown on stderr) try: subprocess.call(["diff", "-urN", buildenv.orig_srcdir, 'src'], stdout=stream) except OSError as ex: print >> sys.stderr, "WARNING: Failed to run 'diff': ", ex if os.path.getsize(patch_file) == 0: os.unlink(patch_file) elif os.path.exists(patch_file): os.unlink(patch_file) env('BUILDDIR', builddir) env('DISTDIR', buildenv.distdir) env('SRCDIR', buildenv.user_srcdir or buildenv.orig_srcdir) env('BINARYFEED', buildenv.local_iface_file) os.chdir(builddir) print "cd", builddir setup = CompileSetup(iface_cache.stores, sels) setup.prepare_env() # These mappings are needed when mixing Zero Install -dev packages with # native package binaries. mappings = {} for impl in sels.selections.values(): # Add mappings that have been set explicitly... new_mappings = impl.attrs.get(XMLNS_0COMPILE + ' lib-mappings', '') if new_mappings: new_mappings = new_mappings.split(' ') for mapping in new_mappings: assert ':' in mapping, "lib-mappings missing ':' in '%s' from '%s'" % ( mapping, impl.feed) name, major_version = mapping.split(':', 1) assert '/' not in mapping, "lib-mappings '%s' contains a / in the version number (from '%s')!" % ( mapping, impl.feed) if sys.platform == 'darwin': mappings[name] = 'lib%s.%s.dylib' % (name, major_version) else: mappings[name] = 'lib%s.so.%s' % (name, major_version) # Auto-detect required mappings where possible... # (if the -dev package is native, the symlinks will be OK) if not is_package_impl(impl): impl_path = lookup(impl) for libdirname in ['lib', 'usr/lib', 'lib64', 'usr/lib64']: libdir = os.path.join(impl_path, libdirname) if os.path.isdir(libdir): find_broken_version_symlinks(libdir, mappings) if mappings: set_up_mappings(mappings) overrides_dir = os.path.join(os.environ['TMPDIR'], PKG_CONFIG_OVERRIDES) if os.path.isdir(overrides_dir): add_overrides = model.EnvironmentBinding('PKG_CONFIG_PATH', PKG_CONFIG_OVERRIDES) do_env_binding(add_overrides, os.environ['TMPDIR']) # Some programs want to put temporary build files in the source directory. # Make a copy of the source if needed. dup_src_type = src_impl.attrs.get(XMLNS_0COMPILE + ' dup-src', None) if dup_src_type == 'true': dup_src(copy_file) env('SRCDIR', builddir) elif dup_src_type: raise Exception("Unknown dup-src value '%s'" % dup_src_type) if options.shell: spawn_and_check(find_in_path('cmd' if os.name == 'nt' else 'sh'), []) else: command = sels.commands[0].qdom.attrs.get('shell-command', None) if command is None: # New style <command> prog_args = setup.build_command(sels.interface, sels.command) + args else: # Old style shell-command='...' if os.name == 'nt': prog_args = [ os.environ['0COMPILE_BASH'], '-eux', '-c', command ] + args else: prog_args = ['/bin/sh', '-c', command + ' "$@"', '-'] + args assert len(sels.commands) == 1 # Remove any existing log files for log in ['build.log', 'build-success.log', 'build-failure.log']: if os.path.exists(log): os.unlink(log) # Run the command, copying output to a new log with open('build.log', 'w') as log: print >> log, "Build log for %s-%s" % (master_feed.get_name(), src_impl.version) print >> log, "\nBuilt using 0compile-%s" % __main__.version print >> log, "\nBuild system: " + ', '.join(uname) print >> log, "\n%s:\n" % ENV_FILE with open(os.path.join(os.pardir, ENV_FILE)) as properties_file: shutil.copyfileobj(properties_file, log) log.write('\n') if os.path.exists(patch_file): print >> log, "\nPatched with:\n" shutil.copyfileobj(file(patch_file), log) log.write('\n') if command: print "Executing: " + command, args print >> log, "Executing: " + command, args else: print "Executing: " + str(prog_args) print >> log, "Executing: " + str(prog_args) # Tee the output to the console and to the log child = subprocess.Popen(prog_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) while True: data = os.read(child.stdout.fileno(), 100) if not data: break sys.stdout.write(data) log.write(data) status = child.wait() failure = None if status == 0: print >> log, "Build successful" shorten_dynamic_library_install_names() fixup_generated_pkgconfig_files() remove_la_files() elif status > 0: failure = "Build failed with exit code %d" % status else: failure = "Build failure: exited due to signal %d" % (-status) if failure: print >> log, failure if failure: os.rename('build.log', 'build-failure.log') raise SafeException("Command '%s': %s" % (prog_args, failure)) else: os.rename('build.log', 'build-success.log')
def pv(v): parsed = model.parse_version(v) assert model.format_version(parsed) == v return parsed
def recursive_build(self, iface_uri, version = None): """Build an implementation of iface_uri and register it as a feed. @param version: the version to build, or None to build any version @type version: str """ r = requirements.Requirements(iface_uri) r.source = True r.command = 'compile' d = driver.Driver(self.config, r) iface = self.config.iface_cache.get_interface(iface_uri) d.solver.record_details = True if version: d.solver.extra_restrictions[iface] = [model.VersionRestriction(model.parse_version(version))] # For testing... #p.target_arch = arch.Architecture(os_ranks = {'FreeBSD': 0, None: 1}, machine_ranks = {'i386': 0, None: 1, 'newbuild': 2}) while True: self.heading(iface_uri) self.note("\nSelecting versions for %s..." % iface.get_name()) solved = d.solve_with_downloads() if solved: yield solved tasks.check(solved) if not d.solver.ready: self.print_details(d.solver) raise d.solver.get_failure_reason() self.note("Selection done.") self.note("\nPlan:\n") self.pretty_print_plan(d.solver, r.interface_uri) self.note('') needed = [] for dep_iface, dep_impl in d.solver.selections.iteritems(): if dep_impl.id.startswith('0compile='): if not needed: self.note("Build dependencies that need to be compiled first:\n") self.note("- {iface} {version}".format(iface = dep_iface.uri, version = model.format_version(dep_impl.version))) needed.append((dep_iface, dep_impl)) if not needed: self.note("No dependencies need compiling... compile %s itself..." % iface.get_name()) build = self.compile_and_register(d.solver.selections, # force the interface in the recursive case iface_uri if iface_uri != self.iface_uri else None) yield build tasks.check(build) return # Compile the first missing build dependency... dep_iface, dep_impl = needed[0] self.note("") #details = d.solver.details[self.config.iface_cache.get_interface(dep_iface.uri)] #for de in details: # print de build = self.recursive_build(dep_iface.uri, dep_impl.get_version()) yield build tasks.check(build)
def mark_stable(data): """Find the single release marked as 'testing' and make it 'stable'.""" doc = minidom.parseString(data) testing = [] all_impls = doc.documentElement.getElementsByTagNameNS(namespaces.XMLNS_IFACE, 'implementation') for x in all_impls: if get_stability(x) == 'testing': testing.append((get_version(x), x)) if len(testing) == 0: raise Exception('No implementations are currently "testing"!') testing = sorted(testing) higest_version = testing[-1][0] latest_testing = [impl for version, impl in testing if version == higest_version] if len(latest_testing) < len(testing): warn("Multiple 'testing' versions - changing %d (of %d) with version %s", len(latest_testing), len(testing), model.format_version(higest_version)) for impl in latest_testing: impl.setAttribute('stability', 'stable') return doc.toxml()
def do_update(config, messages = None): feeds, files = build.build_public_feeds(config) files += [f.public_rel_path for f in feeds] files += catalog.write_catalog(config, feeds) feeds_dir = abspath('feeds') os.chdir('public') # Add default styles, if missing resources_dir = join('resources') if not os.path.isdir(resources_dir): os.mkdir(resources_dir) for resource in ['catalog.xsl', 'catalog.css', 'feed.xsl', 'feed.css']: target = join('resources', resource) files.append(target) if not os.path.exists(target): with open(join(config.default_resources, resource), 'rt') as stream: data = stream.read() data = data.replace('@REPOSITORY_BASE_URL@', config.REPOSITORY_BASE_URL) with open(target, 'wt') as stream: stream.write(data) if not messages: messages = ['0repo update'] config.upload_public_dir(files, message = ', '.join(messages)) out = subprocess.check_output(['git', 'status', '--porcelain'], cwd = feeds_dir).strip('\n') if out: print("Note: you have uncommitted changes in {feeds}:".format(feeds = feeds_dir)) print(out) print("Run 'git commit -a' from that directory to save your changes.") # Warn about releases that are still 'testing' a while after release now = time.time() def age(impl): released = impl.metadata.get('released', None) if not released: return 0 released_time = time.mktime(time.strptime(released, '%Y-%m-%d')) return now - released_time shown_header = False for feed in feeds: with open(feed.source_path, 'rb') as stream: zfeed = model.ZeroInstallFeed(qdom.parse(stream)) if zfeed.implementations: # Find the latest version number (note that there may be several implementations with this version number) latest_version = max(impl.version for impl in zfeed.implementations.values()) testing_impls = [impl for impl in zfeed.implementations.values() if impl.version == latest_version and impl.upstream_stability == model.stability_levels['testing'] and age(impl) > TIME_TO_GRADUATE] if testing_impls: if not shown_header: print("Releases which are still marked as 'testing' after {days} days:".format( days = TIME_TO_GRADUATE / DAY)) shown_header = True print("- {name} v{version}, {age} days ({path})".format( age = int(age(testing_impls[0]) / DAY), name = zfeed.get_name(), path = os.path.relpath(feed.source_path, feeds_dir), version = model.format_version(latest_version)))
def do_build_internal(options, args): """build-internal""" # If a sandbox is being used, we're in it now. import getpass, socket buildenv = BuildEnv() sels = buildenv.get_selections() builddir = os.path.realpath('build') ensure_dir(buildenv.metadir) build_env_xml = join(buildenv.metadir, 'build-environment.xml') buildenv_doc = sels.toDOM() # Create build-environment.xml file root = buildenv_doc.documentElement info = buildenv_doc.createElementNS(XMLNS_0COMPILE, 'build-info') root.appendChild(info) info.setAttributeNS(None, 'time', time.strftime('%Y-%m-%d %H:%M').strip()) info.setAttributeNS(None, 'host', socket.getfqdn()) info.setAttributeNS(None, 'user', getpass.getuser()) info.setAttributeNS(None, 'arch', '%s-%s' % (uname[0], uname[4])) stream = file(build_env_xml, 'w') buildenv_doc.writexml(stream, addindent=" ", newl="\n") stream.close() # Create local binary interface file. # We use the main feed for the interface as the template for the name, # summary, etc (note: this is not necessarily the feed that contained # the source code). master_feed = iface_cache.get_feed(buildenv.interface) src_impl = buildenv.chosen_impl(buildenv.interface) write_sample_feed(buildenv, master_feed, src_impl) # Check 0compile is new enough min_version = model.parse_version(src_impl.attrs.get(XMLNS_0COMPILE + ' min-version', None)) if min_version and min_version > model.parse_version(__main__.version): raise SafeException("%s-%s requires 0compile >= %s, but we are only version %s" % (master_feed.get_name(), src_impl.version, model.format_version(min_version), __main__.version)) # Create the patch patch_file = join(buildenv.metadir, 'from-%s.patch' % src_impl.version) if buildenv.user_srcdir: with open(patch_file, 'w') as stream: # (ignore errors; will already be shown on stderr) try: subprocess.call(["diff", "-urN", buildenv.orig_srcdir, 'src'], stdout = stream) except OSError as ex: print >>sys.stderr, "WARNING: Failed to run 'diff': ", ex if os.path.getsize(patch_file) == 0: os.unlink(patch_file) elif os.path.exists(patch_file): os.unlink(patch_file) env('BUILDDIR', builddir) env('DISTDIR', buildenv.distdir) env('SRCDIR', buildenv.user_srcdir or buildenv.orig_srcdir) env('BINARYFEED', buildenv.local_iface_file) os.chdir(builddir) print "cd", builddir setup = CompileSetup(iface_cache.stores, sels) setup.prepare_env() # These mappings are needed when mixing Zero Install -dev packages with # native package binaries. mappings = {} for impl in sels.selections.values(): # Add mappings that have been set explicitly... new_mappings = impl.attrs.get(XMLNS_0COMPILE + ' lib-mappings', '') if new_mappings: new_mappings = new_mappings.split(' ') for mapping in new_mappings: assert ':' in mapping, "lib-mappings missing ':' in '%s' from '%s'" % (mapping, impl.feed) name, major_version = mapping.split(':', 1) assert '/' not in mapping, "lib-mappings '%s' contains a / in the version number (from '%s')!" % (mapping, impl.feed) if sys.platform == 'darwin': mappings[name] = 'lib%s.%s.dylib' % (name, major_version) else: mappings[name] = 'lib%s.so.%s' % (name, major_version) # Auto-detect required mappings where possible... # (if the -dev package is native, the symlinks will be OK) if not is_package_impl(impl): impl_path = lookup(impl) for libdirname in ['lib', 'usr/lib', 'lib64', 'usr/lib64']: libdir = os.path.join(impl_path, libdirname) if os.path.isdir(libdir): find_broken_version_symlinks(libdir, mappings) if mappings: set_up_mappings(mappings) overrides_dir = os.path.join(os.environ['TMPDIR'], PKG_CONFIG_OVERRIDES) if os.path.isdir(overrides_dir): add_overrides = model.EnvironmentBinding('PKG_CONFIG_PATH', PKG_CONFIG_OVERRIDES) do_env_binding(add_overrides, os.environ['TMPDIR']) # Some programs want to put temporary build files in the source directory. # Make a copy of the source if needed. dup_src_type = src_impl.attrs.get(XMLNS_0COMPILE + ' dup-src', None) if dup_src_type == 'true': dup_src(shutil.copy2) env('SRCDIR', builddir) elif dup_src_type: raise Exception("Unknown dup-src value '%s'" % dup_src_type) if options.shell: spawn_and_check(find_in_path('cmd' if os.name == 'nt' else 'sh'), []) else: command = sels.commands[0].qdom.attrs.get('shell-command', None) if command is None: # New style <command> prog_args = setup.build_command(sels.interface, sels.command) + args else: # Old style shell-command='...' if os.name == 'nt': prog_args = ['cmd', '/c', command] + args else: prog_args = ['/bin/sh', '-c', command + ' "$@"', '-'] + args assert len(sels.commands) == 1 # Remove any existing log files for log in ['build.log', 'build-success.log', 'build-failure.log']: if os.path.exists(log): os.unlink(log) # Run the command, copying output to a new log with open('build.log', 'w') as log: print >>log, "Build log for %s-%s" % (master_feed.get_name(), src_impl.version) print >>log, "\nBuilt using 0compile-%s" % __main__.version print >>log, "\nBuild system: " + ', '.join(uname) print >>log, "\n%s:\n" % ENV_FILE with open(os.path.join(os.pardir, ENV_FILE)) as properties_file: shutil.copyfileobj(properties_file, log) log.write('\n') if os.path.exists(patch_file): print >>log, "\nPatched with:\n" shutil.copyfileobj(file(patch_file), log) log.write('\n') if command: print "Executing: " + command, args print >>log, "Executing: " + command, args else: print "Executing: " + str(prog_args) print >>log, "Executing: " + str(prog_args) # Tee the output to the console and to the log child = subprocess.Popen(prog_args, stdout = subprocess.PIPE, stderr = subprocess.STDOUT) while True: data = os.read(child.stdout.fileno(), 100) if not data: break sys.stdout.write(data) log.write(data) status = child.wait() failure = None if status == 0: print >>log, "Build successful" shorten_dynamic_library_install_names() fixup_generated_pkgconfig_files() remove_la_files() elif status > 0: failure = "Build failed with exit code %d" % status else: failure = "Build failure: exited due to signal %d" % (-status) if failure: print >>log, failure if failure: os.rename('build.log', 'build-failure.log') raise SafeException("Command '%s': %s" % (prog_args, failure)) else: os.rename('build.log', 'build-success.log')