def run(self, args): logger.debug("loading " + args.autobuild_filename) platform=common.get_current_platform() if args.clean_only: logger.info("packaging with --clean-only required") if args.check_license: logger.warning("The --skip-license-check option is deprecated; it now has no effect") if args.results_file and os.path.exists(args.results_file): if args.dry_run: logger.info("would have removed previous results: %s" % args.results_file) else: logger.debug("clearing previous results: %s" % args.results_file) os.remove(args.results_file) config = configfile.ConfigurationDescription(args.autobuild_filename) build_dirs = common.select_directories(args, config, "build", "packaging", lambda cnf: config.get_build_directory(cnf, platform)) if not build_dirs: build_dirs = [config.get_build_directory(None, platform)] is_clean = True for build_dir in build_dirs: package(config, build_dir, platform, archive_filename=args.archive_filename, archive_format=args.archive_format, clean_only=args.clean_only, results_file=args.results_file, dry_run=args.dry_run)
def run(self, args): config = configfile.ConfigurationDescription(args.config_file) arg_dict = _process_key_value_arguments(args.argument) args.func(config, arg_dict, args.delete) if not args.dry_run and args.subparser_name != 'print': config.save()
def run(self, args): platform = common.get_current_platform() common.establish_build_id( args.build_id) # sets id (even if not specified), # and stores in the AUTOBUILD_BUILD_ID environment variable config = configfile.ConfigurationDescription(args.config_file) package_errors = configfile.check_package_attributes(config) if package_errors: raise ConfigurationError("%s\n in configuration %s" \ % (package_errors, args.config_file)) current_directory = os.getcwd() try: build_configurations = common.select_configurations( args, config, "configuring for") if not build_configurations: logger.error( "no applicable configurations found.\n" "did you remember to mark a configuration as default?\n" "autobuild cowardly refuses to do nothing!") for build_configuration in build_configurations: # Get enriched environment based on the current configuration environment = get_enriched_environment( build_configuration.name) # then get a copy of the config specific to this build # configuration bconfig = config.copy() # and expand its $variables according to the environment. bconfig.expand_platform_vars(environment) # Re-fetch the build configuration so we have its expansions. build_configuration = bconfig.get_build_configuration( build_configuration.name, platform_name=platform) build_directory = bconfig.make_build_directory( build_configuration, platform=platform, dry_run=args.dry_run) if not args.dry_run: logger.debug("configuring in %s" % build_directory) os.chdir(build_directory) else: logger.info("configuring in %s" % build_directory) result = _configure_a_configuration(bconfig, build_configuration, args.additional_options, args.dry_run, environment=environment) if result != 0: raise ConfigurationError( "default configuration returned %d" % result) finally: os.chdir(current_directory)
def run(self, args): config = configfile.ConfigurationDescription(args.config_file) if args.command == 'add': add(config, args.name, args.archive, args.argument) elif args.command == 'edit': edit(config, args.name, args.archive, args.argument) elif args.command == 'remove': remove(config, args.name) elif args.command == 'print': print_installable(config, args.name) else: raise InstallablesError('unknown command %s' % args.command) if not args.dry_run and args.command != 'print': config.save()
def run(self, args): platform = common.get_current_platform() logger.debug("uninstalling for platform " + platform) installed_filename = args.installed_filename if os.path.isabs(installed_filename): installed_filenames = [installed_filename] else: # This logic handles the (usual) case when installed_filename is # relative to install_dir. Therefore we must figure out install_dir. # write packages into 'packages' subdir of build directory by default config = configfile.ConfigurationDescription(args.install_filename) # establish a build directory so that the install directory is relative to it build_configurations = common.select_configurations( args, config, "uninstalling for") if not build_configurations: logger.error( "no applicable configurations found.\n" "did you remember to mark a configuration as default?\n" "autobuild cowardly refuses to do nothing!") for build_configuration in build_configurations: # Get enriched environment based on the current configuration environment = get_enriched_environment( build_configuration.name) # then get a copy of the config specific to this build # configuration bconfig = config.copy() # and expand its $variables according to the environment. bconfig.expand_platform_vars(environment) # Re-fetch the build configuration so we have its expansions. build_configuration = bconfig.get_build_configuration( build_configuration.name, platform_name=platform) build_directory = bconfig.get_build_directory( build_configuration, platform_name=platform) logger.debug("build directory: %s" % build_directory) installed_filenames = \ [os.path.realpath(os.path.join(install_dir, installed_filename)) for install_dir in common.select_directories(args, config, "install", "uninstalling", lambda cnf: os.path.join(build_directory, "packages"))] logger.debug("installed filenames: %s" % installed_filenames) for installed_filename in installed_filenames: uninstall_packages(args, installed_filename, args.package, args.dry_run)
def run(self, args): platform = common.get_current_platform() config = configfile.ConfigurationDescription(args.config_file) if args.command == 'add': [add(config, platform, p) for p in args.pattern] elif args.command == 'remove': [remove(config, platform, p) for p in args.pattern] elif args.command == 'clear': clear(config, platform) elif args.command == 'print': print_manifest(config, platform) else: raise ManifestError('unknown command %s' % args.command) if not args.dry_run: config.save()
def run(self, args): UTF8Writer = codecs.getwriter('utf8') sys.stdout = UTF8Writer(sys.stdout) platform = common.get_current_platform() logger.debug("installing platform " + platform) # load the list of packages to install logger.debug("loading " + args.install_filename) config = configfile.ConfigurationDescription(args.install_filename) # establish a build directory so that the install directory is relative to it build_configurations = common.select_configurations( args, config, "installing for") if not build_configurations: logger.error( "no applicable configurations found.\n" "did you remember to mark a configuration as default?\n" "autobuild cowardly refuses to do nothing!") for build_configuration in build_configurations: # Get enriched environment based on the current configuration environment = get_enriched_environment(build_configuration.name) # then get a copy of the config specific to this build # configuration bconfig = config.copy() # and expand its $variables according to the environment. bconfig.expand_platform_vars(environment) # Re-fetch the build configuration so we have its expansions. build_configuration = bconfig.get_build_configuration( build_configuration.name, platform_name=platform) build_directory = bconfig.get_build_directory( build_configuration, platform_name=platform) # write packages into 'packages' subdir of build directory install_dirs = \ common.select_directories(args, bconfig, "install", "installing packages for", lambda cnf: os.path.join(bconfig.make_build_directory(cnf, platform=platform, dry_run=args.dry_run), "packages")) # get the absolute paths to the install dir and installed-packages.xml file for install_dir in install_dirs: install_dir = os.path.realpath(install_dir) install_packages(args, bconfig, install_dir, platform, args.package)
def run(self, args): config = configfile.ConfigurationDescription(args.config_file) configfile.pretty_print(config)
def run(self, args): platform=common.get_current_platform() metadata = None incomplete = '' if not args.source_file: # no file specified, so assume we are in a build tree and find the # metadata in the current build directory logger.info("searching for metadata in the current build tree") config_filename = args.config_filename config = configfile.ConfigurationDescription(config_filename) metadata_file = os.path.join(config.get_build_directory(args.configuration, platform), configfile.PACKAGE_METADATA_FILE) if not os.path.exists(metadata_file): logger.warning("No complete metadata file found; attempting to use partial data from installed files") # get the absolute path to the installed-packages.xml file args.all = False args.configurations = args.configuration install_dirs = common.select_directories(args, config, "install", "getting installed packages", lambda cnf: os.path.join(config.get_build_directory(cnf, platform), "packages")) installed_pathname = os.path.join(os.path.realpath(install_dirs[0]), args.installed_filename) if os.path.exists(installed_pathname): # dummy up a metadata object, but don't create the file metadata = configfile.MetadataDescription() # use the package description from the configuration metadata.package_description = config.package_description metadata.add_dependencies(installed_pathname) incomplete = ' (possibly incomplete)' else: raise GraphError("No metadata found in current directory") else: metadata = configfile.MetadataDescription(path=metadata_file) elif args.source_file.endswith(".xml"): # the specified file is an xml file; assume it is a metadata file logger.info("searching for metadata in autobuild package metadata file %s" % args.source_file) metadata = configfile.MetadataDescription(path=args.source_file) if not metadata: raise GraphError("No metadata found in '%s'" % args.source_file) else: # assume that the file is a package archive and try to get metadata from it logger.info("searching for metadata in autobuild package file %s" % args.source_file) metadata_stream = extract_metadata_from_package(args.source_file, configfile.PACKAGE_METADATA_FILE) if metadata_stream is not None: metadata = configfile.MetadataDescription(stream=metadata_stream) if not metadata: raise GraphError("No metadata found in archive '%s'" % args.file) if metadata: graph = pydot.Dot(label=metadata['package_description']['name']+incomplete+' dependencies for '+platform, graph_type='digraph') graph.set('overlap', 'false') graph.set('splines', 'true') graph.set('scale', '2') graph.set('smoothType', 'spring') graph.set('labelloc', 'top') graph.set('labeljust', 'center') graph.set_node_defaults(shape='box') def add_depends(graph, pkg): name = pkg['package_description']['name'] got = graph.get_node(name) # can return a single Node instance, a list of Nodes, or None try: pkg_node = got if got is None or isinstance(got, pydot.Node) else got[0] except IndexError: # some versions of pydot may return an empty list instead of None pkg_node = None if pkg_node is None: logger.debug(" graph adding package %s" % name) # can't use the dict .get to supply an empty string default for these, # because the value in the dict is None. pkg_version = pkg['package_description']['version'] if pkg['package_description']['version'] else ""; pkg_build_id = pkg['build_id'] if pkg['build_id'] else ""; # create the new node with name, version, and build id pkg_node = pydot.Node(name, label="%s\\n%s\\n%s" % (name, pkg_version, pkg_build_id)) if 'dirty' in pkg and (pkg['dirty'] == 'True' or pkg['dirty'] is True): logger.debug(" setting %s dirty: %s" % (name, ("missing" if 'dirty' not in pkg else "explicit"))) pkg_node.set_shape('ellipse') pkg_node.set_style('dashed') graph.add_node(pkg_node) if 'dependencies' in pkg: for dep_pkg in pkg['dependencies'].itervalues(): dep_name = dep_pkg['package_description']['name'] dep_node = add_depends(graph, dep_pkg) logger.debug(" graph adding dependency %s -> %s" % (dep_name, name)) edge = pydot.Edge(dep_name, name) if 'dirty' in dep_pkg and (dep_pkg['dirty'] == 'True' or dep_pkg['dirty'] is True): edge.set_style('dashed') graph.add_edge(edge) return pkg_node root = add_depends(graph, metadata) root.set_root('true') root.set_shape('octagon') if args.dot_file: try: dot_file=open(args.dot_file,'wb') except IOError as err: raise GraphError("Unable to open dot file %s: %s" % (args.dot_file, err)) dot_file.write(graph.to_string()) dot_file.close() if args.display or args.graph_file: if args.graph_file: graph_file = args.graph_file else: graph_file = os.path.join(tempfile.gettempdir(), metadata['package_description']['name'] + "_graph_" + args.graph_type + '.png') logger.info("writing %s" % graph_file) graph.write_png(graph_file, prog=args.graph_type) if args.display and not args.graph_file: webbrowser.open('file:'+graph_file) else: print "%s" % graph.to_string() else: raise GraphError("No metadata found")
def run(self, args): platform = common.get_current_platform() build_id = common.establish_build_id( args.build_id) # sets id (even if not specified), # and stores in the AUTOBUILD_BUILD_ID environment variable config = configfile.ConfigurationDescription(args.config_file) package_errors = \ configfile.check_package_attributes(config, additional_requirements=['version_file']) if package_errors: # Now that we've deprecated hard-coded version and started # requiring version_file instead, provide an explanation when it's # missing, instead of confounding a longtime autobuild user with # failure to meet a brand-new requirement. # Recall that package_errors isa str that also has an attrs # attribute. Only emit the verbose message if version_file is # actually one of the problematic attributes, and the config file # had to be converted from an earlier file format, and the # original file format version predates version_file. # (missing orig_ver attribute means a current autobuild.xml, which # is why we pass get() a default value that bypasses verbose) # (version_file was introduced at AUTOBUILD_CONFIG_VERSION 1.3) if "version_file" in package_errors.attrs \ and common.get_version_tuple(config.get("orig_ver", "1.3")) < (1, 3): verbose = """ New requirement: instead of stating a particular version number in the %(xml)s file, we now require you to configure a version_file attribute. This should be the path (relative to the build_directory) of a small text file containing only the package version string. Freezing the version number into %(xml)s means we often forget to update it there. Reading the version number from a separate text file allows your build script to create that file from data available in the package. version_file need not be in the manifest; it's used only by 'autobuild build' to create package metadata. """ % dict(xml=configfile.AUTOBUILD_CONFIG_FILE) else: verbose = "" # Now, regardless of the value of 'verbose', show the message. raise BuildError(''.join( (package_errors, "\n in configuration ", args.config_file, verbose))) current_directory = os.getcwd() if args.clean_only: logger.info("building with --clean-only required") try: configure_first = not args.do_not_configure build_configurations = common.select_configurations( args, config, "building for") if not build_configurations: logger.error( "no applicable configurations found.\n" "did you remember to mark a configuration as default?\n" "autobuild cowardly refuses to do nothing!") for build_configuration in build_configurations: # Get enriched environment based on the current configuration environment = get_enriched_environment( build_configuration.name) # then get a copy of the config specific to this build # configuration bconfig = config.copy() # and expand its $variables according to the environment. bconfig.expand_platform_vars(environment) # Re-fetch the build configuration so we have its expansions. build_configuration = bconfig.get_build_configuration( build_configuration.name, platform_name=platform) build_directory = bconfig.make_build_directory( build_configuration, platform=platform, dry_run=args.dry_run) if not args.dry_run: logger.debug("building in %s" % build_directory) os.chdir(build_directory) else: logger.info("building in %s" % build_directory) if configure_first: result = _configure_a_configuration( bconfig, build_configuration, args.build_extra_arguments, args.dry_run, environment=environment) if result != 0: raise BuildError( "configuring default configuration returned %d" % result) result = _build_a_configuration( bconfig, build_configuration, platform_name=platform, extra_arguments=args.build_extra_arguments, dry_run=args.dry_run, environment=environment) # always make clean copy of the build metadata regardless of result metadata_file_name = configfile.PACKAGE_METADATA_FILE logger.debug("metadata file name: %s" % metadata_file_name) if os.path.exists(metadata_file_name): if not args.dry_run: os.unlink(metadata_file_name) else: logger.info("would have replaced %s" % metadata_file_name) if result != 0: raise BuildError("building configuration %s returned %d" % (build_configuration, result)) # Create the metadata record for inclusion in the package metadata_file = configfile.MetadataDescription( path=metadata_file_name, create_quietly=True) # COPY the package description from the configuration: we're # going to convert it to metadata format. metadata_file.package_description = \ configfile.PackageDescription(bconfig.package_description) # A metadata package_description has a version attribute # instead of a version_file attribute. metadata_file.package_description.version = \ metadata_file.package_description.read_version_file(build_directory) del metadata_file.package_description["version_file"] logger.info("built %s version %s" % (metadata_file.package_description.name, metadata_file.package_description.version)) metadata_file.package_description.platforms = None # omit data on platform configurations metadata_file.platform = platform metadata_file.configuration = build_configuration.name metadata_file.build_id = build_id # get the record of any installed packages logger.debug("installed files in " + args.installed_filename) # SL-773: This if/else partly replicates # common.select_directories() because our build_directory # comes from bconfig, which has been $-expanded. # The former select_directories() call produced (e.g.) # build-vc120-$AUTOBUILD_ADDRSIZE, which didn't exist. if args.select_dir: install_dir = args.select_dir logger.debug( "specified metadata directory: {}".format(install_dir)) else: # packages were written into 'packages' subdir of build directory by default install_dir = os.path.join(build_directory, "packages") logger.debug("metadata in build subdirectory: {}".format( install_dir)) # load the list of already installed packages installed_pathname = os.path.realpath( os.path.join(install_dir, args.installed_filename)) if os.path.exists(installed_pathname): metadata_file.add_dependencies(installed_pathname) else: logger.debug("no installed files found (%s)" % installed_pathname) if args.clean_only and metadata_file.dirty: raise BuildError( "Build depends on local or legacy installables\n" + " use 'autobuild install --list-dirty' to see problem packages\n" + " rerun without --clean-only to allow building anyway" ) if not args.dry_run: metadata_file.save() finally: os.chdir(current_directory)