Example #1
0
def get_metadata_from_package(package_file, package=None):
    metadata_file_name = configfile.PACKAGE_METADATA_FILE
    metadata_file = extract_metadata_from_package(package_file,
                                                  metadata_file_name)
    if not metadata_file:
        logger.warning(
            "WARNING: Archive '%s' does not contain metadata; build will be marked as dirty"
            % os.path.basename(package_file))
        # Create a dummy metadata description for a package whose archive does not have one
        metadata = configfile.MetadataDescription()
        # split_tarname() returns a sequence like:
        # ("/some/path", ["boost", "1.39.0", "darwin", "20100222a"], ".tar.bz2")
        ignore_dir, from_name, ignore_ext = common.split_tarname(package_file)
        metadata.platform = from_name[2]
        metadata.build_id = from_name[3]
        metadata.configuration = 'unknown'
        if package is not None:
            if from_name[0] != package.name:
                raise InstallError("configured package name '%s' does not match name from archive '%s'" \
                                   % (package.name, from_name[0]))
            metadata.archive = package['platforms'][
                metadata.platform]['archive']
            metadata.package_description = package.copy()
        else:
            metadata.archive = configfile.ArchiveDescription()
            metadata.package_description = configfile.PackageDescription({})
        metadata.package_description.version = from_name[1]
        metadata.package_description.name = from_name[0]
        del metadata.package_description['platforms']
        metadata.dirty = True
    else:
        metadata = configfile.MetadataDescription(stream=metadata_file)
    return metadata
def uninstall(package_name, installed_config):
    """
    Uninstall specified package_name: remove related files and delete
    package_name from the installed_config ConfigurationDescription.

    Saving the modified installed_config is the caller's responsibility.
    """
    try:
        # Retrieve this package's installed PackageDescription, and
        # remove it from installed_config at the same time.
        package = configfile.MetadataDescription(parsed_llsd=installed_config.dependencies.pop(package_name))
    except KeyError:
        # If the package has never yet been installed, we're good.
        logger.debug("%s not installed, no uninstall needed" % package_name)
        return

    logger.info("uninstalling %s version %s" % (package_name, package.package_description.version))
    clean_files(os.path.join(common.get_current_build_dir(),package.install_dir), package.manifest)
    installed_config.save()
def package(config, build_directory, platform_name, archive_filename=None, archive_format=None, clean_only=False, results_file=None, dry_run=False):
    """
    Create an archive for the given platform.
    Returns True if the archive is not dirty, False if it is
    """
    if not config.package_description:
        raise PackageError("no package description")
    package_description = config.package_description
    if not package_description.name:
        raise PackageError("no package name specified in configuration")
    if not package_description.license:
        raise PackageError("no license specified in configuration")
    ##  autobuild.xml's version_file is validated by build subcommand.
    ##  By this time we should only have to validate metadata package version;
    ##  this happens a few lines down, after reading metadata_file.
    if not os.path.isdir(build_directory):
        raise PackageError("build directory %s is not a directory" % build_directory)
    logger.info("packaging from %s" % build_directory)
    platform_description = config.get_platform(platform_name)
    files = set()
    missing = []
    files, missing = _get_file_list(platform_description, build_directory)
    if platform_name != common.PLATFORM_COMMON:
        try:
            common_files, common_missing = _get_file_list(config.get_platform(common.PLATFORM_COMMON), build_directory)
            files |= common_files
            missing.extend(common_missing)
        except configfile.ConfigurationError:
            pass  # We don't have a common platform defined, that is ok.
    if missing:
        raise PackageError("No files matched manifest specifiers:\n"+'\n'.join(missing))

    # add the manifest files to the metadata file (list does not include itself)
    metadata_file_name = configfile.PACKAGE_METADATA_FILE
    logger.debug("metadata file name: %s" % metadata_file_name)
    metadata_file_path = os.path.abspath(os.path.join(build_directory, metadata_file_name))
    metadata_file = configfile.MetadataDescription(path=metadata_file_path)
    if metadata_file.dirty:
        if clean_only:
            raise PackageError("Package depends on local or legacy installables\n"
                               "  use 'autobuild install --list-dirty' to see problem packages\n"
                               "  rerun without --clean-only to allow packaging anyway")
        else:
            logger.warning("WARNING: package depends on local or legacy installables\n"
                           "  use 'autobuild install --list-dirty' to see problem packages")
    if not getattr(metadata_file.package_description,'version',None):
        raise PackageError("no version in metadata package_description -- "
                           "please verify %s version_file and rerun build" %
                           os.path.basename(config.path))
    if package_description.license_file:
        if package_description.license_file not in files:
            files.add(package_description.license_file)
    if 'source_directory' in metadata_file.package_description:
        del metadata_file.package_description['source_directory']
    disallowed_paths=[path for path in files if ".." in path or os.path.isabs(path)]
    if disallowed_paths:
        raise PackageError("Absolute paths or paths with parent directory elements are not allowed:\n  "+"\n  ".join(sorted(disallowed_paths))+"\n")
    metadata_file.manifest = files
    if metadata_file.build_id:
        build_id = metadata_file.build_id
    else:
        raise PackageError("no build_id in metadata - rerun build\n"
                           "  you may specify (--id <id>) or let it default to the date")
    if metadata_file.platform != platform_name:
        raise PackageError("build platform (%s) does not match current platform (%s)"
                           % (metadata_file.platform, platform_name))

    # printing unconditionally on stdout for backward compatibility
    # the Linden Lab build scripts no longer rely on this
    # (they use the --results-file option instead)
    print "packing %s" % package_description.name

    results = None
    results_dict = None
    if not dry_run:
        if results_file:
            try:
                results=open(results_file,'wb')
            except IOError as err:
                raise PackageError("Unable to open results file %s:\n%s" % (results_file, err))
            
            results_dict = {"autobuild_package_name":package_description.name,
                         "autobuild_package_version":getattr(metadata_file.package_description,'version',None),
                         "autobuild_package_clean":("false" if metadata_file.dirty else "true"),
                         "autobuild_package_metadata":metadata_file_path}
        metadata_file.save()

    # add the metadata file name to the list of files _after_ putting that list in the metadata
    files.add(metadata_file_name)

    config_directory = os.path.dirname(config.path)
    if not archive_filename:
        tardir = config_directory
        tarname = _generate_archive_name(metadata_file.package_description, build_id, platform_name)
        tarfilename = os.path.join(tardir, tarname)
    elif os.path.isabs(archive_filename):
        tarfilename = archive_filename
    else:
        tarfilename = os.path.abspath(os.path.join(config_directory, archive_filename))
    logger.debug(tarfilename)
    if dry_run:
        for f in files:
            logger.info('would have added: ' + f)
    else:
        archive_description = platform_description.archive
        format = _determine_archive_format(archive_format, archive_description)
        if format == 'txz' or format == 'tbz2' or format == 'tgz':
            _create_tarfile(tarfilename, format, build_directory, files, results, results_dict)
        elif format == 'zip':
            _create_zip_archive(tarfilename + '.zip', build_directory, files, results, results_dict)
        else:
            raise PackageError("archive format %s is not supported" % format)
    if not dry_run and results:
        results.close()
    return not metadata_file.dirty
    def run(self, args):
        platform=common.get_current_platform()
        metadata = None
        incomplete = ''
        if not args.source_file:
            # no file specified, so assume we are in a build tree and find the 
            # metadata in the current build directory
            logger.info("searching for metadata in the current build tree")
            config_filename = args.config_filename
            config = configfile.ConfigurationDescription(config_filename)
            metadata_file = os.path.join(config.get_build_directory(args.configuration, platform), configfile.PACKAGE_METADATA_FILE)
            if not os.path.exists(metadata_file):
                logger.warning("No complete metadata file found; attempting to use partial data from installed files")
                # get the absolute path to the installed-packages.xml file
                args.all = False
                args.configurations = args.configuration
                install_dirs = common.select_directories(args, config, "install", "getting installed packages",
                                                         lambda cnf:
                                                         os.path.join(config.get_build_directory(cnf, platform), "packages"))
                installed_pathname = os.path.join(os.path.realpath(install_dirs[0]), args.installed_filename)
                if os.path.exists(installed_pathname):
                    # dummy up a metadata object, but don't create the file
                    metadata = configfile.MetadataDescription()
                    # use the package description from the configuration
                    metadata.package_description = config.package_description
                    metadata.add_dependencies(installed_pathname)
                    incomplete = ' (possibly incomplete)'
                else:
                    raise GraphError("No metadata found in current directory")
            else:
                metadata = configfile.MetadataDescription(path=metadata_file)
        elif args.source_file.endswith(".xml"):
            # the specified file is an xml file; assume it is a metadata file
            logger.info("searching for metadata in autobuild package metadata file %s" % args.source_file)
            metadata = configfile.MetadataDescription(path=args.source_file)
            if not metadata:
                raise GraphError("No metadata found in '%s'" % args.source_file)
        else:
            # assume that the file is a package archive and try to get metadata from it
            logger.info("searching for metadata in autobuild package file %s" % args.source_file)
            metadata_stream = extract_metadata_from_package(args.source_file, configfile.PACKAGE_METADATA_FILE)
            if metadata_stream is not None:
                metadata = configfile.MetadataDescription(stream=metadata_stream)
                if not metadata:
                    raise GraphError("No metadata found in archive '%s'" % args.file)
            
        if metadata:
            graph = pydot.Dot(label=metadata['package_description']['name']+incomplete+' dependencies for '+platform, graph_type='digraph')
            graph.set('overlap', 'false')
            graph.set('splines', 'true')
            graph.set('scale', '2')
            graph.set('smoothType', 'spring')
            graph.set('labelloc', 'top')
            graph.set('labeljust', 'center')

            graph.set_node_defaults(shape='box')

            def add_depends(graph, pkg):
                name = pkg['package_description']['name']
                got = graph.get_node(name) # can return a single Node instance, a list of Nodes, or None 
                try:
                    pkg_node = got if got is None or isinstance(got, pydot.Node) else got[0]
                except IndexError: # some versions of pydot may return an empty list instead of None
                    pkg_node = None
                if pkg_node is None:
                    logger.debug(" graph adding package %s" % name)
                    # can't use the dict .get to supply an empty string default for these, 
                    # because the value in the dict is None.
                    pkg_version = pkg['package_description']['version'] if pkg['package_description']['version'] else "";
                    pkg_build_id = pkg['build_id'] if pkg['build_id'] else "";
                    # create the new node with name, version, and build id
                    pkg_node = pydot.Node(name, label="%s\\n%s\\n%s" % (name, pkg_version, pkg_build_id))
                    if 'dirty' in pkg and (pkg['dirty'] == 'True' or pkg['dirty'] is True):
                        logger.debug(" setting %s dirty: %s" % (name, ("missing" if 'dirty' not in pkg else "explicit")))
                        pkg_node.set_shape('ellipse')
                        pkg_node.set_style('dashed')
                    graph.add_node(pkg_node)
                    if 'dependencies' in pkg:
                        for dep_pkg in pkg['dependencies'].itervalues():
                            dep_name = dep_pkg['package_description']['name']
                            dep_node = add_depends(graph, dep_pkg)
                            logger.debug(" graph adding dependency %s -> %s" % (dep_name, name))
                            edge = pydot.Edge(dep_name, name)
                            if 'dirty' in dep_pkg and (dep_pkg['dirty'] == 'True' or dep_pkg['dirty'] is True):
                                edge.set_style('dashed')
                            graph.add_edge(edge)
                return pkg_node

            root = add_depends(graph, metadata)
            root.set_root('true')
            root.set_shape('octagon')

            if args.dot_file:
                try:
                    dot_file=open(args.dot_file,'wb')
                except IOError as err:
                    raise GraphError("Unable to open dot file %s: %s" % (args.dot_file, err))
                dot_file.write(graph.to_string())
                dot_file.close()
                
            if args.display or args.graph_file:
                if args.graph_file:
                    graph_file = args.graph_file
                else:
                    graph_file = os.path.join(tempfile.gettempdir(), 
                                              metadata['package_description']['name'] + "_graph_" 
                                              + args.graph_type + '.png')
                logger.info("writing %s" % graph_file)
                graph.write_png(graph_file, prog=args.graph_type)
                if args.display and not args.graph_file:
                    webbrowser.open('file:'+graph_file)
            else:
                print "%s" % graph.to_string()

        else:
            raise GraphError("No metadata found")
def _get_new_metadata(config, args_name, args_archive, arguments):
    # Get any name/value pairs from the command line
    key_values = _dict_from_key_value_arguments(arguments)

    if args_archive and 'url' in key_values:
        raise InstallablesError("--archive (%s) and url (%s) may not both be specified" \
                                % (args_archive, key_values['url']))
    if args_archive:
        archive_path = args_archive.strip()
    elif 'url' in key_values:
        archive_path = key_values.pop('url')
    else:
        archive_path = None
    archive_file = None
    if archive_path:
        if _is_uri(archive_path):
            archive_url = archive_path
        else:
            archive_url = 'file://' + config.absolute_path(archive_path)
        archive_file = get_package_file(
            args_name,
            archive_url,
            hash_algorithm=key_values.get('hash_algorithm', 'md5'),
            expected_hash=key_values.get('hash', None))
        if archive_file:
            metadata = get_metadata_from_package(archive_file)
            metadata.archive = configfile.ArchiveDescription()
            metadata.archive.url = archive_url
            if 'hash' not in key_values:
                logger.warning("No hash specified, computing from %s" %
                               archive_file)
                metadata.archive['hash'] = common.compute_md5(archive_file)
                metadata.archive['hash_algorithm'] = 'md5'

    if archive_file is None:
        logger.warning(
            "Archive not downloaded; some integrity checks may not work")
        metadata = configfile.MetadataDescription(create_quietly=True)
        metadata.package_description = configfile.PackageDescription(
            dict(name=args_name))
        metadata.archive = configfile.ArchiveDescription()
        metadata.archive.url = archive_path

    package_name = _check_name(args_name, key_values, metadata)
    if metadata.package_description['name'] is None:
        metadata.package_description['name'] = package_name

    for description_key in _PACKAGE_ATTRIBUTES:
        if description_key in key_values:
            logger.warning(
                "specifying '%s' in the installable is no longer required\n  if it is in the package metadata"
                % description_key)
            if description_key in metadata.package_description \
              and metadata.package_description[description_key] is not None \
              and key_values[description_key] != metadata.package_description[description_key]:
                raise InstallablesError("command line %s (%s) does not match archive %s (%s)" \
                                        % (description_key, key_values[description_key],
                                           description_key, metadata.package_description[description_key]))
            else:
                metadata.package_description[description_key] = key_values.pop(
                    description_key)

    for archive_key in _ARCHIVE_ATTRIBUTES:
        if archive_key in key_values:
            if archive_key in metadata.archive \
              and metadata.archive[archive_key] \
              and key_values[archive_key] != metadata.archive[archive_key]:
                raise InstallablesError("command line %s (%s) does not match archive %s (%s)" \
                                        % (archive_key, key_values[archive_key],
                                           archive_key, metadata.archive[archive_key]))
            else:
                metadata.archive[archive_key] = key_values.pop(archive_key)

    if 'platform' in key_values:
        if 'platform' in metadata \
          and metadata['platform'] is not None \
          and key_values['platform'] != metadata['platform'] \
          and metadata['platform'] != common.PLATFORM_COMMON:
            raise InstallablesError("specified platform '%s' does not match archive platform '%s'" \
                                    % ( key_values['platform'], metadata['platform']))
        else:
            platform = key_values.pop('platform')
    else:
        if 'platform' in metadata \
          and metadata['platform'] is not None:
            platform = metadata['platform']
        else:
            raise InstallablesError("Unspecified platform")

    platform_description = configfile.PlatformDescription()
    platform_description.name = platform
    platform_description.archive = metadata.archive.copy()

    _warn_unused(key_values)
    return (metadata, platform_description)
    def run(self, args):
        platform = common.get_current_platform()
        build_id = common.establish_build_id(
            args.build_id)  # sets id (even if not specified),
        # and stores in the AUTOBUILD_BUILD_ID environment variable
        config = configfile.ConfigurationDescription(args.config_file)
        package_errors = \
            configfile.check_package_attributes(config,
                                                additional_requirements=['version_file'])
        if package_errors:
            # Now that we've deprecated hard-coded version and started
            # requiring version_file instead, provide an explanation when it's
            # missing, instead of confounding a longtime autobuild user with
            # failure to meet a brand-new requirement.
            # Recall that package_errors isa str that also has an attrs
            # attribute. Only emit the verbose message if version_file is
            # actually one of the problematic attributes, and the config file
            # had to be converted from an earlier file format, and the
            # original file format version predates version_file.
            # (missing orig_ver attribute means a current autobuild.xml, which
            # is why we pass get() a default value that bypasses verbose)
            # (version_file was introduced at AUTOBUILD_CONFIG_VERSION 1.3)
            if "version_file" in package_errors.attrs \
            and common.get_version_tuple(config.get("orig_ver", "1.3")) < (1, 3):
                verbose = """
New requirement: instead of stating a particular version number in the %(xml)s
file, we now require you to configure a version_file attribute. This should be
the path (relative to the build_directory) of a small text file containing
only the package version string. Freezing the version number into %(xml)s
means we often forget to update it there. Reading the version number from a
separate text file allows your build script to create that file from data
available in the package. version_file need not be in the manifest; it's used
only by 'autobuild build' to create package metadata.
""" % dict(xml=configfile.AUTOBUILD_CONFIG_FILE)
            else:
                verbose = ""
            # Now, regardless of the value of 'verbose', show the message.
            raise BuildError(''.join(
                (package_errors, "\n    in configuration ", args.config_file,
                 verbose)))
        current_directory = os.getcwd()
        if args.clean_only:
            logger.info("building with --clean-only required")
        try:
            configure_first = not args.do_not_configure
            build_configurations = common.select_configurations(
                args, config, "building for")
            if not build_configurations:
                logger.error(
                    "no applicable configurations found.\n"
                    "did you remember to mark a configuration as default?\n"
                    "autobuild cowardly refuses to do nothing!")

            for build_configuration in build_configurations:
                # Get enriched environment based on the current configuration
                environment = get_enriched_environment(
                    build_configuration.name)
                # then get a copy of the config specific to this build
                # configuration
                bconfig = config.copy()
                # and expand its $variables according to the environment.
                bconfig.expand_platform_vars(environment)
                # Re-fetch the build configuration so we have its expansions.
                build_configuration = bconfig.get_build_configuration(
                    build_configuration.name, platform_name=platform)
                build_directory = bconfig.make_build_directory(
                    build_configuration,
                    platform=platform,
                    dry_run=args.dry_run)
                if not args.dry_run:
                    logger.debug("building in %s" % build_directory)
                    os.chdir(build_directory)
                else:
                    logger.info("building in %s" % build_directory)

                if configure_first:
                    result = _configure_a_configuration(
                        bconfig,
                        build_configuration,
                        args.build_extra_arguments,
                        args.dry_run,
                        environment=environment)
                    if result != 0:
                        raise BuildError(
                            "configuring default configuration returned %d" %
                            result)
                result = _build_a_configuration(
                    bconfig,
                    build_configuration,
                    platform_name=platform,
                    extra_arguments=args.build_extra_arguments,
                    dry_run=args.dry_run,
                    environment=environment)
                # always make clean copy of the build metadata regardless of result
                metadata_file_name = configfile.PACKAGE_METADATA_FILE
                logger.debug("metadata file name: %s" % metadata_file_name)
                if os.path.exists(metadata_file_name):
                    if not args.dry_run:
                        os.unlink(metadata_file_name)
                    else:
                        logger.info("would have replaced %s" %
                                    metadata_file_name)
                if result != 0:
                    raise BuildError("building configuration %s returned %d" %
                                     (build_configuration, result))

                # Create the metadata record for inclusion in the package
                metadata_file = configfile.MetadataDescription(
                    path=metadata_file_name, create_quietly=True)
                # COPY the package description from the configuration: we're
                # going to convert it to metadata format.
                metadata_file.package_description = \
                    configfile.PackageDescription(bconfig.package_description)
                # A metadata package_description has a version attribute
                # instead of a version_file attribute.
                metadata_file.package_description.version = \
                    metadata_file.package_description.read_version_file(build_directory)
                del metadata_file.package_description["version_file"]
                logger.info("built %s version %s" %
                            (metadata_file.package_description.name,
                             metadata_file.package_description.version))
                metadata_file.package_description.platforms = None  # omit data on platform configurations
                metadata_file.platform = platform
                metadata_file.configuration = build_configuration.name
                metadata_file.build_id = build_id
                # get the record of any installed packages
                logger.debug("installed files in " + args.installed_filename)

                # SL-773: This if/else partly replicates
                # common.select_directories() because our build_directory
                # comes from bconfig, which has been $-expanded.
                # The former select_directories() call produced (e.g.)
                # build-vc120-$AUTOBUILD_ADDRSIZE, which didn't exist.
                if args.select_dir:
                    install_dir = args.select_dir
                    logger.debug(
                        "specified metadata directory: {}".format(install_dir))
                else:
                    # packages were written into 'packages' subdir of build directory by default
                    install_dir = os.path.join(build_directory, "packages")
                    logger.debug("metadata in build subdirectory: {}".format(
                        install_dir))

                # load the list of already installed packages
                installed_pathname = os.path.realpath(
                    os.path.join(install_dir, args.installed_filename))
                if os.path.exists(installed_pathname):
                    metadata_file.add_dependencies(installed_pathname)
                else:
                    logger.debug("no installed files found (%s)" %
                                 installed_pathname)
                if args.clean_only and metadata_file.dirty:
                    raise BuildError(
                        "Build depends on local or legacy installables\n" +
                        "  use 'autobuild install --list-dirty' to see problem packages\n"
                        +
                        "  rerun without --clean-only to allow building anyway"
                    )
                if not args.dry_run:
                    metadata_file.save()
        finally:
            os.chdir(current_directory)