Exemple #1
0
def _main(parser, opts):
    """The "main" main function so we can trace/profile."""
    # Initialize the logger before anything else.
    log_level = opts.log_level
    if log_level is None:
        if opts.debug:
            log_level = 'debug'
        elif opts.verbose:
            log_level = 'info'
        else:
            log_level = 'notice'
    log.setup_logging(log_level,
                      output=opts.log_file,
                      debug=opts.debug,
                      color=opts.color)

    # Parse the command line options.
    myconfigs = opts.configs
    if not myconfigs:
        myconfigs = [DEFAULT_CONFIG_FILE]
    myspecfile = opts.file
    mycmdline = opts.cli[:]

    if opts.snapshot:
        mycmdline.append('target=snapshot')
        mycmdline.append('version_stamp=' + opts.snapshot)

    conf_values['DEBUG'] = opts.debug
    conf_values['VERBOSE'] = opts.debug or opts.verbose

    options = set()
    if opts.fetchonly:
        options.add('fetch')
    if opts.purge:
        options.add('purge')
    if opts.purgeonly:
        options.add('purgeonly')
    if opts.purgetmponly:
        options.add('purgetmponly')
    if opts.clear_autoresume:
        options.add('clear-autoresume')

    # Make sure we have some work before moving further.
    if not myspecfile and not mycmdline:
        parser.error('please specify one of either -f or -C or -s')

    # made it this far so start by outputting our version info
    version()
    # import configuration file and import our main module using those settings
    parse_config(myconfigs)

    conf_values["options"].update(options)
    log.notice('conf_values[options] = %s', conf_values['options'])

    # initialize our contents generator
    contents_map = ContentsMap(CONTENTS_DEFINITIONS,
                               comp_prog=conf_values['comp_prog'],
                               decomp_opt=conf_values['decomp_opt'],
                               list_xattrs_opt=conf_values['list_xattrs_opt'])
    conf_values["contents_map"] = contents_map

    # initialze our hash and contents generators
    hash_map = HashMap(HASH_DEFINITIONS)
    conf_values["hash_map"] = hash_map

    # initialize our (de)compression definitions
    conf_values['decompress_definitions'] = DECOMPRESS_DEFINITIONS
    conf_values['compress_definitions'] = COMPRESS_DEFINITIONS
    # TODO add capability to config/spec new definitions

    # Start checking that digests are valid now that hash_map is initialized
    if "digests" in conf_values:
        digests = set(conf_values['digests'].split())
        valid_digests = set(HASH_DEFINITIONS.keys())

        # Use the magic keyword "auto" to use all algos that are available.
        skip_missing = False
        if 'auto' in digests:
            skip_missing = True
            digests.remove('auto')
            if not digests:
                digests = set(valid_digests)

        # First validate all the requested digests are valid keys.
        if digests - valid_digests:
            log.critical(
                'These are not valid digest entries:\n'
                '%s\n'
                'Valid digest entries:\n'
                '%s', ', '.join(digests - valid_digests),
                ', '.join(sorted(valid_digests)))

        # Then check for any programs that the hash func requires.
        for digest in digests:
            try:
                process.find_binary(hash_map.hash_map[digest].cmd)
            except process.CommandNotFound:
                # In auto mode, just ignore missing support.
                if skip_missing:
                    digests.remove(digest)
                    continue
                log.critical(
                    'The "%s" binary needed by digest "%s" was not found. '
                    'It needs to be in your system path.',
                    hash_map.hash_map[digest].cmd, digest)

        # Now reload the config with our updated value.
        conf_values['digests'] = ' '.join(digests)

    if "hash_function" in conf_values:
        if conf_values["hash_function"] not in HASH_DEFINITIONS:
            log.critical(
                '%s is not a valid hash_function entry\n'
                'Valid hash_function entries:\n'
                '%s', conf_values["hash_function"], HASH_DEFINITIONS.keys())
        try:
            process.find_binary(
                hash_map.hash_map[conf_values["hash_function"]].cmd)
        except process.CommandNotFound:
            log.critical(
                'The "%s" binary needed by hash_function "%s" was not found. '
                'It needs to be in your system path.',
                hash_map.hash_map[conf_values['hash_function']].cmd,
                conf_values['hash_function'])

    # detect GNU sed
    for sed in ('/usr/bin/gsed', '/bin/sed', '/usr/bin/sed'):
        if os.path.exists(sed):
            conf_values["sed"] = sed
            break

    addlargs = {}

    if myspecfile:
        log.notice("Processing spec file: %s", myspecfile)
        spec = catalyst.config.SpecParser(myspecfile)
        addlargs.update(spec.get_values())

    if mycmdline:
        try:
            cmdline = catalyst.config.ConfigParser()
            cmdline.parse_lines(mycmdline)
            addlargs.update(cmdline.get_values())
        except CatalystError:
            log.critical('Could not parse commandline')

    if "target" not in addlargs:
        raise CatalystError("Required value \"target\" not specified.")

    if os.getuid() != 0:
        # catalyst cannot be run as a normal user due to chroots, mounts, etc
        log.critical('This script requires root privileges to operate')

    # Namespaces aren't supported on *BSDs at the moment. So let's check
    # whether we're on Linux.
    if os.uname().sysname in ["Linux", "linux"]:
        # Start off by creating unique namespaces to run in.  Would be nice to
        # use pid & user namespaces, but snakeoil's namespace module has signal
        # transfer issues (CTRL+C doesn't propagate), and user namespaces need
        # more work due to Gentoo build process (uses sudo/root/portage).
        namespaces.simple_unshare(mount=True,
                                  uts=True,
                                  ipc=True,
                                  pid=False,
                                  net=False,
                                  user=False,
                                  hostname='catalyst')

    # everything is setup, so the build is a go
    try:
        success = build_target(addlargs)
    except KeyboardInterrupt:
        log.critical('Catalyst build aborted due to user interrupt (Ctrl-C)')
    if not success:
        sys.exit(2)
    sys.exit(0)
Exemple #2
0
def _main(parser, opts):
    """The "main" main function so we can trace/profile."""
    # Initialize the logger before anything else.
    log_level = opts.log_level
    if log_level is None:
        if opts.debug:
            log_level = 'debug'
        elif opts.verbose:
            log_level = 'info'
        else:
            log_level = 'notice'
    log.setup_logging(log_level,
                      output=opts.log_file,
                      debug=opts.debug,
                      color=opts.color)

    # Parse the command line options.
    myconfigs = opts.configs
    if not myconfigs:
        myconfigs = [DEFAULT_CONFIG_FILE]
    myspecfile = opts.file

    mycmdline = list()
    if opts.snapshot:
        mycmdline.append('target: snapshot')
        mycmdline.append('snapshot_treeish: ' + opts.snapshot)

    conf_values['DEBUG'] = opts.debug
    conf_values['VERBOSE'] = opts.debug or opts.verbose

    options = []
    if opts.fetchonly:
        options.append('fetch')
    if opts.purge:
        options.append('purge')
    if opts.purgeonly:
        options.append('purgeonly')
    if opts.purgetmponly:
        options.append('purgetmponly')
    if opts.clear_autoresume:
        options.append('clear-autoresume')
    if opts.enter_chroot:
        options.append('enter-chroot')

    # Make sure we have some work before moving further.
    if not myspecfile and not mycmdline:
        parser.error('please specify one of either -f or -C or -s')

    # made it this far so start by outputting our version info
    version()
    # import configuration file and import our main module using those settings
    parse_config(myconfigs)

    conf_values["options"].extend(options)
    log.notice('conf_values[options] = %s', conf_values['options'])

    # initialize our contents generator
    contents_map = ContentsMap(CONTENTS_DEFINITIONS,
                               comp_prog=conf_values['comp_prog'],
                               decomp_opt=conf_values['decomp_opt'],
                               list_xattrs_opt=conf_values['list_xattrs_opt'])
    conf_values["contents_map"] = contents_map

    # initialize our (de)compression definitions
    conf_values['decompress_definitions'] = DECOMPRESS_DEFINITIONS
    conf_values['compress_definitions'] = COMPRESS_DEFINITIONS
    # TODO add capability to config/spec new definitions

    if "digests" in conf_values:
        valid_digests = hashlib.algorithms_available
        digests = set(conf_values['digests'])
        conf_values['digests'] = digests

        # First validate all the requested digests are valid keys.
        if digests - valid_digests:
            raise CatalystError('These are not valid digest entries:\n%s\n'
                                'Valid digest entries:\n%s' %
                                (', '.join(sorted(digests - valid_digests)),
                                 ', '.join(sorted(valid_digests))))

    addlargs = {}

    if myspecfile:
        log.notice("Processing spec file: %s", myspecfile)
        spec = catalyst.config.SpecParser(myspecfile)
        addlargs.update(spec.get_values())

    if mycmdline:
        try:
            cmdline = catalyst.config.SpecParser()
            cmdline.parse_lines(mycmdline)
            addlargs.update(cmdline.get_values())
        except CatalystError:
            log.critical('Could not parse commandline')

    if "target" not in addlargs:
        raise CatalystError("Required value \"target\" not specified.")

    if os.getuid() != 0:
        # catalyst cannot be run as a normal user due to chroots, mounts, etc
        log.critical('This script requires root privileges to operate')

    # Start off by creating unique namespaces to run in.  Would be nice to
    # use pid & user namespaces, but snakeoil's namespace module has signal
    # transfer issues (CTRL+C doesn't propagate), and user namespaces need
    # more work due to Gentoo build process (uses sudo/root/portage).
    with namespace(uts=True, ipc=True, hostname='catalyst'):
        # everything is setup, so the build is a go
        try:
            success = build_target(addlargs)
        except KeyboardInterrupt:
            log.critical(
                'Catalyst build aborted due to user interrupt (Ctrl-C)')

    if not success:
        sys.exit(2)
    sys.exit(0)
Exemple #3
0
def main(argv):
	parser = get_parser()
	opts = parser.parse_args(argv)

	# Initialize the logger before anything else.
	log_level = opts.log_level
	if log_level is None:
		if opts.debug:
			log_level = 'debug'
		elif opts.verbose:
			log_level = 'info'
		else:
			log_level = 'notice'
	log.setup_logging(log_level, output=opts.log_file, debug=opts.debug,
		color=opts.color)

	# Parse the command line options.
	myconfigs = opts.configs
	if not myconfigs:
		myconfigs = [DEFAULT_CONFIG_FILE]
	myspecfile = opts.file
	mycmdline = opts.cli[:]

	if opts.snapshot:
		mycmdline.append('target=snapshot')
		mycmdline.append('version_stamp=' + opts.snapshot)

	conf_values['DEBUG'] = opts.debug
	conf_values['VERBOSE'] = opts.debug or opts.verbose

	options = set()
	if opts.fetchonly:
		options.add('fetch')
	if opts.purge:
		options.add('purge')
	if opts.purgeonly:
		options.add('purgeonly')
	if opts.purgetmponly:
		options.add('purgetmponly')
	if opts.clear_autoresume:
		options.add('clear-autoresume')

	# Make sure we have some work before moving further.
	if not myspecfile and not mycmdline:
		parser.error('please specify one of either -f or -C or -s')

	# made it this far so start by outputting our version info
	version()
	# import configuration file and import our main module using those settings
	parse_config(myconfigs)

	conf_values["options"].update(options)
	log.debug('conf_values[options] = %s', conf_values['options'])

	# initialize our contents generator
	contents_map = ContentsMap(CONTENTS_DEFINITIONS)
	conf_values["contents_map"] = contents_map

	# initialze our hash and contents generators
	hash_map = HashMap(HASH_DEFINITIONS)
	conf_values["hash_map"] = hash_map

	# initialize our (de)compression definitions
	conf_values['decompress_definitions'] = DECOMPRESS_DEFINITIONS
	conf_values['compress_definitions'] = COMPRESS_DEFINITIONS
	# TODO add capability to config/spec new definitions

	# Start checking that digests are valid now that hash_map is initialized
	if "digests" in conf_values:
		digests = set(conf_values['digests'].split())
		valid_digests = set(HASH_DEFINITIONS.keys())

		# Use the magic keyword "auto" to use all algos that are available.
		skip_missing = False
		if 'auto' in digests:
			skip_missing = True
			digests.remove('auto')
			if not digests:
				digests = set(valid_digests)

		# First validate all the requested digests are valid keys.
		if digests - valid_digests:
			log.critical(
				'These are not valid digest entries:\n'
				'%s\n'
				'Valid digest entries:\n'
				'%s',
				', '.join(digests - valid_digests),
				', '.join(sorted(valid_digests)))

		# Then check for any programs that the hash func requires.
		for digest in digests:
			try:
				process.find_binary(hash_map.hash_map[digest].cmd)
			except process.CommandNotFound:
				# In auto mode, just ignore missing support.
				if skip_missing:
					digests.remove(digest)
					continue
				log.critical(
					'The "%s" binary needed by digest "%s" was not found. '
					'It needs to be in your system path.',
					hash_map.hash_map[digest].cmd, digest)

		# Now reload the config with our updated value.
		conf_values['digests'] = ' '.join(digests)

	if "hash_function" in conf_values:
		if conf_values["hash_function"] not in HASH_DEFINITIONS:
			log.critical(
				'%s is not a valid hash_function entry\n'
				'Valid hash_function entries:\n'
				'%s', HASH_DEFINITIONS.keys())
		try:
			process.find_binary(hash_map.hash_map[conf_values["hash_function"]].cmd)
		except process.CommandNotFound:
			log.critical(
				'The "%s" binary needed by hash_function "%s" was not found. '
				'It needs to be in your system path.',
				hash_map.hash_map[conf_values['hash_function']].cmd,
				conf_values['hash_function'])

	addlargs={}

	if myspecfile:
		spec = catalyst.config.SpecParser(myspecfile)
		addlargs.update(spec.get_values())

	if mycmdline:
		try:
			cmdline = catalyst.config.ConfigParser()
			cmdline.parse_lines(mycmdline)
			addlargs.update(cmdline.get_values())
		except CatalystError:
			log.critical('Could not parse commandline')

	if "target" not in addlargs:
		raise CatalystError("Required value \"target\" not specified.")

	if os.getuid() != 0:
		# catalyst cannot be run as a normal user due to chroots, mounts, etc
		log.critical('This script requires root privileges to operate')

	# everything is setup, so the build is a go
	try:
		success = build_target(addlargs)
	except KeyboardInterrupt:
		log.critical('Catalyst build aborted due to user interrupt (Ctrl-C)')
	if not success:
		sys.exit(2)
	sys.exit(0)