Ejemplo n.º 1
0
    def __init__(self, log_file_path=None, verbose=False, rotate_size=0):
        logger = logging.getLogger()
        logger.setLevel(logging.DEBUG)

        sh = logging.StreamHandler(sys.stdout)
        if verbose:
            sh.setLevel(logging.DEBUG)
        else:
            sh.setLevel(logging.INFO)
        sh.setFormatter(logging.Formatter('%(message)s'))
        logger.addHandler(sh)

        if log_file_path:
            log_dir = os.path.dirname(log_file_path)
            utils.create_directory(log_dir)

            logger.info("Logs available at %s" % log_file_path)

            rfh = logging.handlers.RotatingFileHandler(log_file_path,
                                                       maxBytes=rotate_size,
                                                       backupCount=1)
            rfh.setLevel(logging.DEBUG)
            rfh.setFormatter(
                logging.Formatter(
                    '%(asctime)s | %(levelname)s | %(name)s: %(message)s'))
            logger.addHandler(rfh)
Ejemplo n.º 2
0
    def _save(self):
        utils.create_directory(self.result_dir)
        latest_dir = os.path.join(os.path.dirname(self.result_dir),
                                  LATEST_SYMLINK_NAME)
        utils.force_symlink(self.timestamp, latest_dir)

        iso_dir = "/%s/%s/iso" % (self.version, self.arch)
        iso_files = "%s/*" % iso_dir

        LOG.info("Saving ISO files %s at %s" % (iso_files, self.result_dir))
        self.mock.run_command("--copyout %s %s" % (iso_files, self.result_dir))
Ejemplo n.º 3
0
def set_handler_file(logger_name, logpath):

    logformat = '%(asctime)s %(process)d %(levelname)-10s %(module)s %(funcName)-4s %(message)s'

    utils.create_directory(os.path.dirname(logpath))
    handler = logging.FileHandler(logpath)
    handler.set_name('%s-file' % logger_name)

    handler.setLevel(logging.DEBUG)
    handler.setFormatter(logging.Formatter(logformat))

    return handler
Ejemplo n.º 4
0
    def __init__(self, name, force_rebuild=True):
        """
        Create a new package instance.

        Args:
            name: package name
            force_rebuild: whether to force the rebuild of the package
                in case its build results are already up-to-date
        """
        self.name = name
        self.clone_url = None
        self.download_source = None
        self.install_dependencies = []
        self.build_dependencies = []
        self.build_cache_dir = os.path.join(BUILD_CACHE_DIR, self.name)
        self.sources = []
        self.repository = None
        self.build_files = None
        self.download_build_files = []
        utils.create_directory(PACKAGES_REPOS_TARGET_PATH)
        self.lock_file_path = os.path.join(PACKAGES_REPOS_TARGET_PATH,
                                           self.name + ".lock")
        self.force_rebuild = force_rebuild

        # Dependencies packages may be present in those directories in older
        # versions of package metadata. This keeps compatibility.
        OLD_DEPENDENCIES_DIRS = ["build_dependencies", "dependencies"]
        PACKAGES_DIRS = [""] + OLD_DEPENDENCIES_DIRS
        versions_repo_url = CONF.get('common').get(
            'packages_metadata_repo_url')
        versions_repo_name = os.path.basename(
            os.path.splitext(versions_repo_url)[0])
        versions_repo_target_path = os.path.join(PACKAGES_REPOS_TARGET_PATH,
                                                 versions_repo_name)
        for rel_packages_dir in PACKAGES_DIRS:
            packages_dir = os.path.join(versions_repo_target_path,
                                        rel_packages_dir)
            package_dir = os.path.join(packages_dir, self.name)
            package_file = os.path.join(package_dir, self.name + ".yaml")
            if os.path.isfile(package_file):
                self.package_dir = package_dir
                self.package_file = package_file
                break
        else:
            raise exception.PackageDescriptorError(
                "Failed to find %s's YAML descriptor" % self.name)

        self._load()
Ejemplo n.º 5
0
    def _save(self):
        utils.create_directory(self.result_dir)
        latest_dir = os.path.join(os.path.dirname(self.result_dir), LATEST_DIR)
        utils.force_symlink(self.timestamp, latest_dir)

        iso_file = "%s-DVD-%s-%s.iso" % (self.distro, self.arch, self.version)
        checksum_file = ("%s-%s-%s-CHECKSUM" %
                         (self.distro, self.version, self.arch))
        iso_dir = "/%s/%s/iso" % (self.version, self.arch)
        iso_path = os.path.join(iso_dir, iso_file)
        checksum_path = os.path.join(iso_dir, checksum_file)
        chroot_files = "%s %s" % (iso_path, checksum_path)

        LOG.info("Saving ISO %s and checksum %s at %s" %
                 (iso_file, checksum_file, self.result_dir))
        self._run_mock_command("--copyout %s %s" %
                               (chroot_files, self.result_dir))
Ejemplo n.º 6
0
    def create_repository(self):
        """
        Create yum repository in build results directory.
        """
        result_dir = CONF.get('common').get('result_dir')
        build_results_dir = os.path.join(
            result_dir, 'packages', self.timestamp)
        utils.run_command("createrepo %s" % build_results_dir)

        repo_config_dir = os.path.join(result_dir, "repository_config")
        utils.create_directory(repo_config_dir)
        repo_config_path = os.path.join(
            repo_config_dir, self.timestamp + ".repo")
        with open(repo_config_path, "w") as repo_config:
            repo_config.write(YUM_REPO_CONFIG_TEMPLATE.format(
                timestamp=self.timestamp,
                repo_path=os.path.abspath(build_results_dir)))
Ejemplo n.º 7
0
    def __init__(self, config):
        self.config = config
        self.work_dir = self.config.get('work_dir')
        self.timestamp = datetime.datetime.now().isoformat()
        self.result_dir = os.path.join(self.config.get('result_dir'),
            'iso', self.timestamp)
        self.distro = self.config.get("iso_name")
        self.version = (self.config.get("iso_version")
                            or datetime.date.today().strftime("%y%m%d"))
        (_, _, self.arch) = distro_utils.detect_distribution()
        self.pungi_binary = self.config.get('pungi_binary') or "pungi"
        self.pungi_args = self.config.get('pungi_args') or ""
        self.build_iso = self.config.get('iso')
        self.build_install_tree = self.config.get('install_tree')

        self._init_mock()

        utils.create_directory(self.result_dir)
Ejemplo n.º 8
0
    def __init__(self, config):
        self.config = config
        self.work_dir = self.config.get('work_dir')
        self.timestamp = datetime.datetime.now().isoformat()
        self.result_dir = os.path.join(self.config.get('result_dir'), 'iso',
                                       self.timestamp)
        self.distro = self.config.get("iso_name")
        self.version = (self.config.get("iso_version")
                        or datetime.date.today().strftime("%y%m%d"))
        (_, _, self.arch) = distro_utils.detect_distribution()
        self.pungi_binary = self.config.get('pungi_binary') or "pungi"
        self.pungi_args = self.config.get('pungi_args') or ""
        self.build_iso = self.config.get('iso')
        self.build_install_tree = self.config.get('install_tree')

        self._init_mock()

        utils.create_directory(self.result_dir)
    def create_repository(self):
        """
        Create yum repository in build results directory.
        """
        result_dir = CONF.get('result_dir')
        build_results_dir = os.path.join(
            result_dir, 'packages', self.timestamp)
        yum_repository.create_repository(build_results_dir)

        repo_short_name = "host-os-local-repo-{timestamp}".format(**vars(self))
        repo_long_name = ("OpenPOWER Host OS local repository built at "
                          "{timestamp}".format(**vars(self)))
        repo_url = "file://" + os.path.abspath(build_results_dir)
        repo_config = yum_repository.create_repository_config(
            repo_short_name, repo_long_name, repo_url)

        repo_config_dir = os.path.join(result_dir, "repository_config")
        utils.create_directory(repo_config_dir)
        repo_config_path = os.path.join(
            repo_config_dir, self.timestamp + ".repo")
        with open(repo_config_path, "w") as repo_config_file:
            repo_config_file.write(repo_config)
    def create_repository(self):
        """
        Create yum repository in build results directory.
        """
        result_dir = CONF.get('result_dir')
        build_results_dir = os.path.join(result_dir, 'packages',
                                         self.timestamp)
        yum_repository.create_repository(build_results_dir)

        repo_short_name = "host-os-local-repo-{timestamp}".format(**vars(self))
        repo_long_name = ("OpenPOWER Host OS local repository built at "
                          "{timestamp}".format(**vars(self)))
        repo_url = "file://" + os.path.abspath(build_results_dir)
        repo_config = yum_repository.create_repository_config(
            repo_short_name, repo_long_name, repo_url)

        repo_config_dir = os.path.join(result_dir, "repository_config")
        utils.create_directory(repo_config_dir)
        repo_config_path = os.path.join(repo_config_dir,
                                        self.timestamp + ".repo")
        with open(repo_config_path, "w") as repo_config_file:
            repo_config_file.write(repo_config)
Ejemplo n.º 11
0
    def __init__(self, log_file_path=None, verbose=False, rotate_size=0):
        logger = logging.getLogger()
        logger.setLevel(logging.DEBUG)

        sh = logging.StreamHandler(sys.stdout)
        if verbose:
            sh.setLevel(logging.DEBUG)
        else:
            sh.setLevel(logging.INFO)
        sh.setFormatter(logging.Formatter('%(message)s'))
        logger.addHandler(sh)

        if log_file_path:
            log_dir = os.path.dirname(log_file_path)
            utils.create_directory(log_dir)

            logger.info("Logs available at %s" % log_file_path)

            rfh = logging.handlers.RotatingFileHandler(
                log_file_path, maxBytes=rotate_size, backupCount=1)
            rfh.setLevel(logging.DEBUG)
            rfh.setFormatter(logging.Formatter(
                '%(asctime)s | %(levelname)s | %(name)s: %(message)s'))
            logger.addHandler(rfh)
Ejemplo n.º 12
0
    def __init__(self, name, force_rebuild=True):
        """
        Create a new package instance.

        Args:
            name: package name
            force_rebuild: whether to force the rebuild of the package
                in case its build results are already up-to-date
        """
        self.name = name
        self.clone_url = None
        self.download_source = None
        self.install_dependencies = []
        self.build_dependencies = []
        self.build_cache_dir = os.path.join(BUILD_CACHE_DIR, self.name)
        self.sources = []
        self.repository = None
        self.build_files = None
        self.download_build_files = []
        utils.create_directory(PACKAGES_REPOS_TARGET_PATH)
        self.lock_file_path = os.path.join(
            PACKAGES_REPOS_TARGET_PATH, self.name + ".lock")
        self.force_rebuild = force_rebuild
        self.built = False

        # Dependencies packages may be present in those directories in older
        # versions of package metadata. This keeps compatibility.
        OLD_DEPENDENCIES_DIRS = ["build_dependencies", "dependencies"]
        PACKAGES_DIRS = [""] + OLD_DEPENDENCIES_DIRS
        versions_repo = get_versions_repository(CONF)

        for rel_packages_dir in PACKAGES_DIRS:
            packages_dir = os.path.join(
                versions_repo.working_tree_dir, rel_packages_dir)
            package_dir = os.path.join(packages_dir, self.name)
            package_file = os.path.join(package_dir, self.name + ".yaml")
            if os.path.isfile(package_file):
                self.package_dir = package_dir
                self.package_file = package_file
                break
        else:
            raise exception.PackageDescriptorError(
                "Failed to find %s's YAML descriptor" % self.name)

        # load package metadata YAML file
        self._load()

        # get global config information which may override package YAML
        # Package option fields are separated by `#` character and the expected
        # format can be one of the following:
        #
        #   package_name#repo_url#branch_name#revision_id
        #   package_name#repo_url#reference
        #   package_name##reference
        #   package_name
        packages_options = CONF.get('packages') or []
        for package_option in packages_options:
            package_parts = package_option.split("#")
            if package_parts[0] == name:
                # cancel if there are no sources
                if not self.sources:
                    break
                # assume that the first source is the main one and that a source
                # override targets it
                source_type = self.sources[0].keys()[0]
                main_source = self.sources[0][source_type]
                if len(package_parts) == 1:
                    pass
                elif len(package_parts) == 2:
                    if package_parts[1]:
                        main_source["src"] = package_parts[1]
                elif len(package_parts) == 3:
                    if package_parts[1]:
                        main_source["src"] = package_parts[1]
                    if package_parts[2]:
                       main_source["commit_id"] = package_parts[2]
                elif len(package_parts) == 4:
                    if package_parts[1]:
                        main_source["src"] = package_parts[1]
                    if package_parts[2]:
                       main_source["branch"] = package_parts[2]
                    if package_parts[3]:
                       main_source["commit_id"] = package_parts[3]
                else:
                    raise "Too many parts (%s) in package option %s" % (
                        len(package_parts), package_option)
                break
Ejemplo n.º 13
0
        required_packages = f.read().splitlines()
    missing_packages = [
        p for p in required_packages if not is_package_installed(p)
    ]
    if missing_packages:
        print("Following packages should be installed before running this "
              "script: %s" % ", ".join(missing_packages))
        sys.exit(MISSING_PACKAGES_ERROR)

    if os.getuid() is 0:
        print("Please, do not run this command as root.")
        sys.exit(TOO_MUCH_PRIVILEGE_ERROR)

    if subcommand in MOCK_REQUIRED_SUBCOMANDS:
        mock_users = grp.getgrnam('mock').gr_mem
        user = os.environ['USER']
        if user not in mock_users:
            print("User must be in mock group, please run "
                  "'sudo usermod -a -G mock $(whoami)'")
            sys.exit(INSUFFICIENT_PRIVILEGE_ERROR)

    create_directory(CONF.get('work_dir'))

    return_code = 0
    try:
        SUBCOMMANDS[subcommand].run(CONF)
    except exception.BaseException as exc:
        LOG.exception("Command %s failed." % subcommand)
        return_code = exc.error_code
    sys.exit(return_code)
Ejemplo n.º 14
0
    def __init__(self, name, force_rebuild=True):
        """
        Create a new package instance.

        Args:
            name: package name
            force_rebuild: whether to force the rebuild of the package
                in case its build results are already up-to-date
        """
        self.name = name
        self.clone_url = None
        self.download_source = None
        self.install_dependencies = []
        self.build_dependencies = []
        self.build_cache_dir = os.path.join(BUILD_CACHE_DIR, self.name)
        self.sources = []
        self.repository = None
        self.build_files = None
        self.download_build_files = []
        utils.create_directory(PACKAGES_REPOS_TARGET_PATH)
        self.lock_file_path = os.path.join(PACKAGES_REPOS_TARGET_PATH,
                                           self.name + ".lock")
        self.force_rebuild = force_rebuild

        # Dependencies packages may be present in those directories in older
        # versions of package metadata. This keeps compatibility.
        OLD_DEPENDENCIES_DIRS = ["build_dependencies", "dependencies"]
        PACKAGES_DIRS = [""] + OLD_DEPENDENCIES_DIRS
        versions_repo_url = CONF.get('common').get(
            'packages_metadata_repo_url')
        versions_repo_name = os.path.basename(
            os.path.splitext(versions_repo_url)[0])
        versions_repo_target_path = os.path.join(PACKAGES_REPOS_TARGET_PATH,
                                                 versions_repo_name)
        for rel_packages_dir in PACKAGES_DIRS:
            packages_dir = os.path.join(versions_repo_target_path,
                                        rel_packages_dir)
            package_dir = os.path.join(packages_dir, self.name)
            package_file = os.path.join(package_dir, self.name + ".yaml")
            if os.path.isfile(package_file):
                self.package_dir = package_dir
                self.package_file = package_file
                break
        else:
            raise exception.PackageDescriptorError(
                "Failed to find %s's YAML descriptor" % self.name)

        # load package metadata YAML file
        self._load()

        # get global config information which may override package YAML
        # Package option fields are separated by `#` character and the expected
        # format can be one of the following:
        #
        #   package_name#repo_url#branch_name#revision_id
        #   package_name#repo_url#reference
        #   package_name##reference
        #   package_name
        packages_options = CONF.get('build_packages').get('packages') or []
        for package_option in packages_options:
            package_parts = package_option.split("#")
            if package_parts[0] == name:
                # cancel if there are no sources
                if not self.sources:
                    break
                # assume that the first source is the main one and that a source
                # override targets it
                source_type = self.sources[0].keys()[0]
                main_source = self.sources[0][source_type]
                if len(package_parts) == 1:
                    pass
                elif len(package_parts) == 2:
                    if package_parts[1]:
                        main_source["src"] = package_parts[1]
                elif len(package_parts) == 3:
                    if package_parts[1]:
                        main_source["src"] = package_parts[1]
                    if package_parts[2]:
                        main_source["commit_id"] = package_parts[2]
                elif len(package_parts) == 4:
                    if package_parts[1]:
                        main_source["src"] = package_parts[1]
                    if package_parts[2]:
                        main_source["branch"] = package_parts[2]
                    if package_parts[3]:
                        main_source["commit_id"] = package_parts[3]
                else:
                    raise "Too many parts (%s) in package option %s" % (
                        len(package_parts), package_option)
                break
Ejemplo n.º 15
0
    with open(REQUIRED_PACKAGES_FILE_PATH) as f:
        required_packages = f.read().splitlines()
    missing_packages = [p for p in required_packages
        if not is_package_installed(p)]
    if missing_packages:
        print("Following packages should be installed before running this "
              "script: %s" % ", ".join(missing_packages))
        sys.exit(MISSING_PACKAGES_ERROR)

    if os.getuid() is 0:
        print("Please, do not run this command as root.")
        sys.exit(TOO_MUCH_PRIVILEGE_ERROR)

    if subcommand in MOCK_REQUIRED_SUBCOMANDS:
        mock_users = grp.getgrnam('mock').gr_mem
        user = os.environ['USER']
        if user not in mock_users:
            print("User must be in mock group, please run "
                  "'sudo usermod -a -G mock $(whoami)'")
            sys.exit(INSUFFICIENT_PRIVILEGE_ERROR)

    create_directory(CONF.get('work_dir'))

    return_code = 0
    try:
        SUBCOMMANDS[subcommand].run(CONF)
    except exception.BaseException as exc:
        LOG.exception("Command %s failed." % subcommand)
        return_code = exc.error_code
    sys.exit(return_code)
Ejemplo n.º 16
0
    with open(REQUIRED_PACKAGES_FILE_PATH) as f:
        required_packages = f.read().splitlines()
    missing_packages = [p for p in required_packages
        if not is_package_installed(p)]
    if missing_packages:
        print("Following packages should be installed before running this "
              "script: %s" % ", ".join(missing_packages))
        sys.exit(MISSING_PACKAGES_ERROR)

    if os.getuid() is 0:
        print("Please, do not run this command as root.")
        sys.exit(TOO_MUCH_PRIVILEGE_ERROR)

    if subcommand in MOCK_REQUIRED_SUBCOMANDS:
        mock_users = grp.getgrnam('mock').gr_mem
        user = os.environ['USER']
        if user not in mock_users:
            print("User must be in mock group, please run "
                  "'sudo usermod -a -G mock $(whoami)'")
            sys.exit(INSUFFICIENT_PRIVILEGE_ERROR)

    create_directory(CONF.get('common').get('work_dir'))

    return_code = 0
    try:
        SUBCOMMANDS[subcommand].run(CONF)
    except exception.BaseException as exc:
        LOG.exception("Command %s failed." % subcommand)
        return_code = exc.error_code
    sys.exit(return_code)