def prepare_playback(self, file_path, fullscreen=True): """Copies the html file to /tmp and loads the webpage. @param file_path: The path to the file. @param fullscreen: Plays the video in fullscreen. """ # Copies the html file to /tmp to make it accessible. utils.get_file( os.path.join(self.bindir, 'video.html'), '/tmp/playback_video.html') html_path = 'file:///tmp/playback_video.html' tab = self._resource._browser.tabs.New() tab.Navigate(html_path) self._player = native_html5_player.NativeHtml5Player( tab=tab, full_url=html_path, video_id='video', video_src_path=file_path) self._player.load_video() if fullscreen: self._display_facade.set_fullscreen(True)
def auto_kernel(job, path, subdir, tmp_dir, build_dir, leave=False): """ Create a kernel object, dynamically selecting the appropriate class to use based on the path provided. """ kernel_paths = [preprocess_path(path)] if kernel_paths[0].endswith('.list'): # Fetch the list of packages to install kernel_list = os.path.join(tmp_dir, 'kernel.list') utils.get_file(kernel_paths[0], kernel_list) kernel_paths = [p.strip() for p in open(kernel_list).readlines()] if kernel_paths[0].endswith('.rpm'): rpm_paths = [] for kernel_path in kernel_paths: if os.path.exists(kernel_path): rpm_paths.append(kernel_path) else: # Fetch the rpm into the job's packages directory and pass it to # rpm_kernel rpm_name = os.path.basename(kernel_path) # If the preprocessed path (kernel_path) is only a name then # search for the kernel in all the repositories, else fetch the # kernel from that specific path. job.pkgmgr.fetch_pkg(rpm_name, os.path.join(job.pkgdir, rpm_name), repo_url=os.path.dirname(kernel_path)) rpm_paths.append(os.path.join(job.pkgdir, rpm_name)) return rpm_kernel_vendor(job, rpm_paths, subdir) else: if len(kernel_paths) > 1: raise error.TestError("don't know what to do with more than one non-rpm kernel file") return kernel(job,kernel_paths[0], subdir, tmp_dir, build_dir, leave)
def setup(setup_dir): binary = 'arc_camera3_test' src_path = os.path.join(os.environ['SYSROOT'], 'usr', 'bin') dst_path = os.path.join(os.getcwd(), 'bin') os.mkdir(dst_path) utils.get_file(os.path.join(src_path, binary), os.path.join(dst_path, binary))
def run_once(self, options='', testlist=''): """ Passes the appropriate parameters to the testsuite. # Usage: $0 [options] [testlist] # check options # -raw test raw (default) # -cow test cow # -qcow test qcow # -qcow2 test qcow2 # -vpc test vpc # -vmdk test vmdk # -xdiff graphical mode diff # -nocache use O_DIRECT on backing file # -misalign misalign memory allocations # -n show me, do not run tests # -T output timestamps # -r randomize test order # # testlist options # -g group[,group...] include tests from these groups # -x group[,group...] exclude tests from these groups # NNN include test NNN # NNN-NNN include test range (eg. 012-021) @param qemu_path: Optional qemu install path. @param options: Options accepted by the testsuite. @param testlist: List of tests that will be executed (by default, all testcases will be executed). """ os.chdir(self.srcdir) test_dir = os.path.join(self.srcdir, "scratch") if not os.path.exists(test_dir): os.mkdir(test_dir) cmd = "./check" if options: cmd += " " + options if testlist: cmd += " " + testlist try: try: result = utils.system(cmd) except error.CmdError, e: failed_cases = re.findall("Failures: (\d+)", str(e)) for num in failed_cases: failed_name = num + ".out.bad" src = os.path.join(self.srcdir, failed_name) dest = os.path.join(self.resultsdir, failed_name) utils.get_file(src, dest) if failed_cases: e_msg = ("Qemu-iotests failed. Failed cases: %s" % failed_cases) else: e_msg = "Qemu-iotests failed" raise error.TestFail(e_msg) finally: src = os.path.join(self.srcdir, "check.log") dest = os.path.join(self.resultsdir, "check.log") utils.get_file(src, dest)
def setup(tarball, topdir): srcdir = os.path.join(topdir, 'src') if not os.path.exists(tarball): utils.get_file('http://mirror.x10.com/mirror/mysql/Downloads/MySQL-5.0/mysql-5.0.45.tar.gz', tarball) utils.extract_tarball_to_dir(tarball, 'src') os.chdir(srcdir) utils.configure('--prefix=%s/mysql --enable-thread-safe-client' \ % topdir) utils.make('-j %d' % utils.count_cpus()) utils.make('install') # # MySQL doesn't create this directory on it's own. # This is where database logs and files are created. # try: os.mkdir(topdir + '/mysql/var') except Exception: pass # # Initialize the database. # utils.system('%s/mysql/bin/mysql_install_db' % topdir) os.chdir(topdir)
def setup(tarball, topdir): srcdir = os.path.join(topdir, 'src') if not os.path.exists(tarball): utils.get_file( 'http://mirror.x10.com/mirror/mysql/Downloads/MySQL-5.0/mysql-5.0.45.tar.gz', tarball) utils.extract_tarball_to_dir(tarball, 'src') os.chdir(srcdir) utils.configure('--prefix=%s/mysql --enable-thread-safe-client' \ % topdir) utils.make('-j %d' % utils.count_cpus()) utils.make('install') # # MySQL doesn't create this directory on it's own. # This is where database logs and files are created. # try: os.mkdir(topdir + '/mysql/var') except Exception: pass # # Initialize the database. # utils.system('%s/mysql/bin/mysql_install_db' % topdir) os.chdir(topdir)
def fetch_and_patch(self): if not self.repo: return virt_utils.get_git_branch(self.repo, self.branch, self.srcdir, self.commit, self.lbranch) os.chdir(self.srcdir) for patch in self.patches: utils.get_file(patch, os.path.join(self.srcdir, os.path.basename(patch))) utils.system('patch -p1 < %s' % os.path.basename(patch))
def get_binaries(test, url=None): """Fetch and unpack Ceph binary tarball.""" machine = os.uname()[4] CEPH_TARBALL_DEFAULT_URL = 'http://ceph.newdream.net/gitbuilder/output/ref/origin_master/ceph.{machine}.tgz'.format(machine=machine) if url is None: url = CEPH_TARBALL_DEFAULT_URL tarball = os.path.join(test.tmpdir, 'ceph-bin.tgz') utils.get_file(url, tarball) utils.system('tar xzf {tarball} -C {bindir}'.format(tarball=tarball, bindir=test.bindir)) log.info('Finished unpacking binary tarball in: %s', test.bindir)
def setup(tarball, topdir): srcdir = os.path.join(topdir, "src") if not os.path.exists(tarball): utils.get_file("ftp://ftp.postgresql.org/pub/source/v8.3.1/postgresql-8.3.1.tar.bz2", tarball) utils.extract_tarball_to_dir(tarball, "src") os.chdir(srcdir) utils.configure("--without-readline --without-zlib --enable-debug --prefix=%s/pgsql" % topdir) utils.make("-j %d" % utils.count_cpus()) utils.make("install") os.chdir(topdir)
def __init__(self, job, build_dir, config_dir, orig_file, overrides, defconfig=False, name=None, make=None): self.build_dir = build_dir self.config_dir = config_dir # 1. Get original config file self.build_config = build_dir + '/.config' if (orig_file == '' and not defconfig and not make): # use user default set = job.config_get("kernel.default_config_set") defconf = None if set and name: defconf = config_by_name(name, set) if not defconf: defconf = job.config_get("kernel.default_config") if defconf: orig_file = defconf if (orig_file == '' and not make and defconfig): # use defconfig make = 'defconfig' if (orig_file == '' and make): # use the config command print "kernel_config: using " + make + " to configure kernel" os.chdir(build_dir) make_return = utils.system('make %s > /dev/null' % make) self.config_record(make) if (make_return): raise error.TestError('make % failed' % make) else: print "kernel_config: using " + orig_file + \ " to configure kernel" self.orig_config = config_dir + '/config.orig' utils.get_file(orig_file, self.orig_config) self.update_config(self.orig_config, self.orig_config + '.new') diff_configs(self.orig_config, self.orig_config + '.new') # 2. Apply overrides if overrides: print "kernel_config: using " + overrides + \ " to re-configure kernel" self.over_config = config_dir + '/config.over' overrides_local = self.over_config + '.changes' utils.get_file(overrides, overrides_local) apply_overrides(self.build_config, overrides_local, self.over_config) self.update_config(self.over_config, self.over_config + '.new') diff_configs(self.over_config, self.over_config + '.new') else: self.over_config = self.orig_config
def setup(tarball, topdir): srcdir = os.path.join(topdir, 'src') if not os.path.exists(tarball): utils.get_file('ftp://ftp.postgresql.org/pub/source/v8.3.1/postgresql-8.3.1.tar.bz2', tarball) utils.extract_tarball_to_dir(tarball, 'src') os.chdir(srcdir) utils.system ('./configure --without-readline --without-zlib --enable-debug --prefix=%s/pgsql' % topdir) utils.system('make -j %d' % utils.count_cpus()) utils.system('make install') os.chdir(topdir)
def setup(tarball, topdir): srcdir = os.path.join(topdir, 'src') if not os.path.exists(tarball): utils.get_file( 'http://www.packetfactory.net/libnet/dist/libnet.tar.gz', tarball) utils.extract_tarball_to_dir(tarball, 'src') os.chdir(srcdir) utils.configure('--prefix=%s/libnet' % topdir) utils.make() utils.make('install') os.chdir(topdir)
def setup(tarball, topdir): srcdir = os.path.join(topdir, 'src') if not os.path.exists(tarball): utils.get_file('http://www.packetfactory.net/libnet/dist/libnet.tar.gz', tarball) utils.extract_tarball_to_dir(tarball, 'src') os.chdir(srcdir) utils.system ('./configure --prefix=%s/libnet' % topdir) utils.system('make') utils.system('make install') os.chdir(topdir)
def get_binaries(test, url=None): """Fetch and unpack Ceph binary tarball.""" machine = os.uname()[4] CEPH_TARBALL_DEFAULT_URL = 'http://ceph.newdream.net/gitbuilder/output/ref/origin_master/ceph.{machine}.tgz'.format( machine=machine) if url is None: url = CEPH_TARBALL_DEFAULT_URL tarball = os.path.join(test.tmpdir, 'ceph-bin.tgz') utils.get_file(url, tarball) utils.system('tar xzf {tarball} -C {bindir}'.format(tarball=tarball, bindir=test.bindir)) log.info('Finished unpacking binary tarball in: %s', test.bindir)
def get_patches(self, patches): """fetch the patches to the local src_dir""" local_patches = [] for patch in patches: dest = os.path.join(self.src_dir, os.path.basename(patch)) # FIXME: this isn't unique. Append something to it # like wget does if it's not there? print "get_file %s %s %s %s" % (patch, dest, self.src_dir, os.path.basename(patch)) utils.get_file(patch, dest) # probably safer to use the command, not python library md5sum = utils.system_output('md5sum ' + dest).split()[0] local_patches.append((patch, dest, md5sum)) return local_patches
def setup(tarball, topdir): srcdir = os.path.join(topdir, 'src') if not os.path.exists(tarball): utils.get_file( 'ftp://ftp-archives.postgresql.org/pub/source/v8.3.1/postgresql-8.3.1.tar.bz2', tarball) utils.extract_tarball_to_dir(tarball, 'src') os.chdir(srcdir) utils.configure( '--without-readline --without-zlib --enable-debug --prefix=%s/pgsql' % topdir) utils.make('-j %d' % utils.count_cpus()) utils.make('install') os.chdir(topdir)
def setup(tarball, topdir): # FIXME - Waiting to be able to specify dependency. #self.job.setup_dep(['pgsql']) srcdir = os.path.join(topdir, 'src') if not os.path.exists(tarball): utils.get_file('http://pgfoundry.org/frs/download.php/1083/pgpool-II-1.0.1.tar.gz', tarball) utils.extract_tarball_to_dir(tarball, 'src') os.chdir(srcdir) # FIXEME - Waiting to be able to use self.autodir instead of # os.environ['AUTODIR'] utils.system('./configure --prefix=%s/pgpool --with-pgsql=%s/deps/pgsql/pgsql' \ % (topdir, os.environ['AUTODIR'])) utils.system('make -j %d' % utils.count_cpus()) utils.system('make install') os.chdir(topdir)
def get_kernel_tree(self, base_tree): """Extract/link base_tree to self.build_dir""" # if base_tree is a dir, assume uncompressed kernel if os.path.isdir(base_tree): print 'Symlinking existing kernel source' os.symlink(base_tree, self.build_dir) # otherwise, extract tarball else: os.chdir(os.path.dirname(self.src_dir)) # Figure out local destination for tarball tarball = os.path.join(self.src_dir, os.path.basename(base_tree)) utils.get_file(base_tree, tarball) print 'Extracting kernel tarball:', tarball, '...' utils.extract_tarball_to_dir(tarball, self.build_dir)
def __init__(self, job, build_dir, config_dir, orig_file, overrides, defconfig = False, name = None, make = None): self.build_dir = build_dir self.config_dir = config_dir # 1. Get original config file self.build_config = build_dir + '/.config' if (orig_file == '' and not defconfig and not make): # use user default set = job.config_get("kernel.default_config_set") defconf = None if set and name: defconf = config_by_name(name, set) if not defconf: defconf = job.config_get("kernel.default_config") if defconf: orig_file = defconf if (orig_file == '' and not make and defconfig): # use defconfig make = 'defconfig' if (orig_file == '' and make): # use the config command print "kernel_config: using " + make + " to configure kernel" os.chdir(build_dir) make_return = utils.system('make %s > /dev/null' % make) self.config_record(make) if (make_return): raise error.TestError('make % failed' % make) else: print "kernel_config: using " + orig_file + \ " to configure kernel" self.orig_config = config_dir + '/config.orig' utils.get_file(orig_file, self.orig_config) self.update_config(self.orig_config, self.orig_config+'.new') diff_configs(self.orig_config, self.orig_config+'.new') # 2. Apply overrides if overrides: print "kernel_config: using " + overrides + \ " to re-configure kernel" self.over_config = config_dir + '/config.over' overrides_local = self.over_config + '.changes' utils.get_file(overrides, overrides_local) apply_overrides(self.build_config, overrides_local, self.over_config) self.update_config(self.over_config, self.over_config+'.new') diff_configs(self.over_config, self.over_config+'.new') else: self.over_config = self.orig_config
def postprocess_iteration(self): """ Copies the log files to the results dir. """ src = os.path.join(self.srcdir, "check.log") dest = os.path.join(self.resultsdir, "qemu_iotests.log") logging.critical('First copy') utils.get_file(src, dest) # copy the failed cases' log file to self.resultsdir if self.failed_cases: for num in self.failed_cases: failed_name = num + ".out.bad" src = os.path.join(self.srcdir, failed_name) dest = os.path.join(self.resultsdir, failed_name) logging.critical('Second copy') utils.get_file(src, dest)
def setup(tarball, topdir): # FIXME - Waiting to be able to specify dependency. #self.job.setup_dep(['pgsql']) srcdir = os.path.join(topdir, 'src') if not os.path.exists(tarball): utils.get_file( 'http://pgfoundry.org/frs/download.php/1083/pgpool-II-1.0.1.tar.gz', tarball) utils.extract_tarball_to_dir(tarball, 'src') os.chdir(srcdir) # FIXEME - Waiting to be able to use self.autodir instead of # os.environ['AUTODIR'] utils.configure('--prefix=%s/pgpool --with-pgsql=%s/deps/pgsql/pgsql' \ % (topdir, os.environ['AUTODIR'])) utils.make('-j %d' % utils.count_cpus()) utils.make('install') os.chdir(topdir)
def get_kernel_tree(self, base_tree): """Extract/link base_tree to self.build_dir""" # if base_tree is a dir, assume uncompressed kernel if os.path.isdir(base_tree): print 'Symlinking existing kernel source' if os.path.islink(self.build_dir): os.remove(self.build_dir) os.symlink(base_tree, self.build_dir) # otherwise, extract tarball else: os.chdir(os.path.dirname(self.src_dir)) # Figure out local destination for tarball tarball = os.path.join(self.src_dir, os.path.basename(base_tree.split(';')[0])) utils.get_file(base_tree, tarball) print 'Extracting kernel tarball:', tarball, '...' utils.extract_tarball_to_dir(tarball, self.build_dir)
def setup(topdir): """Download TDL library tarball and unpack to src/, then install remaining files/ into src/. @param topdir: The directory of this deps. """ tarball = 'tdl-0.0.2.tar.gz' srcdir = os.path.join(topdir, 'src') filesdir = os.path.join(topdir, 'files') tarball_path = os.path.join(filesdir, tarball) shutil.rmtree(srcdir, ignore_errors=True) if not os.path.exists(tarball_path): utils.get_file( 'http://github.com/greggman/tdl/archive/0.0.2.tar.gz', tarball_path) os.mkdir(srcdir) utils.extract_tarball_to_dir(tarball_path, srcdir) os.chdir(srcdir) shutil.copy(os.path.join(filesdir, 'WebGLClear.html'), srcdir)
def __init__(self, test, params): """ Initialize class parameters and retrieves code from git repositories. @param test: kvm test object. @param params: Dictionary with test parameters. """ super(GitInstaller, self).__init__(test, params) kernel_repo = params.get("git_repo") user_repo = params.get("user_git_repo") kmod_repo = params.get("kmod_repo") test_repo = params.get("test_git_repo") kernel_branch = params.get("kernel_branch", "master") user_branch = params.get("user_branch", "master") kmod_branch = params.get("kmod_branch", "master") test_branch = params.get("test_branch", "master") kernel_lbranch = params.get("kernel_lbranch", "master") user_lbranch = params.get("user_lbranch", "master") kmod_lbranch = params.get("kmod_lbranch", "master") test_lbranch = params.get("test_lbranch", "master") kernel_commit = params.get("kernel_commit", None) user_commit = params.get("user_commit", None) kmod_commit = params.get("kmod_commit", None) test_commit = params.get("test_commit", None) kernel_patches = eval(params.get("kernel_patches", "[]")) user_patches = eval(params.get("user_patches", "[]")) kmod_patches = eval(params.get("user_patches", "[]")) if not user_repo: message = "KVM user git repository path not specified" logging.error(message) raise error.TestError(message) userspace_srcdir = os.path.join(self.srcdir, "kvm_userspace") kvm_utils.get_git_branch(user_repo, user_branch, userspace_srcdir, user_commit, user_lbranch) self.userspace_srcdir = userspace_srcdir if user_patches: os.chdir(self.userspace_srcdir) for patch in user_patches: utils.get_file(patch, os.path.join(self.userspace_srcdir, os.path.basename(patch))) utils.system('patch -p1 %s' % os.path.basename(patch)) if test_repo: test_srcdir = os.path.join(self.srcdir, "kvm-unit-tests") kvm_utils.get_git_branch(test_repo, test_branch, test_srcdir, test_commit, test_lbranch) unittest_cfg = os.path.join(test_srcdir, 'x86', 'unittests.cfg') self.test_srcdir = test_srcdir else: unittest_cfg = os.path.join(userspace_srcdir, 'kvm', 'test', 'x86', 'unittests.cfg') self.unittest_cfg = None if os.path.isfile(unittest_cfg): self.unittest_cfg = unittest_cfg if kernel_repo: kernel_srcdir = os.path.join(self.srcdir, "kvm") kvm_utils.get_git_branch(kernel_repo, kernel_branch, kernel_srcdir, kernel_commit, kernel_lbranch) self.kernel_srcdir = kernel_srcdir if kernel_patches: os.chdir(self.kernel_srcdir) for patch in kernel_patches: utils.get_file(patch, os.path.join(self.userspace_srcdir, os.path.basename(patch))) utils.system('patch -p1 %s' % os.path.basename(patch)) else: self.kernel_srcdir = None if kmod_repo: kmod_srcdir = os.path.join (self.srcdir, "kvm_kmod") kvm_utils.get_git_branch(kmod_repo, kmod_branch, kmod_srcdir, kmod_commit, kmod_lbranch) self.kmod_srcdir = kmod_srcdir if kmod_patches: os.chdir(self.kmod_srcdir) for patch in kmod_patches: utils.get_file(patch, os.path.join(self.userspace_srcdir, os.path.basename(patch))) utils.system('patch -p1 %s' % os.path.basename(patch)) else: self.kmod_srcdir = None configure_script = os.path.join(self.userspace_srcdir, 'configure') self.configure_options = check_configure_options(configure_script)
def _get_packages(self): for pkg in self.pkg_path_list: utils.get_file(pkg, os.path.join(self.srcdir, os.path.basename(pkg)))
def _pull_code(self): """ Retrieves code from git repositories. """ params = self.params kernel_repo = params.get("git_repo") user_repo = params.get("user_git_repo") kmod_repo = params.get("kmod_repo") kernel_branch = params.get("kernel_branch", "master") user_branch = params.get("user_branch", "master") kmod_branch = params.get("kmod_branch", "master") kernel_lbranch = params.get("kernel_lbranch", "master") user_lbranch = params.get("user_lbranch", "master") kmod_lbranch = params.get("kmod_lbranch", "master") kernel_commit = params.get("kernel_commit", None) user_commit = params.get("user_commit", None) kmod_commit = params.get("kmod_commit", None) kernel_patches = eval(params.get("kernel_patches", "[]")) user_patches = eval(params.get("user_patches", "[]")) kmod_patches = eval(params.get("user_patches", "[]")) if not user_repo: message = "KVM user git repository path not specified" logging.error(message) raise error.TestError(message) userspace_srcdir = os.path.join(self.srcdir, "kvm_userspace") kvm_utils.get_git_branch(user_repo, user_branch, userspace_srcdir, user_commit, user_lbranch) self.userspace_srcdir = userspace_srcdir if user_patches: os.chdir(self.userspace_srcdir) for patch in user_patches: utils.get_file( patch, os.path.join(self.userspace_srcdir, os.path.basename(patch))) utils.system('patch -p1 %s' % os.path.basename(patch)) if kernel_repo: kernel_srcdir = os.path.join(self.srcdir, "kvm") kvm_utils.get_git_branch(kernel_repo, kernel_branch, kernel_srcdir, kernel_commit, kernel_lbranch) self.kernel_srcdir = kernel_srcdir if kernel_patches: os.chdir(self.kernel_srcdir) for patch in kernel_patches: utils.get_file( patch, os.path.join(self.userspace_srcdir, os.path.basename(patch))) utils.system('patch -p1 %s' % os.path.basename(patch)) else: self.kernel_srcdir = None if kmod_repo: kmod_srcdir = os.path.join(self.srcdir, "kvm_kmod") kvm_utils.get_git_branch(kmod_repo, kmod_branch, kmod_srcdir, kmod_commit, kmod_lbranch) self.kmod_srcdir = kmod_srcdir if kmod_patches: os.chdir(self.kmod_srcdir) for patch in kmod_patches: utils.get_file( patch, os.path.join(self.userspace_srcdir, os.path.basename(patch))) utils.system('patch -p1 %s' % os.path.basename(patch)) else: self.kmod_srcdir = None configure_script = os.path.join(self.userspace_srcdir, 'configure') self.configure_options = check_configure_options(configure_script)
def _pull_code(self): """ Retrieves code from git repositories. """ params = self.params kernel_repo = params.get("git_repo") user_repo = params.get("user_git_repo") kmod_repo = params.get("kmod_repo") kernel_branch = params.get("kernel_branch", "master") user_branch = params.get("user_branch", "master") kmod_branch = params.get("kmod_branch", "master") kernel_lbranch = params.get("kernel_lbranch", "master") user_lbranch = params.get("user_lbranch", "master") kmod_lbranch = params.get("kmod_lbranch", "master") kernel_commit = params.get("kernel_commit", None) user_commit = params.get("user_commit", None) kmod_commit = params.get("kmod_commit", None) kernel_patches = eval(params.get("kernel_patches", "[]")) user_patches = eval(params.get("user_patches", "[]")) kmod_patches = eval(params.get("user_patches", "[]")) if not user_repo: message = "KVM user git repository path not specified" logging.error(message) raise error.TestError(message) userspace_srcdir = os.path.join(self.srcdir, "kvm_userspace") kvm_utils.get_git_branch(user_repo, user_branch, userspace_srcdir, user_commit, user_lbranch) self.userspace_srcdir = userspace_srcdir if user_patches: os.chdir(self.userspace_srcdir) for patch in user_patches: utils.get_file(patch, os.path.join(self.userspace_srcdir, os.path.basename(patch))) utils.system('patch -p1 %s' % os.path.basename(patch)) if kernel_repo: kernel_srcdir = os.path.join(self.srcdir, "kvm") kvm_utils.get_git_branch(kernel_repo, kernel_branch, kernel_srcdir, kernel_commit, kernel_lbranch) self.kernel_srcdir = kernel_srcdir if kernel_patches: os.chdir(self.kernel_srcdir) for patch in kernel_patches: utils.get_file(patch, os.path.join(self.userspace_srcdir, os.path.basename(patch))) utils.system('patch -p1 %s' % os.path.basename(patch)) else: self.kernel_srcdir = None if kmod_repo: kmod_srcdir = os.path.join (self.srcdir, "kvm_kmod") kvm_utils.get_git_branch(kmod_repo, kmod_branch, kmod_srcdir, kmod_commit, kmod_lbranch) self.kmod_srcdir = kmod_srcdir if kmod_patches: os.chdir(self.kmod_srcdir) for patch in kmod_patches: utils.get_file(patch, os.path.join(self.userspace_srcdir, os.path.basename(patch))) utils.system('patch -p1 %s' % os.path.basename(patch)) else: self.kmod_srcdir = None configure_script = os.path.join(self.userspace_srcdir, 'configure') self.configure_options = check_configure_options(configure_script)
def get(self, src_package, dst_dir, rfilter=None, tag=None, build=None, arch=None): """ Download a list of packages from the build system. This will download all packages originated from source package [package] with given [tag] or [build] for the architecture reported by the machine. @param src_package: Source package name. @param dst_dir: Destination directory for the downloaded packages. @param rfilter: Regexp filter, only download the packages that match that particular filter. @param tag: Build system tag. @param build: Build system ID. @param arch: Package arch. Useful when you want to download noarch packages. @return: List of paths with the downloaded rpm packages. """ if build and build.isdigit(): build = int(build) if tag and build: logging.info("Both tag and build parameters provided, ignoring tag " "parameter...") if not tag and not build: raise ValueError("Koji install selected but neither koji_tag " "nor koji_build parameters provided. Please " "provide an appropriate tag or build name.") if not build: builds = self.session.listTagged(tag, latest=True, inherit=True, package=src_package) if not builds: raise ValueError("Tag %s has no builds of %s" % (tag, src_package)) info = builds[0] else: info = self.session.getBuild(build) if info is None: raise ValueError('No such brew/koji build: %s' % build) if arch is None: arch = utils.get_arch() rpms = self.session.listRPMs(buildID=info['id'], arches=arch) if not rpms: raise ValueError("No %s packages available for %s" % arch, koji.buildLabel(info)) rpm_paths = [] for rpm in rpms: rpm_name = koji.pathinfo.rpm(rpm) url = ("%s/%s/%s/%s/%s" % (self.koji_options['pkgurl'], info['package_name'], info['version'], info['release'], rpm_name)) if rfilter: filter_regexp = re.compile(rfilter, re.IGNORECASE) if filter_regexp.match(os.path.basename(rpm_name)): download = True else: download = False else: download = True if download: r = utils.get_file(url, os.path.join(dst_dir, os.path.basename(url))) rpm_paths.append(r) return rpm_paths