Esempio n. 1
0
def get_platform_objs():

	platforms = get_platforms()

	plat_objs = []
	for platform in platforms:
		plat_objs.append( packaging.buildconf(platform, exclusive=False) )

	return _web_index_sort(plat_objs)
Esempio n. 2
0
def get_platform_objs():

    platforms = get_platforms()

    plat_objs = []
    for platform in platforms:
        plat_objs.append(packaging.buildconf(platform, exclusive=False))

    return _web_index_sort(plat_objs)
Esempio n. 3
0
def find_base_distro(pack_name, distro_name):
    """Look at all the build hosts to see which gives the same distro_root"""

    plat_obj = packaging.buildconf(distro_name, exclusive=False)
    pack_obj = packaging.package(plat_obj, pack_name, bundle_obj=bundle_obj)

    target_base_path = pack_obj.package_relpath

    base_distro = ""
    for p in pack_obj.get_info_var("BUILD_HOSTS"):
        plat2 = packaging.buildconf(p, exclusive=False)
        pack2 = packaging.package(plat2, pack_name, bundle_obj=bundle_obj)

        if pack2.package_relpath == target_base_path:
            base_distro = p
            #print "Found base distro for %s,%s: %s" % ( pack_name, distro_name, p)
            break

    return base_distro
Esempio n. 4
0
def find_base_distro(pack_name, distro_name):
	"""Look at all the build hosts to see which gives the same distro_root"""

	plat_obj = packaging.buildconf(distro_name, exclusive=False)
	pack_obj = packaging.package(plat_obj, pack_name, bundle_obj=bundle_obj)

	target_base_path = pack_obj.package_relpath

	base_distro = ""
	for p in pack_obj.get_info_var("BUILD_HOSTS"):
		plat2 = packaging.buildconf(p, exclusive=False)
		pack2 = packaging.package(plat2, pack_name, bundle_obj=bundle_obj)

		if pack2.package_relpath == target_base_path:
			base_distro = p
			#print "Found base distro for %s,%s: %s" % ( pack_name, distro_name, p)
			break

	return base_distro
Esempio n. 5
0
def clean_distro_builds(distro_name):

	# Reference global var instead of defining in our own scope
	global remove_noarch

	print "Removing packages for: " + distro_name
	conf_obj = packaging.buildconf(distro_name, exclusive=False)
	for p in packages:
		# fake out symlink errors by using 'inside_jail'
		pobj = packaging.package(conf_obj, p, HEAD_or_RELEASE=HEAD_or_RELEASE, source_basepath=source_basepath, package_basepath=package_basepath)

		if pobj.destroot == 'noarch' and not remove_noarch:
			continue

		for i in pobj.get_versions(fail_on_missing=False)[:-num_builds]:
			path = os.path.join(package_basepath, pobj.destroot, pobj.name, i)
			print "Removing: " + path
			shutil.rmtree(path)

	# Only remove these once
	remove_noarch = False
Esempio n. 6
0
fd = open(os.path.join(config.release_repo_root, 'website', 'distro-index'))
template = fd.readlines()
fd.close()

version = bundle_conf.info['archive_version']
package_src_url = os.path.basename(package_src_dir)

if not package_src_url:
    print "Invalid package_src_dir, make sure it doesn't end with a slash"
    sys.exit(1)

# Go here so the rpm file globbings look right
os.chdir(package_src_dir)
for distro_conf in distros:

    build_conf = packaging.buildconf(os.path.basename(distro_conf),
                                     exclusive=False)
    print "*** Generating pages for: %s" % build_conf.name

    # Skip the distros that use zip packaging system
    if utils.get_dict_var('USE_ZIP_PKG', build_conf.info): continue

    distro_out_dir = os.path.join(output_dir, url_prefix, build_conf.name)
    out_file = distro_out_dir + os.sep + 'index.html'
    arc_out_file = os.path.join(output_dir, 'archive', version, 'download',
                                build_conf.name, 'index.html')

    distutils.dir_util.mkpath(os.path.dirname(out_file))
    distutils.dir_util.mkpath(os.path.dirname(arc_out_file))

    out = open(out_file, 'w')
    arc_out = open(arc_out_file, 'w')
Esempio n. 7
0
#!/usr/bin/env python


import sys

sys.path.append('..')

import packaging

conf = packaging.buildconf('sunos-8-sparc')

shell_code = """
for i in `ls /tmp` ; do echo $i ; echo "manual" ; done

if [ "test" == "test2" ] ; then
	echo "no match!"
fi

if [ "test2" == "test2" ] ; then
	echo "match!"
fi


"""

python_code = """

import os

#for i in range(0, 10):
#	print i
Esempio n. 8
0
    return base_distro


# TODO: how to make sure we avoid the inprogress builds
#  which can be done by looking at the xml metadata for the build step...
# Gather rpms for this bundle
if include_packages:
    for plat in config.sd_latest_build_distros:

        # Start with a fresh bundle so one distro doesn't affect another
        bundle_obj2 = packaging.bundle(bundle_name=bundle_name)
        if validated:
            bundle_obj2.force_version_map()

        plat_obj = packaging.buildconf(plat, exclusive=False)
        if not plat_obj.get_info_var('USE_ZIP_PKG') or include_zip:
            print plat_obj.info['distro']
            # Add external dependencies for this distro
            extern = 'external_packages' + os.sep + plat_obj.info['distro']
            if os.path.exists(extern): dirs.append(extern)

            for pack in packages_in_repo:

                #  if we're doing validated builds
                # figure out which version this platform/package should have in the bundle
                if validated:

                    #  find the distro that builds for the current pack for the current distro
                    base_distro = find_base_distro(pack, plat)
                    if not base_distro:
Esempio n. 9
0
#!/usr/bin/env python

import sys

sys.path.append('..')

import packaging

conf = packaging.buildconf('sunos-8-sparc')

shell_code = """
for i in `ls /tmp` ; do echo $i ; echo "manual" ; done

if [ "test" == "test2" ] ; then
	echo "no match!"
fi

if [ "test2" == "test2" ] ; then
	echo "match!"
fi


"""

python_code = """

import os

#for i in range(0, 10):
#	print i
#	print os.environ['test']
Esempio n. 10
0
	return base_distro


# TODO: how to make sure we avoid the inprogress builds
#  which can be done by looking at the xml metadata for the build step...
# Gather rpms for this bundle
if include_packages:
	for plat in config.sd_latest_build_distros:

		# Start with a fresh bundle so one distro doesn't affect another
		bundle_obj2 = packaging.bundle(bundle_name=bundle_name)
		if validated:
			bundle_obj2.force_version_map()

		plat_obj = packaging.buildconf(plat, exclusive=False)
		if not plat_obj.get_info_var('USE_ZIP_PKG') or include_zip:
			print plat_obj.info['distro']
			# Add external dependencies for this distro
			extern = 'external_packages' + os.sep + plat_obj.info['distro']
			if os.path.exists(extern): dirs.append(extern)

			for pack in packages_in_repo:

				#  if we're doing validated builds
				# figure out which version this platform/package should have in the bundle
				if validated:

					#  find the distro that builds for the current pack for the current distro
					base_distro = find_base_distro(pack, plat)
					if not base_distro:
Esempio n. 11
0
fd = open(os.path.join(config.release_repo_root, 'website', 'distro-index'))
template = fd.readlines()
fd.close()

version = bundle_conf.info['archive_version']
package_src_url = os.path.basename(package_src_dir)

if not package_src_url:
	print "Invalid package_src_dir, make sure it doesn't end with a slash"
	sys.exit(1)

# Go here so the rpm file globbings look right
os.chdir(package_src_dir)
for distro_conf in distros:

	build_conf = packaging.buildconf(os.path.basename(distro_conf), exclusive=False)
	print "*** Generating pages for: %s" % build_conf.name

	# Skip the distros that use zip packaging system
	if utils.get_dict_var('USE_ZIP_PKG', build_conf.info): continue

	distro_out_dir = os.path.join(output_dir, url_prefix, build_conf.name)
	out_file = distro_out_dir + os.sep + 'index.html'
	arc_out_file = os.path.join(output_dir, 'archive',  version, 'download', build_conf.name, 'index.html')

	distutils.dir_util.mkpath(os.path.dirname(out_file))
	distutils.dir_util.mkpath(os.path.dirname(arc_out_file))

	out = open(out_file, 'w')
	arc_out = open(arc_out_file, 'w')
Esempio n. 12
0
bundle_conf = packaging.bundle(bundle_name=bundle)
url_prefix = 'download-' + bundle_conf.info['bundle_urlname']

base_dir = output_dir + os.sep + url_prefix
distutils.dir_util.mkpath(base_dir)
os.chdir(base_dir)

# Load up packages to include in repository (packages_in_repo)
execfile(os.path.join(config.release_repo_root, 'website', 'repo-config', 'config.py') )

# TODO: maybe we should generate the repo data for all repo types for all distros... ?  That might be just confusing...

# Create hard links to real packages to use in repo
for distro in distros:

        distro_obj = packaging.buildconf(distro, exclusive=False)

	# TODO: Come up with repo system for zip system
	if utils.get_dict_var('USE_ZIP_PKG', distro_obj.info):
		pass

	# Only non-zip distros and valid distros for this package
	else: 

		if not os.path.exists(distro_obj.name):
			distutils.dir_util.mkpath(distro_obj.name)
		else:
			# TODO: Better way to do this?
			# Clean up all directories
			for dir in os.listdir(distro_obj.name):
				full_dir = distro_obj.name + os.sep + dir
Esempio n. 13
0
base_dir = output_dir + os.sep + url_prefix
distutils.dir_util.mkpath(base_dir)
os.chdir(base_dir)

# Load up packages to include in repository (packages_in_repo)
execfile(
    os.path.join(config.release_repo_root, 'website', 'repo-config',
                 'config.py'))

# TODO: maybe we should generate the repo data for all repo types for all distros... ?  That might be just confusing...

# Create hard links to real packages to use in repo
for distro in distros:

    distro_obj = packaging.buildconf(distro, exclusive=False)

    # TODO: Come up with repo system for zip system
    if utils.get_dict_var('USE_ZIP_PKG', distro_obj.info):
        pass

# Only non-zip distros and valid distros for this package
    else:

        if not os.path.exists(distro_obj.name):
            distutils.dir_util.mkpath(distro_obj.name)
        else:
            # TODO: Better way to do this?
            # Clean up all directories
            for dir in os.listdir(distro_obj.name):
                full_dir = distro_obj.name + os.sep + dir