コード例 #1
0
ファイル: build.py プロジェクト: lovelydb/release
def get_package_objs(honor_disable_webview=False):
    """Returns two lists: packages, noarch_packages."""

    packages = get_packages()

    pack_objs = []
    noarch_pack_objs = []

    for package in packages:
        # Don't try to create the dirs and links because this code is run by the web server
        pack_obj = packaging.package("", package, create_dirs=False)

        if honor_disable_webview and pack_obj.get_info_var("disable_webview"):
            continue

    # Handle normal package
    # Add workaround for packages that build on multiple platforms, but are noarch (currently mono-basic)
        if pack_obj.info['get_destroot'].find(
                'noarch') == -1 or pack_obj.get_info_var('web_ignore_noarch'):
            pack_objs.append(pack_obj)
    # It's noarch
        else:
            noarch_pack_objs.append(pack_obj)

    return _web_index_sort(pack_objs), _web_index_sort(noarch_pack_objs)
コード例 #2
0
def clean_sources():

	for p in packages:
		pobj = packaging.package("", p, HEAD_or_RELEASE=HEAD_or_RELEASE, source_basepath=source_basepath, package_basepath=package_basepath)
		for i in pobj.get_source_files()[:-num_builds]:
			path = os.path.join(source_basepath,  pobj.name, i)
			print "Removing: " + path
			os.unlink(path)
コード例 #3
0
ファイル: sync-bundle.py プロジェクト: lovelydb/release
def find_base_distro(pack_name, distro_name):
    """Look at all the build hosts to see which gives the same distro_root"""

    plat_obj = packaging.buildconf(distro_name, exclusive=False)
    pack_obj = packaging.package(plat_obj, pack_name, bundle_obj=bundle_obj)

    target_base_path = pack_obj.package_relpath

    base_distro = ""
    for p in pack_obj.get_info_var("BUILD_HOSTS"):
        plat2 = packaging.buildconf(p, exclusive=False)
        pack2 = packaging.package(plat2, pack_name, bundle_obj=bundle_obj)

        if pack2.package_relpath == target_base_path:
            base_distro = p
            #print "Found base distro for %s,%s: %s" % ( pack_name, distro_name, p)
            break

    return base_distro
コード例 #4
0
ファイル: sync-bundle.py プロジェクト: akoeplinger/release
def find_base_distro(pack_name, distro_name):
	"""Look at all the build hosts to see which gives the same distro_root"""

	plat_obj = packaging.buildconf(distro_name, exclusive=False)
	pack_obj = packaging.package(plat_obj, pack_name, bundle_obj=bundle_obj)

	target_base_path = pack_obj.package_relpath

	base_distro = ""
	for p in pack_obj.get_info_var("BUILD_HOSTS"):
		plat2 = packaging.buildconf(p, exclusive=False)
		pack2 = packaging.package(plat2, pack_name, bundle_obj=bundle_obj)

		if pack2.package_relpath == target_base_path:
			base_distro = p
			#print "Found base distro for %s,%s: %s" % ( pack_name, distro_name, p)
			break

	return base_distro
コード例 #5
0
    def get_dep_files(self,
                      build_deps=False,
                      recommend_deps=False,
                      source_deps=False,
                      zip_runtime_deps=False):
        files = []

        url_dest = config.packaging_dir + os.sep + 'external_zip_pkg' + os.sep + self.package_env.name

        deps = []
        if build_deps:
            deps += self.get_mono_deps()
        if recommend_deps:
            deps += self.get_mono_recommend_deps()
        if source_deps:
            deps += self.get_mono_source_deps()

        for dep in deps:
            # Get files for mono deps
            # Woah, total cheat here, I imported packaging, and am using it!
            package = packaging.package(self.package_env,
                                        dep,
                                        HEAD_or_RELEASE=self.HEAD_or_RELEASE)

            # If this is a recommended package, don't fail if missing
            if self.get_mono_recommend_deps().count(package.name):
                fail_flag = False
            else:
                fail_flag = True

            files += package.get_files(fail_on_missing=fail_flag)

            # Get url files
            urls = package.get_distro_zip_deps()
            if zip_runtime_deps:
                urls += package.get_distro_zip_runtime_deps()
            for url in urls:
                files += [url_dest + os.sep + os.path.basename(url)]
                utils.get_url(url, url_dest)

            # Get rpm deps
            urls += self.get_rpm_deps()

        # Get url files
        urls = self.get_distro_zip_deps()
        if zip_runtime_deps:
            urls += self.get_distro_zip_runtime_deps()
        urls += self.get_rpm_deps()
        for url in urls:
            files += [url_dest + os.sep + os.path.basename(url)]
            utils.get_url(url, url_dest)

        return utils.remove_list_duplicates(files)
コード例 #6
0
ファイル: packaging.py プロジェクト: akoeplinger/release
	def get_dep_files(self, build_deps=False, recommend_deps=False, source_deps=False, zip_runtime_deps=False):
		files = []

		url_dest = config.packaging_dir + os.sep + 'external_zip_pkg' + os.sep + self.package_env.name

		deps = []
		if build_deps:
			deps += self.get_mono_deps()
		if recommend_deps:
			deps += self.get_mono_recommend_deps()
		if source_deps:
			deps += self.get_mono_source_deps()

		for dep in deps:
			# Get files for mono deps
				# Woah, total cheat here, I imported packaging, and am using it!
			package = packaging.package(self.package_env, dep, HEAD_or_RELEASE=self.HEAD_or_RELEASE)

			# If this is a recommended package, don't fail if missing
			if self.get_mono_recommend_deps().count(package.name):
				fail_flag = False
			else:
				fail_flag = True

			files += package.get_files(fail_on_missing=fail_flag)

			# Get url files
			urls = package.get_distro_zip_deps()
			if zip_runtime_deps:
				urls += package.get_distro_zip_runtime_deps()
			for url in urls:
				files += [ url_dest + os.sep + os.path.basename(url) ]
				utils.get_url(url, url_dest)

			# Get rpm deps
			urls += self.get_rpm_deps()

		# Get url files
		urls = self.get_distro_zip_deps()
		if zip_runtime_deps:
			urls += self.get_distro_zip_runtime_deps()
		urls += self.get_rpm_deps()
		for url in urls:
			files += [ url_dest + os.sep + os.path.basename(url) ]
			utils.get_url(url, url_dest)

		return utils.remove_list_duplicates(files)
コード例 #7
0
ファイル: buildserver.py プロジェクト: akoeplinger/release
    def load_info(self):

        # reload list of packages (allows us to update the list without restarting the daemon)
        reload(config)

        self.td_active = config.td_active

        self.max_poll_interval = config.td_max_poll_interval
        self.network_error_interval = config.td_network_error_interval

        self.num_sequential = config.td_num_sequential

        self.sequential = config.td_sequential

        self.src_repo = src_repo_utils.svn(config.MONO_ROOT, key_file=config.src_key_file)
        self.distfiles = datastore.source_file_repo()

        self.pack_objs = {}
        for pack in config.td_packages:
            self.pack_objs[pack] = packaging.package("", pack)
コード例 #8
0
    def load_info(self):

        # reload list of packages (allows us to update the list without restarting the daemon)
        reload(config)

        self.td_active = config.td_active

        self.max_poll_interval = config.td_max_poll_interval
        self.network_error_interval = config.td_network_error_interval

        self.num_sequential = config.td_num_sequential

        self.sequential = config.td_sequential

        self.src_repo = src_repo_utils.svn(config.MONO_ROOT,
                                           key_file=config.src_key_file)
        self.distfiles = datastore.source_file_repo()

        self.pack_objs = {}
        for pack in config.td_packages:
            self.pack_objs[pack] = packaging.package("", pack)
コード例 #9
0
def clean_distro_builds(distro_name):

	# Reference global var instead of defining in our own scope
	global remove_noarch

	print "Removing packages for: " + distro_name
	conf_obj = packaging.buildconf(distro_name, exclusive=False)
	for p in packages:
		# fake out symlink errors by using 'inside_jail'
		pobj = packaging.package(conf_obj, p, HEAD_or_RELEASE=HEAD_or_RELEASE, source_basepath=source_basepath, package_basepath=package_basepath)

		if pobj.destroot == 'noarch' and not remove_noarch:
			continue

		for i in pobj.get_versions(fail_on_missing=False)[:-num_builds]:
			path = os.path.join(package_basepath, pobj.destroot, pobj.name, i)
			print "Removing: " + path
			shutil.rmtree(path)

	# Only remove these once
	remove_noarch = False
コード例 #10
0
ファイル: build.py プロジェクト: akoeplinger/release
def get_package_objs(honor_disable_webview=False):
	"""Returns two lists: packages, noarch_packages."""

	packages = get_packages()

        pack_objs = []
        noarch_pack_objs = []

        for package in packages:
		# Don't try to create the dirs and links because this code is run by the web server
                pack_obj = packaging.package("", package, create_dirs=False)

		if honor_disable_webview and pack_obj.get_info_var("disable_webview"):
			continue

		# Handle normal package
		# Add workaround for packages that build on multiple platforms, but are noarch (currently mono-basic)
		if pack_obj.info['get_destroot'].find('noarch') == -1 or pack_obj.get_info_var('web_ignore_noarch'):
			pack_objs.append(pack_obj)
		# It's noarch
		else:
			noarch_pack_objs.append(pack_obj)

        return _web_index_sort(pack_objs), _web_index_sort(noarch_pack_objs)
コード例 #11
0
ファイル: pack_obj_test.py プロジェクト: lovelydb/release
#!/usr/bin/env python

import sys

import pdb

sys.path += [".."]

import packaging
pack = packaging.package("", 'mono-1.1.7')

pdb.set_trace()

print pack.info['web_index']
コード例 #12
0
ファイル: buildserver.py プロジェクト: akoeplinger/release
    def run(self):

        distro = self.distro
        scheduler_log.log("%s:\tStarting scheduler\n" % (distro))

        while not self.cancelled():

            packages_to_build = []
            for pack_def in config.sd_latest_build_packages:
                pack_obj = packaging.package("", pack_def)
                if pack_obj.valid_build_platform(distro):
                    packages_to_build.append(pack_def)

            num_started_builds = 0
            start_time = utils.get_time()

            # Build each package for this jail
            for package_name in packages_to_build:

                # Skip builds so we can exit
                if self.cancelled():
                    continue

                # Check to see what the latest tarball is
                # The src_file_repo class is not threadsafe, so provide a mutex here
                tarball_lock.acquire()
                try:
                    tarball_filename = tarballs.get_latest_tarball("HEAD", package_name)
                except:
                    # catch this in case the filename is being edited by hand
                    tarball_filename = ""
                tarball_lock.release()

                if not tarball_filename:
                    # scheduler_log.log("%s:\t*** Error getting latest tarball (%s) (Probably doesn't exist...)!!!\n" % (distro, package_name) )
                    pass

                else:

                    # print "Latest tarball: " + tarball_filename

                    # Get version
                    version, ext = version_re.search(tarball_filename).groups()

                    info = datastore.build_info("HEAD", distro, package_name, version)

                    # Build if the build doesn't exist already
                    if not info.exists:
                        command = "cd %s; ./build --suppress_output %s %s %s" % (
                            config.packaging_dir,
                            distro,
                            package_name,
                            version,
                        )
                        scheduler_log.log("%s:\t%s\n" % (distro, command))

                        num_started_builds += 1
                        # TODO: hmm... is this not blocking?  Seems this code continues before being able to run tests?
                        (code, output) = utils.launch_process(command, print_output=0)
                        # Testing...
                        # code = 2

                        # Is the jail busy?  if so, just repeat this loop (and select a new tarball if a newer one exists)
                        # Hmm... this really shouldn't happen, as much at least
                        if code == 2:
                            # scheduler_log.log("%s:\tJail is busy or offline... will retry again (%s)\n" % (distro, package_name) )
                            num_started_builds -= 1

                        if code == 5:
                            scheduler_log.log(
                                "%s:\tbuild info is missing, but packages exist... ?? will retry again (%s)\n"
                                % (distro, package_name)
                            )
                            num_started_builds -= 1
                    else:
                        # scheduler_log.log("%s:\tSkipping existing build (%s, %s)\n" % (distro, package_name, version) )
                        pass

            time_duration = utils.time_duration_asc(start_time, utils.get_time()) * 60
            if num_started_builds == 0 and time_duration < config.sd_wakeup_interval:
                # scheduler_log.log("%s:\tSleeping %d seconds...\n" % (distro, config.sd_wakeup_interval - time_duration) )
                time.sleep(config.sd_wakeup_interval - time_duration)

                # Exiting because we've been removed from the configuration
        scheduler_log.log("%s:\tExiting upon user request...\n" % distro)
コード例 #13
0
ファイル: __init__.py プロジェクト: timothyxchen/VacaEase
def init(location, start_date, end_date, preference):
    print("Application Start...")
    # get number of days
    start_dt = dt.datetime.strptime(start_date, "%Y-%m-%d").date()
    end_dt = dt.datetime.strptime(end_date, "%Y-%m-%d").date()
    num_days = (end_dt - start_dt).days
    print("Number of days: {}".format(num_days))

    # scrap data
    restaurant_data, hotel_data, attraction_data, weather_data = scrap_data(
        location, start_date, end_date, preference)

    # filter data
    # print("###################################################")
    print("Sorting data with preference 【{}】 and filtering...".format(
        preference))
    hotel_filtered, restaurant_filtered, attraction_filtered = data_filter(
        restaurant_data, hotel_data, attraction_data, preference, num_days)
    # print("\n\n#########filtered#########")
    # with pd.option_context('display.max_rows', None, 'display.max_columns', None, 'expand_frame_repr', False):
    #     print(hotel_filtered)
    #     print(restaurant_filtered)
    #     print(attraction_filtered)

    # calculate distances between POIs and package them to bundles
    print("\n\nCalculating Distance...")
    attraction_restaurant_package = cd.package_att_rest(
        attraction_filtered, restaurant_filtered)
    attraction_restaurant_package_filtered = [
        item for item in attraction_restaurant_package if int(item[2]) <= 20000
    ]

    # package items to actual routes
    print("Calculating distances between each POI ...")
    routes = pg.package(hotel_filtered, attraction_restaurant_package_filtered,
                        num_days)

    # transform name df to address df
    route_address = route_to_address(routes, hotel_filtered,
                                     attraction_filtered, restaurant_filtered)
    route_distance = cd.calculate_each_route(route_address)
    print("\n\n#########route_distance#########")
    with pd.option_context('display.max_rows', None, 'display.max_columns',
                           None, 'expand_frame_repr', False,
                           'display.max_colwidth', -1):
        print(route_distance)

    # replace name in route with actual POI contents
    print("\n\n#########Final_route_outputs#########")
    routes = to_route_output(routes, hotel_filtered, attraction_filtered,
                             restaurant_filtered)

    with pd.option_context('display.max_rows', None, 'display.max_columns',
                           None, 'display.max_colwidth', -1):
        print(routes)

    print("\n\n#########Weather_outputs#########")
    weather_output = weather_data[["Description", "High / Low"]]
    with pd.option_context('display.max_rows', None, 'display.max_columns',
                           None, 'display.max_colwidth', -1):
        print(weather_output)

    export_plot(route_distance, routes)

    return routes, route_distance, weather_output
コード例 #14
0
ファイル: step_test.py プロジェクト: lovelydb/release
#!/usr/bin/env python

import sys

sys.path += [ '../pyutils' ]

import packaging

env = packaging.buildenv('sles-9-i586')
#env = packaging.buildenv('win-4-i386')
env = packaging.buildenv('macos-10-ppc')

pack = packaging.package(env, 'mono-1.1.13')

print pack.info['POSTBUILD_STEP_NAME1']
print pack.info['POSTBUILD_STEP1']

env.ssh.print_command=1
environment = {}
environment['HEAD_or_RELEASE'] = "HEAD"

pack.info['POSTBUILD_STEP1'] = """

python -c 'print 'hey''

"""


env.ssh.execute(pack.info['POSTBUILD_STEP1'], env=environment)

コード例 #15
0
#!/usr/bin/env python

import sys

sys.path += [".."]

import packaging
import shell_parse

#env = packaging.buildenv('sles-9-x86_64')
env = packaging.buildenv('win-4-i386')
print env.info

#pack = packaging.package(env, 'gecko-sharp-2.0')
pack = packaging.package(env, 'mono')
print pack.info

print "---------------------------------------------------------------------"

pack_def = shell_parse.parse_file('../../packaging/defs/libgdiplus')

print pack_def['macos_10_ppc_ZIP_BUILD']

print "---------------------------------------------------------------------"
コード例 #16
0
ファイル: mk-repos.py プロジェクト: lovelydb/release
        if not os.path.exists(distro_obj.name):
            distutils.dir_util.mkpath(distro_obj.name)
        else:
            # TODO: Better way to do this?
            # Clean up all directories
            for dir in os.listdir(distro_obj.name):
                full_dir = distro_obj.name + os.sep + dir
                if os.path.isdir(full_dir):
                    shutil.rmtree(full_dir)

        rpms = []
        # Get rpms for this distro
        for pack in packages_in_repo:
            pack_obj = packaging.package(distro_obj,
                                         os.path.basename(pack),
                                         bundle_obj=bundle_conf,
                                         package_basepath=package_src_dir)

            # Only if package is valid on this distro
            if pack_obj.valid_use_platform(distro_obj.name):
                rpms += pack_obj.get_files(fail_on_missing=False)

        # Get external rpms
        for rpm_file in glob.glob(
                os.path.join(package_src_dir, '..', 'external_packages',
                             distro_obj.name, "*.rpm")):
            # Skip source rpms
            if not re.compile('\.src\.rpm').search(rpm_file):
                rpms.append(os.path.normpath(rpm_file))

        # Create hard links for all rpms
コード例 #17
0
distutils.dir_util.mkpath(os.path.join(output_dir, "archive", bundle_conf.info['archive_version'], 'sources'))

out = open(os.path.join(output_dir, 'sources-' + bundle_conf.info['bundle_urlname'], 'index.html'), 'w')
arc_out = open(os.path.join(output_dir, 'archive', bundle_conf.info['archive_version'], 'sources', 'index.html'), 'w')

fd = open(os.path.join(config.release_repo_root, 'website', 'sources-index'))
template = fd.readlines()
fd.close()

for line in template:
	line_items = line.split()
	if line_items and line_items[0] == "#":
		args = line_items[1:]
		tarballs = []
		for pack in args:
			pack_obj = packaging.package("", pack, bundle_obj=bundle_conf, source_basepath=sources_dir)
			try:
				source_file = pack_obj.get_source_file()
			except IndexError:
				# TODO: Sort of a hack...
				# There's no source for this module
				source_file = ''
			# Skip if there is no source for this bundle
			if source_file:
				tarballs.append(source_file)

		print tarballs

		out.write("<ul>\n")
		arc_out.write("<ul>\n")
		for i in tarballs:
コード例 #18
0
ファイル: pack_obj_test.py プロジェクト: akoeplinger/release
#!/usr/bin/env python

import sys

import pdb

sys.path += [ ".." ]

import packaging
pack = packaging.package("", 'mono-1.1.7')

pdb.set_trace()

print pack.info['web_index']


コード例 #19
0
	arc_out_file = os.path.join(output_dir, 'archive',  version, 'download', build_conf.name, 'index.html')

	distutils.dir_util.mkpath(os.path.dirname(out_file))
	distutils.dir_util.mkpath(os.path.dirname(arc_out_file))

	out = open(out_file, 'w')
	arc_out = open(arc_out_file, 'w')

	for line in template:	
		line_segs = line.split()
		if line_segs and line_segs[0] == "#":
			ARGS = line_segs[2:]
			RPMS = []
			SPECS = []
			for package in ARGS:
				pack_obj = packaging.package(build_conf, package, bundle_obj=bundle_conf, package_basepath=package_src_dir)

				if not pack_obj.valid_use_platform(build_conf.name):
					continue

				# probably won't ever want to post zip files ... ?
				RPMS += pack_obj.get_files_relpath(ext=["rpm"], fail_on_missing=False)

				# Remove src.rpms in case we saved them (which we usually don't)
				for i in RPMS:
					if re.compile(".*\.src\.rpm").search(i):
						RPMS.remove(i)

				SPECS += pack_obj.get_files_relpath(ext="spec", fail_on_missing=False)

			if len(RPMS) == 0:
コード例 #20
0
ファイル: sync-bundle.py プロジェクト: lovelydb/release
                        build_info = datastore.build_info(
                            bundle_obj2.HEAD_or_RELEASE(), base_distro, pack,
                            ver)

                        # check for a build that passed all tests
                        if build_info.get_state() == "success":
                            target_ver = ver
                            print "Found validated build for %s: %s" % (
                                pack, target_ver)
                            break

                    if target_ver:
                        bundle_obj2.add_version(pack, target_ver)

                pack_obj = packaging.package(plat_obj,
                                             pack,
                                             bundle_obj=bundle_obj2)

                # Ignore versioning from external sources (which we don't build svn versions of)
                old_version_map_exists = pack_obj.bundle_obj.version_map_exists
                if pack_obj.get_info_var("EXTERNAL_SOURCE"):
                    pack_obj.bundle_obj.ignore_version_map()

                if pack_obj.valid_use_platform(plat_obj.info['distro']):
                    rpms += pack_obj.get_files(fail_on_missing=fail_on_missing)

                # Restore version_map_exists
                pack_obj.bundle_obj.version_map_exists = old_version_map_exists

# Gather sources
for pack in build.get_packages():
コード例 #21
0
#!/usr/bin/env python

import sys
import pdb

sys.path.append("../pyutils")

import packaging

bundle_conf = packaging.bundle(bundle_name='1.1.13')

for i in ['gtk-sharp', 'gtk-sharp-2.0', 'gtk-sharp-2.8']:
    env = packaging.buildenv('suse-101-i586')
    pack = packaging.package(
        env,
        i,
        bundle_obj=bundle_conf,
        source_basepath='/var/www/mono-website/go-mono/sources',
        package_basepath='/var/www/mono-website/go-mono/download')

    print "\n".join(pack.get_files())

    print pack.get_source_file()
コード例 #22
0
arc_out = open(
    os.path.join(output_dir, 'archive', bundle_conf.info['archive_version'],
                 'sources', 'index.html'), 'w')

fd = open(os.path.join(config.release_repo_root, 'website', 'sources-index'))
template = fd.readlines()
fd.close()

for line in template:
    line_items = line.split()
    if line_items and line_items[0] == "#":
        args = line_items[1:]
        tarballs = []
        for pack in args:
            pack_obj = packaging.package("",
                                         pack,
                                         bundle_obj=bundle_conf,
                                         source_basepath=sources_dir)
            try:
                source_file = pack_obj.get_source_file()
            except IndexError:
                # TODO: Sort of a hack...
                # There's no source for this module
                source_file = ''
            # Skip if there is no source for this bundle
            if source_file:
                tarballs.append(source_file)

        print tarballs

        out.write("<ul>\n")
        arc_out.write("<ul>\n")
コード例 #23
0
ファイル: sync-bundle.py プロジェクト: akoeplinger/release
					target_ver = ""
					for ver in versions:

						build_info = datastore.build_info(bundle_obj2.HEAD_or_RELEASE(), base_distro, pack, ver)

						# check for a build that passed all tests
						if build_info.get_state() == "success":
							target_ver = ver
							print "Found validated build for %s: %s" % (pack, target_ver)
							break

					if target_ver:
						bundle_obj2.add_version(pack, target_ver)

				pack_obj = packaging.package(plat_obj, pack, bundle_obj=bundle_obj2)

				# Ignore versioning from external sources (which we don't build svn versions of)
				old_version_map_exists = pack_obj.bundle_obj.version_map_exists
				if pack_obj.get_info_var("EXTERNAL_SOURCE"):
					pack_obj.bundle_obj.ignore_version_map()

				if pack_obj.valid_use_platform(plat_obj.info['distro']):
					rpms += pack_obj.get_files(fail_on_missing=fail_on_missing)

				# Restore version_map_exists
				pack_obj.bundle_obj.version_map_exists = old_version_map_exists

# Gather sources
for pack in build.get_packages():
	pack_obj = packaging.package("", pack, bundle_obj=bundle_obj, create_dirs=False)
コード例 #24
0
    def run(self):

        distro = self.distro
        scheduler_log.log("%s:\tStarting scheduler\n" % (distro))

        while not self.cancelled():

            packages_to_build = []
            for pack_def in config.sd_latest_build_packages:
                pack_obj = packaging.package("", pack_def)
                if pack_obj.valid_build_platform(distro):
                    packages_to_build.append(pack_def)

            num_started_builds = 0
            start_time = utils.get_time()

            # Build each package for this jail
            for package_name in packages_to_build:

                # Skip builds so we can exit
                if self.cancelled(): continue

                # Check to see what the latest tarball is
                # The src_file_repo class is not threadsafe, so provide a mutex here
                tarball_lock.acquire()
                try:
                    tarball_filename = tarballs.get_latest_tarball(
                        "HEAD", package_name)
                except:
                    # catch this in case the filename is being edited by hand
                    tarball_filename = ""
                tarball_lock.release()

                if not tarball_filename:
                    #scheduler_log.log("%s:\t*** Error getting latest tarball (%s) (Probably doesn't exist...)!!!\n" % (distro, package_name) )
                    pass

                else:

                    #print "Latest tarball: " + tarball_filename

                    # Get version
                    version, ext = version_re.search(tarball_filename).groups()

                    info = datastore.build_info("HEAD", distro, package_name,
                                                version)

                    # Build if the build doesn't exist already
                    if not info.exists:
                        command = "cd %s; ./build --suppress_output %s %s %s" % (
                            config.packaging_dir, distro, package_name,
                            version)
                        scheduler_log.log("%s:\t%s\n" % (distro, command))

                        num_started_builds += 1
                        # TODO: hmm... is this not blocking?  Seems this code continues before being able to run tests?
                        (code, output) = utils.launch_process(command,
                                                              print_output=0)
                        # Testing...
                        #code = 2

                        # Is the jail busy?  if so, just repeat this loop (and select a new tarball if a newer one exists)
                        # Hmm... this really shouldn't happen, as much at least
                        if code == 2:
                            #scheduler_log.log("%s:\tJail is busy or offline... will retry again (%s)\n" % (distro, package_name) )
                            num_started_builds -= 1

                        if code == 5:
                            scheduler_log.log(
                                "%s:\tbuild info is missing, but packages exist... ?? will retry again (%s)\n"
                                % (distro, package_name))
                            num_started_builds -= 1
                    else:
                        #scheduler_log.log("%s:\tSkipping existing build (%s, %s)\n" % (distro, package_name, version) )
                        pass

            time_duration = utils.time_duration_asc(start_time,
                                                    utils.get_time()) * 60
            if num_started_builds == 0 and time_duration < config.sd_wakeup_interval:
                #scheduler_log.log("%s:\tSleeping %d seconds...\n" % (distro, config.sd_wakeup_interval - time_duration) )
                time.sleep(config.sd_wakeup_interval - time_duration)

        # Exiting because we've been removed from the configuration
        scheduler_log.log("%s:\tExiting upon user request...\n" % distro)
コード例 #25
0
ファイル: mk-repos.py プロジェクト: akoeplinger/release
	else: 

		if not os.path.exists(distro_obj.name):
			distutils.dir_util.mkpath(distro_obj.name)
		else:
			# TODO: Better way to do this?
			# Clean up all directories
			for dir in os.listdir(distro_obj.name):
				full_dir = distro_obj.name + os.sep + dir
				if os.path.isdir(full_dir):
					shutil.rmtree(full_dir)

		rpms = []
		# Get rpms for this distro
		for pack in packages_in_repo:
			pack_obj = packaging.package(distro_obj, os.path.basename(pack), bundle_obj=bundle_conf, package_basepath=package_src_dir)

			# Only if package is valid on this distro
			if pack_obj.valid_use_platform(distro_obj.name):
				rpms += pack_obj.get_files(fail_on_missing=False)

		# Get external rpms
		for rpm_file in glob.glob(os.path.join(package_src_dir, '..',  'external_packages', distro_obj.name, "*.rpm")):
			# Skip source rpms
			if not re.compile('\.src\.rpm').search(rpm_file):
				rpms.append(os.path.normpath(rpm_file))

		# Create hard links for all rpms
		for file in rpms:
			arch_dir = distro_obj.name + os.sep + rpm_utils.rpm_query('ARCH', file)
			if not os.path.exists(arch_dir):  distutils.dir_util.mkpath(arch_dir)
コード例 #26
0
    distutils.dir_util.mkpath(os.path.dirname(out_file))
    distutils.dir_util.mkpath(os.path.dirname(arc_out_file))

    out = open(out_file, 'w')
    arc_out = open(arc_out_file, 'w')

    for line in template:
        line_segs = line.split()
        if line_segs and line_segs[0] == "#":
            ARGS = line_segs[2:]
            RPMS = []
            SPECS = []
            for package in ARGS:
                pack_obj = packaging.package(build_conf,
                                             package,
                                             bundle_obj=bundle_conf,
                                             package_basepath=package_src_dir)

                if not pack_obj.valid_use_platform(build_conf.name):
                    continue

                # probably won't ever want to post zip files ... ?
                RPMS += pack_obj.get_files_relpath(ext=["rpm"],
                                                   fail_on_missing=False)

                # Remove src.rpms in case we saved them (which we usually don't)
                for i in RPMS:
                    if re.compile(".*\.src\.rpm").search(i):
                        RPMS.remove(i)

                SPECS += pack_obj.get_files_relpath(ext="spec",
コード例 #27
0
#!/usr/bin/env python

import sys
import pdb

sys.path.append("../pyutils")

import packaging

bundle_conf = packaging.bundle(bundle_name='1.1.13')

for i in ['gtk-sharp', 'gtk-sharp-2.0', 'gtk-sharp-2.8']:
        env = packaging.buildenv('suse-101-i586')
        pack = packaging.package(env, i, bundle_obj=bundle_conf, source_basepath='/var/www/mono-website/go-mono/sources', package_basepath='/var/www/mono-website/go-mono/download')

        print "\n".join(pack.get_files())

        print pack.get_source_file()