def get_missing(distro, os_platform, arch, repo=SHADOW_REPO, lock_version=True): distro_name = distro.release_name # Load the list of exclusions excludes_uri = "https://code.ros.org/svn/release/trunk/distros/%s.excludes"%(distro_name) excludes = ExclusionList(excludes_uri, distro_name, os_platform, arch) # Find all the deps in the distro for this stack deps = compute_deps(distro, 'ALL') # These stacks are not actually relased, so we treat them as implicitly excluded missing_primary = set(distro.stacks.keys()) - set(distro.released_stacks.keys()) missing_dep = set() missing_excluded = set(distro.stacks.keys()) - set(distro.released_stacks.keys()) missing_excluded_dep = set() # Build the deps in order for (sn, sv) in deps: if not sv: missing_primary.add(sn) continue deb_name = "ros-%s-%s"%(distro_name, debianize_name(sn)) if lock_version: deb_version = debianize_version(sv, '\w*', os_platform) else: deb_version = '[0-9.]*-[st][0-9]+~[a-z]+' if not deb_in_repo(repo, deb_name, deb_version, os_platform, arch, use_regex=True): try: si = load_info(sn, sv) depends = set(si['depends']) except: # stack is missing, including its info depends = set() # subtract any depends that aren't in the distro b/c of catkin dry/wet line to_remove = [d for d in depends if not d in distro.stacks] for d in to_remove: depends.remove(d) if excludes.check(sn): missing_excluded.add(sn) missing_primary.add(sn) elif depends.isdisjoint(missing_primary.union(missing_dep)): missing_primary.add(sn) else: missing_dep.add(sn) if not depends.isdisjoint(missing_excluded.union(missing_excluded_dep)): missing_excluded_dep.add(sn) else: pass #print "IN", sn missing_primary -= missing_excluded missing_dep -= missing_excluded_dep return missing_primary, missing_dep, missing_excluded, missing_excluded_dep
def get_buildable(deps, distro_name, os_platform, arch, requested_stack_name, force): # have to recalculate buildable after each build as invalidation # may have occurred. We examine in order to minimize retreading. cache = {} # fresh Packages cache each time through for sn, sv in deps: deb_name = "ros-%s-%s" % (distro_name, debianize_name(sn)) deb_version = debianize_version(sv, "\w*", os_platform) in_repo = deb_in_repo(deb_name, deb_version, os_platform, arch, cache) if not in_repo: debug("selecting [%s] because [%s, %s] not in repo" % (sn, deb_name, deb_version)) return sn, sv elif force and sn == requested_stack_name: debug("forcing build of %s" % (requested_stack_name))
def get_buildable(deps, distro_name, os_platform, arch, requested_stack_name, force): # have to recalculate buildable after each build as invalidation # may have occurred. We examine in order to minimize retreading. cache = {} #fresh Packages cache each time through for sn, sv in deps: deb_name = "ros-%s-%s" % (distro_name, debianize_name(sn)) deb_version = debianize_version(sv, '\w*', os_platform) in_repo = deb_in_repo(deb_name, deb_version, os_platform, arch, cache) if not in_repo: debug("selecting [%s] because [%s, %s] not in repo" % (sn, deb_name, deb_version)) return sn, sv elif force and sn == requested_stack_name: debug("forcing build of %s" % (requested_stack_name))
def do_deb_build(distro_name, stack_name, stack_version, os_platform, arch, staging_dir, noupload, interactive, repo_fqdn): debug("Actually trying to build %s-%s..."%(stack_name, stack_version)) distro_tgz = os.path.join('/var/cache/pbuilder', "%s-%s-%s.tgz"%(os_platform, arch, TGZ_VERSION)) deb_name = "ros-%s-%s"%(distro_name, debianize_name(stack_name)) deb_version = debianize_version(stack_version, '0', os_platform) ros_file = "%s-%s"%(stack_name, stack_version) deb_file = "%s_%s"%(deb_name, deb_version) conf_file = os.path.join(os.path.dirname(rosdeb.__file__),'pbuilder.conf') # Make sure the distro chroot exists if not os.path.exists(distro_tgz): raise InternalBuildFailure("%s does not exist."%(distro_tgz)) # Download deb and tar.gz files: dsc_name = '%s.dsc'%(deb_file) tar_gz_name = '%s.tar.gz'%(deb_file) (dsc_file, tar_gz_file) = download_files(stack_name, stack_version, staging_dir, [dsc_name, tar_gz_name]) # Create hook and results directories hook_dir = os.path.join(staging_dir, 'hooks') results_dir = os.path.join(staging_dir, 'results') build_dir = os.path.join(staging_dir, 'pbuilder') if not os.path.exists(hook_dir): os.makedirs(hook_dir) if not os.path.exists(results_dir): os.makedirs(results_dir) if not os.path.exists(build_dir): os.makedirs(build_dir) # Hook script which will download our tar.bz2 into environment p = os.path.join(hook_dir, 'A50fetch') with open(p, 'w') as f: f.write("""#!/bin/sh set -o errexit apt-get install ca-certificates -y # not in default ubuntu anymore wget https://code.ros.org/svn/release/download/stacks/%(stack_name)s/%(stack_name)s-%(stack_version)s/%(stack_name)s-%(stack_version)s.tar.bz2 -O /tmp/buildd/%(stack_name)s-%(stack_version)s.tar.bz2 rosdep update chown -R pbuilder /tmp/buildd/.ros su pbuilder -c "rosdep resolve gtest" su pbuilder -c "cp -r /tmp/buildd/.ros /tmp" """%locals()) os.chmod(p, stat.S_IRWXU) # Hook script which makes sure we have updated our apt cache p = os.path.join(hook_dir, 'D50update') with open(p, 'w') as f: f.write("""#!/bin/bash set -o errexit apt-get update apt-get install -y python-rosdep rosdep init"""%locals()) os.chmod(p, stat.S_IRWXU) if interactive: # Hook scripts to make us interactive: p = os.path.join(hook_dir, 'B50interactive') with open(p, 'w') as f: f.write("""#!/bin/bash echo "Entering interactive environment. Exit when done to continue pbuilder operation." export ROS_DESTDIR=/tmp/buildd/%(deb_name)s-%(stack_version)s/debian/%(deb_name)s source /tmp/buildd/%(deb_name)s-%(stack_version)s/setup_deb.sh roscd %(stack_name)s bash </dev/tty echo "Resuming pbuilder" """%locals()) os.chmod(p, stat.S_IRWXU) # Hook scripts to make us interactive: p = os.path.join(hook_dir, 'C50interactive') with open(p, 'w') as f: f.write("""#!/bin/bash echo "Entering interactive environment. Exit when done to continue pbuilder operation." export ROS_DESTDIR=/tmp/buildd/%(deb_name)s-%(stack_version)s/debian/%(deb_name)s source /tmp/buildd/%(deb_name)s-%(stack_version)s/setup_deb.sh roscd %(stack_name)s bash </dev/tty echo "Resuming pbuilder" """%locals()) os.chmod(p, stat.S_IRWXU) if arch == 'amd64' or arch == 'armel' or arch == 'armhf': archcmd = [] else: archcmd = ['setarch', arch] # Actually build the deb. This results in the deb being located in results_dir debug("starting pbuilder build of %s-%s"%(stack_name, stack_version)) subprocess.check_call(archcmd+ ['sudo', 'pbuilder', '--build', '--basetgz', distro_tgz, '--configfile', conf_file, '--hookdir', hook_dir, '--buildresult', results_dir, '--binary-arch', '--buildplace', build_dir, dsc_file], stderr=subprocess.STDOUT) # Set up an RE to look for the debian file and find the build_version deb_version_wild = debianize_version(stack_version, '(\w*)', os_platform) deb_file_wild = "%s_%s_%s\.deb"%(deb_name, deb_version_wild, arch) build_version = None # Extract the version number we just built: files = os.listdir(results_dir) for f in files: M = re.match(deb_file_wild, f) if M: build_version = M.group(1) if not build_version: raise InternalBuildFailure("No deb-file generated matching template: %s"%deb_file_wild) deb_version_final = debianize_version(stack_version, build_version, os_platform) deb_file_final = "%s_%s"%(deb_name, deb_version_final) # Build a package db if we have to debug("starting package db build of %s-%s"%(stack_name, stack_version)) subprocess.check_call(['bash', '-c', 'cd %(staging_dir)s && dpkg-scanpackages . > %(results_dir)s/Packages'%locals()]) # Script to execute for deb verification # TODO: Add code to run all the unit-tests for the deb! verify_script = os.path.join(staging_dir, 'verify_script.sh') with open(verify_script, 'w') as f: f.write("""#!/bin/sh set -o errexit echo "deb file:%(staging_dir)s results/" > /etc/apt/sources.list.d/pbuild.list apt-get update apt-get install %(deb_name)s=%(deb_version_final)s -y --force-yes dpkg -l %(deb_name)s """%locals()) os.chmod(verify_script, stat.S_IRWXU) debug("starting verify script for %s-%s"%(stack_name, stack_version)) subprocess.check_call(archcmd + ['sudo', 'pbuilder', '--execute', '--basetgz', distro_tgz, '--configfile', conf_file, '--bindmounts', results_dir, '--buildplace', build_dir, verify_script], stderr=subprocess.STDOUT) # Upload the debs to the server base_files = ['%s_%s.changes'%(deb_file, arch)] # , "%s_%s.deb"%(deb_file_final, arch) files = [os.path.join(results_dir, x) for x in base_files] print "Generated debian change files: %s" % files if not noupload: invalidate_debs(deb_name, os_platform, arch, repo_fqdn) if not upload_debs(files, distro_name, os_platform, arch, repo_fqdn): print "Upload of debs failed!!!" return 1 return 0
def do_deb_build(distro_name, stack_name, stack_version, os_platform, arch, staging_dir, noupload, interactive, repo_fqdn): debug("Actually trying to build %s-%s..." % (stack_name, stack_version)) distro_tgz = os.path.join( '/var/cache/pbuilder', "%s-%s-%s.tgz" % (os_platform, arch, TGZ_VERSION)) deb_name = "ros-%s-%s" % (distro_name, debianize_name(stack_name)) deb_version = debianize_version(stack_version, '0', os_platform) ros_file = "%s-%s" % (stack_name, stack_version) deb_file = "%s_%s" % (deb_name, deb_version) conf_file = os.path.join(os.path.dirname(rosdeb.__file__), 'pbuilder.conf') # Make sure the distro chroot exists if not os.path.exists(distro_tgz): raise InternalBuildFailure("%s does not exist." % (distro_tgz)) # Download deb and tar.gz files: dsc_name = '%s.dsc' % (deb_file) tar_gz_name = '%s.tar.gz' % (deb_file) (dsc_file, tar_gz_file) = download_files(stack_name, stack_version, staging_dir, [dsc_name, tar_gz_name]) # Create hook and results directories hook_dir = os.path.join(staging_dir, 'hooks') results_dir = os.path.join(staging_dir, 'results') build_dir = os.path.join(staging_dir, 'pbuilder') if not os.path.exists(hook_dir): os.makedirs(hook_dir) if not os.path.exists(results_dir): os.makedirs(results_dir) if not os.path.exists(build_dir): os.makedirs(build_dir) # Hook script which will download our tar.bz2 into environment p = os.path.join(hook_dir, 'A50fetch') with open(p, 'w') as f: f.write("""#!/bin/sh set -o errexit apt-get install ca-certificates -y # not in default ubuntu anymore wget https://code.ros.org/svn/release/download/stacks/%(stack_name)s/%(stack_name)s-%(stack_version)s/%(stack_name)s-%(stack_version)s.tar.bz2 -O /tmp/buildd/%(stack_name)s-%(stack_version)s.tar.bz2 rosdep update chown -R pbuilder /tmp/buildd/.ros su pbuilder -c "rosdep resolve gtest" su pbuilder -c "cp -r /tmp/buildd/.ros /tmp" """ % locals()) os.chmod(p, stat.S_IRWXU) # Hook script which makes sure we have updated our apt cache p = os.path.join(hook_dir, 'D50update') with open(p, 'w') as f: f.write("""#!/bin/bash set -o errexit apt-get update apt-get install -y python-rosdep rosdep init""" % locals()) os.chmod(p, stat.S_IRWXU) if interactive: # Hook scripts to make us interactive: p = os.path.join(hook_dir, 'B50interactive') with open(p, 'w') as f: f.write("""#!/bin/bash echo "Entering interactive environment. Exit when done to continue pbuilder operation." export ROS_DESTDIR=/tmp/buildd/%(deb_name)s-%(stack_version)s/debian/%(deb_name)s source /tmp/buildd/%(deb_name)s-%(stack_version)s/setup_deb.sh roscd %(stack_name)s bash </dev/tty echo "Resuming pbuilder" """ % locals()) os.chmod(p, stat.S_IRWXU) # Hook scripts to make us interactive: p = os.path.join(hook_dir, 'C50interactive') with open(p, 'w') as f: f.write("""#!/bin/bash echo "Entering interactive environment. Exit when done to continue pbuilder operation." export ROS_DESTDIR=/tmp/buildd/%(deb_name)s-%(stack_version)s/debian/%(deb_name)s source /tmp/buildd/%(deb_name)s-%(stack_version)s/setup_deb.sh roscd %(stack_name)s bash </dev/tty echo "Resuming pbuilder" """ % locals()) os.chmod(p, stat.S_IRWXU) if arch == 'amd64' or arch == 'armel' or arch == 'armhf': archcmd = [] else: archcmd = ['setarch', arch] # Actually build the deb. This results in the deb being located in results_dir debug("starting pbuilder build of %s-%s" % (stack_name, stack_version)) subprocess.check_call(archcmd + [ 'sudo', 'pbuilder', '--build', '--basetgz', distro_tgz, '--configfile', conf_file, '--hookdir', hook_dir, '--buildresult', results_dir, '--binary-arch', '--buildplace', build_dir, dsc_file ], stderr=subprocess.STDOUT) # Set up an RE to look for the debian file and find the build_version deb_version_wild = debianize_version(stack_version, '(\w*)', os_platform) deb_file_wild = "%s_%s_%s\.deb" % (deb_name, deb_version_wild, arch) build_version = None # Extract the version number we just built: files = os.listdir(results_dir) for f in files: M = re.match(deb_file_wild, f) if M: build_version = M.group(1) if not build_version: raise InternalBuildFailure( "No deb-file generated matching template: %s" % deb_file_wild) deb_version_final = debianize_version(stack_version, build_version, os_platform) deb_file_final = "%s_%s" % (deb_name, deb_version_final) # Build a package db if we have to debug("starting package db build of %s-%s" % (stack_name, stack_version)) subprocess.check_call([ 'bash', '-c', 'cd %(staging_dir)s && dpkg-scanpackages . > %(results_dir)s/Packages' % locals() ]) # Script to execute for deb verification # TODO: Add code to run all the unit-tests for the deb! verify_script = os.path.join(staging_dir, 'verify_script.sh') with open(verify_script, 'w') as f: f.write("""#!/bin/sh set -o errexit echo "deb file:%(staging_dir)s results/" > /etc/apt/sources.list.d/pbuild.list apt-get update apt-get install %(deb_name)s=%(deb_version_final)s -y --force-yes dpkg -l %(deb_name)s """ % locals()) os.chmod(verify_script, stat.S_IRWXU) debug("starting verify script for %s-%s" % (stack_name, stack_version)) subprocess.check_call(archcmd + [ 'sudo', 'pbuilder', '--execute', '--basetgz', distro_tgz, '--configfile', conf_file, '--bindmounts', results_dir, '--buildplace', build_dir, verify_script ], stderr=subprocess.STDOUT) # Upload the debs to the server base_files = ['%s_%s.changes' % (deb_file, arch) ] # , "%s_%s.deb"%(deb_file_final, arch) files = [os.path.join(results_dir, x) for x in base_files] print "Generated debian change files: %s" % files if not noupload: invalidate_debs(deb_name, os_platform, arch, repo_fqdn) if not upload_debs(files, distro_name, os_platform, arch, repo_fqdn): print "Upload of debs failed!!!" return 1 return 0
def get_missing(distro, os_platform, arch, repo=SHADOW_REPO, lock_version=True): distro_name = distro.release_name # Load the list of exclusions excludes_uri = "https://code.ros.org/svn/release/trunk/distros/%s.excludes" % ( distro_name) excludes = ExclusionList(excludes_uri, distro_name, os_platform, arch) # Find all the deps in the distro for this stack deps = compute_deps(distro, 'ALL') # These stacks are not actually relased, so we treat them as implicitly excluded missing_primary = set(distro.stacks.keys()) - set( distro.released_stacks.keys()) missing_dep = set() missing_excluded = set(distro.stacks.keys()) - set( distro.released_stacks.keys()) missing_excluded_dep = set() # Build the deps in order for (sn, sv) in deps: if not sv: missing_primary.add(sn) continue deb_name = "ros-%s-%s" % (distro_name, debianize_name(sn)) if lock_version: deb_version = debianize_version(sv, '\w*', os_platform) else: deb_version = '[0-9.]*-[st][0-9]+~[a-z]+' if not deb_in_repo( repo, deb_name, deb_version, os_platform, arch, use_regex=True): try: si = load_info(sn, sv) depends = set(si['depends']) except: # stack is missing, including its info depends = set() # subtract any depends that aren't in the distro b/c of catkin dry/wet line to_remove = [d for d in depends if not d in distro.stacks] for d in to_remove: depends.remove(d) if excludes.check(sn): missing_excluded.add(sn) missing_primary.add(sn) elif depends.isdisjoint(missing_primary.union(missing_dep)): missing_primary.add(sn) else: missing_dep.add(sn) if not depends.isdisjoint( missing_excluded.union(missing_excluded_dep)): missing_excluded_dep.add(sn) else: pass #print "IN", sn missing_primary -= missing_excluded missing_dep -= missing_excluded_dep return missing_primary, missing_dep, missing_excluded, missing_excluded_dep
def do_deb_build(distro_name, stack_name, stack_version, os_platform, arch, staging_dir, noupload, interactive): debug("Actually trying to build %s-%s..." % (stack_name, stack_version)) distro_tgz = os.path.join( '/var/cache/pbuilder', "%s-%s-%s.tgz" % (os_platform, arch, TGZ_VERSION)) cache_dir = '/home/rosbuild/aptcache/%s-%s' % (os_platform, arch) deb_name = "ros-%s-%s" % (distro_name, debianize_name(stack_name)) deb_version = debianize_version(stack_version, '0', os_platform) ros_file = "%s-%s" % (stack_name, stack_version) deb_file = "%s_%s" % (deb_name, deb_version) conf_file = os.path.join(roslib.packages.get_pkg_dir('rosdeb'), 'config', 'pbuilder.conf') # Make sure the distro chroot exists if not os.path.exists(distro_tgz): raise InternalBuildFailure("%s does not exist." % (distro_tgz)) # Download deb and tar.gz files: dsc_name = '%s.dsc' % (deb_file) tar_gz_name = '%s.tar.gz' % (deb_file) (dsc_file, tar_gz_file) = download_files(stack_name, stack_version, staging_dir, [dsc_name, tar_gz_name]) # Create hook and results directories hook_dir = os.path.join(staging_dir, 'hooks') results_dir = os.path.join(staging_dir, 'results') build_dir = os.path.join(staging_dir, 'pbuilder') if not os.path.exists(cache_dir): os.makedirs(cache_dir) if not os.path.exists(hook_dir): os.makedirs(hook_dir) if not os.path.exists(results_dir): os.makedirs(results_dir) if not os.path.exists(build_dir): os.makedirs(build_dir) # Hook script which will download our tar.bz2 into environment p = os.path.join(hook_dir, 'A50fetch') with open(p, 'w') as f: f.write("""#!/bin/sh set -o errexit wget https://code.ros.org/svn/release/download/stacks/%(stack_name)s/%(stack_name)s-%(stack_version)s/%(stack_name)s-%(stack_version)s.tar.bz2 -O /tmp/buildd/%(stack_name)s-%(stack_version)s.tar.bz2""" % locals()) os.chmod(p, stat.S_IRWXU) # Hook script which makes sure we have updated our apt cache p = os.path.join(hook_dir, 'D50update') with open(p, 'w') as f: f.write("""#!/bin/sh set -o errexit apt-get update""" % locals()) os.chmod(p, stat.S_IRWXU) if interactive: # Hook scripts to make us interactive: p = os.path.join(hook_dir, 'B50interactive') with open(p, 'w') as f: f.write("""#!/bin/sh echo "Entering interactive environment. Exit when done to continue pbuilder operation." export ROS_DESTDIR=/tmp/buildd/%(deb_name)s-%(stack_version)s/debian/%(deb_name)s source /tmp/buildd/%(deb_name)s-%(stack_version)s/setup_deb.sh roscd %(stack_name)s bash </dev/tty echo "Resuming pbuilder" """ % locals()) os.chmod(p, stat.S_IRWXU) # Hook scripts to make us interactive: p = os.path.join(hook_dir, 'C50interactive') with open(p, 'w') as f: f.write("""#!/bin/sh echo "Entering interactive environment. Exit when done to continue pbuilder operation." export ROS_DESTDIR=/tmp/buildd/%(deb_name)s-%(stack_version)s/debian/%(deb_name)s source /tmp/buildd/%(deb_name)s-%(stack_version)s/setup_deb.sh roscd %(stack_name)s bash </dev/tty echo "Resuming pbuilder" """ % locals()) os.chmod(p, stat.S_IRWXU) if arch == 'amd64' or arch == 'armel': archcmd = [] else: archcmd = ['setarch', arch] # Actually build the deb. This results in the deb being located in results_dir debug("starting pbuilder build of %s-%s" % (stack_name, stack_version)) subprocess.check_call(archcmd + [ 'sudo', 'pbuilder', '--build', '--basetgz', distro_tgz, '--configfile', conf_file, '--hookdir', hook_dir, '--buildresult', results_dir, '--binary-arch', '--buildplace', build_dir, '--aptcache', cache_dir, dsc_file ], stderr=subprocess.STDOUT) # Set up an RE to look for the debian file and find the build_version deb_version_wild = debianize_version(stack_version, '(\w*)', os_platform) deb_file_wild = "%s_%s_%s\.deb" % (deb_name, deb_version_wild, arch) build_version = None # Extract the version number we just built: files = os.listdir(results_dir) for f in files: M = re.match(deb_file_wild, f) if M: build_version = M.group(1) if not build_version: raise InternalBuildFailure( "No deb-file generated matching template: %s" % deb_file_wild) deb_version_final = debianize_version(stack_version, build_version, os_platform) deb_file_final = "%s_%s" % (deb_name, deb_version_final) # Build a package db if we have to debug("starting package db build of %s-%s" % (stack_name, stack_version)) subprocess.check_call([ 'bash', '-c', 'cd %(staging_dir)s && dpkg-scanpackages . > %(results_dir)s/Packages' % locals() ]) # Script to execute for deb verification # TODO: Add code to run all the unit-tests for the deb! verify_script = os.path.join(staging_dir, 'verify_script.sh') with open(verify_script, 'w') as f: f.write("""#!/bin/sh set -o errexit echo "deb file:%(staging_dir)s results/" > /etc/apt/sources.list.d/pbuild.list apt-get update apt-get install %(deb_name)s=%(deb_version_final)s -y --force-yes dpkg -l %(deb_name)s """ % locals()) os.chmod(verify_script, stat.S_IRWXU) debug("starting verify script for %s-%s" % (stack_name, stack_version)) subprocess.check_call(archcmd + [ 'sudo', 'pbuilder', '--execute', '--basetgz', distro_tgz, '--configfile', conf_file, '--bindmounts', results_dir, '--buildplace', build_dir, '--aptcache', cache_dir, verify_script ], stderr=subprocess.STDOUT) if not noupload: # Upload the debs to the server base_files = [ '%s_%s.changes' % (deb_file, arch), "%s_%s.deb" % (deb_file_final, arch) ] files = [os.path.join(results_dir, x) for x in base_files] debug("uploading debs for %s-%s to %s" % (stack_name, stack_version, REPO_HOSTNAME)) cmd = ['scp'] + files + [ '%s:/var/packages/ros-shadow/ubuntu/incoming/%s' % (REPO_LOGIN, os_platform) ] debug(' '.join(cmd)) subprocess.check_call(cmd, stderr=subprocess.STDOUT) debug("upload complete") # Assemble string for moving all files from incoming to queue (while lock is being held) move_str = '\n'.join([ 'mv ' + os.path.join('/var/packages/ros-shadow/ubuntu/incoming', os_platform, x) + ' ' + os.path.join('/var/packages/ros-shadow/ubuntu/queue', os_platform, x) for x in base_files ]) # This script moves files into queue directory, removes all dependent debs, removes the existing deb, and then processes the incoming files remote_cmd = "TMPFILE=`mktemp` || exit 1 && cat > ${TMPFILE} && chmod +x ${TMPFILE} && ${TMPFILE}; ret=${?}; rm ${TMPFILE}; exit ${ret}" debug("running remote command [%s]" % (remote_cmd)) run_script = subprocess.Popen(['ssh', REPO_LOGIN, remote_cmd], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) debug("getting depends to prepare invalidate script") invalidate = [deb_name] + get_depends(deb_name, os_platform, arch) debug("invalidating pre-existing and downstream: %s" % (invalidate)) invalidate_cmds = [ "reprepro -b /var/packages/ros-shadow/ubuntu -V -A %(arch)s removefilter %(os_platform)s 'Package (==%(deb_name_x)s)'" % locals() for deb_name_x in invalidate ] invalidate_str = "\n".join(invalidate_cmds) script_content = """ #!/bin/bash set -o errexit ( flock 200 # Move from incoming to queue %(move_str)s # Remove all debs that depend on this package %(invalidate_str)s # Load it into the repo reprepro -b /var/packages/ros-shadow/ubuntu -V processincoming %(os_platform)s ) 200>/var/lock/ros-shadow.lock """ % locals() #Actually run script and check result (o, e) = run_script.communicate(script_content) debug("waiting for invalidation script") res = run_script.wait() debug("invalidation script result: %s" % o) if res != 0: raise InternalBuildFailure("Could not run upload script:\n%s\n%s" % (o, e)) # The cache is no longer valid, we clear it so that we won't skip debs that have been invalidated rosdeb.repo._Packages_cache = {}
def do_deb_build(distro_name, stack_name, stack_version, os_platform, arch, staging_dir, noupload, interactive): debug("Actually trying to build %s-%s..." % (stack_name, stack_version)) distro_tgz = os.path.join("/var/cache/pbuilder", "%s-%s-%s.tgz" % (os_platform, arch, TGZ_VERSION)) cache_dir = "/home/rosbuild/aptcache/%s-%s" % (os_platform, arch) deb_name = "ros-%s-%s" % (distro_name, debianize_name(stack_name)) deb_version = debianize_version(stack_version, "0", os_platform) ros_file = "%s-%s" % (stack_name, stack_version) deb_file = "%s_%s" % (deb_name, deb_version) conf_file = os.path.join(roslib.packages.get_pkg_dir("rosdeb"), "config", "pbuilder.conf") # Make sure the distro chroot exists if not os.path.exists(distro_tgz): raise InternalBuildFailure("%s does not exist." % (distro_tgz)) # Download deb and tar.gz files: dsc_name = "%s.dsc" % (deb_file) tar_gz_name = "%s.tar.gz" % (deb_file) (dsc_file, tar_gz_file) = download_files(stack_name, stack_version, staging_dir, [dsc_name, tar_gz_name]) # Create hook and results directories hook_dir = os.path.join(staging_dir, "hooks") results_dir = os.path.join(staging_dir, "results") build_dir = os.path.join(staging_dir, "pbuilder") if not os.path.exists(cache_dir): os.makedirs(cache_dir) if not os.path.exists(hook_dir): os.makedirs(hook_dir) if not os.path.exists(results_dir): os.makedirs(results_dir) if not os.path.exists(build_dir): os.makedirs(build_dir) # Hook script which will download our tar.bz2 into environment p = os.path.join(hook_dir, "A50fetch") with open(p, "w") as f: f.write( """#!/bin/sh set -o errexit wget https://code.ros.org/svn/release/download/stacks/%(stack_name)s/%(stack_name)s-%(stack_version)s/%(stack_name)s-%(stack_version)s.tar.bz2 -O /tmp/buildd/%(stack_name)s-%(stack_version)s.tar.bz2""" % locals() ) os.chmod(p, stat.S_IRWXU) # Hook script which makes sure we have updated our apt cache p = os.path.join(hook_dir, "D50update") with open(p, "w") as f: f.write( """#!/bin/sh set -o errexit apt-get update""" % locals() ) os.chmod(p, stat.S_IRWXU) if interactive: # Hook scripts to make us interactive: p = os.path.join(hook_dir, "B50interactive") with open(p, "w") as f: f.write( """#!/bin/sh echo "Entering interactive environment. Exit when done to continue pbuilder operation." export ROS_DESTDIR=/tmp/buildd/%(deb_name)s-%(stack_version)s/debian/%(deb_name)s source /tmp/buildd/%(deb_name)s-%(stack_version)s/setup_deb.sh roscd %(stack_name)s bash </dev/tty echo "Resuming pbuilder" """ % locals() ) os.chmod(p, stat.S_IRWXU) # Hook scripts to make us interactive: p = os.path.join(hook_dir, "C50interactive") with open(p, "w") as f: f.write( """#!/bin/sh echo "Entering interactive environment. Exit when done to continue pbuilder operation." export ROS_DESTDIR=/tmp/buildd/%(deb_name)s-%(stack_version)s/debian/%(deb_name)s source /tmp/buildd/%(deb_name)s-%(stack_version)s/setup_deb.sh roscd %(stack_name)s bash </dev/tty echo "Resuming pbuilder" """ % locals() ) os.chmod(p, stat.S_IRWXU) if arch == "amd64" or arch == "armel": archcmd = [] else: archcmd = ["setarch", arch] # Actually build the deb. This results in the deb being located in results_dir debug("starting pbuilder build of %s-%s" % (stack_name, stack_version)) subprocess.check_call( archcmd + [ "sudo", "pbuilder", "--build", "--basetgz", distro_tgz, "--configfile", conf_file, "--hookdir", hook_dir, "--buildresult", results_dir, "--binary-arch", "--buildplace", build_dir, "--aptcache", cache_dir, dsc_file, ], stderr=subprocess.STDOUT, ) # Set up an RE to look for the debian file and find the build_version deb_version_wild = debianize_version(stack_version, "(\w*)", os_platform) deb_file_wild = "%s_%s_%s\.deb" % (deb_name, deb_version_wild, arch) build_version = None # Extract the version number we just built: files = os.listdir(results_dir) for f in files: M = re.match(deb_file_wild, f) if M: build_version = M.group(1) if not build_version: raise InternalBuildFailure("No deb-file generated matching template: %s" % deb_file_wild) deb_version_final = debianize_version(stack_version, build_version, os_platform) deb_file_final = "%s_%s" % (deb_name, deb_version_final) # Build a package db if we have to debug("starting package db build of %s-%s" % (stack_name, stack_version)) subprocess.check_call( ["bash", "-c", "cd %(staging_dir)s && dpkg-scanpackages . > %(results_dir)s/Packages" % locals()] ) # Script to execute for deb verification # TODO: Add code to run all the unit-tests for the deb! verify_script = os.path.join(staging_dir, "verify_script.sh") with open(verify_script, "w") as f: f.write( """#!/bin/sh set -o errexit echo "deb file:%(staging_dir)s results/" > /etc/apt/sources.list.d/pbuild.list apt-get update apt-get install %(deb_name)s=%(deb_version_final)s -y --force-yes dpkg -l %(deb_name)s """ % locals() ) os.chmod(verify_script, stat.S_IRWXU) debug("starting verify script for %s-%s" % (stack_name, stack_version)) subprocess.check_call( archcmd + [ "sudo", "pbuilder", "--execute", "--basetgz", distro_tgz, "--configfile", conf_file, "--bindmounts", results_dir, "--buildplace", build_dir, "--aptcache", cache_dir, verify_script, ], stderr=subprocess.STDOUT, ) if not noupload: # Upload the debs to the server base_files = ["%s_%s.changes" % (deb_file, arch), "%s_%s.deb" % (deb_file_final, arch)] files = [os.path.join(results_dir, x) for x in base_files] debug("uploading debs for %s-%s to %s" % (stack_name, stack_version, REPO_HOSTNAME)) cmd = ["scp"] + files + ["%s:/var/packages/ros-shadow/ubuntu/incoming/%s" % (REPO_LOGIN, os_platform)] debug(" ".join(cmd)) subprocess.check_call(cmd, stderr=subprocess.STDOUT) debug("upload complete") # Assemble string for moving all files from incoming to queue (while lock is being held) move_str = "\n".join( [ "mv " + os.path.join("/var/packages/ros-shadow/ubuntu/incoming", os_platform, x) + " " + os.path.join("/var/packages/ros-shadow/ubuntu/queue", os_platform, x) for x in base_files ] ) # This script moves files into queue directory, removes all dependent debs, removes the existing deb, and then processes the incoming files remote_cmd = "TMPFILE=`mktemp` || exit 1 && cat > ${TMPFILE} && chmod +x ${TMPFILE} && ${TMPFILE}; ret=${?}; rm ${TMPFILE}; exit ${ret}" debug("running remote command [%s]" % (remote_cmd)) run_script = subprocess.Popen( ["ssh", REPO_LOGIN, remote_cmd], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) debug("getting depends to prepare invalidate script") invalidate = [deb_name] + get_depends(deb_name, os_platform, arch) debug("invalidating pre-existing and downstream: %s" % (invalidate)) invalidate_cmds = [ "reprepro -b /var/packages/ros-shadow/ubuntu -V -A %(arch)s removefilter %(os_platform)s 'Package (==%(deb_name_x)s)'" % locals() for deb_name_x in invalidate ] invalidate_str = "\n".join(invalidate_cmds) script_content = ( """ #!/bin/bash set -o errexit ( flock 200 # Move from incoming to queue %(move_str)s # Remove all debs that depend on this package %(invalidate_str)s # Load it into the repo reprepro -b /var/packages/ros-shadow/ubuntu -V processincoming %(os_platform)s ) 200>/var/lock/ros-shadow.lock """ % locals() ) # Actually run script and check result (o, e) = run_script.communicate(script_content) debug("waiting for invalidation script") res = run_script.wait() debug("invalidation script result: %s" % o) if res != 0: raise InternalBuildFailure("Could not run upload script:\n%s\n%s" % (o, e)) # The cache is no longer valid, we clear it so that we won't skip debs that have been invalidated rosdeb.repo._Packages_cache = {}
def do_deb_build(distro_name, stack_name, stack_version, os_platform, arch, staging_dir, noupload, interactive): print "Actually trying to build %s-%s..." % (stack_name, stack_version) project_name = stack_name.split('/')[-1].rstrip('.git') #pull down the git repo using git-buildpackage clone, this gets all the right tags subprocess.check_call(["/bin/bash", "-c", "cd %(staging_dir)s && gbp-clone %(stack_name)s" % locals()]) #update any submodules. subprocess.check_call(["/bin/bash", "-c", "cd %(staging_dir)s/%(project_name)s && git submodule update --init"% locals()]) lines = open(os.path.join(staging_dir,project_name,'debian/changelog'),'r').readlines() first_line = lines[0] w = first_line.split('(') left = w[0] right = w[-1].split(')') middle = right[0] right = right[-1] middle += '~' + os_platform line = left + '(' + middle + ')' + right lines[0] = line with open(os.path.join(staging_dir,project_name,'debian/changelog'),'w') as nchlog: nchlog.writelines(lines) subprocess.check_call(["/bin/bash", "-c", "cd %(staging_dir)s/%(project_name)s && git commit -a -m 'change to platform specific'"% locals()]) subprocess.check_call(["/bin/bash", "-c", "cd %(staging_dir)s/%(project_name)s && git-buildpackage -S -uc -us" % locals()]) distro_tgz = os.path.join('/var/cache/pbuilder', "%s-%s.tgz" % (os_platform, arch)) cache_dir = '/home/rosbuild/aptcache/%s-%s' % (os_platform, arch) deb_name = "ros-%s-%s" % (distro_name, debianize_name(stack_name)) deb_version = debianize_version(stack_version, '0', os_platform) ros_file = "%s-%s" % (stack_name, stack_version) deb_file = "%s_%s" % (deb_name, deb_version) conf_file = os.path.join(roslib.packages.get_pkg_dir('rosdeb'), 'config', 'pbuilder.conf') # Make sure the distro chroot exists if not os.path.exists(distro_tgz): raise InternalBuildFailure("%s does not exist." % (distro_tgz)) staging_dir_contents = os.listdir(staging_dir) dsc_files = [f for f in staging_dir_contents if ".dsc" in f] if len(dsc_files) != 1: raise InternalBuildFailure("Too many dsc files found %s" % dsc_files) dsc_file = os.path.join(staging_dir, dsc_files[0]) # Create hook and results directories hook_dir = os.path.join(staging_dir, 'hooks') results_dir = os.path.join(staging_dir, 'results') build_dir = os.path.join(staging_dir, 'pbuilder') if not os.path.exists(cache_dir): os.makedirs(cache_dir) if not os.path.exists(hook_dir): os.makedirs(hook_dir) if not os.path.exists(results_dir): os.makedirs(results_dir) if not os.path.exists(build_dir): os.makedirs(build_dir) # Hook script which makes sure we have updated our apt cache p = os.path.join(hook_dir, 'D50update') with open(p, 'w') as f: f.write("""#!/bin/sh set -o errexit apt-get update""" % locals()) os.chmod(p, stat.S_IRWXU) if arch == 'amd64': archcmd = [] else: archcmd = ['setarch', arch] # Actually build the deb. This results in the deb being located in results_dir print "starting pbuilder build of %s-%s" % (stack_name, stack_version) subprocess.check_call(archcmd + ['sudo', 'pbuilder', '--build', '--basetgz', distro_tgz, '--configfile', conf_file, '--hookdir', hook_dir, '--buildresult', results_dir, '--binary-arch', '--buildplace', build_dir, '--aptcache', cache_dir, dsc_file]) # Extract the version number we just built: files = os.listdir(results_dir) # Find debian file outputs deb_files_detected = [f for f in files if f.endswith('.deb')] deb_names = [d.split('_')[0] for d in deb_files_detected] if len(deb_files_detected) < 1: raise InternalBuildFailure("No deb-file generated") # Build a package db if we have to print "starting package db build of %s-%s" % (stack_name, stack_version) subprocess.check_call(['bash', '-c', 'cd %(staging_dir)s && dpkg-scanpackages . > %(results_dir)s/Packages' % locals()]) for d in deb_names: # Script to execute for deb verification # TODO: Add code to run all the unit-tests for the deb! verify_script = os.path.join(staging_dir, 'verify_script.sh') with open(verify_script, 'w') as f: f.write("""#!/bin/sh set -o errexit echo "deb file:%(staging_dir)s results/" > /etc/apt/sources.list.d/pbuild.list apt-get update apt-get install %(d)s -y --force-yes dpkg -l %(d)s """ % locals()) os.chmod(verify_script, stat.S_IRWXU) print "starting verify script for %s-%s" % (stack_name, stack_version) subprocess.check_call(archcmd + ['sudo', 'pbuilder', '--execute', '--basetgz', distro_tgz, '--configfile', conf_file, '--bindmounts', results_dir, '--buildplace', build_dir, '--aptcache', cache_dir, verify_script]) # Detect changes files change_files = [f for f in files if '.changes' in f] upload_files = [os.path.join(results_dir, x) for x in deb_files_detected] if not noupload: upload_debs(upload_files, 'ros-shadow-3rdparty', distro_name, os_platform, arch) else: print "No Upload option selected, I would have uploaded the files:", upload_files