def get_vagrant_sshinfo(): """Get ssh connection info for a vagrant VM :returns: A dictionary containing 'hostname', 'port', 'user' and 'idfile' """ if subprocess.call( 'vagrant ssh-config >sshconfig', cwd='builder', shell=True) != 0: raise BuildException("Error getting ssh config") vagranthost = 'default' # Host in ssh config file sshconfig = paramiko.SSHConfig() sshf = open('builder/sshconfig', 'r') sshconfig.parse(sshf) sshf.close() sshconfig = sshconfig.lookup(vagranthost) idfile = sshconfig['identityfile'] if isinstance(idfile, list): idfile = idfile[0] elif idfile.startswith('"') and idfile.endswith('"'): idfile = idfile[1:-1] return { 'hostname': sshconfig['hostname'], 'port': int(sshconfig['port']), 'user': sshconfig['user'], 'idfile': idfile }
def main(): parser = optparse.OptionParser() parser.add_option("--depends_on", action="store_true", default=False) (options, args) = parser.parse_args() if len(args) <= 2 or len(args) % 2 != 1: print("Usage: %s ros_distro repo1 version1 repo2 version2 ..." % sys.argv[0]) print( " - with ros_distro the name of the ros distribution (e.g. 'fuerte' or 'groovy')" ) print(" - with repo the name of the repository") print( " - with version 'latest', 'devel', or the actual version number (e.g. 0.2.5)." ) raise BuildException("Wrong arguments for test_repositories script") ros_distro = args[0] repo_list = [args[i] for i in range(1, len(args), 2)] version_list = [args[i + 1] for i in range(1, len(args), 2)] workspace = os.environ['WORKSPACE'] print("Running test_repositories test on distro %s and repositories %s" % (ros_distro, ', '.join( ["%s (%s)" % (r, v) for r, v in zip(repo_list, version_list)]))) test_repositories(ros_distro, repo_list, version_list, workspace, test_depends_on=options.depends_on)
def _get_repo_data(doc_file, repo_name): if repo_name not in doc_file.repositories: raise BuildException('Could not find a repository "%s" in doc file' % repo_name) repo = doc_file.repositories[repo_name] repo_data = { 'local-name': repo.name, 'uri': repo.url, } if repo.version is not None: repo_data['version'] = repo.version return {repo.type: repo_data}
def load_configuration_fuerte(ros_distro, repo): try: repo_url = 'https://raw.github.com/ros/rosdistro/master/doc/%s/%s.rosinstall' % (ros_distro, repo) f = urllib2.urlopen(repo_url) if f.code != 200: raise BuildException("Could not find a valid rosinstall file for %s at %s" % (repo, repo_url)) doc_conf = yaml.load(f.read()) except (urllib2.URLError, urllib2.HTTPError): raise BuildException("Could not find a valid rosinstall file for %s at %s" % (repo, repo_url)) depends_conf = [] try: depends_repo_url = 'https://raw.github.com/ros/rosdistro/master/doc/%s/%s_depends.rosinstall' % (ros_distro, repo) f = urllib2.urlopen(depends_repo_url) if f.code == 200: print "Found a depends rosinstall file for %s" % repo depends_conf = yaml.load(f.read()) except (urllib2.URLError, urllib2.HTTPError): print "Did not find a depends rosinstall file for %s" % repo return (doc_conf, depends_conf)
def load_configuration(ros_distro, repo_name): if ros_distro == 'fuerte': return load_configuration_fuerte(ros_distro, repo_name) from rosdistro import get_doc_file, get_index, get_index_url index = get_index(get_index_url()) doc_file = get_doc_file(index, ros_distro) repo_data = _get_repo_data(doc_file, repo_name) doc_conf = [repo_data] repo = doc_file.repositories[repo_name] depends = getattr(repo, 'depends', []) depends_conf = [] for dep_name in depends: try: repo_data = _get_repo_data(doc_file, dep_name) except BuildException: raise BuildException('Could not find a dependent repository "%s" of "%s" in doc file' % (dep_name, repo_name)) depends_conf.append(repo_data) return (doc_conf, depends_conf)
def genkey(keystore, repo_keyalias, password, keydname): '''generate a new keystore with a new key in it for signing repos''' logging.info('Generating a new key in "' + keystore + '"...') common.write_password_file("keystorepass", password) common.write_password_file("keypass", password) p = FDroidPopen(['keytool', '-genkey', '-keystore', keystore, '-alias', repo_keyalias, '-keyalg', 'RSA', '-keysize', '4096', '-sigalg', 'SHA256withRSA', '-validity', '10000', '-storepass:file', config['keystorepassfile'], '-keypass:file', config['keypassfile'], '-dname', keydname]) # TODO keypass should be sent via stdin if p.returncode != 0: raise BuildException("Failed to generate key", p.output) # now show the lovely key that was just generated p = FDroidPopen(['keytool', '-list', '-v', '-keystore', keystore, '-alias', repo_keyalias, '-storepass:file', config['keystorepassfile']]) logging.info(p.output.strip() + '\n\n')
def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, force, onserver): """Do a build locally.""" if thisbuild['buildjni'] and thisbuild['buildjni'] != ['no']: if not config['ndk_path']: logging.critical("$ANDROID_NDK is not set!") sys.exit(3) elif not os.path.isdir(config['sdk_path']): logging.critical( "$ANDROID_NDK points to a non-existing directory!") sys.exit(3) # Prepare the source code... root_dir, srclibpaths = common.prepare_source(vcs, app, thisbuild, build_dir, srclib_dir, extlib_dir, onserver) # We need to clean via the build tool in case the binary dirs are # different from the default ones p = None if thisbuild['type'] == 'maven': logging.info("Cleaning Maven project...") cmd = [ config['mvn3'], 'clean', '-Dandroid.sdk.path=' + config['sdk_path'] ] if '@' in thisbuild['maven']: maven_dir = os.path.join(root_dir, thisbuild['maven'].split('@', 1)[1]) maven_dir = os.path.normpath(maven_dir) else: maven_dir = root_dir p = FDroidPopen(cmd, cwd=maven_dir) elif thisbuild['type'] == 'gradle': logging.info("Cleaning Gradle project...") cmd = [config['gradle'], 'clean'] adapt_gradle(build_dir) for name, number, libpath in srclibpaths: adapt_gradle(libpath) p = FDroidPopen(cmd, cwd=root_dir) elif thisbuild['type'] == 'kivy': pass elif thisbuild['type'] == 'ant': logging.info("Cleaning Ant project...") p = FDroidPopen(['ant', 'clean'], cwd=root_dir) if p is not None and p.returncode != 0: raise BuildException( "Error cleaning %s:%s" % (app['id'], thisbuild['version']), p.output) for root, dirs, files in os.walk(build_dir): # Don't remove possibly necessary 'gradle' dirs if 'gradlew' is not there if 'gradlew' in files: logging.debug("Getting rid of Gradle wrapper stuff in %s" % root) os.remove(os.path.join(root, 'gradlew')) if 'gradlew.bat' in files: os.remove(os.path.join(root, 'gradlew.bat')) if 'gradle' in dirs: shutil.rmtree(os.path.join(root, 'gradle')) if not options.skipscan: # Scan before building... logging.info("Scanning source for common problems...") count = common.scan_source(build_dir, root_dir, thisbuild) if count > 0: if force: logging.warn('Scanner found %d problems:' % count) else: raise BuildException( "Can't build due to %d errors while scanning" % count) if not options.notarball: # Build the source tarball right before we build the release... logging.info("Creating source tarball...") tarname = common.getsrcname(app, thisbuild) tarball = tarfile.open(os.path.join(tmp_dir, tarname), "w:gz") def tarexc(f): return any(f.endswith(s) for s in ['.svn', '.git', '.hg', '.bzr']) tarball.add(build_dir, tarname, exclude=tarexc) tarball.close() if onserver: manifest = os.path.join(root_dir, 'AndroidManifest.xml') if os.path.exists(manifest): homedir = os.path.expanduser('~') with open(os.path.join(homedir, 'buildserverid'), 'r') as f: buildserverid = f.read() with open(os.path.join(homedir, 'fdroidserverid'), 'r') as f: fdroidserverid = f.read() with open(manifest, 'r') as f: manifestcontent = f.read() manifestcontent = manifestcontent.replace( '</manifest>', '<fdroid buildserverid="' + buildserverid + '"' + ' fdroidserverid="' + fdroidserverid + '"' + '/></manifest>') with open(manifest, 'w') as f: f.write(manifestcontent) # Run a build command if one is required... if thisbuild['build']: logging.info("Running 'build' commands in %s" % root_dir) cmd = common.replace_config_vars(thisbuild['build']) # Substitute source library paths into commands... for name, number, libpath in srclibpaths: libpath = os.path.relpath(libpath, root_dir) cmd = cmd.replace('$$' + name + '$$', libpath) p = FDroidPopen(['bash', '-x', '-c', cmd], cwd=root_dir) if p.returncode != 0: raise BuildException( "Error running build command for %s:%s" % (app['id'], thisbuild['version']), p.output) # Build native stuff if required... if thisbuild['buildjni'] and thisbuild['buildjni'] != ['no']: logging.info("Building the native code") jni_components = thisbuild['buildjni'] if jni_components == ['yes']: jni_components = [''] cmd = [os.path.join(config['ndk_path'], "ndk-build"), "-j1"] for d in jni_components: if d: logging.info("Building native code in '%s'" % d) else: logging.info("Building native code in the main project") manifest = root_dir + '/' + d + '/AndroidManifest.xml' if os.path.exists(manifest): # Read and write the whole AM.xml to fix newlines and avoid # the ndk r8c or later 'wordlist' errors. The outcome of this # under gnu/linux is the same as when using tools like # dos2unix, but the native python way is faster and will # work in non-unix systems. manifest_text = open(manifest, 'U').read() open(manifest, 'w').write(manifest_text) # In case the AM.xml read was big, free the memory del manifest_text p = FDroidPopen(cmd, cwd=os.path.join(root_dir, d)) if p.returncode != 0: raise BuildException( "NDK build failed for %s:%s" % (app['id'], thisbuild['version']), p.output) p = None # Build the release... if thisbuild['type'] == 'maven': logging.info("Building Maven project...") if '@' in thisbuild['maven']: maven_dir = os.path.join(root_dir, thisbuild['maven'].split('@', 1)[1]) else: maven_dir = root_dir mvncmd = [ config['mvn3'], '-Dandroid.sdk.path=' + config['sdk_path'], '-Dmaven.jar.sign.skip=true', '-Dmaven.test.skip=true', '-Dandroid.sign.debug=false', '-Dandroid.release=true', 'package' ] if thisbuild['target']: target = thisbuild["target"].split('-')[1] FDroidPopen([ 'sed', '-i', 's@<platform>[0-9]*</platform>@<platform>' + target + '</platform>@g', 'pom.xml' ], cwd=root_dir) if '@' in thisbuild['maven']: FDroidPopen([ 'sed', '-i', 's@<platform>[0-9]*</platform>@<platform>' + target + '</platform>@g', 'pom.xml' ], cwd=maven_dir) p = FDroidPopen(mvncmd, cwd=maven_dir) bindir = os.path.join(root_dir, 'target') elif thisbuild['type'] == 'kivy': logging.info("Building Kivy project...") spec = os.path.join(root_dir, 'buildozer.spec') if not os.path.exists(spec): raise BuildException( "Expected to find buildozer-compatible spec at {0}".format( spec)) defaults = { 'orientation': 'landscape', 'icon': '', 'permissions': '', 'android.api': "18" } bconfig = ConfigParser(defaults, allow_no_value=True) bconfig.read(spec) distdir = 'python-for-android/dist/fdroid' if os.path.exists(distdir): shutil.rmtree(distdir) modules = bconfig.get('app', 'requirements').split(',') cmd = 'ANDROIDSDK=' + config['sdk_path'] cmd += ' ANDROIDNDK=' + config['ndk_path'] cmd += ' ANDROIDNDKVER=r9' cmd += ' ANDROIDAPI=' + str(bconfig.get('app', 'android.api')) cmd += ' VIRTUALENV=virtualenv' cmd += ' ./distribute.sh' cmd += ' -m ' + "'" + ' '.join(modules) + "'" cmd += ' -d fdroid' p = FDroidPopen(cmd, cwd='python-for-android', shell=True) if p.returncode != 0: raise BuildException("Distribute build failed") cid = bconfig.get('app', 'package.domain') + '.' + bconfig.get( 'app', 'package.name') if cid != app['id']: raise BuildException( "Package ID mismatch between metadata and spec") orientation = bconfig.get('app', 'orientation', 'landscape') if orientation == 'all': orientation = 'sensor' cmd = [ './build.py' '--dir', root_dir, '--name', bconfig.get('app', 'title'), '--package', app['id'], '--version', bconfig.get('app', 'version'), '--orientation', orientation ] perms = bconfig.get('app', 'permissions') for perm in perms.split(','): cmd.extend(['--permission', perm]) if config.get('app', 'fullscreen') == 0: cmd.append('--window') icon = bconfig.get('app', 'icon.filename') if icon: cmd.extend(['--icon', os.path.join(root_dir, icon)]) cmd.append('release') p = FDroidPopen(cmd, cwd=distdir) elif thisbuild['type'] == 'gradle': logging.info("Building Gradle project...") flavours = thisbuild['gradle'].split(',') if len(flavours) == 1 and flavours[0] in ['main', 'yes', '']: flavours[0] = '' commands = [config['gradle']] if thisbuild['preassemble']: commands += thisbuild['preassemble'].split() flavours_cmd = ''.join(flavours) if flavours_cmd: flavours_cmd = flavours_cmd[0].upper() + flavours_cmd[1:] commands += ['assemble' + flavours_cmd + 'Release'] # Avoid having to use lintOptions.abortOnError false if thisbuild['gradlepluginver'] >= LooseVersion('0.7'): with open(os.path.join(root_dir, 'build.gradle'), "a") as f: f.write( "\nandroid { lintOptions { checkReleaseBuilds false } }\n") p = FDroidPopen(commands, cwd=root_dir) elif thisbuild['type'] == 'ant': logging.info("Building Ant project...") cmd = ['ant'] if thisbuild['antcommand']: cmd += [thisbuild['antcommand']] else: cmd += ['release'] p = FDroidPopen(cmd, cwd=root_dir) bindir = os.path.join(root_dir, 'bin') if p is not None and p.returncode != 0: raise BuildException( "Build failed for %s:%s" % (app['id'], thisbuild['version']), p.output) logging.info("Successfully built version " + thisbuild['version'] + ' of ' + app['id']) if thisbuild['type'] == 'maven': stdout_apk = '\n'.join([ line for line in p.output.splitlines() if any(a in line for a in ('.apk', '.ap_', '.jar')) ]) m = re.match(r".*^\[INFO\] .*apkbuilder.*/([^/]*)\.apk", stdout_apk, re.S | re.M) if not m: m = re.match( r".*^\[INFO\] Creating additional unsigned apk file .*/([^/]+)\.apk[^l]", stdout_apk, re.S | re.M) if not m: m = re.match( r'.*^\[INFO\] [^$]*aapt \[package,[^$]*' + bindir + r'/([^/]+)\.ap[_k][,\]]', stdout_apk, re.S | re.M) if not m: m = re.match( r".*^\[INFO\] Building jar: .*/" + bindir + r"/(.+)\.jar", stdout_apk, re.S | re.M) if not m: raise BuildException('Failed to find output') src = m.group(1) src = os.path.join(bindir, src) + '.apk' elif thisbuild['type'] == 'kivy': src = 'python-for-android/dist/default/bin/{0}-{1}-release.apk'.format( bconfig.get('app', 'title'), bconfig.get('app', 'version')) elif thisbuild['type'] == 'gradle': if thisbuild['gradlepluginver'] >= LooseVersion('0.11'): apks_dir = os.path.join(root_dir, 'build', 'outputs', 'apk') else: apks_dir = os.path.join(root_dir, 'build', 'apk') apks = glob.glob(os.path.join(apks_dir, '*-release-unsigned.apk')) if len(apks) > 1: raise BuildException( 'More than one resulting apks found in %s' % apks_dir, '\n'.join(apks)) if len(apks) < 1: raise BuildException('Failed to find gradle output in %s' % apks_dir) src = apks[0] elif thisbuild['type'] == 'ant': stdout_apk = '\n'.join( [line for line in p.output.splitlines() if '.apk' in line]) src = re.match(r".*^.*Creating (.+) for release.*$.*", stdout_apk, re.S | re.M).group(1) src = os.path.join(bindir, src) elif thisbuild['type'] == 'raw': src = os.path.join(root_dir, thisbuild['output']) src = os.path.normpath(src) # Make sure it's not debuggable... if common.isApkDebuggable(src, config): raise BuildException("APK is debuggable") # By way of a sanity check, make sure the version and version # code in our new apk match what we expect... logging.debug("Checking " + src) if not os.path.exists(src): raise BuildException("Unsigned apk is not at expected location of " + src) p = SilentPopen([config['aapt'], 'dump', 'badging', src]) vercode = None version = None foundid = None nativecode = None for line in p.output.splitlines(): if line.startswith("package:"): pat = re.compile(".*name='([a-zA-Z0-9._]*)'.*") m = pat.match(line) if m: foundid = m.group(1) pat = re.compile(".*versionCode='([0-9]*)'.*") m = pat.match(line) if m: vercode = m.group(1) pat = re.compile(".*versionName='([^']*)'.*") m = pat.match(line) if m: version = m.group(1) elif line.startswith("native-code:"): nativecode = line[12:] # Ignore empty strings or any kind of space/newline chars that we don't # care about if nativecode is not None: nativecode = nativecode.strip() nativecode = None if not nativecode else nativecode if thisbuild['buildjni'] and thisbuild['buildjni'] != ['no']: if nativecode is None: raise BuildException( "Native code should have been built but none was packaged") if thisbuild['novcheck']: vercode = thisbuild['vercode'] version = thisbuild['version'] if not version or not vercode: raise BuildException( "Could not find version information in build in output") if not foundid: raise BuildException("Could not find package ID in output") if foundid != app['id']: raise BuildException("Wrong package ID - build " + foundid + " but expected " + app['id']) # Some apps (e.g. Timeriffic) have had the bonkers idea of # including the entire changelog in the version number. Remove # it so we can compare. (TODO: might be better to remove it # before we compile, in fact) index = version.find(" //") if index != -1: version = version[:index] if (version != thisbuild['version'] or vercode != thisbuild['vercode']): raise BuildException(("Unexpected version/version code in output;" " APK: '%s' / '%s', " " Expected: '%s' / '%s'") % (version, str(vercode), thisbuild['version'], str(thisbuild['vercode']))) # Copy the unsigned apk to our destination directory for further # processing (by publish.py)... dest = os.path.join(output_dir, common.getapkname(app, thisbuild)) shutil.copyfile(src, dest) # Move the source tarball into the output directory... if output_dir != tmp_dir and not options.notarball: shutil.move(os.path.join(tmp_dir, tarname), os.path.join(output_dir, tarname))
def build_server(app, thisbuild, vcs, build_dir, output_dir, force): """Do a build on the build server.""" try: paramiko except NameError: raise BuildException("Paramiko is required to use the buildserver") if options.verbose: logging.getLogger("paramiko").setLevel(logging.DEBUG) else: logging.getLogger("paramiko").setLevel(logging.WARN) sshinfo = get_clean_vm() try: # Open SSH connection... logging.info("Connecting to virtual machine...") sshs = paramiko.SSHClient() sshs.set_missing_host_key_policy(paramiko.AutoAddPolicy()) sshs.connect(sshinfo['hostname'], username=sshinfo['user'], port=sshinfo['port'], timeout=300, look_for_keys=False, key_filename=sshinfo['idfile']) homedir = '/home/' + sshinfo['user'] # Get an SFTP connection... ftp = sshs.open_sftp() ftp.get_channel().settimeout(15) # Put all the necessary files in place... ftp.chdir(homedir) # Helper to copy the contents of a directory to the server... def send_dir(path): root = os.path.dirname(path) main = os.path.basename(path) ftp.mkdir(main) for r, d, f in os.walk(path): rr = os.path.relpath(r, root) ftp.chdir(rr) for dd in d: ftp.mkdir(dd) for ff in f: lfile = os.path.join(root, rr, ff) if not os.path.islink(lfile): ftp.put(lfile, ff) ftp.chmod(ff, os.stat(lfile).st_mode) for i in range(len(rr.split('/'))): ftp.chdir('..') ftp.chdir('..') logging.info("Preparing server for build...") serverpath = os.path.abspath(os.path.dirname(__file__)) ftp.put(os.path.join(serverpath, 'build.py'), 'build.py') ftp.put(os.path.join(serverpath, 'common.py'), 'common.py') ftp.put(os.path.join(serverpath, 'metadata.py'), 'metadata.py') ftp.put( os.path.join(serverpath, '..', 'buildserver', 'config.buildserver.py'), 'config.py') ftp.chmod('config.py', 0o600) # Copy over the ID (head commit hash) of the fdroidserver in use... subprocess.call('git rev-parse HEAD >' + os.path.join(os.getcwd(), 'tmp', 'fdroidserverid'), shell=True, cwd=serverpath) ftp.put('tmp/fdroidserverid', 'fdroidserverid') # Copy the metadata - just the file for this app... ftp.mkdir('metadata') ftp.mkdir('srclibs') ftp.chdir('metadata') ftp.put(os.path.join('metadata', app['id'] + '.txt'), app['id'] + '.txt') # And patches if there are any... if os.path.exists(os.path.join('metadata', app['id'])): send_dir(os.path.join('metadata', app['id'])) ftp.chdir(homedir) # Create the build directory... ftp.mkdir('build') ftp.chdir('build') ftp.mkdir('extlib') ftp.mkdir('srclib') # Copy any extlibs that are required... if thisbuild['extlibs']: ftp.chdir(homedir + '/build/extlib') for lib in thisbuild['extlibs']: lib = lib.strip() libsrc = os.path.join('build/extlib', lib) if not os.path.exists(libsrc): raise BuildException("Missing extlib {0}".format(libsrc)) lp = lib.split('/') for d in lp[:-1]: if d not in ftp.listdir(): ftp.mkdir(d) ftp.chdir(d) ftp.put(libsrc, lp[-1]) for _ in lp[:-1]: ftp.chdir('..') # Copy any srclibs that are required... srclibpaths = [] if thisbuild['srclibs']: for lib in thisbuild['srclibs']: srclibpaths.append( common.getsrclib(lib, 'build/srclib', srclibpaths, basepath=True, prepare=False)) # If one was used for the main source, add that too. basesrclib = vcs.getsrclib() if basesrclib: srclibpaths.append(basesrclib) for name, number, lib in srclibpaths: logging.info("Sending srclib '%s'" % lib) ftp.chdir(homedir + '/build/srclib') if not os.path.exists(lib): raise BuildException("Missing srclib directory '" + lib + "'") fv = '.fdroidvcs-' + name ftp.put(os.path.join('build/srclib', fv), fv) send_dir(lib) # Copy the metadata file too... ftp.chdir(homedir + '/srclibs') ftp.put(os.path.join('srclibs', name + '.txt'), name + '.txt') # Copy the main app source code # (no need if it's a srclib) if (not basesrclib) and os.path.exists(build_dir): ftp.chdir(homedir + '/build') fv = '.fdroidvcs-' + app['id'] ftp.put(os.path.join('build', fv), fv) send_dir(build_dir) # Execute the build script... logging.info("Starting build...") chan = sshs.get_transport().open_session() chan.get_pty() cmdline = 'python build.py --on-server' if force: cmdline += ' --force --test' if options.verbose: cmdline += ' --verbose' cmdline += " %s:%s" % (app['id'], thisbuild['vercode']) chan.exec_command('bash -c ". ~/.bsenv && ' + cmdline + '"') output = '' while not chan.exit_status_ready(): while chan.recv_ready(): output += chan.recv(1024) time.sleep(0.1) logging.info("...getting exit status") returncode = chan.recv_exit_status() while True: get = chan.recv(1024) if len(get) == 0: break output += get if returncode != 0: raise BuildException( "Build.py failed on server for {0}:{1}".format( app['id'], thisbuild['version']), output) # Retrieve the built files... logging.info("Retrieving build output...") if force: ftp.chdir(homedir + '/tmp') else: ftp.chdir(homedir + '/unsigned') apkfile = common.getapkname(app, thisbuild) tarball = common.getsrcname(app, thisbuild) try: ftp.get(apkfile, os.path.join(output_dir, apkfile)) if not options.notarball: ftp.get(tarball, os.path.join(output_dir, tarball)) except: raise BuildException( "Build failed for %s:%s - missing output files".format( app['id'], thisbuild['version']), output) ftp.close() finally: # Suspend the build server. release_vm()
def get_clean_vm(reset=False): """Get a clean VM ready to do a buildserver build. This might involve creating and starting a new virtual machine from scratch, or it might be as simple (unless overridden by the reset parameter) as re-using a snapshot created previously. A BuildException will be raised if anything goes wrong. :reset: True to force creating from scratch. :returns: A dictionary containing 'hostname', 'port', 'user' and 'idfile' """ # Reset existing builder machine to a clean state if possible. vm_ok = False if not reset: logging.info("Checking for valid existing build server") if got_valid_builder_vm(): logging.info("...VM is present") p = FDroidPopen([ 'VBoxManage', 'snapshot', get_builder_vm_id(), 'list', '--details' ], cwd='builder') if 'fdroidclean' in p.output: logging.info("...snapshot exists - resetting build server to " "clean state") retcode, output = vagrant(['status'], cwd='builder') if 'running' in output: logging.info("...suspending") vagrant(['suspend'], cwd='builder') logging.info("...waiting a sec...") time.sleep(10) p = FDroidPopen([ 'VBoxManage', 'snapshot', get_builder_vm_id(), 'restore', 'fdroidclean' ], cwd='builder') if p.returncode == 0: logging.info("...reset to snapshot - server is valid") retcode, output = vagrant(['up'], cwd='builder') if retcode != 0: raise BuildException("Failed to start build server") logging.info("...waiting a sec...") time.sleep(10) sshinfo = get_vagrant_sshinfo() vm_ok = True else: logging.info("...failed to reset to snapshot") else: logging.info("...snapshot doesn't exist - " "VBoxManage snapshot list:\n" + p.output) # If we can't use the existing machine for any reason, make a # new one from scratch. if not vm_ok: if os.path.exists('builder'): logging.info("Removing broken/incomplete/unwanted build server") vagrant(['destroy', '-f'], cwd='builder') shutil.rmtree('builder') os.mkdir('builder') p = subprocess.Popen('vagrant --version', shell=True, stdout=subprocess.PIPE) vver = p.communicate()[0] if vver.startswith('Vagrant version 1.2'): with open('builder/Vagrantfile', 'w') as vf: vf.write('Vagrant.configure("2") do |config|\n') vf.write('config.vm.box = "buildserver"\n') vf.write('end\n') else: with open('builder/Vagrantfile', 'w') as vf: vf.write('Vagrant::Config.run do |config|\n') vf.write('config.vm.box = "buildserver"\n') vf.write('end\n') logging.info("Starting new build server") retcode, _ = vagrant(['up'], cwd='builder') if retcode != 0: raise BuildException("Failed to start build server") # Open SSH connection to make sure it's working and ready... logging.info("Connecting to virtual machine...") sshinfo = get_vagrant_sshinfo() sshs = paramiko.SSHClient() sshs.set_missing_host_key_policy(paramiko.AutoAddPolicy()) sshs.connect(sshinfo['hostname'], username=sshinfo['user'], port=sshinfo['port'], timeout=300, look_for_keys=False, key_filename=sshinfo['idfile']) sshs.close() logging.info("Saving clean state of new build server") retcode, _ = vagrant(['suspend'], cwd='builder') if retcode != 0: raise BuildException("Failed to suspend build server") logging.info("...waiting a sec...") time.sleep(10) p = FDroidPopen([ 'VBoxManage', 'snapshot', get_builder_vm_id(), 'take', 'fdroidclean' ], cwd='builder') if p.returncode != 0: raise BuildException("Failed to take snapshot") logging.info("...waiting a sec...") time.sleep(10) logging.info("Restarting new build server") retcode, _ = vagrant(['up'], cwd='builder') if retcode != 0: raise BuildException("Failed to start build server") logging.info("...waiting a sec...") time.sleep(10) # Make sure it worked... p = FDroidPopen([ 'VBoxManage', 'snapshot', get_builder_vm_id(), 'list', '--details' ], cwd='builder') if 'fdroidclean' not in p.output: raise BuildException("Failed to take snapshot.") return sshinfo
def document_repo(workspace, docspace, ros_distro, repo, platform, arch, homepage, no_chroot, skip_garbage, doc_conf, depends_conf, tags_db): doc_job = "doc-%s-%s" % (ros_distro, repo) #Get the list of repositories that should have documentation run on them #These are all of the repos that are not in the depends rosinsall file repos_to_doc = get_repositories_from_rosinstall(doc_conf) repo_path = os.path.realpath("%s" % (docspace)) print("Repo path %s" % repo_path) #Walk through the installed repositories and find old-style packages, new-stye packages, and stacks stacks, manifest_packages, catkin_packages, repo_map = build_repo_structure(repo_path, doc_conf, depends_conf) if ros_distro == 'indigo': if stacks or manifest_packages: print("Ignoring dry packages and stacks in '%s'" % ros_distro) stacks = {} manifest_packages = {} if not catkin_packages: raise BuildException('No catkin packages found') print("Running documentation generation on\npackages: %s" % (manifest_packages.keys() + catkin_packages.keys())) #print "Catkin packages: %s" % catkin_packages #print "Manifest packages: %s" % manifest_packages #print "Stacks: %s" % stacks #Get any non local apt dependencies ros_dep = rosdep.RosDepResolver(ros_distro, no_chroot=no_chroot) import rosdistro if ros_distro == 'electric': apt = rosdistro.AptDistro(platform, arch, shadow=False) else: apt = rosdistro.AptDistro(platform, arch, shadow=True) apt_deps = get_apt_deps(apt, ros_dep, ros_distro, catkin_packages, stacks, manifest_packages) print("Apt dependencies: %s" % apt_deps) #Get rosdistro release file if there are catkin packages to get status if catkin_packages and ros_distro not in ['electric', 'fuerte']: print("Fetch rosdistro files for: %s" % ros_distro) index = rosdistro.get_index(rosdistro.get_index_url()) rosdistro_release_file = rosdistro.get_release_file(index, ros_distro) rosdistro_source_file = rosdistro.get_source_file(index, ros_distro) else: rosdistro_release_file = None rosdistro_source_file = None #Build a local dependency graph to be used for build order local_dep_graph = build_local_dependency_graph(catkin_packages, manifest_packages) doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro)) if os.path.exists(doc_path): shutil.rmtree(doc_path) #Write stack manifest files for all stacks, we can just do this off the #stack.xml files write_stack_manifests(stacks, docspace, ros_distro, repo_map, tags_db, doc_job, homepage) #Need to make sure to re-order packages to be run in dependency order build_order = get_dependency_build_order(local_dep_graph) print("Build order that honors deps:\n%s" % build_order) #We'll need the full list of apt_deps to get tag files full_apt_deps = get_full_apt_deps(apt_deps, apt) if not no_chroot: print("Installing all dependencies for %s" % repo) # XXX this is a really ugly hack to make the hydro doc job for ros_comm pass # otherwise roslisp pulls in the rosgraph_msgs package as a Debian dependency # which then break catkin_basic since it include the msgs CMake multiple files # resulting in duplicate target names (https://github.com/ros/ros_comm/issues/471) if repo == 'ros_comm' and 'ros-hydro-roslisp' in apt_deps: apt_deps.remove('ros-hydro-roslisp') if apt_deps: call("apt-get install %s --yes" % (' '.join(apt_deps))) print("Done installing dependencies") #Set up the list of things that need to be sourced to run rosdoc_lite #TODO: Hack for electric if ros_distro == 'electric': #lucid doesn't have /usr/local on the path by default... weird sources = ['export PATH=/usr/local/sbin:/usr/local/bin:$PATH'] sources.append('source /opt/ros/fuerte/setup.bash') sources.append('export ROS_PACKAGE_PATH=/opt/ros/electric/stacks:$ROS_PACKAGE_PATH') else: sources = ['source /opt/ros/%s/setup.bash' % ros_distro] #We assume that there will be no build errors to start build_errors = [] #Everything that is after fuerte supports catkin workspaces, so everything #that has packages with package.xml files local_install_path = os.path.join(docspace, 'local_installs') if os.path.exists(local_install_path): shutil.rmtree(local_install_path) #Make sure to create some subfolders under the local install path def makedirs(path): if not os.path.exists(path): os.makedirs(path) makedirs(os.path.join(local_install_path, 'bin')) makedirs(os.path.join(local_install_path, 'lib/python2.7/dist-packages')) makedirs(os.path.join(local_install_path, 'share')) if catkin_packages \ and not 'rosdoc_lite' in catkin_packages.keys() and not 'catkin' in catkin_packages.keys(): source, errs = build_repo_messages(catkin_packages, docspace, ros_distro, local_install_path) build_errors.extend(errs) if source: sources.append(source) #For fuerte catkin, we need to check if we should build catkin stacks source, errs = build_repo_messages_catkin_stacks(stacks, ros_distro, local_install_path) build_errors.extend(errs) sources.append(source) #For all our manifest packages (dry or fuerte catkin) we want to build #messages. Note, for fuerte catkin, we have to build all the code and #install locally to get message generation source, errs = build_repo_messages_manifest(manifest_packages, build_order, ros_distro) build_errors.extend(errs) sources.append(source) #We want to pull all the tagfiles available once from the server tags_location = os.path.join(workspace, ros_distro) if os.path.exists(tags_location): shutil.rmtree(tags_location) command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qrz [email protected]:/home/rosbot/docs/%s/tags %s' % (ros_distro, tags_location)] call_with_list(command) repo_tags = document_packages(manifest_packages, catkin_packages, build_order, repos_to_doc, sources, tags_db, full_apt_deps, ros_dep, repo_map, repo_path, docspace, ros_distro, homepage, doc_job, tags_location, doc_path, rosdistro_release_file, rosdistro_source_file) #Copy the files to the appropriate place folders = sorted(set(stacks.keys() + manifest_packages.keys() + catkin_packages.keys())) if folders: dsts = ['%s/api/%s' % (doc_path, f) for f in folders] for dst in dsts: with open(os.path.join(dst, 'stamp'), 'w'): pass command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr --delete %s [email protected]:/home/rosbot/docs/%s/api' % (' '.join(dsts), ros_distro)] call_with_list(command) folders = ['%s/changelogs' % doc_path, '%s/tags' % doc_path] folders = [f for f in folders if os.path.exists(f)] if folders: command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s [email protected]:/home/rosbot/docs/%s' % (' '.join(folders), ros_distro)] call_with_list(command) if not skip_garbage: #Remove the autogenerated doc files since they take up a lot of space if left on the server shutil.rmtree(tags_location) shutil.rmtree(doc_path) #Write the new tags to the database if there are any to write for name, tags in repo_tags.iteritems(): #Get the apt name of the current stack/repo if ros_dep.has_ros(name): deb_name = ros_dep.to_apt(name)[0] else: deb_name = "ros-%s-%s" % (ros_distro, name.replace('_', '-')) #We only want to write tags for packages that have a valid deb name #For others, the only way to get cross referencing is to document everything #together with a rosinstall file if apt.has_package(deb_name): tags_db.set_tags(deb_name, tags) #Make sure to write changes to tag files and deps #We don't want to write hashes on an unsuccessful build excludes = ['rosinstall_hashes'] if build_errors else [] tags_db.commit_db(excludes) tags_db.delete_tag_index_repo() #Tell jenkins that we've succeeded print("Preparing xml test results") try: os.makedirs(os.path.join(workspace, 'test_results')) print("Created test results directory") except Exception: pass if build_errors: import yaml copy_test_results(workspace, docspace, """Failed to generate messages by calling cmake for %s. Look in the console for cmake failures, search for "CMake Error" Also, are you sure that the rosinstall files are pulling from the right branch for %s? Check the repos below, you can update information the %s.rosinstall and %s-depends.rosinstall files by submitting a pull request at https://github.com/ros/rosdistro/%s Documentation rosinstall:\n%s Depends rosinstall:\n%s""" % (build_errors, ros_distro, repo, repo, ros_distro, yaml.safe_dump(doc_conf, default_flow_style=False), yaml.safe_dump(depends_conf, default_flow_style=False)), "message_generation_failure") else: copy_test_results(workspace, docspace)
def main(): #Read configuration... execfile('config.py', globals()) # Parse command line... parser = OptionParser() parser.add_option("-v", "--verbose", action="store_true", default=False, help="Spew out even more information than normal") parser.add_option("-p", "--package", default=None, help="Publish only the specified package") (options, args) = parser.parse_args() log_dir = 'logs' if not os.path.isdir(log_dir): print "Creating log directory" os.makedirs(log_dir) tmp_dir = 'tmp' if not os.path.isdir(tmp_dir): print "Creating temporary directory" os.makedirs(tmp_dir) output_dir = 'repo' if not os.path.isdir(output_dir): print "Creating output directory" os.makedirs(output_dir) unsigned_dir = 'unsigned' if not os.path.isdir(unsigned_dir): print "No unsigned directory - nothing to do" sys.exit(0) for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))): apkfilename = os.path.basename(apkfile) i = apkfilename.rfind('_') if i == -1: raise BuildException("Invalid apk name") appid = apkfilename[:i] print "Processing " + appid if not options.package or options.package == appid: # Figure out the key alias name we'll use. Only the first 8 # characters are significant, so we'll use the first 8 from # the MD5 of the app's ID and hope there are no collisions. # If a collision does occur later, we're going to have to # come up with a new alogrithm, AND rename all existing keys # in the keystore! if appid in keyaliases: # For this particular app, the key alias is overridden... keyalias = keyaliases[appid] if keyalias.startswith('@'): m = md5.new() m.update(keyalias[1:]) keyalias = m.hexdigest()[:8] else: m = md5.new() m.update(appid) keyalias = m.hexdigest()[:8] print "Key alias: " + keyalias # See if we already have a key for this application, and # if not generate one... p = subprocess.Popen([ 'keytool', '-list', '-alias', keyalias, '-keystore', keystore, '-storepass', keystorepass ], stdout=subprocess.PIPE) output = p.communicate()[0] if p.returncode != 0: print "Key does not exist - generating..." p = subprocess.Popen([ 'keytool', '-genkey', '-keystore', keystore, '-alias', keyalias, '-keyalg', 'RSA', '-keysize', '2048', '-validity', '10000', '-storepass', keystorepass, '-keypass', keypass, '-dname', keydname ], stdout=subprocess.PIPE) output = p.communicate()[0] print output if p.returncode != 0: raise BuildException("Failed to generate key") # Sign the application... p = subprocess.Popen([ 'jarsigner', '-keystore', keystore, '-storepass', keystorepass, '-keypass', keypass, '-sigalg', 'MD5withRSA', '-digestalg', 'SHA1', apkfile, keyalias ], stdout=subprocess.PIPE) output = p.communicate()[0] print output if p.returncode != 0: raise BuildException("Failed to sign application") # Zipalign it... p = subprocess.Popen([ os.path.join(sdk_path, 'tools', 'zipalign'), '-v', '4', apkfile, os.path.join(output_dir, apkfilename) ], stdout=subprocess.PIPE) output = p.communicate()[0] print output if p.returncode != 0: raise BuildException("Failed to align application") os.remove(apkfile) # Move the source tarball into the output directory... tarfilename = apkfilename[:-4] + '_src.tar.gz' shutil.move(os.path.join(unsigned_dir, tarfilename), os.path.join(output_dir, tarfilename)) print 'Published ' + apkfilename
def main(): global config, options # Parse command line... parser = OptionParser(usage="Usage: %prog [options] " "[APPID[:VERCODE] [APPID[:VERCODE] ...]]") parser.add_option("-v", "--verbose", action="store_true", default=False, help="Spew out even more information than normal") parser.add_option("-q", "--quiet", action="store_true", default=False, help="Restrict output to warnings and errors") (options, args) = parser.parse_args() config = common.read_config(options) log_dir = 'logs' if not os.path.isdir(log_dir): logging.info("Creating log directory") os.makedirs(log_dir) tmp_dir = 'tmp' if not os.path.isdir(tmp_dir): logging.info("Creating temporary directory") os.makedirs(tmp_dir) output_dir = 'repo' if not os.path.isdir(output_dir): logging.info("Creating output directory") os.makedirs(output_dir) unsigned_dir = 'unsigned' if not os.path.isdir(unsigned_dir): logging.warning("No unsigned directory - nothing to do") sys.exit(1) for f in [ config['keystorepassfile'], config['keystore'], config['keypassfile'] ]: if not os.path.exists(f): logging.error("Config error - missing '{0}'".format(f)) sys.exit(1) # It was suggested at # https://dev.guardianproject.info/projects/bazaar/wiki/FDroid_Audit # that a package could be crafted, such that it would use the same signing # key as an existing app. While it may be theoretically possible for such a # colliding package ID to be generated, it seems virtually impossible that # the colliding ID would be something that would be a) a valid package ID, # and b) a sane-looking ID that would make its way into the repo. # Nonetheless, to be sure, before publishing we check that there are no # collisions, and refuse to do any publishing if that's the case... allapps = metadata.read_metadata() vercodes = common.read_pkg_args(args, True) allaliases = [] for appid in allapps: m = md5.new() m.update(appid) keyalias = m.hexdigest()[:8] if keyalias in allaliases: logging.error("There is a keyalias collision - publishing halted") sys.exit(1) allaliases.append(keyalias) logging.info("{0} apps, {0} key aliases".format(len(allapps), len(allaliases))) # Process any apks that are waiting to be signed... for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))): appid, vercode = common.apknameinfo(apkfile) apkfilename = os.path.basename(apkfile) if vercodes and appid not in vercodes: continue if appid in vercodes and vercodes[appid]: if vercode not in vercodes[appid]: continue logging.info("Processing " + apkfile) # There ought to be valid metadata for this app, otherwise why are we # trying to publish it? if appid not in allapps: logging.error("Unexpected {0} found in unsigned directory".format( apkfilename)) sys.exit(1) app = allapps[appid] if app.get('Binaries', None): # It's an app where we build from source, and verify the apk # contents against a developer's binary, and then publish their # version if everything checks out. # The binary should already have been retrieved during the build # process. srcapk = apkfile + ".binary" # Compare our unsigned one with the downloaded one... compare_result = common.verify_apks(srcapk, apkfile, tmp_dir) if compare_result: logging.error("...verification failed - publish skipped : " + compare_result) continue # Success! So move the downloaded file to the repo, and remove # our built version. shutil.move(srcapk, os.path.join(output_dir, apkfilename)) os.remove(apkfile) else: # It's a 'normal' app, i.e. we sign and publish it... # Figure out the key alias name we'll use. Only the first 8 # characters are significant, so we'll use the first 8 from # the MD5 of the app's ID and hope there are no collisions. # If a collision does occur later, we're going to have to # come up with a new alogrithm, AND rename all existing keys # in the keystore! if appid in config['keyaliases']: # For this particular app, the key alias is overridden... keyalias = config['keyaliases'][appid] if keyalias.startswith('@'): m = md5.new() m.update(keyalias[1:]) keyalias = m.hexdigest()[:8] else: m = md5.new() m.update(appid) keyalias = m.hexdigest()[:8] logging.info("Key alias: " + keyalias) # See if we already have a key for this application, and # if not generate one... p = FDroidPopen([ 'keytool', '-list', '-alias', keyalias, '-keystore', config['keystore'], '-storepass:file', config['keystorepassfile'] ]) if p.returncode != 0: logging.info("Key does not exist - generating...") p = FDroidPopen([ 'keytool', '-genkey', '-keystore', config['keystore'], '-alias', keyalias, '-keyalg', 'RSA', '-keysize', '2048', '-validity', '10000', '-storepass:file', config['keystorepassfile'], '-keypass:file', config['keypassfile'], '-dname', config['keydname'] ]) # TODO keypass should be sent via stdin if p.returncode != 0: raise BuildException("Failed to generate key") # Sign the application... p = FDroidPopen([ 'jarsigner', '-keystore', config['keystore'], '-storepass:file', config['keystorepassfile'], '-keypass:file', config['keypassfile'], '-sigalg', 'MD5withRSA', '-digestalg', 'SHA1', apkfile, keyalias ]) # TODO keypass should be sent via stdin if p.returncode != 0: raise BuildException("Failed to sign application") # Zipalign it... p = SdkToolsPopen([ 'zipalign', '-v', '4', apkfile, os.path.join(output_dir, apkfilename) ]) if p.returncode != 0: raise BuildException("Failed to align application") os.remove(apkfile) # Move the source tarball into the output directory... tarfilename = apkfilename[:-4] + '_src.tar.gz' tarfile = os.path.join(unsigned_dir, tarfilename) if os.path.exists(tarfile): shutil.move(tarfile, os.path.join(output_dir, tarfilename)) logging.info('Published ' + apkfilename)
def build_server(app, thisbuild, vcs, build_dir, output_dir, sdk_path, force): """Do a build on the build server.""" import ssh # Reset existing builder machine to a clean state if possible. vm_ok = False if not options.resetserver: print "Checking for valid existing build server" if (os.path.exists(os.path.join('builder', 'Vagrantfile')) and os.path.exists(os.path.join('builder', '.vagrant'))): print "...VM is present" p = subprocess.Popen([ 'VBoxManage', 'snapshot', get_builder_vm_id(), 'list', '--details' ], cwd='builder', stdout=subprocess.PIPE) output = p.communicate()[0] if output.find('fdroidclean') != -1: print "...snapshot exists - resetting build server to clean state" p = subprocess.Popen(['vagrant', 'status'], cwd='builder', stdout=subprocess.PIPE) output = p.communicate()[0] if output.find('running') != -1: print "...suspending" subprocess.call(['vagrant', 'suspend'], cwd='builder') if subprocess.call([ 'VBoxManage', 'snapshot', get_builder_vm_id(), 'restore', 'fdroidclean' ], cwd='builder') == 0: print "...reset to snapshot - server is valid" if subprocess.call(['vagrant', 'up'], cwd='builder') != 0: raise BuildException("Failed to start build server") vm_ok = True else: print "...failed to reset to snapshot" else: print "...snapshot doesn't exist - vagrant snap said:\n" + output # If we can't use the existing machine for any reason, make a # new one from scratch. if not vm_ok: if os.path.exists('builder'): print "Removing broken/incomplete/unwanted build server" subprocess.call(['vagrant', 'destroy', '-f'], cwd='builder') shutil.rmtree('builder') os.mkdir('builder') with open('builder/Vagrantfile', 'w') as vf: vf.write('Vagrant::Config.run do |config|\n') vf.write('config.vm.box = "buildserver"\n') vf.write( 'config.vm.customize ["modifyvm", :id, "--memory", "768"]\n') vf.write('end\n') print "Starting new build server" if subprocess.call(['vagrant', 'up'], cwd='builder') != 0: raise BuildException("Failed to start build server") # Open SSH connection to make sure it's working and ready... print "Connecting to virtual machine..." if subprocess.call('vagrant ssh-config >sshconfig', cwd='builder', shell=True) != 0: raise BuildException("Error getting ssh config") vagranthost = 'default' # Host in ssh config file sshconfig = ssh.SSHConfig() sshf = open('builder/sshconfig', 'r') sshconfig.parse(sshf) sshf.close() sshconfig = sshconfig.lookup(vagranthost) sshs = ssh.SSHClient() sshs.set_missing_host_key_policy(ssh.AutoAddPolicy()) idfile = sshconfig['identityfile'] if idfile.startswith('"') and idfile.endswith('"'): idfile = idfile[1:-1] sshs.connect(sshconfig['hostname'], username=sshconfig['user'], port=int(sshconfig['port']), timeout=300, look_for_keys=False, key_filename=idfile) sshs.close() print "Saving clean state of new build server" subprocess.call(['vagrant', 'suspend'], cwd='builder') if subprocess.call([ 'VBoxManage', 'snapshot', get_builder_vm_id(), 'take', 'fdroidclean' ], cwd='builder') != 0: raise BuildException("Failed to take snapshot") print "Restarting new build server" if subprocess.call(['vagrant', 'up'], cwd='builder') != 0: raise BuildException("Failed to start build server") # Make sure it worked... p = subprocess.Popen([ 'VBoxManage', 'snapshot', get_builder_vm_id(), 'list', '--details' ], cwd='builder', stdout=subprocess.PIPE) output = p.communicate()[0] if output.find('fdroidclean') == -1: raise BuildException("Failed to take snapshot.") try: # Get SSH configuration settings for us to connect... print "Getting ssh configuration..." subprocess.call('vagrant ssh-config >sshconfig', cwd='builder', shell=True) vagranthost = 'default' # Host in ssh config file # Load and parse the SSH config... sshconfig = ssh.SSHConfig() sshf = open('builder/sshconfig', 'r') sshconfig.parse(sshf) sshf.close() sshconfig = sshconfig.lookup(vagranthost) # Open SSH connection... print "Connecting to virtual machine..." sshs = ssh.SSHClient() sshs.set_missing_host_key_policy(ssh.AutoAddPolicy()) idfile = sshconfig['identityfile'] if idfile.startswith('"') and idfile.endswith('"'): idfile = idfile[1:-1] sshs.connect(sshconfig['hostname'], username=sshconfig['user'], port=int(sshconfig['port']), timeout=300, look_for_keys=False, key_filename=idfile) # Get an SFTP connection... ftp = sshs.open_sftp() ftp.get_channel().settimeout(15) # Put all the necessary files in place... ftp.chdir('/home/vagrant') # Helper to copy the contents of a directory to the server... def send_dir(path): root = os.path.dirname(path) main = os.path.basename(path) ftp.mkdir(main) for r, d, f in os.walk(path): rr = os.path.relpath(r, root) ftp.chdir(rr) for dd in d: ftp.mkdir(dd) for ff in f: if not os.path.islink(os.path.join(root, rr, ff)): ftp.put(os.path.join(root, rr, ff), ff) for i in range(len(rr.split('/'))): ftp.chdir('..') ftp.chdir('..') print "Preparing server for build..." serverpath = os.path.abspath(os.path.dirname(__file__)) ftp.put(os.path.join(serverpath, 'build.py'), 'build.py') ftp.put(os.path.join(serverpath, 'common.py'), 'common.py') ftp.put(os.path.join(serverpath, '..', 'config.buildserver.py'), 'config.py') # Copy the metadata - just the file for this app... ftp.mkdir('metadata') ftp.mkdir('srclibs') ftp.chdir('metadata') ftp.put(os.path.join('metadata', app['id'] + '.txt'), app['id'] + '.txt') # And patches if there are any... if os.path.exists(os.path.join('metadata', app['id'])): send_dir(os.path.join('metadata', app['id'])) ftp.chdir('/home/vagrant') # Create the build directory... ftp.mkdir('build') ftp.chdir('build') ftp.mkdir('extlib') ftp.mkdir('srclib') # Copy the main app source code if os.path.exists(build_dir): send_dir(build_dir) # Copy any extlibs that are required... if 'extlibs' in thisbuild: ftp.chdir('/home/vagrant/build/extlib') for lib in thisbuild['extlibs'].split(';'): lp = lib.split('/') for d in lp[:-1]: if d not in ftp.listdir(): ftp.mkdir(d) ftp.chdir(d) ftp.put(os.path.join('build/extlib', lib), lp[-1]) for _ in lp[:-1]: ftp.chdir('..') # Copy any srclibs that are required... srclibpaths = [] if 'srclibs' in thisbuild: for lib in thisbuild['srclibs'].split(';'): name, _ = lib.split('@') if options.verbose: print "Processing srclib '" + name + "'" srclibpaths.append((name, common.getsrclib(lib, 'build/srclib', sdk_path, basepath=True, prepare=False))) # If one was used for the main source, add that too. basesrclib = vcs.getsrclib() if basesrclib: srclibpaths.append(basesrclib) for name, lib in srclibpaths: print "Sending srclib '" + lib + "'" ftp.chdir('/home/vagrant/build/srclib') if not os.path.exists(lib): raise BuildException("Missing srclib directory '" + lib + "'") send_dir(lib) # Copy the metadata file too... ftp.chdir('/home/vagrant/srclibs') ftp.put(os.path.join('srclibs', name + '.txt'), name + '.txt') # Execute the build script... print "Starting build..." chan = sshs.get_transport().open_session() cmdline = 'python build.py --on-server' if force: cmdline += ' --force --test' cmdline += ' -p ' + app['id'] + ' --vercode ' + thisbuild['vercode'] chan.exec_command(cmdline) output = '' error = '' while not chan.exit_status_ready(): while chan.recv_ready(): output += chan.recv(1024) while chan.recv_stderr_ready(): error += chan.recv_stderr(1024) print "...getting exit status" returncode = chan.recv_exit_status() while chan.recv_ready(): output += chan.recv(1024) while chan.recv_stderr_ready(): error += chan.recv_stderr(1024) if returncode != 0: raise BuildException( "Build.py failed on server for %s:%s" % (app['id'], thisbuild['version']), output.strip(), error.strip()) # Retrieve the built files... print "Retrieving build output..." if force: ftp.chdir('/home/vagrant/tmp') else: ftp.chdir('/home/vagrant/unsigned') apkfile = app['id'] + '_' + thisbuild['vercode'] + '.apk' tarball = app['id'] + '_' + thisbuild['vercode'] + '_src' + '.tar.gz' try: ftp.get(apkfile, os.path.join(output_dir, apkfile)) ftp.get(tarball, os.path.join(output_dir, tarball)) except: raise BuildException( "Build failed for %s:%s" % (app['id'], thisbuild['version']), output.strip(), error.strip()) ftp.close() finally: # Suspend the build server. print "Suspending build server" subprocess.call(['vagrant', 'suspend'], cwd='builder')
def build_local(app, thisbuild, vcs, build_dir, output_dir, srclib_dir, extlib_dir, tmp_dir, install, force, verbose, onserver): """Do a build locally.""" # Prepare the source code... root_dir, srclibpaths = common.prepare_source(vcs, app, thisbuild, build_dir, srclib_dir, extlib_dir, sdk_path, ndk_path, javacc_path, mvn3, verbose, onserver) # Scan before building... buildprobs = common.scan_source(build_dir, root_dir, thisbuild) if len(buildprobs) > 0: print 'Scanner found ' + str(len(buildprobs)) + ' problems:' for problem in buildprobs: print '...' + problem if not force: raise BuildException("Can't build due to " + str(len(buildprobs)) + " scanned problems") # Build the source tarball right before we build the release... tarname = app['id'] + '_' + thisbuild['vercode'] + '_src' tarball = tarfile.open(os.path.join(tmp_dir, tarname + '.tar.gz'), "w:gz") def tarexc(f): for vcs_dir in ['.svn', '.git', '.hg', '.bzr']: if f.endswith(vcs_dir): return True return False tarball.add(build_dir, tarname, exclude=tarexc) tarball.close() # Run a build command if one is required... if 'build' in thisbuild: prebuild = thisbuild['build'] # Substitute source library paths into prebuild commands... for name, libpath in srclibpaths: libpath = os.path.relpath(libpath, root_dir) prebuild = prebuild.replace('$$' + name + '$$', libpath) prebuild = prebuild.replace('$$SDK$$', sdk_path) prebuild = prebuild.replace('$$NDK$$', ndk_path) prebuild = prebuild.replace('$$MVN3$$', mvn3) p = subprocess.Popen(prebuild, cwd=root_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode != 0: raise BuildException("Error running build command", out, err) # Build native stuff if required... if thisbuild.get('buildjni') not in (None, 'no'): jni_components = thisbuild.get('buildjni') if jni_components == 'yes': jni_components = [''] else: jni_components = jni_components.split(';') ndkbuild = os.path.join(ndk_path, "ndk-build") for d in jni_components: if options.verbose: print "Running ndk-build in " + root_dir + '/' + d manifest = root_dir + '/' + d + '/AndroidManifest.xml' if os.path.exists(manifest): # Read and write the whole AM.xml to fix newlines and avoid # the ndk r8c or later 'wordlist' errors. The outcome of this # under gnu/linux is the same as when using tools like # dos2unix, but the native python way is faster and will # work in non-unix systems. manifest_text = open(manifest, 'U').read() open(manifest, 'w').write(manifest_text) # In case the AM.xml read was big, free the memory del manifest_text p = subprocess.Popen([ndkbuild], cwd=root_dir + '/' + d, stdout=subprocess.PIPE) output = p.communicate()[0] if p.returncode != 0: print output raise BuildException("NDK build failed for %s:%s" % (app['id'], thisbuild['version'])) # Build the release... if 'maven' in thisbuild: mvncmd = [mvn3, 'clean', 'package', '-Dandroid.sdk.path=' + sdk_path] if install: mvncmd += ['-Dandroid.sign.debug=true'] else: mvncmd += ['-Dandroid.sign.debug=false', '-Dandroid.release=true'] if 'mvnflags' in thisbuild: mvncmd += thisbuild['mvnflags'] p = subprocess.Popen(mvncmd, cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) else: if install: antcommands = ['debug', 'install'] elif 'antcommand' in thisbuild: antcommands = [thisbuild['antcommand']] else: antcommands = ['release'] p = subprocess.Popen(['ant'] + antcommands, cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output, error = p.communicate() if p.returncode != 0: raise BuildException( "Build failed for %s:%s" % (app['id'], thisbuild['version']), output.strip(), error.strip()) if verbose: print output if install: if 'maven' in thisbuild: p = subprocess.Popen( [mvn3, 'android:deploy', '-Dandroid.sdk.path=' + sdk_path], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output, error = p.communicate() if p.returncode != 0: raise BuildException( "Warning: Could not deploy %s:%s" % (app['id'], thisbuild['version']), output.strip(), error.strip()) return print "Build successful" # Find the apk name in the output... if 'bindir' in thisbuild: bindir = os.path.join(build_dir, thisbuild['bindir']) else: bindir = os.path.join(root_dir, 'bin') if thisbuild.get('initfun', 'no') == "yes": # Special case (again!) for funambol... src = ("funambol-android-sync-client-" + thisbuild['version'] + "-unsigned.apk") src = os.path.join(bindir, src) elif 'maven' in thisbuild: m = re.match(r".*^\[INFO\] .*apkbuilder.*/([^/]*)\.apk", output, re.S | re.M) if not m: m = re.match( r".*^\[INFO\] Creating additional unsigned apk file .*/([^/]+)\.apk", output, re.S | re.M) if not m: # This format is found in com.github.mobile, com.yubico.yubitotp and com.botbrew.basil for example... m = re.match( r".*^\[INFO\] [^$]*aapt \[package,[^$]*" + app['id'] + "/" + thisbuild['bindir'] + "/([^/]+)\.ap[_k][,\]]", output, re.S | re.M) if not m: print output raise BuildException('Failed to find output') src = m.group(1) src = os.path.join(bindir, src) + '.apk' else: src = re.match(r".*^.*Creating (.+) for release.*$.*", output, re.S | re.M).group(1) src = os.path.join(bindir, src) # Make sure it's not debuggable... if not install and common.isApkDebuggable(src, sdk_path): raise BuildException("APK is debuggable") # By way of a sanity check, make sure the version and version # code in our new apk match what we expect... print "Checking " + src if not os.path.exists(src): raise BuildException("Unsigned apk is not at expected location of " + src) p = subprocess.Popen([ os.path.join(sdk_path, 'platform-tools', 'aapt'), 'dump', 'badging', src ], stdout=subprocess.PIPE) output = p.communicate()[0] if thisbuild.get('novcheck', 'no') == "yes": vercode = thisbuild['vercode'] version = thisbuild['version'] else: vercode = None version = None foundid = None for line in output.splitlines(): if line.startswith("package:"): pat = re.compile(".*name='([a-zA-Z0-9._]*)'.*") foundid = re.match(pat, line).group(1) pat = re.compile(".*versionCode='([0-9]*)'.*") vercode = re.match(pat, line).group(1) pat = re.compile(".*versionName='([^']*)'.*") version = re.match(pat, line).group(1) if not version or not vercode: raise BuildException( "Could not find version information in build in output") if not foundid: raise BuildException("Could not find package ID in output") if foundid != app['id']: raise BuildException("Wrong package ID - build " + foundid + " but expected " + app['id']) # Some apps (e.g. Timeriffic) have had the bonkers idea of # including the entire changelog in the version number. Remove # it so we can compare. (TODO: might be better to remove it # before we compile, in fact) index = version.find(" //") if index != -1: version = version[:index] if (version != thisbuild['version'] or vercode != thisbuild['vercode']): raise BuildException(("Unexpected version/version code in output;" " APK: '%s' / '%s', " " Expected: '%s' / '%s'") % (version, str(vercode), thisbuild['version'], str(thisbuild['vercode']))) # Copy the unsigned apk to our destination directory for further # processing (by publish.py)... dest = os.path.join(output_dir, app['id'] + '_' + thisbuild['vercode'] + '.apk') shutil.copyfile(src, dest) # Move the source tarball into the output directory... if output_dir != tmp_dir: tarfilename = tarname + '.tar.gz' shutil.move(os.path.join(tmp_dir, tarfilename), os.path.join(output_dir, tarfilename))
def _test_repositories(ros_distro, repo_list, version_list, workspace, test_depends_on, repo_sourcespace, dependson_sourcespace, repo_buildspace, dependson_buildspace, sudo=False, no_chroot=False): append_pymodules_if_needed() from catkin_pkg.package import InvalidPackage, parse_package_string from rosdistro import get_cached_release, get_index, get_index_url, get_source_file from rosdistro.dependency_walker import DependencyWalker from rosdistro.manifest_provider import get_release_tag index = get_index(get_index_url()) print("Parsing rosdistro file for %s" % ros_distro) release = get_cached_release(index, ros_distro) print("Parsing devel file for %s" % ros_distro) source_file = get_source_file(index, ros_distro) # Create rosdep object print("Create rosdep object") rosdep_resolver = rosdep.RosDepResolver(ros_distro, sudo, no_chroot) if repo_list: # download the repo_list from source print("Creating rosinstall file for repo list") rosinstall = "" for repo_name, version in zip(repo_list, version_list): if version == 'devel': if repo_name not in source_file.repositories: raise BuildException( "Repository %s does not exist in Devel Distro" % repo_name) print("Using devel distro file to download repositories") rosinstall += _generate_rosinstall_for_repo( source_file.repositories[repo_name]) else: if repo_name not in release.repositories: raise BuildException( "Repository %s does not exist in Ros Distro" % repo_name) repo = release.repositories[repo_name] if version not in ['latest', 'master']: assert repo.version is not None, 'Repository "%s" does not have a version set' % repo_name assert 'release' in repo.tags, 'Repository "%s" does not have a "release" tag set' % repo_name for pkg_name in repo.package_names: release_tag = get_release_tag(repo, pkg_name) if version in ['latest', 'master']: release_tag = '/'.join(release_tag.split('/')[:-1]) print( 'Using tag "%s" of release distro file to download package "%s from repo "%s' % (version, pkg_name, repo_name)) rosinstall += _generate_rosinstall_for_pkg_version( release.repositories[repo_name], pkg_name, release_tag) print("rosinstall file for all repositories: \n %s" % rosinstall) with open(os.path.join(workspace, "repo.rosinstall"), 'w') as f: f.write(rosinstall) print("Install repo list from source") os.makedirs(repo_sourcespace) call("rosinstall %s %s/repo.rosinstall --catkin" % (repo_sourcespace, workspace)) extract_notification_recipients(repo_sourcespace) # get the repositories build dependencies print("Get build dependencies of repo list") repo_build_dependencies = get_dependencies(repo_sourcespace, build_depends=True, run_depends=True) # ensure that catkin gets installed, for non-catkin packages so that catkin_make_isolated is available if 'catkin' not in repo_build_dependencies: repo_build_dependencies.append('catkin') print("Install build dependencies of repo list: %s" % (', '.join(repo_build_dependencies))) apt_get_install(repo_build_dependencies, rosdep_resolver, sudo) # replace the CMakeLists.txt file for repositories that use catkin root_cmakelists = os.path.join(repo_sourcespace, 'CMakeLists.txt') if os.path.exists(root_cmakelists): print("Removing the CMakeLists.txt file generated by rosinstall") os.remove(root_cmakelists) print("Create a new CMakeLists.txt file using catkin") # get environment ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro) # check if source workspace contains only package built with catkin non_catkin_pkgs = _get_non_catkin_packages(repo_sourcespace) # make build folder and change into it os.makedirs(repo_buildspace) os.chdir(repo_buildspace) # make test results dir test_results_dir = os.path.join(workspace, 'test_results') if os.path.exists(test_results_dir): shutil.rmtree(test_results_dir) os.makedirs(test_results_dir) if not non_catkin_pkgs: print("Build catkin workspace") call("catkin_init_workspace %s" % repo_sourcespace, ros_env) repos_test_results_dir = os.path.join(test_results_dir, 'repos') call( "cmake %s -DCATKIN_TEST_RESULTS_DIR=%s" % (repo_sourcespace, repos_test_results_dir), ros_env) #ros_env_repo = get_ros_env(os.path.join(repo_buildspace, 'devel/setup.bash')) # build repositories and tests print("Build repo list") call("make", ros_env) call("make tests", ros_env) # get the repositories test and run dependencies print("Get run dependencies of repo list") repo_test_dependencies = get_dependencies(repo_sourcespace, build_depends=False, run_depends=True) print("Install run dependencies of repo list: %s" % (', '.join(repo_test_dependencies))) apt_get_install(repo_test_dependencies, rosdep_resolver, sudo) # get environment after installing test and run dependencies ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro) # run tests print("Test repo list") call("make run_tests", ros_env) # anything after this should build on this env ros_env = get_ros_env(os.path.join(repo_buildspace, 'devel/setup.bash')) else: print("Build workspace with non-catkin packages in isolation") # work around catkin_make_isolated issue (at least with version 0.5.65 of catkin) os.makedirs(os.path.join(repo_buildspace, 'devel_isolated')) call( 'catkin_make_isolated -C %s --source %s --install' % (repo_buildspace, repo_sourcespace), ros_env) setup_file = os.path.join(repo_buildspace, 'install_isolated', 'setup.sh') # anything after this should build on this env ros_env = get_ros_env(setup_file) # don't do depends-on on things not in release not_in_release = set(repo_list) - set(release.repositories.keys()) if not_in_release: print( "Removed [%s] repositories which are not in the " % ', '.join(sorted(not_in_release)), "release file for depends-on testing") repo_list = list(set(repo_list) - not_in_release) # see if we need to do more work or not if not test_depends_on: print("We're not testing the depends-on repositories") ensure_test_results(test_results_dir) return # get repo_list depends-on list print("Get list of wet repositories that build-depend on repo list: %s" % ', '.join(repo_list)) walker = DependencyWalker(release) depends_on = set([]) try: for repo_name in repo_list: print('repo_name', repo_name) repo = release.repositories[repo_name] for pkg_name in repo.package_names: print('pkg_name', pkg_name) depends_on |= walker.get_recursive_depends_on( pkg_name, ['buildtool', 'build', 'test'], ignore_pkgs=depends_on) print('depends_on', depends_on) # remove all packages which are already in the workspace from catkin_pkg.packages import find_packages pkgs = find_packages(repo_sourcespace) depends_on -= set([pkg.name for pkg in pkgs.values()]) except RuntimeError: print( "Exception %s: If you are not in the rosdistro and only in the devel", " builds there will be no depends on") depends_on = set([]) print("Build depends_on list of pkg list: %s" % (', '.join(depends_on))) if len(depends_on) == 0: print("No wet packages depend on our repo list. Test finished here") ensure_test_results(test_results_dir) return # install depends_on packages from source from release repositories rosinstall = '' non_catkin_pkgs = [] for pkg_name in depends_on: repo = release.repositories[release.packages[pkg_name].repository_name] if repo.version is None: continue pkg_xml = release.get_package_xml(pkg_name) if pkg_xml is None: raise BuildException( 'Could not retrieve package.xml for package "%s" from rosdistro cache' % pkg_name) try: pkg = parse_package_string(pkg_xml) except InvalidPackage as e: raise BuildException( 'package.xml for package "%s" from rosdistro cache is invalid: %s' % (pkg_name, e)) if _is_non_catkin_package(pkg): non_catkin_pkgs.append(pkg.name) rosinstall += _generate_rosinstall_for_pkg(repo, pkg_name) if non_catkin_pkgs: print( 'Non-catkin packages depend on our repo list (%s). Skipping depends_on packages here' % ', '.join(sorted(non_catkin_pkgs))) create_test_result( test_results_dir, failure= 'Non-catkin packages depend on the repos (%s). Skip building and testing depends_on packages.' % ', '.join(sorted(non_catkin_pkgs))) return print("Rosinstall for depends_on:\n %s" % rosinstall) with open(workspace + "/depends_on.rosinstall", 'w') as f: f.write(rosinstall) print("Created rosinstall file for depends on") # install all repository and system dependencies of the depends_on list print("Install all depends_on from source: %s" % (', '.join(depends_on))) os.makedirs(dependson_sourcespace) call("rosinstall --catkin %s %s/depends_on.rosinstall" % (dependson_sourcespace, workspace)) # check if depends_on workspace contains only package built with catkin non_catkin_pkgs = _get_non_catkin_packages(dependson_sourcespace) if non_catkin_pkgs: print( 'Non-catkin packages depend on our repo list (%s). Skipping depends_on packages here' % ', '.join(sorted(non_catkin_pkgs))) create_test_result( test_results_dir, failure= 'Non-catkin packages depend on the repos (%s). Skip building and testing depends_on packages.' % ', '.join(sorted(non_catkin_pkgs))) return # get build and run dependencies of depends_on list dependson_build_dependencies = [] for d in get_dependencies(dependson_sourcespace, build_depends=True, run_depends=False): print(" Checking dependency %s" % d) if d in dependson_build_dependencies: print(" Already in dependson_build_dependencies") if d in depends_on: print( " Is a direct dependency of the repo list, and is installed from source" ) if d in repo_list: print(" Is one of the repositories tested") if not d in dependson_build_dependencies and not d in depends_on and not d in repo_list: dependson_build_dependencies.append(d) print("Build dependencies of depends_on list are %s" % (', '.join(dependson_build_dependencies))) dependson_test_dependencies = [] for d in get_dependencies(dependson_sourcespace, build_depends=False, run_depends=True): if not d in dependson_test_dependencies and not d in depends_on and not d in repo_list: dependson_test_dependencies.append(d) print("Test dependencies of depends_on list are %s" % (', '.join(dependson_test_dependencies))) # install build dependencies print("Install all build dependencies of the depends_on list") apt_get_install(dependson_build_dependencies, rosdep_resolver, sudo) # replace the CMakeLists.txt file again print("Removing the CMakeLists.txt file generated by rosinstall") os.remove(os.path.join(dependson_sourcespace, 'CMakeLists.txt')) os.makedirs(dependson_buildspace) os.chdir(dependson_buildspace) print("Create a new CMakeLists.txt file using catkin") call("catkin_init_workspace %s" % dependson_sourcespace, ros_env) depends_on_test_results_dir = os.path.join(test_results_dir, 'depends_on') call( "cmake %s -DCATKIN_TEST_RESULTS_DIR=%s" % (dependson_sourcespace, depends_on_test_results_dir), ros_env) #ros_env_depends_on = get_ros_env(os.path.join(dependson_buildspace, 'devel/setup.bash')) # build repositories print("Build depends-on packages") call("make", ros_env) # install test dependencies print("Install all test dependencies of the depends_on list") apt_get_install(dependson_test_dependencies, rosdep_resolver, sudo) # test repositories print("Test depends-on packages") call("make run_tests", ros_env) ensure_test_results(test_results_dir)
def _test_repositories_fuerte(ros_distro, repo_list, version_list, workspace, test_depends_on, repo_sourcespace, dependson_sourcespace, repo_buildspace, dependson_buildspace, sudo=False, no_chroot=False): import rosdistro # parse the rosdistro file print("Parsing rosdistro file for %s" % ros_distro) distro = rosdistro.RosDistro(ros_distro) print("Parsing devel file for %s" % ros_distro) devel = rosdistro.DevelDistro(ros_distro) # Create rosdep object print("Create rosdep object") rosdep_resolver = rosdep.RosDepResolver(ros_distro, sudo, no_chroot) # download the repo_list from source print("Creating rosinstall file for repo list") rosinstall = "" for repo, version in zip(repo_list, version_list): if version == 'devel': if repo not in devel.repositories: raise BuildException( "Repository %s does not exist in Devel Distro" % repo) print("Using devel distro file to download repositories") rosinstall += devel.repositories[repo].get_rosinstall() else: if repo not in distro.get_repositories(): raise BuildException( "Repository %s does not exist in Ros Distro" % repo) if version in ['latest', 'master']: print( "Using latest release distro file to download repositories" ) rosinstall += distro.get_rosinstall(repo, version='master') else: print( "Using version %s of release distro file to download repositories" % version) rosinstall += distro.get_rosinstall(repo, version) print("rosinstall file for all repositories: \n %s" % rosinstall) with open(os.path.join(workspace, "repo.rosinstall"), 'w') as f: f.write(rosinstall) print("Install repo list from source") os.makedirs(repo_sourcespace) call("rosinstall %s %s/repo.rosinstall --catkin" % (repo_sourcespace, workspace)) # get the repositories build dependencies print("Get build dependencies of repo list") repo_build_dependencies = get_dependencies(repo_sourcespace, build_depends=True, run_depends=False) print("Install build dependencies of repo list: %s" % (', '.join(repo_build_dependencies))) apt_get_install(repo_build_dependencies, rosdep_resolver, sudo) # replace the CMakeLists.txt file for repositories that use catkin print("Removing the CMakeLists.txt file generated by rosinstall") os.remove(os.path.join(repo_sourcespace, 'CMakeLists.txt')) print("Create a new CMakeLists.txt file using catkin") ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro) call("catkin_init_workspace %s" % repo_sourcespace, ros_env) test_results_dir = os.path.join(workspace, 'test_results') repos_test_results_dir = os.path.join(test_results_dir, 'repos') os.makedirs(repo_buildspace) os.chdir(repo_buildspace) call( "cmake %s -DCATKIN_TEST_RESULTS_DIR=%s" % (repo_sourcespace, repos_test_results_dir), ros_env) #ros_env_repo = get_ros_env(os.path.join(repo_buildspace, 'devel/setup.bash')) # build repositories and tests print("Build repo list") call("make", ros_env) call("make tests", ros_env) # get the repositories run dependencies print("Get test and run dependencies of repo list") repo_test_dependencies = get_dependencies(repo_sourcespace, build_depends=False, run_depends=True) print("Install test and run dependencies of repo list: %s" % (', '.join(repo_test_dependencies))) apt_get_install(repo_test_dependencies, rosdep_resolver, sudo) # run tests print("Test repo list") call("make run_tests", ros_env) # see if we need to do more work or not if not test_depends_on: print("We're not testing the depends-on repositories") ensure_test_results(test_results_dir) return # get repo_list depends-on list print("Get list of wet repositories that build-depend on repo list %s" % ', '.join(repo_list)) depends_on = [] try: for d in distro.get_depends_on(repo_list)[ 'build'] + distro.get_depends_on(repo_list)['buildtool']: if not d in depends_on and not d in repo_list: depends_on.append(d) except RuntimeError: print( "Exception %s: If you are not in the rosdistro and only in the devel", " builds there will be no depends on") depends_on = [] print("Build depends_on list of repo list: %s" % (', '.join(depends_on))) if len(depends_on) == 0: print( "No wet repositories depend on our repo list. Test finished here") ensure_test_results(test_results_dir) return # install depends_on repositories from source rosinstall = distro.get_rosinstall(depends_on) print("Rosinstall for depends_on:\n %s" % rosinstall) with open(workspace + "/depends_on.rosinstall", 'w') as f: f.write(rosinstall) print("Created rosinstall file for depends on") # install all repository and system dependencies of the depends_on list print("Install all depends_on from source: %s" % (', '.join(depends_on))) os.makedirs(dependson_sourcespace) call("rosinstall --catkin %s %s/depends_on.rosinstall" % (dependson_sourcespace, workspace)) # get build and run dependencies of depends_on list dependson_build_dependencies = [] for d in get_dependencies(dependson_sourcespace, build_depends=True, run_depends=False): print(" Checking dependency %s" % d) if d in dependson_build_dependencies: print(" Already in dependson_build_dependencies") if d in depends_on: print( " Is a direct dependency of the repo list, and is installed from source" ) if d in repo_list: print(" Is on of the repositories tested") if not d in dependson_build_dependencies and not d in depends_on and not d in repo_list: dependson_build_dependencies.append(d) print("Build dependencies of depends_on list are %s" % (', '.join(dependson_build_dependencies))) dependson_test_dependencies = [] for d in get_dependencies(dependson_sourcespace, build_depends=False, run_depends=True): if not d in dependson_test_dependencies and not d in depends_on and not d in repo_list: dependson_test_dependencies.append(d) print("Test dependencies of depends_on list are %s" % (', '.join(dependson_test_dependencies))) # install build dependencies print("Install all build dependencies of the depends_on list") apt_get_install(dependson_build_dependencies, rosdep_resolver, sudo) # replace the CMakeLists.txt file again print("Removing the CMakeLists.txt file generated by rosinstall") os.remove(os.path.join(dependson_sourcespace, 'CMakeLists.txt')) os.makedirs(dependson_buildspace) os.chdir(dependson_buildspace) print("Create a new CMakeLists.txt file using catkin") call("catkin_init_workspace %s" % dependson_sourcespace, ros_env) depends_on_test_results_dir = os.path.join(test_results_dir, 'depends_on') call( "cmake %s -DCATKIN_TEST_RESULTS_DIR=%s" % (dependson_sourcespace, depends_on_test_results_dir), ros_env) #ros_env_depends_on = get_ros_env(os.path.join(dependson_buildspace, 'devel/setup.bash')) # build repositories print("Build depends-on repositories") call("make", ros_env) # install test dependencies print("Install all test dependencies of the depends_on list") apt_get_install(dependson_test_dependencies, rosdep_resolver, sudo) # test repositories print("Test depends-on repositories") call("make run_tests", ros_env) ensure_test_results(test_results_dir)