def merge_port_trees(): for i in config['port_trees']: info(e('Merging ports tree ${i}')) uids = "%s/%s" % (i, "UIDs") gids = "%s/%s" % (i, "GIDs") for p in glob('${i}/*/*'): portpath = '/'.join(p.split('/')[-2:]) if portpath.startswith('Mk'): if os.path.isdir(e('${PORTS_OVERLAY}/${portpath}')): sh('cp -lf ${p}/* ${PORTS_OVERLAY}/${portpath}/') else: sh('cp -l ${p} ${PORTS_OVERLAY}/${portpath}') else: sh('rm -rf ${PORTS_OVERLAY}/${portpath}') sh('mkdir -p ${PORTS_OVERLAY}/${portpath}') sh('cp -lr ${p}/ ${PORTS_OVERLAY}/${portpath}') if os.path.exists(uids): sh('rm -f ${PORTS_OVERLAY}/UIDs') sh('cp -l ${uids} ${PORTS_OVERLAY}/UIDs') if os.path.exists(gids): sh('rm -rf ${PORTS_OVERLAY}/GIDs') sh('cp -l ${gids} ${PORTS_OVERLAY}/GIDs')
def main(): changelog = e('${CHANGELOG}') ssh = e('${UPDATE_USER}@${UPDATE_HOST}') sshopts = '-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' temp_dest = sh_str( "ssh ${ssh} ${sshopts} mktemp -d /tmp/update-${PRODUCT}-XXXXXXXXX") temp_changelog = sh_str( "ssh ${ssh} ${sshopts} mktemp /tmp/changelog-XXXXXXXXX") if not temp_dest or not temp_changelog: fail('Failed to create temporary directories on {0}', ssh) sh('scp ${sshopts} -r ${BE_ROOT}/release/LATEST/. ${ssh}:${temp_dest}') if changelog: cl_file = None if changelog == '-': print 'Enter changelog, ^D to end:' cl_file = tempfile.NamedTemporaryFile(delete=False) cl_file.write(sys.stdin.read()) cl_file.close() changelog = cl_file.name sh('scp ${sshopts} ${changelog} ${ssh}:${temp_changelog}') if cl_file is not None: os.remove(cl_file.name) sh("ssh ${sshopts} ${ssh}", "/usr/local/bin/freenas-release", "-P ${PRODUCT}", "-D ${UPDATE_DB}", "--archive ${UPDATE_DEST}", "-K ${FREENAS_KEYFILE}", "-C ${temp_changelog}" if changelog else "", "add ${temp_dest}") sh("ssh ${sshopts} ${ssh} rm -rf ${temp_dest}") sh("ssh ${sshopts} ${ssh} rm -rf ${temp_changelog}") # This last line syncs up with the cdn sh("ssh ${sshopts} ${ssh} /usr/local/sbin/rsync-mirror.sh")
def main(): user = sh_str('id -un') if user == 'root': user = '******' # sh('ssh ${user}@${DOWNLOAD_HOST} rm -rf ${DOWNLOAD_TARGETDIR}') # sh('ssh ${user}@${DOWNLOAD_HOST} mkdir -p ${DOWNLOAD_TARGETDIR}') # sh('scp -pr ${RELEASE_STAGEDIR}/* ${user}@${DOWNLOAD_HOST}:${DOWNLOAD_TARGETDIR}/') ref_date = 0 rel_dir = '' dirstring = e('${BE_ROOT}/release/${PRODUCT}') for x in glob.glob("{0}*".format(dirstring)): if e('${BUILD_ARCH_SHORT}') not in os.listdir(x): continue if os.lstat(x).st_ctime > ref_date: ref_date = os.lstat(x).st_ctime rel_dir = x if not rel_dir: error('Release not found') buildtimestamp = os.path.basename(rel_dir).split("-")[-1] downlodtargetdir = e('${DOWNLOAD_BASEDIR}/${MILESTONE}/${buildtimestamp}') sh('ssh ${user}@${DOWNLOAD_HOST} rm -rf ${downlodtargetdir}') sh('ssh ${user}@${DOWNLOAD_HOST} mkdir -p ${downlodtargetdir}') sh('scp -pr ${rel_dir}/* ${user}@${DOWNLOAD_HOST}:${downlodtargetdir}/') info('Synchronizing download server to CDN') sh('ssh ${user}@${DOWNLOAD_HOST} /usr/local/sbin/rsync-mirror.sh')
def check_build_sanity(): if len(e('${BUILD_ROOT}')) > 38: error("Current path too long ({0} characters) for nullfs mounts during build", len(os.getcwd())) if e('${BE_ROOT}') in sh_str('mount'): error("You have dangling mounts inside {0}, did last build crash?", e('${BE_ROOT}'))
def QR_decomposition(size, A, precision=0.01): Ak = [row.copy() for row in A] E = utils.e(size) Q = utils.e(size) for i in range(size): V = [0 for _ in range(size)] V[i] = Ak[i][i] + sign(Ak[i][i]) * math.sqrt( sum(Ak[j][i]**2 for j in range(i, size))) for k in range(i + 1, size): V[k] = Ak[k][i] Vt = [V] V = [[V[i]] for i in range(size)] M = utils.mm_mult(V, Vt) C = utils.mm_mult(Vt, V)[0][0] for j in range(size): for k in range(size): M[j][k] /= C M[j][k] *= 2 Hk = utils.mm_substr(E, M) Q = utils.mm_mult(Q, Hk) Ak = utils.mm_mult(Hk, Ak) return Q, Ak
def setup_vm(): global vm_proc, termserv_proc info('Starting up VM') sh('bhyveload -m ${RAM_SIZE} -d ${OBJDIR}/test-root.ufs ${VM_NAME}') vm_proc = sh_spawn( 'bhyve -m ${RAM_SIZE} -A -H -P', '-s 0:0,hostbridge', '-s 1:0,virtio-net,${tapdev}', '-s 2:0,ahci-hd,${OBJDIR}/test-root.ufs', '-s 3:0,ahci-hd,${OBJDIR}/test-swap.bin', '-s 31,lpc -l com1,${CONSOLE_MASTER}', '${VM_NAME}' ) pid = vm_proc.pid logfile = objdir(e('logs/bhyve.${pid}.log')) info('Starting telnet server on port {0}', e('${TELNET_PORT}')) info('Console log file is {0}', logfile) termserv_proc = sh_spawn( 'python', '${BUILD_TOOLS}/terminal-server.py', '-l ${logfile}', '-c ${CONSOLE_SLAVE}', '-p ${TELNET_PORT}' ) on_abort(shutdown_vm)
def setup_rootfs(): buildkernel(e('${KERNCONF}-DEBUG'), ['mach'], buildkernellog) installworld('${OBJDIR}/test-root', installworldlog, distributionlog, conf="run") installkernel(e('${KERNCONF}'), '${OBJDIR}/test-root', installkernellog, modules=['mach'], conf="run") info('Installing overlay files') sh('rsync -ah ${TESTS_ROOT}/trueos/overlay/ ${OBJDIR}/test-root') sh('makefs -M ${IMAGE_SIZE} ${OBJDIR}/test-root.ufs ${OBJDIR}/test-root')
def check_port(name, port): debug('Checking for "{0}" command', name) for i in e('${PATH}').split(':'): if os.path.exists(e('${i}/${name}')): return error('Command {0} not found. Please run "pkg install {1}" or install from ports', name, port)
def create_upgradefile(): """ Copy the manifest, and all other files, into a temp directory, then create a tarball from that. We need to rename ${PRODUCT}-MANIFEST to simply MANIFEST, and all the Pakages files go into the base directory. We'll name the resulting file ${PRODUCT}-${VERSION}.tar """ info("Creating update tar-file") temp_dir = tempfile.mkdtemp() source_dir = e("${UPGRADE_STAGEDIR}") for entry in os.listdir(source_dir): if entry == e("${PRODUCT}-MANIFEST"): shutil.copyfile(os.path.join(source_dir, entry), os.path.join(temp_dir, "MANIFEST")) elif entry == "Packages": for pkgfile in os.listdir(os.path.join(source_dir, entry)): shutil.copyfile(os.path.join(source_dir, entry, pkgfile), os.path.join(temp_dir, pkgfile)) else: shutil.copyfile(os.path.join(source_dir, entry), os.path.join(temp_dir, entry)) sh("chmod 755 {0}".format(temp_dir)) sh("tar -C {0} -cf {1} .".format( temp_dir, e("${BE_ROOT}/release/${PRODUCT}-${VERSION}-manual-update-unsigned.tar" ))) info( "tar-file path: ${{BE_ROOT}}/release/${{PRODUCT}}-${{VERSION}}-manual-update-unsigned.tar" ) shutil.rmtree(temp_dir)
def do_run(): info('Starting up VM for testing') vm_proc = sh_spawn( 'bhyve -m ${MEMSIZE} -c ${CORES} -A -H -P', '-s 3:0,ahci-hd,${destdir}/boot.img', '-s 4:0,ahci-hd,${destdir}/hd1.img', '-s 5:0,ahci-hd,${destdir}/hd2.img', '-s 6:0,virtio-net,${tapdev}', '-s 7:0,fbuf,tcp=5900,w=1024,h=768', '-s 31,lpc', '-l bootrom,/usr/local/share/uefi-firmware/BHYVE_UEFI.fd', '${VM_NAME}') ready.wait() time.sleep(60) info('VM middleware is ready') proc = subprocess.Popen([ e('${venvdir}/bin/python'), e('${BUILD_ROOT}/tests/freenas/main.py'), '-a', e('${FREENAS_IP}'), '-u', 'root', '-p', 'abcd1234' ]) proc.wait() vm_proc.terminate() vm_proc.wait()
def stage_upgrade(): sh('rm -rf ${UPGRADE_STAGEDIR}') sh('mkdir -p ${UPGRADE_STAGEDIR}') sh('cp -R ${OBJDIR}/packages/Packages ${UPGRADE_STAGEDIR}/') # If an update validation script is given, copy that if os.path.exists(e('${PROFILE_ROOT}/ValidateUpdate')): sh('cp ${PROFILE_ROOT}/ValidateUpdate ${UPGRADE_STAGEDIR}/ValidateUpdate' ) if os.path.exists(e('${PROFILE_ROOT}/ValidateInstall')): sh('cp ${PROFILE_ROOT}/ValidateUpdate ${UPGRADE_STAGEDIR}/ValidateInstall' ) # Allow the environment to over-ride it -- /dev/null or empty string means # don't have one if env('VALIDATE_UPDATE') is not None: if env('VALIDATE_UPDATE') not in ("/dev/null", ""): sh('cp ${VALIDATE_UPDATE} ${UPGRADE_STAGEDIR}/ValidateUpdate') else: sh('rm -f ${UPGRADE_STAGEDIR}/ValidateUpdate') if env('VALIDATE_INSTALL') is not None: if env('VALIDATE_INSTALL') not in ("/dev/null", ""): sh('cp ${VALIDATE_INSTALL} ${UPGRADE_STAGEDIR}/ValidateInstall') else: sh('rm -f ${UPGRADE_STAGEDIR}/ValidateInstall') # If RESTART is given, save that if env('RESTART'): sh('echo ${RESTART} > ${UPGRADE_STAGEDIR}/RESTART') # And if REBOOT is given, put that in FORCEREBOOT if env('REBOOT'): sh('echo ${REBOOT} > ${UPGRADE_STAGEDIR}/FORCEREBOOT') sh('rm -f ${BE_ROOT}/release/LATEST') sh('ln -sf ${UPGRADE_STAGEDIR} ${BE_ROOT}/release/LATEST')
def main(): if not e('${SKIP_CHECKOUT}'): cwd = os.getcwd() checkout_only = e('${CHECKOUT_ONLY}') checkout_exclude = e('${CHECKOUT_EXCLUDE}') if checkout_only: checkout_only = checkout_only.split(',') if checkout_exclude: checkout_exclude = checkout_exclude.split(',') for i in dsl['repos']: if checkout_only and i['name'] not in checkout_only: continue if checkout_exclude and i['name'] in checkout_exclude: continue info('Checkout: {0} -> {1} from {2}', i['name'], i['path'], i['url']) debug('Repository URL: {0}', i['url']) debug('Local branch: {0}', i['branch']) checkout_repo(cwd, i) generate_manifest() setfile('${BE_ROOT}/.pulled', e('${PRODUCT}')) return 0
def stage_upgrade(): sh('rm -rf ${UPGRADE_STAGEDIR}') sh('mkdir -p ${UPGRADE_STAGEDIR}') sh('cp -R ${OBJDIR}/packages/Packages ${UPGRADE_STAGEDIR}/') # If an update validation script is given, copy that if os.path.exists(e('${PROFILE_ROOT}/ValidateUpdate')): sh('cp ${PROFILE_ROOT}/ValidateUpdate ${UPGRADE_STAGEDIR}/ValidateUpdate') if os.path.exists(e('${PROFILE_ROOT}/ValidateInstall')): sh('cp ${PROFILE_ROOT}/ValidateUpdate ${UPGRADE_STAGEDIR}/ValidateInstall') # Allow the environment to over-ride it -- /dev/null or empty string means # don't have one if env('VALIDATE_UPDATE') is not None: if env('VALIDATE_UPDATE') not in ("/dev/null", ""): sh('cp ${VALIDATE_UPDATE} ${UPGRADE_STAGEDIR}/ValidateUpdate') else: sh('rm -f ${UPGRADE_STAGEDIR}/ValidateUpdate') if env('VALIDATE_INSTALL') is not None: if env('VALIDATE_INSTALL') not in ("/dev/null", ""): sh('cp ${VALIDATE_INSTALL} ${UPGRADE_STAGEDIR}/ValidateInstall') else: sh('rm -f ${UPGRADE_STAGEDIR}/ValidateInstall') # If RESTART is given, save that if env('RESTART'): sh('echo ${RESTART} > ${UPGRADE_STAGEDIR}/RESTART') # And if REBOOT is given, put that in FORCEREBOOT if env('REBOOT'): sh('echo ${REBOOT} > ${UPGRADE_STAGEDIR}/FORCEREBOOT') sh('rm -f ${BE_ROOT}/release/LATEST') sh('ln -sf ${UPGRADE_STAGEDIR} ${BE_ROOT}/release/LATEST')
def main(): user = sh_str('id -un') if user == 'root': user = '******' # sh('ssh ${user}@${DOWNLOAD_HOST} rm -rf ${DOWNLOAD_TARGETDIR}') # sh('ssh ${user}@${DOWNLOAD_HOST} mkdir -p ${DOWNLOAD_TARGETDIR}') # sh('scp -pr ${RELEASE_STAGEDIR}/* ${user}@${DOWNLOAD_HOST}:${DOWNLOAD_TARGETDIR}/') ref_date = 0 rel_dir = '' dirstring = e('${BE_ROOT}/release/${PRODUCT}') for x in glob.glob("{0}*".format(dirstring)): if e('${BUILD_ARCH_SHORT}') not in os.listdir(x): continue if os.lstat(x).st_ctime > ref_date: ref_date = os.lstat(x).st_ctime rel_dir = x if not rel_dir: error('Release not found') if e('${BUILD_TYPE}').lower() in ["master", "stable"]: buildtimestamp = os.path.basename(rel_dir).split("-")[-1] downloadtargetdir = e('${DOWNLOAD_BASEDIR}/${MILESTONE}/${buildtimestamp}') else: downloadtargetdir = e('${DOWNLOAD_TARGETDIR}') sh('ssh ${user}@${DOWNLOAD_HOST} rm -rf ${downloadtargetdir}') sh('ssh ${user}@${DOWNLOAD_HOST} mkdir -p ${downloadtargetdir}') sh('scp -pr ${rel_dir}/* ${user}@${DOWNLOAD_HOST}:${downloadtargetdir}/') info('Synchronizing download server to CDN') sh('ssh ${user}@${DOWNLOAD_HOST} /usr/local/sbin/rsync-mirror.sh')
def dhcp_request(mac, hostname): info('DHCP request from {0} ({1})'.format(hostname, mac)) lease = Lease() lease.client_mac = mac lease.client_ip = ipaddress.ip_address(e('${FREENAS_IP}')) lease.client_mask = ipaddress.ip_address(e('${NETMASK}')) ready.set() return lease
def install(): node_modules = e('${GUI_STAGEDIR}/node_modules') os.chdir(e('${GUI_STAGEDIR}')) sh('npm install') sh('${node_modules}/.bin/gulp deploy --output=${GUI_DESTDIR}', log=logfile, mode='a')
def create_plist(): with open(e('${GUI_DESTDIR}/gui-plist'), 'w') as f: for i in walk('${GUI_DESTDIR}'): if not os.path.isdir(e('${GUI_DESTDIR}/${i}')): f.write(e('/usr/local/www/gui/${i}\n')) with open(e('${GUI_STAGEDIR}/custom-plist')) as c: f.write(c.read())
def stage_release(): sh('mkdir -p ${RELEASE_STAGEDIR}/${BUILD_ARCH_SHORT}') releases = [e('${OBJDIR}/${NAME}.${ext}') for ext in dsl.formats] for path in releases: if os.path.exists(path): info(e('Moving ${path} artifact to release directory')) sh('mv ${path} ${RELEASE_STAGEDIR}/${BUILD_ARCH_SHORT}/') sh('mv ${path}.sha256 ${RELEASE_STAGEDIR}/${BUILD_ARCH_SHORT}/') info('ISO path: ${{RELEASE_STAGEDIR}}/${{BUILD_ARCH_SHORT}}')
def check_sandbox(): if not os.path.exists(e('${BE_ROOT}/.pulled')): error('Sandbox is not fully checked out') for i in config['repos']: if not os.path.isdir(os.path.join(e('${BE_ROOT}'), i['path'], '.git')): error('Sandbox is not fully checked out, {0} is missing', i['name']) info('Sandbox is fully checked out')
def main(): sh('rm -rf ${DEBUG_WORLD}') sh('mkdir -p ${DEBUG_WORLD}') info('Saving debug information in ${{DEBUG_WORLD}}') for root, dirs, files in os.walk(e('${WORLD_DESTDIR}/')): for name in files: filename = os.path.join(root, name) relpath = os.path.relpath(filename, e('${WORLD_DESTDIR}')) destpath = os.path.join(e('${DEBUG_WORLD}'), relpath) ext = os.path.splitext(name)[1] if ext == '.ko': continue if relpath.startswith(('boot', 'usr/local/lib/grub')): continue if ext == '.c': make_dir(destpath) shutil.move(filename, destpath) for f in os.listdir(root): if f.endswith('.html'): html_path = os.path.join(root, f) if 'Generated by Cython' in open(html_path).read(): relpath = os.path.relpath(html_path, e('${WORLD_DESTDIR}')) destpath = os.path.join(e('${DEBUG_WORLD}'), relpath) shutil.move(html_path, destpath) continue if ext == '.html': continue if not is_elf(filename): continue make_dir(destpath) # We need to remove any flags on protected files and restore # them after stripping flags = os.stat(filename).st_flags os.chflags(filename, 0) if not relpath.startswith('rescue'): sh('objcopy --only-keep-debug ${filename} ${destpath}.debug') sh('objcopy --strip-unneeded ${filename}', nofail=True) sh('objcopy --add-gnu-debuglink="${destpath}.debug" ${filename}', log='/dev/null', nofail=True) else: sh('strip ${filename}') os.chflags(filename, flags)
def find_tests(self): info('Looking for test manifests in ${{BE_ROOT}}') for dir in os.listdir(e('${BE_ROOT}')): if dir not in self.excluded: for root, _, files in os.walk( os.path.join(e('${BE_ROOT}'), dir)): if os.path.split( root)[1] == 'tests' and 'MANIFEST.json' in files: info('Found test manifest at {0}', root) self.test_suites.append(root)
def get_image_files_desc(): for ext in dsl.formats: path = e('${RELEASE_STAGEDIR}/${BUILD_ARCH_SHORT}/${NAME}.${ext}') filename = os.path.basename(path) if os.path.exists(path): yield { 'filename': filename, 'type': ext, 'hash': sh_str("sha256 -q ${path}"), 'url': e("${url}/${filename}") }
def create_poudriere_config(): sh('mkdir -p ${DISTFILES_CACHE}') setfile('${POUDRIERE_ROOT}/etc/poudriere.conf', template('${BUILD_CONFIG}/templates/poudriere.conf', { 'ports_repo': config['repos'].where(name='ports')['path'], 'ports_branch': config['repos'].where(name='ports')['branch'], })) tree = e('${POUDRIERE_ROOT}/etc/poudriere.d/ports/p') sh('mkdir -p', tree) setfile(pathjoin(tree, 'mnt'), e('${PORTS_OVERLAY}')) setfile(pathjoin(tree, 'method'), 'git')
def copy_packages(): sh('mkdir -p ${ISO_DESTDIR}/${PRODUCT}') sh('cp -R ${OBJDIR}/packages/Packages ${ISO_DESTDIR}/${PRODUCT}') # Move any validation scripts back for v in "ValidateInstall", "ValidateUpdate": if os.path.exists(e('${ISO_DESTDIR}/${PRODUCT}/Packages/${v}')): sh( e('mv ${ISO_DESTDIR}/${PRODUCT}/Packages/${v} ${ISO_DESTDIR}/${PRODUCT}/${v}' )) sh('cp ${OBJDIR}/packages/${PRODUCT}-MANIFEST ${ISO_DESTDIR}/')
def main(): source = e('${PROFILE_ROOT}/release') destination = e('${BUILD_ROOT}/release') # Delete the ./release directory thus removing current content if os.path.exists(destination): shutil.rmtree(destination) # Copy the complete profile/release directory to ./release # thus recreating ./release with the up-to-date content shutil.copytree(source, destination)
def main(): sh('rm -rf ${DEBUG_WORLD}') sh('mkdir -p ${DEBUG_WORLD}') info('Saving debug information in ${{DEBUG_WORLD}}') for root, dirs, files in os.walk(e('${WORLD_DESTDIR}/')): for name in files: filename = os.path.join(root, name) relpath = os.path.relpath(filename, e('${WORLD_DESTDIR}')) destpath = os.path.join(e('${DEBUG_WORLD}'), relpath) ext = os.path.splitext(name)[1] if ext == '.ko': continue if relpath.startswith(('boot', 'usr/local/lib/grub')): continue if ext == '.c': make_dir(destpath) shutil.move(filename, destpath) for f in os.listdir(root): if f.endswith('.html'): html_path = os.path.join(root, f) if 'Generated by Cython' in open(html_path).read(): relpath = os.path.relpath(html_path, e('${WORLD_DESTDIR}')) destpath = os.path.join(e('${DEBUG_WORLD}'), relpath) shutil.move(html_path, destpath) continue if ext == '.html': continue if not is_elf(filename): continue make_dir(destpath) # We need to remove any flags on protected files and restore # them after stripping flags = os.stat(filename).st_flags os.chflags(filename, 0) if not relpath.startswith('rescue'): sh('objcopy --only-keep-debug ${filename} ${destpath}.debug') sh('objcopy --strip-unneeded ${filename}') sh('objcopy --add-gnu-debuglink="${destpath}.debug" ${filename}', log='/dev/null', nofail=True) else: sh('strip ${filename}') os.chflags(filename, flags)
def fetch_binary_packages(): if e('${SKIP_PACKAGES_FETCH}'): return for i in config.binary_packages: _, name = os.path.split(i) if os.path.exists(e('${WORLD_DESTDIR}/usr/ports/packages/${name}')): continue info('Fetching package {0}', name) sh('fetch ${i} -o ${WORLD_DESTDIR}/usr/ports/packages/')
def main(): info("Available profiles:") for i in glob("${BUILD_PROFILES}/*"): dsl = load_file(e("${i}/config.pyd"), os.environ) if dsl is None: continue profile = dsl["profile"] selected = e("${PROFILE}") == profile["name"] info('* {0}{1}', profile["name"], " [selected]" if selected else "") info('\tDescription: {0}', profile["description"]) info('\tOwner: {0}', profile["owner"]) info('\tStatus: {0}', profile["status"])
def obtain_jail_name(): global jailname for i in string.ascii_lowercase: user = e('${SUDO_USER}') if user: i = e('${i}-${user}') if sh('jls -q -n -j j${i}-p', log="/dev/null", nofail=True) != 0: jailname = e('j${i}') setfile(e('${OBJDIR}/jailname'), jailname) return error('No jail names available')
def setup_rootfs(): buildkernel(e('${KERNCONF}-DEBUG'), ['mach'], buildkernellog) installworld('${OBJDIR}/test-root', installworldlog, distributionlog, conf="run") installkernel(e('${KERNCONF}'), '${OBJDIR}/test-root', installkernellog, modules=['mach'], conf="run") info('Installing overlay files') sh('rsync -ah ${TESTS_ROOT}/os/overlay/ ${OBJDIR}/test-root') sh('makefs -M ${IMAGE_SIZE} ${OBJDIR}/test-root.ufs ${OBJDIR}/test-root')
def read_repo_manifest(): global pkgversion global sequence versions = [] f = open(e("${BE_ROOT}/repo-manifest")) o = open(e("${BE_ROOT}/objs/world/etc/repo-manifest"), "w") for i in f: versions.append(i.split()[1]) o.write(i) pkgversion = hashlib.md5('-'.join(versions).encode('ascii')).hexdigest() sequence = pkgversion
def check_sandbox(): if not os.path.exists(e('${BE_ROOT}/.pulled')): error('Sandbox is not fully checked out') checkout_only = e('${CHECKOUT_ONLY}') if checkout_only: checkout_only = checkout_only.split(',') for i in config['repos']: if checkout_only and i['name'] not in checkout_only: continue if not os.path.isdir(os.path.join(e('${BE_ROOT}'), i['path'], '.git')): error('Sandbox is not fully checked out, {0} is missing', i['name']) info('Sandbox is fully checked out')
def main(): prod = e("${PRODUCTION}") if prod and prod.lower() == "yes": KEY_PASSWORD = getpass.getpass("Enter Password: "******"Hey Release Engineer this is the password you entered: {0}".format(KEY_PASSWORD)) else: KEY_PASSWORD = "" changelog = e('${CHANGELOG}') ssh = e('${UPDATE_USER}@${UPDATE_HOST}') sshopts='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' temp_dest = sh_str("ssh ${ssh} ${sshopts} mktemp -d /tmp/update-${PRODUCT}-XXXXXXXXX") temp_changelog = sh_str("ssh ${ssh} ${sshopts} mktemp /tmp/changelog-XXXXXXXXX") if not temp_dest or not temp_changelog: error('Failed to create temporary directories on {0}', ssh) sh('scp ${sshopts} -r ${BE_ROOT}/release/LATEST/. ${ssh}:${temp_dest}') if changelog: cl_file = None if changelog == '-': print('Enter changelog, ^D to end:') cl_file = tempfile.NamedTemporaryFile(delete=False) cl_file.write(bytes(sys.stdin.read(),'UTF-8')) cl_file.close() changelog = cl_file.name sh('scp ${sshopts} ${changelog} ${ssh}:${temp_changelog}') if cl_file is not None: os.remove(cl_file.name) sh( "echo ${KEY_PASSWORD} |", "ssh ${sshopts} ${ssh}", "/usr/local/bin/freenas-release", "-P ${PRODUCT}", "-D ${UPDATE_DB}", "--archive ${UPDATE_DEST}", "-K ${FREENAS_KEYFILE}", "-C ${temp_changelog}" if changelog else "", "add ${temp_dest}" ) sh("ssh ${sshopts} ${ssh} rm -rf ${temp_dest}") sh("ssh ${sshopts} ${ssh} rm -rf ${temp_changelog}") # This last line syncs up with the cdn # It is only done in the case of public facing update if e("${INTERNAL_UPDATE}").lower() == "no": sh("ssh ${sshopts} ${ssh} /usr/local/sbin/rsync-mirror.sh")
def main(): prod = e("${PRODUCTION}") if prod and prod.lower() == "yes": KEY_PASSWORD = getpass.getpass("Enter Password: "******"Hey Release Engineer this is the password you entered: {0}".format(KEY_PASSWORD)) else: KEY_PASSWORD = "" changelog = e('${CHANGELOG}') ssh = e('${UPDATE_USER}@${UPDATE_HOST}') sshopts = '-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' temp_dest = sh_str("ssh ${ssh} ${sshopts} mktemp -d /tmp/update-${PRODUCT}-XXXXXXXXX") temp_changelog = sh_str("ssh ${ssh} ${sshopts} mktemp /tmp/changelog-XXXXXXXXX") if not temp_dest or not temp_changelog: error('Failed to create temporary directories on {0}', ssh) sh('scp ${sshopts} -r ${BE_ROOT}/release/LATEST/. ${ssh}:${temp_dest}') if changelog: cl_file = None if changelog == '-': print('Enter changelog, ^D to end:') cl_file = tempfile.NamedTemporaryFile(delete=False) cl_file.write(bytes(sys.stdin.read(), 'UTF-8')) cl_file.close() changelog = cl_file.name sh('scp ${sshopts} ${changelog} ${ssh}:${temp_changelog}') if cl_file is not None: os.remove(cl_file.name) sh( "echo ${KEY_PASSWORD} |", "ssh ${sshopts} ${ssh}", "/usr/local/bin/freenas-release", "-P ${PRODUCT}", "-D ${UPDATE_DB}", "--archive ${UPDATE_DEST}", "-K ${FREENAS_KEYFILE}", "-C ${temp_changelog}" if changelog else "", "add ${temp_dest}" ) sh("ssh ${sshopts} ${ssh} rm -rf ${temp_dest}") sh("ssh ${sshopts} ${ssh} rm -rf ${temp_changelog}") # This last line syncs up with the cdn # It is only done in the case of public facing update if e("${INTERNAL_UPDATE}").lower() == "no": sh("ssh ${sshopts} ${ssh} /usr/local/sbin/rsync-mirror.sh")
def install_files(): info('Copying installer files') setfile('${INSTUFS_DESTDIR}/etc/avatar.conf', template('${BUILD_CONFIG}/templates/avatar.conf')) if e("${UNATTENDED_CONFIG}"): sh('cp ${UNATTENDED_CONFIG} ${INSTUFS_DESTDIR}/etc/install.conf') sh('cp ${BUILD_CONFIG}/templates/cdrom/rc.conf ${INSTUFS_DESTDIR}/etc/')
def main(): if not e('${SKIP_CHECKOUT}'): cwd = os.getcwd() for i in dsl['repos']: if e('${CHECKOUT_ONLY}'): if i['name'] not in e('${CHECKOUT_ONLY}').split(','): continue info('Checkout: {0} -> {1}', i['name'], i['path']) debug('Repository URL: {0}', i['url']) debug('Local branch: {0}', i['branch']) checkout_repo(cwd, i) generate_manifest() setfile('${BE_ROOT}/.pulled', e('${PRODUCT}')) return 0
def prepare_env(): for cmd in jailconf.get('copy', []): dest = os.path.join(e('${JAIL_DESTDIR}'), cmd['dest'][1:]) sh('rm -rf ${dest}') sh('cp -a', cmd['source'], dest) for cmd in jailconf.get('link', []): flags = '-o {0}'.format(cmd['flags']) if 'flags' in cmd else '' dest = os.path.join(e('${JAIL_DESTDIR}'), cmd['dest'][1:]) sh('mkdir -p', os.path.dirname(dest)) sh('mount -t nullfs', flags, cmd['source'], dest) osversion = sh_str("awk '/\#define __FreeBSD_version/ { print $3 }' ${JAIL_DESTDIR}/usr/include/sys/param.h") login_env = e(',UNAME_r=${FREEBSD_RELEASE_VERSION% *},UNAME_v=FreeBSD ${FREEBSD_RELEASE_VERSION},OSVERSION=${osversion}') sh('sed -i "" -e "s/,UNAME_r.*:/:/ ; s/:\(setenv.*\):/:\\1${login_env}:/" ${JAIL_DESTDIR}/etc/login.conf') sh('cap_mkdb ${JAIL_DESTDIR}/etc/login.conf');
def cleandirs(): info('Cleaning previous build products') if os.path.isdir(e('${INSTUFS_DESTDIR}')): sh('chflags -R 0 ${INSTUFS_DESTDIR}') sh('rm -rf ${INSTUFS_DESTDIR}') sh('rm -rf ${ISO_DESTDIR}') sh('mkdir -p ${ISO_DESTDIR} ${INSTUFS_DESTDIR}')
def install_ports(): pkgs = ' '.join(get_port_names(config.ports)) sh('mount -t devfs devfs ${WORLD_DESTDIR}/dev') err = chroot('${WORLD_DESTDIR}', 'env ASSUME_ALWAYS_YES=yes pkg install -r local -f ${pkgs}', log=logfile, nofail=True) sh('umount -f ${WORLD_DESTDIR}/dev') if not os.path.isdir(e('${WORLD_DESTDIR}/data')) or err != 0: error('Packages installation failed, see log file {0}', logfile)
def merge_port_trees(): for i in config['port_trees']: info(e('Merging ports tree ${i}')) for p in glob('${i}/*/*'): portpath = '/'.join(p.split('/')[-2:]) sh('rm -rf ${PORTS_OVERLAY}/${portpath}') sh('mkdir -p ${PORTS_OVERLAY}/${portpath}') sh('cp -lr ${p}/ ${PORTS_OVERLAY}/${portpath}')
def prepare_jail(): basepath = e('${POUDRIERE_ROOT}/etc/poudriere.d/jails/${jailname}') sh('mkdir -p ${basepath}') setfile(e('${basepath}/method'), 'git') setfile(e('${basepath}/mnt'), e('${JAIL_DESTDIR}')) setfile(e('${basepath}/version'), e('${FREEBSD_RELEASE_VERSION}')) setfile(e('${basepath}/arch'), e('${BUILD_ARCH}')) sh("jail -U root -c name=${jailname} path=${JAIL_DESTDIR} command=/sbin/ldconfig -m /lib /usr/lib /usr/lib/compat")
def main(): user = sh_str('id -un') if user == 'root': user = '******' ref_date = 0 rel_dir = '' dirstring = e('${BE_ROOT}/release/${PRODUCT}') for x in glob.glob("{0}*".format(dirstring)): if os.path.isdir(e('${x}/${BUILD_ARCH_SHORT}')) and os.lstat(x).st_ctime > ref_date: ref_date = os.lstat(x).st_ctime rel_dir = x if not rel_dir: error('Release not found') download_suffix = "" if e('${BUILD_TYPE}').lower() in ["master", "stable"]: buildtimestamp = os.path.basename(rel_dir).split("-")[-1] download_suffix = e('${BUILD_TYPE}/${buildtimestamp}') else: download_suffix = e('${BUILD_TYPE}') downloadtargetdir = e('${DOWNLOAD_PREFIX}/${download_suffix}') sh('ssh ${user}@${DOWNLOAD_HOST} rm -rf ${downloadtargetdir}') sh('ssh ${user}@${DOWNLOAD_HOST} mkdir -p ${downloadtargetdir}') sh('scp -pr ${rel_dir}/* ${user}@${DOWNLOAD_HOST}:${downloadtargetdir}/') # For all non-nightly builds create latest symlinks if e('${BUILD_TYPE}').lower() != "master": info('Creating top level downloads symlink') sh('ssh ${user}@${DOWNLOAD_HOST} ln -shf ${VERSION_NUMBER}/${download_suffix} ${DOWNLOAD_BASEDIR}/latest') info('Creating MILESTONE level downloads symlink') sh('ssh ${user}@${DOWNLOAD_HOST} ln -shf ${download_suffix} ${DOWNLOAD_PREFIX}/latest') info('Synchronizing download server to CDN') sh('ssh ${user}@${DOWNLOAD_HOST} /usr/local/sbin/rsync-mirror.sh')
def find_ref_clone(repo_name): """See if there's an existing clone to use as a reference.""" git_ref_path = e('${GIT_REF_PATH}') if git_ref_path: for path in git_ref_path.split(':'): candidate = os.path.join(path, repo_name) if is_git_repo(candidate): return candidate return None
def stage_release(): sh('mkdir -p ${RELEASE_STAGEDIR}/${BUILD_ARCH_SHORT}') for ext in dsl.formats: path = e('${OBJDIR}/${NAME}.${ext}') if os.path.exists(path): info('Moving installation disk ISO to release directory') sh('mv ${path} ${RELEASE_STAGEDIR}/${BUILD_ARCH_SHORT}/') sh('mv ${path}.sha256 ${RELEASE_STAGEDIR}/${BUILD_ARCH_SHORT}/') info('ISO path: ${{RELEASE_STAGEDIR}}/${{BUILD_ARCH_SHORT}}')
def load_file(filename, env): from utils import e filename = e(filename) g = GlobalsWrapper(env, filename) with open(filename, 'r') as f: tree = ast.parse(f.read(), filename) t2 = ast.fix_missing_locations(AstTransformer().visit(tree)) exec(compile(t2, filename, 'exec'), g) return g.dict
def main(): sh('rm -rf ${DEBUG_WORLD}') sh('mkdir -p ${DEBUG_WORLD}') info('Saving debug information in ${{DEBUG_WORLD}}') for root, dirs, files in os.walk(e('${WORLD_DESTDIR}/')): for name in files: filename = os.path.join(root, name) relpath = os.path.relpath(filename, e('${WORLD_DESTDIR}')) destpath = os.path.join(e('${DEBUG_WORLD}'), relpath) if os.path.splitext(name)[1] == '.ko': continue if relpath.startswith(('boot', 'usr/local/lib/grub')): continue if not is_elf(filename): continue try: os.makedirs(os.path.dirname(destpath)) except OSError as err: if err.errno != errno.EEXIST: raise # We need to remove any flags on protected files and restore # them after stripping flags = os.stat(filename).st_flags os.chflags(filename, 0) if not relpath.startswith('rescue'): sh('objcopy --only-keep-debug ${filename} ${destpath}.debug') sh('objcopy --strip-unneeded ${filename}') sh('objcopy --add-gnu-debuglink="${destpath}.debug" ${filename}', log='/dev/null', nofail=True) else: sh('strip ${filename}') os.chflags(filename, flags)
def populate_ufsroot(): info('Populating UFS root') for i in purge_dirs: for name in glob('${INSTUFS_DESTDIR}${i}/*'): name = os.path.basename(name) if e('${i}/${name}') in files_to_preserve: continue sh('chflags -f 0 ${INSTUFS_DESTDIR}${i}/${name}') sh('rm -f ${INSTUFS_DESTDIR}${i}/${name}') for k, v in symlinks.items(): p = os.path.join('/rescue', k) sh('ln -s ${p} ${INSTUFS_DESTDIR}${v}')
def create_manifest(pkgs): info('Creating package manifests') date = int(time.time()) train = e('${TRAIN}') or 'FreeNAS' sh( "env PYTHONPATH=${tooldir}/usr/local/lib", "${tooldir}/usr/local/bin/create_manifest", "-P ${pkgdir}/Packages", "-S ${sequence}", "-o ${pkgdir}/${PRODUCT}-${sequence}", "-R ${PRODUCT}-${VERSION}", "-T ${train}", "-t ${date}", *pkgs ) sh('ln -sf ${PRODUCT}-${sequence} ${pkgdir}/${PRODUCT}-MANIFEST')
def create_json(): version = e('${VERSION}').split('-')[0] json_file = { 'name': e('${PRODUCT}'), 'version': e('${version}'), 'type': e('${BUILD_TYPE}'), 'date': e('${BUILD_TIMESTAMP}'), 'aux_files': list(get_aux_files_desc()), 'arch': { e('${BUILD_ARCH}'): list(get_image_files_desc()) } } f = open(e("${RELEASE_STAGEDIR}/CHECKSUMS.json"), 'a') json.dump(json_file, f, indent=4) f.close()