def main(): changelog = e('${CHANGELOG}') ssh = e('${UPDATE_USER}@${UPDATE_HOST}') sshopts = '-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' temp_dest = sh_str( "ssh ${ssh} ${sshopts} mktemp -d /tmp/update-${PRODUCT}-XXXXXXXXX") temp_changelog = sh_str( "ssh ${ssh} ${sshopts} mktemp /tmp/changelog-XXXXXXXXX") if not temp_dest or not temp_changelog: fail('Failed to create temporary directories on {0}', ssh) sh('scp ${sshopts} -r ${BE_ROOT}/release/LATEST/. ${ssh}:${temp_dest}') if changelog: cl_file = None if changelog == '-': print 'Enter changelog, ^D to end:' cl_file = tempfile.NamedTemporaryFile(delete=False) cl_file.write(sys.stdin.read()) cl_file.close() changelog = cl_file.name sh('scp ${sshopts} ${changelog} ${ssh}:${temp_changelog}') if cl_file is not None: os.remove(cl_file.name) sh("ssh ${sshopts} ${ssh}", "/usr/local/bin/freenas-release", "-P ${PRODUCT}", "-D ${UPDATE_DB}", "--archive ${UPDATE_DEST}", "-K ${FREENAS_KEYFILE}", "-C ${temp_changelog}" if changelog else "", "add ${temp_dest}") sh("ssh ${sshopts} ${ssh} rm -rf ${temp_dest}") sh("ssh ${sshopts} ${ssh} rm -rf ${temp_changelog}") # This last line syncs up with the cdn sh("ssh ${sshopts} ${ssh} /usr/local/sbin/rsync-mirror.sh")
def main(): prod = e("${PRODUCTION}") if prod and prod.lower() == "yes": KEY_PASSWORD = e("${IX_KEY_PASSWORD}") or getpass.getpass("Enter Password: "******"" changelog = e('${CHANGELOG}') info('Using ChangeLog: {0}', changelog) ssh = e('${UPDATE_USER}@${UPDATE_HOST}') sshopts = '-o SendEnv={KEY_PASSWORD} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' temp_dest = sh_str("ssh ${ssh} ${sshopts} mktemp -d /tmp/update-${PRODUCT}-XXXXXXXXX") temp_changelog = sh_str("ssh ${ssh} ${sshopts} mktemp /tmp/changelog-XXXXXXXXX") delta_count = e('${DELTAS}') if not temp_dest or not temp_changelog: error('Failed to create temporary directories on {0}', ssh) sh('scp ${sshopts} -r ${BE_ROOT}/release/LATEST/. ${ssh}:${temp_dest}') if changelog: cl_file = None if changelog == '-': print('Enter changelog, ^D to end:') cl_file = tempfile.NamedTemporaryFile(delete=False) cl_file.write(bytes(sys.stdin.read(), 'UTF-8')) cl_file.close() changelog = cl_file.name sh('scp ${sshopts} ${changelog} ${ssh}:${temp_changelog}') if cl_file is not None: os.remove(cl_file.name) sh( "echo ${KEY_PASSWORD} |", "ssh ${sshopts} ${ssh}", "/usr/local/bin/freenas-release", "-P ${PRODUCT}", "-D ${UPDATE_DB}", "--archive ${UPDATE_DEST}", "-K ${FREENAS_KEYFILE}", "-C ${temp_changelog}" if changelog else "", "--deltas ${delta_count}" if delta_count else "", "add ${temp_dest}" ) sh("ssh ${sshopts} ${ssh} rm -rf ${temp_dest}") sh("ssh ${sshopts} ${ssh} rm -rf ${temp_changelog}") # This last line syncs up with the cdn # It is only done in the case of public facing update if e("${INTERNAL_UPDATE}").lower() == "no": sh("ssh ${sshopts} ${ssh} /usr/local/sbin/rsync-mirror.sh")
def main(): prod = e("${PRODUCTION}") if prod and prod.lower() == "yes": KEY_PASSWORD = getpass.getpass("Enter Password: "******"Hey Release Engineer this is the password you entered: {0}".format(KEY_PASSWORD)) else: KEY_PASSWORD = "" changelog = e('${CHANGELOG}') ssh = e('${UPDATE_USER}@${UPDATE_HOST}') sshopts = '-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' temp_dest = sh_str("ssh ${ssh} ${sshopts} mktemp -d /tmp/update-${PRODUCT}-XXXXXXXXX") temp_changelog = sh_str("ssh ${ssh} ${sshopts} mktemp /tmp/changelog-XXXXXXXXX") if not temp_dest or not temp_changelog: error('Failed to create temporary directories on {0}', ssh) sh('scp ${sshopts} -r ${BE_ROOT}/release/LATEST/. ${ssh}:${temp_dest}') if changelog: cl_file = None if changelog == '-': print('Enter changelog, ^D to end:') cl_file = tempfile.NamedTemporaryFile(delete=False) cl_file.write(bytes(sys.stdin.read(), 'UTF-8')) cl_file.close() changelog = cl_file.name sh('scp ${sshopts} ${changelog} ${ssh}:${temp_changelog}') if cl_file is not None: os.remove(cl_file.name) sh( "echo ${KEY_PASSWORD} |", "ssh ${sshopts} ${ssh}", "/usr/local/bin/freenas-release", "-P ${PRODUCT}", "-D ${UPDATE_DB}", "--archive ${UPDATE_DEST}", "-K ${FREENAS_KEYFILE}", "-C ${temp_changelog}" if changelog else "", "add ${temp_dest}" ) sh("ssh ${sshopts} ${ssh} rm -rf ${temp_dest}") sh("ssh ${sshopts} ${ssh} rm -rf ${temp_changelog}") # This last line syncs up with the cdn # It is only done in the case of public facing update if e("${INTERNAL_UPDATE}").lower() == "no": sh("ssh ${sshopts} ${ssh} /usr/local/sbin/rsync-mirror.sh")
def get_aux_files_desc(): for aux in dsl.aux_files: name = aux.name yield { 'filename': name, 'hash': sh_str("sha256 -q ${RELEASE_STAGEDIR}/${name}"), }
def main(): user = sh_str('id -un') if user == 'root': user = '******' # sh('ssh ${user}@${DOWNLOAD_HOST} rm -rf ${DOWNLOAD_TARGETDIR}') # sh('ssh ${user}@${DOWNLOAD_HOST} mkdir -p ${DOWNLOAD_TARGETDIR}') # sh('scp -pr ${RELEASE_STAGEDIR}/* ${user}@${DOWNLOAD_HOST}:${DOWNLOAD_TARGETDIR}/') ref_date = 0 rel_dir = '' dirstring = e('${BE_ROOT}/release/${PRODUCT}') for x in glob.glob("{0}*".format(dirstring)): if e('${BUILD_ARCH_SHORT}') not in os.listdir(x): continue if os.lstat(x).st_ctime > ref_date: ref_date = os.lstat(x).st_ctime rel_dir = x if not rel_dir: error('Release not found') if e('${BUILD_TYPE}').lower() in ["master", "stable"]: buildtimestamp = os.path.basename(rel_dir).split("-")[-1] downloadtargetdir = e('${DOWNLOAD_BASEDIR}/${MILESTONE}/${buildtimestamp}') else: downloadtargetdir = e('${DOWNLOAD_TARGETDIR}') sh('ssh ${user}@${DOWNLOAD_HOST} rm -rf ${downloadtargetdir}') sh('ssh ${user}@${DOWNLOAD_HOST} mkdir -p ${downloadtargetdir}') sh('scp -pr ${rel_dir}/* ${user}@${DOWNLOAD_HOST}:${downloadtargetdir}/') info('Synchronizing download server to CDN') sh('ssh ${user}@${DOWNLOAD_HOST} /usr/local/sbin/rsync-mirror.sh')
def check_build_sanity(): if len(e('${BUILD_ROOT}')) > 38: error("Current path too long ({0} characters) for nullfs mounts during build", len(os.getcwd())) if e('${BE_ROOT}') in sh_str('mount'): error("You have dangling mounts inside {0}, did last build crash?", e('${BE_ROOT}'))
def main(): user = sh_str('id -un') if user == 'root': user = '******' ref_date = 0 rel_dir = '' dirstring = e('${BE_ROOT}/release/${PRODUCT}') for x in glob.glob("{0}*".format(dirstring)): if os.path.isdir(e('${x}/${BUILD_ARCH_SHORT}')) and os.lstat(x).st_ctime > ref_date: ref_date = os.lstat(x).st_ctime rel_dir = x if not rel_dir: error('Release not found') download_suffix = "" if e('${BUILD_TYPE}').lower() in ["master", "stable"]: buildtimestamp = os.path.basename(rel_dir).split("-")[-1] download_suffix = e('${BUILD_TYPE}/${buildtimestamp}') else: download_suffix = e('${BUILD_TYPE}') downloadtargetdir = e('${DOWNLOAD_PREFIX}/${download_suffix}') sh('ssh ${user}@${DOWNLOAD_HOST} rm -rf ${downloadtargetdir}') sh('ssh ${user}@${DOWNLOAD_HOST} mkdir -p ${downloadtargetdir}') sh('scp -pr ${rel_dir}/* ${user}@${DOWNLOAD_HOST}:${downloadtargetdir}/') # For all non-nightly builds create latest symlinks if e('${BUILD_TYPE}').lower() != "master": info('Creating top level downloads symlink') sh('ssh ${user}@${DOWNLOAD_HOST} ln -shf ${VERSION_NUMBER}/${download_suffix} ${DOWNLOAD_BASEDIR}/latest') info('Creating MILESTONE level downloads symlink') sh('ssh ${user}@${DOWNLOAD_HOST} ln -shf ${download_suffix} ${DOWNLOAD_PREFIX}/latest') info('Synchronizing download server to CDN') sh('ssh ${user}@${DOWNLOAD_HOST} /usr/local/sbin/rsync-mirror.sh')
def main(): user = sh_str('id -un') if user == 'root': user = '******' # sh('ssh ${user}@${DOWNLOAD_HOST} rm -rf ${DOWNLOAD_TARGETDIR}') # sh('ssh ${user}@${DOWNLOAD_HOST} mkdir -p ${DOWNLOAD_TARGETDIR}') # sh('scp -pr ${RELEASE_STAGEDIR}/* ${user}@${DOWNLOAD_HOST}:${DOWNLOAD_TARGETDIR}/') ref_date = 0 rel_dir = '' dirstring = e('${BE_ROOT}/release/${PRODUCT}') for x in glob.glob("{0}*".format(dirstring)): if e('${BUILD_ARCH_SHORT}') not in os.listdir(x): continue if os.lstat(x).st_ctime > ref_date: ref_date = os.lstat(x).st_ctime rel_dir = x if not rel_dir: error('Release not found') buildtimestamp = os.path.basename(rel_dir).split("-")[-1] downlodtargetdir = e('${DOWNLOAD_BASEDIR}/${MILESTONE}/${buildtimestamp}') sh('ssh ${user}@${DOWNLOAD_HOST} rm -rf ${downlodtargetdir}') sh('ssh ${user}@${DOWNLOAD_HOST} mkdir -p ${downlodtargetdir}') sh('scp -pr ${rel_dir}/* ${user}@${DOWNLOAD_HOST}:${downlodtargetdir}/') info('Synchronizing download server to CDN') sh('ssh ${user}@${DOWNLOAD_HOST} /usr/local/sbin/rsync-mirror.sh')
def setup_network(): global tapdev info('Configuring VM networking') tapdev = sh_str('ifconfig tap create') info('Using tap device {0}', tapdev) sh('ifconfig ${tapdev} inet ${HOST_IP} ${NETMASK} up')
def calculate_make_jobs(): global makejobs jobs = sh_str('sysctl -n kern.smp.cpus') if not jobs: makejobs = 2 makejobs = os.environ.get("POUDRIERE_JOBS", int(jobs) + 1) debug('Using {0} make jobs', makejobs)
def calculate_make_jobs(): global makejobs jobs = sh_str('sysctl -n kern.smp.cpus') if not jobs: makejobs = 2 makejobs = int(jobs) + 1 debug('Using {0} make jobs', makejobs)
def calculate_make_jobs(): global makejobs # We use number of CPUs + 1, to have # some overlap with I/O overhead. cores = sh_str('sysctl -n hw.ncpu') if not cores: cores = 2 makejobs = os.environ.get("BUILDWORLD_JOBS", int(cores) + 1) debug('Using {0} make jobs', makejobs)
def get_image_files_desc(): for ext in dsl.formats: path = e('${RELEASE_STAGEDIR}/${BUILD_ARCH_SHORT}/${NAME}.${ext}') filename = os.path.basename(path) if os.path.exists(path): yield { 'filename': filename, 'type': ext, 'hash': sh_str("sha256 -q ${path}"), 'url': e("${url}/${filename}") }
def main(destdir): # Kill all binaries that are non for AMD64 arch for root, dirs, files in os.walk(destdir): for name in files: filename = os.path.join(root, name) if not is_elf(filename): continue ret = sh_str('file ${filename}') if 'x86-64' not in ret and '80386' not in ret: os.unlink(filename)
def get_aux_files_desc(): for aux in dsl.aux_files: name = aux.name # Please note that the aux.source is not the one in the # release stagedir but if it did not exist in the first place # then create_aux_files function would not have copied it over. if os.path.exists(aux.source): yield { 'filename': name, 'hash': sh_str("sha256 -q ${RELEASE_STAGEDIR}/${name}"), } else: continue
def checkout_repo(repo): os.chdir(e('${BE_ROOT}')) if os.path.isdir(os.path.join(repo['path'], '.git')): os.chdir(repo['path']) branch = sh_str('git rev-parse --abbrev-ref HEAD') if branch != repo['branch']: sh('git remote set-url origin', repo['url']) sh('git fetch origin') sh('git checkout', repo['branch']) sh('git pull --rebase') else: if e('${CHECKOUT_SHALLOW}'): sh('git clone', '-b', repo['branch'], '--depth', '1', repo['url'], repo['path']) else: sh('git clone', '-b', repo['branch'], repo['url'], repo['path']) os.chdir(repo['path']) if e('${CHECKOUT_TAG}'): sh('git checkout ${CHECKOUT_TAG}') elif 'commit' in repo: sh('git checkout', repo['commit']) manifest[repo['url']] = sh_str("git rev-parse --short HEAD")
def main(): user = sh_str('id -un') if user == 'root': user = '******' ref_date = 0 rel_dir = '' dirstring = e('${BE_ROOT}/release/${PRODUCT}') for x in glob.glob("{0}*".format(dirstring)): if os.path.isdir(e('${x}/${BUILD_ARCH_SHORT}') ) and os.lstat(x).st_ctime > ref_date: ref_date = os.lstat(x).st_ctime rel_dir = x if not rel_dir: error('Release not found') download_suffix = "" if e('${BUILD_TYPE}').lower() in [ "master", "stable", "freebsd-stable", "freebsd-head" ]: buildtimestamp = os.path.basename(rel_dir).split("-")[-1] download_suffix = e('${BUILD_TYPE}/${buildtimestamp}') else: download_suffix = e('${BUILD_TYPE}') sshopts = '-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' downloadtargetdir = e('${DOWNLOAD_PREFIX}/${download_suffix}') sh('ssh ${sshopts} ${user}@${DOWNLOAD_HOST} rm -rf ${downloadtargetdir}') sh('ssh ${sshopts} ${user}@${DOWNLOAD_HOST} mkdir -p ${downloadtargetdir}') sh('scp ${sshopts} -pr ${rel_dir}/* ${user}@${DOWNLOAD_HOST}:${downloadtargetdir}/' ) # For all non-nightly builds create latest symlinks if e('${BUILD_TYPE}').lower() in [ "master", "freebsd-stable", "freebsd-head" ]: info('Creating MILESTONE level downloads symlink') sh('ssh ${sshopts} ${user}@${DOWNLOAD_HOST} ln -shf ${buildtimestamp} ${DOWNLOAD_PREFIX}/${BUILD_TYPE}/latest' ) else: info('Creating top level downloads symlink') sh('ssh ${sshopts} ${user}@${DOWNLOAD_HOST} ln -shf ${VERSION_NUMBER}/${download_suffix} ${DOWNLOAD_BASEDIR}/latest' ) info('Synchronizing download server to CDN') sh('ssh ${sshopts} ${user}@${DOWNLOAD_HOST} /usr/local/sbin/rsync-mirror.sh' )
def prepare_env(): for cmd in jailconf.get('copy', []): dest = os.path.join(e('${JAIL_DESTDIR}'), cmd['dest'][1:]) sh('rm -rf ${dest}') sh('cp -a', cmd['source'], dest) for cmd in jailconf.get('link', []): flags = '-o {0}'.format(cmd['flags']) if 'flags' in cmd else '' dest = os.path.join(e('${JAIL_DESTDIR}'), cmd['dest'][1:]) sh('mkdir -p', os.path.dirname(dest)) sh('mount -t nullfs', flags, cmd['source'], dest) osversion = sh_str("awk '/\#define __FreeBSD_version/ { print $3 }' ${JAIL_DESTDIR}/usr/include/sys/param.h") login_env = e(',UNAME_r=${FREEBSD_RELEASE_VERSION% *},UNAME_v=FreeBSD ${FREEBSD_RELEASE_VERSION},OSVERSION=${osversion}') sh('sed -i "" -e "s/,UNAME_r.*:/:/ ; s/:\(setenv.*\):/:\\1${login_env}:/" ${JAIL_DESTDIR}/etc/login.conf') sh('cap_mkdb ${JAIL_DESTDIR}/etc/login.conf');
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING # IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # ##################################################################### import os from dsl import load_profile_config from utils import sh, sh_str, info, debug, e, setfile, appendfile dsl = load_profile_config() manifest = {sh_str("git config --get remote.origin.url"): sh_str("git rev-parse --short HEAD")} def checkout_repo(repo): os.chdir(e('${BE_ROOT}')) if os.path.isdir(os.path.join(repo['path'], '.git')): os.chdir(repo['path']) branch = sh_str('git rev-parse --abbrev-ref HEAD') if branch != repo['branch']: sh('git remote set-url origin', repo['url']) sh('git fetch origin') sh('git checkout', repo['branch']) sh('git pull --rebase') else: if e('${CHECKOUT_SHALLOW}'):
def checkout_repo(cwd, repo): """Check out the given repository. Arguments: cwd -- start in this directory. repo -- gives 'name', 'path', 'branch', and 'url' (and optionally 'commit') We check out the given branch, unless ${CHECKOUT_TAG} is set (then we check out that value), or unless a 'commit' key is set (then we check out repo['commit']). If ${CHECKOUT_SHALLOW} is set, new clones are made with depth 1. If ${GIT_REF_PATH} is set, we can check for reference clones that may be available in that path (colon separated path as for normal Unix conventions). """ buildenv_root = e('${BE_ROOT}') repo_name = repo['name'] repo_path = repo['path'] repo_url = repo['url'] branch = repo['branch'] # Search for a reference clone before changing directories # in case it's a relative path. os.chdir(cwd) refclone = find_ref_clone(repo_name) if refclone: refclone = os.path.abspath(refclone) os.chdir(buildenv_root) if is_git_repo(repo_path): os.chdir(repo_path) current_branch = sh_str('git rev-parse --abbrev-ref HEAD') if current_branch != branch: # (re)setting origin is a bit rude if someone had # carefully set their own variant, but oh well. sh('git remote set-url origin', repo_url) sh('git fetch origin') sh('git checkout', branch) # git pull --rebase exhibits bad behavior in git 2.8.x and # early 2.9.x, leaving dead lock files. This is an attempted # work-around - it should behave the same, perhaps minus # internal git bugs. sh('git fetch && git rebase') else: if e('${CHECKOUT_SHALLOW}'): sh('git clone -b', branch, '--depth 1', repo_url, repo_path) else: # Should we have an option to add --dissociate? if refclone: sh('git clone --reference', refclone, '-b', branch, repo_url, repo_path) else: sh('git clone -b', branch, repo_url, repo_path) os.chdir(repo_path) if e('${CHECKOUT_TAG}'): sh('git checkout ${CHECKOUT_TAG}') elif 'commit' in repo: sh('git checkout', repo['commit']) manifest[repo_url] = get_git_rev()
def get_git_rev(): """Return git revision. Assumes $cwd is within the repository.""" return sh_str("git rev-parse --short HEAD")
# ##################################################################### import os import sys from dsl import load_profile_config from utils import sh, sh_str, info, debug, e, setfile, appendfile def get_git_rev(): """Return git revision. Assumes $cwd is within the repository.""" return sh_str("git rev-parse --short HEAD") dsl = load_profile_config() manifest = {sh_str("git config --get remote.origin.url"): get_git_rev()} def is_git_repo(path, allow_bare=False): """Determine whether given path names a git repository.""" # This is how git itself does it if os.path.exists(os.path.join(path, '.git', 'HEAD')): return True if allow_bare and os.path.exists(os.path.join(path, 'HEAD')): return True return False def find_ref_clone(repo_name): """See if there's an existing clone to use as a reference.""" git_ref_path = e('${GIT_REF_PATH}')
# POSSIBILITY OF SUCH DAMAGE. # ##################################################################### import os import sys from dsl import load_profile_config from utils import sh, sh_str, info, debug, e, setfile, appendfile def get_git_rev(): """Return git revision. Assumes $cwd is within the repository.""" return sh_str("git rev-parse --short HEAD") dsl = load_profile_config() manifest = {sh_str("git config --get remote.origin.url"): get_git_rev()} def is_git_repo(path): """Determine whether given path names a git repository.""" # This is how git itself does it return os.path.exists(os.path.join(path, '.git', 'HEAD')) def find_ref_clone(repo_name): """See if there's an existing clone to use as a reference.""" git_ref_path = e('${GIT_REF_PATH}') if git_ref_path: for path in git_ref_path.split(':'): candidate = os.path.join(path, repo_name) if is_git_repo(candidate): return candidate
def checkout_repo(cwd, repo): """Check out the given repository. Arguments: cwd -- start in this directory. repo -- gives 'name', 'path', 'branch', and 'url' (and optionally 'commit') We check out the given branch, unless ${CHECKOUT_TAG} is set (then we check out that value), or unless a 'commit' key is set (then we check out repo['commit']). If ${CHECKOUT_SHALLOW} is set, new clones are made with depth 1. If ${GIT_REF_PATH} is set, we can check for reference clones that may be available in that path (colon separated path as for normal Unix conventions). """ buildenv_root = e('${BE_ROOT}') repo_name = repo['name'] repo_path = repo['path'] repo_url = repo['url'] branch = repo['branch'] # Search for a reference clone before changing directories # in case it's a relative path. os.chdir(cwd) refclone = find_ref_clone(repo_name) if refclone: refclone = os.path.abspath(refclone) os.chdir(buildenv_root) if is_git_repo(repo_path): os.chdir(repo_path) current_branch = sh_str('git rev-parse --abbrev-ref HEAD') if current_branch != branch: # (re)setting origin is a bit rude if someone had # carefully set their own variant, but oh well. sh('git remote set-url origin', repo_url) sh('git fetch origin') sh('git checkout', branch) sh('git pull --rebase') else: if e('${CHECKOUT_SHALLOW}'): sh('git clone -b', branch, '--depth 1', repo_url, repo_path) else: # Should we have an option to add --dissociate? if refclone: sh('git clone --reference', refclone, '-b', branch, repo_url, repo_path) else: sh('git clone -b', branch, repo_url, repo_path) os.chdir(repo_path) if e('${CHECKOUT_TAG}'): sh('git checkout ${CHECKOUT_TAG}') elif 'commit' in repo: sh('git checkout', repo['commit']) manifest[repo_url] = get_git_rev()
def get_latest_commit(repo, branch): output = sh_str('git ls-remote', repo, f'refs/heads/{branch}') commit = output.split() if commit and not re.search(r'^[a-f0-9]+$', commit[0]): return None return commit
def alloc_network(): global tapdev tapdev = sh_str('ifconfig tap create') info('Using tap device {0}', tapdev)