def get_node_info(): r_version = get_r_version() osys = BBScorevars.getNodeSpec(builder_id, "OS") arch = BBScorevars.getNodeSpec(builder_id, "Arch") plat = BBScorevars.getNodeSpec(builder_id, "Platform") send_message({"status": "node_info", "r_version": r_version, "os": osys, "arch": arch, "platform": plat, "body": "node_info", "bioc_version": os.getenv("BBS_BIOC_VERSION")})
def update_packages_file(): global repos r_version = bioc_r_map[os.getenv("BBS_BIOC_VERSION")] if (platform.system() == "Darwin"): pkg_type = BBScorevars.getNodeSpec(builder_id, "pkgType") if pkg_type == "mac.binary.leopard": os_seg = "bin/macosx/contrib/%s" % r_version else: os_seg = "bin/macosx/mavericks/contrib/%s" % r_version elif (platform.system() == "Linux"): os_seg = "src/contrib" else: os_seg = "bin/windows/contrib/%s" % r_version if (manifest['repository'] == 'course'): repos = "/loc/www/bioconductor-test.fhcrc.org/course-packages/%s" % os_seg url = repos.replace("/loc/www/bioconductor-test.fhcrc.org/","http://bioconductor.org/") script_loc = "/loc/www/bioconductor-test.fhcrc.org/course-packages" elif (manifest['repository'] == 'scratch'): repos = '/loc/www/bioconductor-test.fhcrc.org/scratch-repos/%s/%s' % (manifest['bioc_version'], os_seg) url = repos.replace("/loc/www/bioconductor-test.fhcrc.org/scratch-repos/","http://bioconductor.org/scratch-repos/") script_loc = "/loc/www/bioconductor-test.fhcrc.org/scratch-repos/%s" % manifest['bioc_version'] pkg_type = BBScorevars.getNodeSpec(builder_id, "pkgType") if pkg_type == "mac.binary.leopard": pkg_type = "mac.binary" command = \ "%s [email protected] 'R -f %s/update-repo.R --args %s %s'" \ % (packagebuilder_ssh_cmd, script_loc, repos, pkg_type) print("update packages command: ") print(command) retcode = subprocess.call(command, shell=True) print "retcode for update packages: %d" % retcode send_message({"status": "post_processing", "retcode": retcode, "body": "Updated packages list"}) if retcode != 0: send_message({"status": "post_processing_complete", "retcode": retcode, "body": "Updating packages failed.", "build_product": build_product, "url": url}) sys.exit("Updating packages failed") if (manifest['repository'] == 'course' or manifest['repository'] == 'scratch'): command = "%s [email protected] \"source ~/.bash_profile && cd /home/biocadmin/bioc-test-web/bioconductor.org && rake deploy_production\"" % \ packagebuilder_ssh_cmd print("sync command = ") print(command) retcode = subprocess.call(command, shell=True) send_message({"status": "post_processing", "retcode": retcode, "body": "Synced repository to website", "build_product": build_product, "url": url}) if retcode != 0: send_message({"status": "post_processing_complete", "retcode": retcode, "body": "Syncing repository failed", "build_product": build_product, "url": url}) sys.exit("Sync to website failed") send_message({"status": "post_processing_complete", "retcode": retcode, "body": "Post-processing complete.", "build_product": build_product, "url": url})
def copy_outgoing_pkgs(fresh_pkgs_subdir, source_node): tmp = fresh_pkgs_subdir.split("/") if len(tmp) != 2: sys.exit("ERROR: Invalid relative path to fresh pkgs %s (must be of the form node/subdir)" % fresh_pkgs_subdir) node_id = tmp[0] node_hostname = node_id.split("-")[0] fileext = BBScorevars.getNodeSpec(node_hostname, 'pkgFileExt') fresh_pkgs_subdir = os.path.join(BBScorevars.nodes_rdir.path, fresh_pkgs_subdir) ## Workflow packages do not have manuals/ because we do not run ## `R CMD check`. manuals_dir = "../manuals" if BBScorevars.is_workflow: pass elif (source_node): print "BBS> [stage6] mkdir %s" % manuals_dir os.mkdir(manuals_dir) print "BBS> [stage6] BEGIN copying outgoing packages from %s." % fresh_pkgs_subdir pkgType = BBScorevars.getNodeSpec(node_hostname, 'pkgType') meat_index_file = os.path.join(BBScorevars.Central_rdir.path, BBScorevars.meat_index_file) dcf = open(meat_index_file, 'r') pkgs = bbs.parse.readPkgsFromDCF(dcf, node_id, pkgType) dcf.close() for pkg in pkgs: if pkgMustBeRejected(node_hostname, node_id, pkg): continue dcf = open(meat_index_file, 'r') version = bbs.parse.getPkgFieldFromDCF(dcf, pkg, 'Version', BBScorevars.meat_index_file) dcf.close() ## Copy pkg from 'fresh_pkgs_subdir2'. pkg_file = "%s_%s.%s" % (pkg, version, fileext) pkg_file = os.path.join(fresh_pkgs_subdir, pkg_file) print "BBS> [stage6] - copying %s to OUTGOING folder ..." % pkg_file if os.path.exists(pkg_file): shutil.copy(pkg_file, ".") else: print "BBS> [stage6] SKIPPED (file %s doesn't exist)" % pkg_file ## Get reference manual from pkg.Rcheck directory. if BBScorevars.is_workflow: pass elif source_node: pdf_file = "%s/meat/%s.Rcheck/%s-manual.pdf" % \ (BBScorevars.getenv('BBS_WORK_TOPDIR'), pkg, pkg) print "BBS> [stage6] - copying %s manual to OUTGOING/manuals folder..." % pkg if os.path.exists(pdf_file): shutil.copy(pdf_file, "%s/%s.pdf" % (manuals_dir, pkg)) else: print "BBS> [stage6] SKIPPED (file %s doesn't exist)" % pdf_file print "BBS> [stage6] END copying outgoing packages from %s." % fresh_pkgs_subdir return
def extractTargetPkgListFromMeatIndex(): pkgType = BBScorevars.getNodeSpec(BBSvars.node_hostname, 'pkgType') Central_rdir = BBScorevars.Central_rdir dcf = Central_rdir.WOpen(BBScorevars.meat_index_file) target_pkgs = bbs.parse.readPkgsFromDCF(dcf, BBSvars.node_hostname, pkgType) dcf.close() return target_pkgs
def STAGE5_loop(srcpkg_paths, nb_cpu): total = len(srcpkg_paths) job_queue = [] for srcpkg_path in srcpkg_paths: cmd = BBSbase.getSTAGE5cmd(srcpkg_path) if cmd == None: continue pkg = bbs.parse.getPkgFromPath(srcpkg_path) version = bbs.parse.getVersionFromPath(srcpkg_path) fileext = BBScorevars.getNodeSpec(BBSvars.node_hostname, 'pkgFileExt') binpkg_file = "%s_%s.%s" % (pkg, version, fileext) pkgdumps_prefix = pkg + '.buildbin' pkgdumps = BBSbase.PkgDumps(binpkg_file, pkgdumps_prefix) job = BBSbase.BuildPkg_Job(pkg, version, cmd, pkgdumps, BBSvars.buildbin_rdir) job_queue.append(job) nb_jobs = len(job_queue) print "BBS> BEGIN STAGE5 loop." t0 = time.time() nb_products = bbs.jobs.processJobQueue(job_queue, None, nb_cpu, BBScorevars.r_cmd_timeout, True) dt = time.time() - t0 print "BBS> END STAGE5 loop." print "BBS> -------------------------------------------------------------" print "BBS> STAGE5 SUMMARY:" print "BBS> o Working dir: %s" % os.getcwd() print "BBS> o %d srcpkg file(s) in working dir" % total print "BBS> o %d srcpkg file(s) queued and processed" % nb_jobs print "BBS> o %d binpkg file(s) produced" % nb_products print "BBS> o Total time: %.2f seconds" % dt print "BBS> -------------------------------------------------------------" return
def get_node_info(): logging.info("Node Info:") r_version = get_r_version() osys = BBScorevars.getNodeSpec(BUILDER_ID, "OS") arch = BBScorevars.getNodeSpec(BUILDER_ID, "Arch") plat = BBScorevars.getNodeSpec(BUILDER_ID, "Platform") send_message({ "status": "node_info", "r_version": r_version, "os": osys, "arch": arch, "platform": plat, "body": "node_info", "bioc_version": ENVIR['bbs_Bioc_version']}) logging.info("\n os: " + osys + "\n r_version: " + r_version + "\n bioc_version: " + ENVIR['bbs_Bioc_version'])
def stage6_make_OUTGOING(): ## Create working directory OUTGOING_dir = os.path.join(BBScorevars.Central_rdir.path, "OUTGOING") print "BBS> [stage6] remake_dir %s" % OUTGOING_dir bbs.fileutils.remake_dir(OUTGOING_dir) ## Loop over each element of the OUTGOING map OUTGOING_map = BBScorevars.getenv('BBS_OUTGOING_MAP') map_elts = OUTGOING_map.split(" ") for map_elt in map_elts: tmp = map_elt.split(":") if len(tmp) != 2: sys.exit("ERROR: Invalid OUTGOING map element %s" % map_elt) source_node = False if tmp[0] == "source": source_node = True OUTGOING_subdir = os.path.join(OUTGOING_dir, tmp[0]) print "BBS> [stage6] mkdir %s" % OUTGOING_subdir os.mkdir(OUTGOING_subdir) print "BBS> [stage6] cd %s/" % OUTGOING_subdir os.chdir(OUTGOING_subdir) tmp2 = tmp[1].split("+") if len(tmp2) == 1: copy_outgoing_pkgs(tmp[1], source_node) elif len(tmp2) == 2: make_outgoing_biarch_pkgs(tmp2[0], tmp2[1]) else: sys.exit("ERROR: Invalid OUTGOING map element %s" % map_elt) return
def get_pkgs(dcf, node_hostname=None, node_id=None): if node_hostname: pkgType = BBScorevars.getNodeSpec(node_hostname, 'pkgType') pkgs = bbs.parse.readPkgsFromDCF(dcf, node_id, pkgType) else: pkgs = bbs.parse.readPkgsFromDCF(dcf) pkgs.sort(lambda x, y: cmp(string.lower(x), string.lower(y))) return pkgs
def set_NODES(fancynames_in_one_string): fancynames = fancynames_in_one_string.split(' ') for fancyname in fancynames: if fancyname == "": continue id = fancyname.split(":")[0] hostname = id.split("-")[0] os_html = BBScorevars.getNodeSpec(hostname, 'OS').replace(' ', ' ') arch = BBScorevars.getNodeSpec(hostname, 'Arch') platform = BBScorevars.getNodeSpec(hostname, 'Platform') buildbin = fancyname_has_a_bin_suffix(fancyname) pkgs = get_pkgs_from_meat_index(hostname, id) node = Node(hostname, id, os_html, arch, platform, buildbin, pkgs) NODES.append(node) if len(NODES) == 0: sys.exit("nothing to report (no nodes) => EXIT.") return
def _BiocGreaterThanOrEqualTo(x, y): bioc_version = BBScorevars.getenv('BBS_BIOC_VERSION', False) # If BBS_BIOC_VERSION is not defined, then we assume it's the latest version. if bioc_version == None: return True parts = bioc_version.split('.') x0 = int(parts[0]) y0 = int(parts[1]) return x0 > x or (x0 == x and y0 >= y)
def _getMaintainerFromDir(pkg_dir): desc_file = getDescFile(pkg_dir) FNULL = open(os.devnull, 'w') r_home = BBScorevars.getenv('BBS_R_HOME') Rscript_cmd = os.path.join(r_home, "bin", "Rscript") script_path = os.path.join(BBScorevars.BBS_home, "utils", "getMaintainer.R") cmd = [Rscript_cmd, '--vanilla', script_path, desc_file] maintainer = subprocess.check_output(cmd, stderr=FNULL) if maintainer == 'NA': raise DcfFieldNotFoundError(desc_file, 'Maintainer') return maintainer
def getSTAGE2cmd(pkg, version): cmd = '%s CMD INSTALL %s' % (BBSvars.r_cmd, pkg) if sys.platform == "win32": win_archs = _supportedWinArchs(pkg) if _mustRunSTAGE2InMultiarchMode() and len(win_archs) == 2: curl_cmd = BBScorevars.getenv('BBS_CURL_CMD') srcpkg_file = pkg + '_' + version + '.tar.gz' srcpkg_url = BBScorevars.Central_rdir.url + '/src/contrib/' + \ srcpkg_file cmd = '%s -O %s' % (curl_cmd, srcpkg_url) + ' && ' + \ '%s CMD INSTALL --merge-multiarch %s' % \ (BBSvars.r_cmd, srcpkg_file) + ' && ' + \ 'rm %s' % srcpkg_file else: cmd = '%s --arch %s CMD INSTALL --no-multiarch %s' % \ (BBSvars.r_cmd, win_archs[0], pkg) return cmd
def prepare_STAGE5_job_queue(srcpkg_paths): print "BBS> Preparing STAGE5 job queue ... ", stage = 'buildbin' jobs = [] for srcpkg_path in srcpkg_paths: cmd = BBSbase.getSTAGE5cmd(srcpkg_path) if cmd == None: continue pkg = bbs.parse.getPkgFromPath(srcpkg_path) version = bbs.parse.getVersionFromPath(srcpkg_path) fileext = BBScorevars.getNodeSpec(BBSvars.node_hostname, 'pkgFileExt') binpkg_file = "%s_%s.%s" % (pkg, version, fileext) pkgdumps_prefix = pkg + '.' + stage pkgdumps = BBSbase.PkgDumps(binpkg_file, pkgdumps_prefix) job = BBSbase.BuildPkg_Job(pkg, version, cmd, pkgdumps, BBSvars.buildbin_rdir) jobs.append(job) print "OK" job_queue = bbs.jobs.JobQueue(stage, jobs, None) job_queue._total = len(srcpkg_paths) return job_queue
def getSTAGE2cmd(pkg, version): prepend = bbs.parse.getBBSoptionFromDir(pkg, 'INSTALLprepend') if sys.platform != "win32": cmd = '%s %s' % (_get_RINSTALL_cmd0(), pkg) else: prepend_win = bbs.parse.getBBSoptionFromDir(pkg, 'INSTALLprepend.win') if prepend_win != None: prepend = prepend_win win_archs = _supportedWinArchs(pkg) if _mustRunSTAGE2InMultiarchMode() and len(win_archs) >= 2: ## Here is what Dan's commit message says about why BBS uses this ## very long and complicated compound command to install packages ## in multiarch mode on Windows during STAGE2 (see ## 'git show 87822fb346e04b4301d0c2efd7ec1a2a8762e93a'): ## Install STAGE2 target pkgs to zip+libdir first, then install ## zip. This mitigates the problem where the INSTALL times out ## after installing only one architecture. Now, if the install ## times out, it has only failed to install a package to a ## temporary libdir, and the previous installation is still ## intact (or pkg is not installed at all), so dependent packages ## will not complain about the timed-out package being only ## available for one architecture. ## We still need to figure out why mzR in particular times out ## during INSTALL. When run manually (admittedly not during ## peak load times) the install takes ~ 5-6 minutes, even when ## done via the build system. curl_cmd = BBScorevars.getenv('BBS_CURL_CMD') srcpkg_file = pkg + '_' + version + '.tar.gz' srcpkg_url = BBScorevars.Central_rdir.url + '/src/contrib/' + \ srcpkg_file zip_file = srcpkg_file.replace(".tar.gz", ".zip") cmd = '%s -O %s' % (curl_cmd, srcpkg_url) + ' && ' + \ _get_BuildBinPkg_cmd(srcpkg_file, win_archs) + ' && ' + \ '%s %s' % (_get_RINSTALL_cmd0(), zip_file) + ' && ' + \ 'rm %s %s' % (srcpkg_file, zip_file) else: cmd = '%s %s' % (_get_RINSTALL_cmd0(win_archs), pkg) if prepend != None: cmd = '%s %s' % (prepend, cmd) return cmd
def getSTAGE2cmd(pkg, version): cmd = '%s CMD INSTALL %s' % (BBSvars.r_cmd, pkg) if sys.platform == "win32": win_archs = _supportedWinArchs(pkg) if _mustRunSTAGE2InMultiarchMode() and len(win_archs) == 2: curl_cmd = BBScorevars.getenv('BBS_CURL_CMD') srcpkg_file = pkg + '_' + version + '.tar.gz' srcpkg_url = BBScorevars.Central_rdir.url + '/src/contrib/' + \ srcpkg_file zipfile = srcpkg_file.replace(".tar.gz", ".zip") cmd = 'rm -rf %s.buildbin-libdir' % pkg + ' && ' + \ 'mkdir %s.buildbin-libdir ' % pkg + ' && ' + \ '%s -O %s' % (curl_cmd, srcpkg_url) + ' && ' + \ '%s CMD INSTALL --build --library=%s.buildbin-libdir --merge-multiarch %s' % \ (BBSvars.r_cmd, pkg, srcpkg_file) + ' && ' + \ '%s CMD INSTALL %s ' % (BBSvars.r_cmd, zipfile) + ' && ' + \ 'rm %s %s' % (srcpkg_file, zipfile) else: cmd = '%s --arch %s CMD INSTALL --no-multiarch %s' % \ (BBSvars.r_cmd, win_archs[0], pkg) return cmd
### import sys import bbs.rdir import bbs.jobs import BBScorevars ############################################################################## ### BBS GLOBAL VARIABLES ############################################################################## ### Only needed by BBS-prerun.py and BBS-run.py MEAT0_rdir = bbs.rdir.RemoteDir('BBS_MEAT0_RDIR', None, BBScorevars.getenv('BBS_MEAT0_RDIR'), BBScorevars.getenv('BBS_MEAT0_RHOST', False), BBScorevars.getenv('BBS_MEAT0_RUSER', False), BBScorevars.ssh_cmd, BBScorevars.rsync_cmd, BBScorevars.rsync_rsh_cmd) meat_path = BBScorevars.getenv('BBS_MEAT_PATH') work_topdir = BBScorevars.getenv('BBS_WORK_TOPDIR') r_cmd = BBScorevars.getenv('BBS_R_CMD') ### Only needed by BBS-prerun.py MEAT0_type = int(BBScorevars.getenv('BBS_MEAT0_TYPE'))
def build_package(source_build): global pkg_type_views global longBuild pkg_type = BBScorevars.getNodeSpec(BUILDER_ID, "pkgType") buildmsg = None if (source_build): buildmsg = "building" else: buildmsg = "buildingbin" if ((not source_build) and (pkg_type == "source")): send_message({"status": "skip_buildbin", "body": "skipped"}) logging.info("Skip buildbin") return(0) if (not source_build): if platform.system() == "Darwin": pkg_type = "mac.binary" elif platform.system() == "Linux": pkg_type = "source" elif platform.system() == "Windows": pkg_type = "win.binary" else: pkg_type = "source" send_message({"status": "starting_buildbin", "body": ""}) logging.info("Start buildbin") global message_sequence global warnings message_sequence = 1 flags = "--keep-empty-dirs --no-resave-data" if (source_build): package_name = manifest['job_id'].split("_")[0] if ((platform.system() == "Windows") and (isUnsupported("Windows", "win32"))): r_cmd = "%s --arch x64 CMD build %s %s" % \ (ENVIR['bbs_R_cmd'], flags, package_name) elif ((platform.system() == "Windows") and (isUnsupported("Windows", "win64"))): r_cmd = "%s --arch i386 CMD build %s %s" % \ (ENVIR['bbs_R_cmd'], flags, package_name) else: r_cmd = "%s CMD build %s %s" % \ (ENVIR['bbs_R_cmd'], flags, package_name) else: if pkg_type == "mac.binary": libdir = "libdir" if os.path.exists(libdir): _call("rm -rf %s" % libdir, False) if (not (os.path.exists(libdir))): os.mkdir(libdir) r_cmd = os.environ['SPB_HOME'] + "/build-universal.sh %s %s" % ( get_source_tarball_name(), libdir) status = None if (source_build): status = "r_cmd" outfile = "R.out" else: status = "r_buildbin_cmd" outfile = "Rbuildbin.out" logging.debug("Before build, working dir is %s." % working_dir) send_message({ "body": "Starting Build package. ", "status": "preprocessing", "retcode": 0 }) start_time = datetime.datetime.now() if ((not source_build) and pkg_type == "win.binary"): retcode = win_multiarch_buildbin(buildmsg) else: send_message({"status": status, "body": r_cmd}) retcode = do_build(r_cmd, buildmsg, source_build) stop_time = datetime.datetime.now() time_dif = stop_time - start_time min_time, sec_time = divmod(time_dif.seconds,60) sec_time = str(format(float(str(time_dif).split(":")[2]), '.2f')) elapsed_time = str(min_time) + " minutes " + sec_time + " seconds" send_message({ "body": "Build Package status: " + str(retcode) + ". ", "status": "post_processing", "retcode": retcode }) # check for warnings out_fh = open(outfile) warnings = False for line in out_fh: if line.lower().startswith("warning:"): warnings = True if line.lower().startswith("error:"): retcode = 1 out_fh.close() # to catch windows timeout timeout_limit = int(ENVIR['timeout_limit']) if longBuild: timeout_limit = int(4800) if (timeout_limit <= time_dif.seconds): logging.info("Build time indicates TIMEOUT") retcode = -9 tarname = get_source_tarball_name() if ((retcode == 0) and (source_build)): rawsize = os.path.getsize(tarname) sizeFile = rawsize/(1024*1024.0) # size for build report kib = rawsize / float(1024) filesize = "%.2f" % kib logging.info("Size: " + str(sizeFile)) send_message({ "body": "Determining package size complete: " + format(sizeFile,'.4f') + "MB. ", "status": "post_processing", "retcode": retcode, "filesize": filesize }) complete_status = None if (source_build): complete_status = "build_complete" else: complete_status = "buildbin_complete" # build output printed entirely here # changed from interactively during build background = Tailer(outfile, buildmsg) background.start() out_fh = open(outfile, "r") out_fh.flush() out_fh.close() background.stop() send_message({ "status": complete_status, "retcode": retcode, "warnings": warnings, "body": "Build completed with status %d" % retcode, "elapsed_time": elapsed_time}) logging.info(complete_status + "\n Build completed with status: " + str(retcode) + " Elapsed time: " + elapsed_time) # gave specific retcode to trigger warning but # still want to proceed with rest of build/check after reporting if (retcode == -4 or retcode == -6): retcode = 0 return (retcode)
# BUILD BIN status if BBSreportutils.is_doing_buildbin(node): stage = 'buildbin' if skipped_is_OK: status = "skipped" else: status = get_status_from_summary_file(pkg, node.id, stage) out.write('%s#%s#%s: %s\n' % (pkg, node.id, stage, status)) out.close() print "BBS> [make_STATUS_DB] END" return ############################################################################## ### MAIN SECTION ############################################################################## print "BBS> [stage7a] STARTING stage7a at %s..." % time.asctime() central_rdir_path = BBScorevars.Central_rdir.path if central_rdir_path != None: print "BBS> [stage7a] cd %s/" % central_rdir_path os.chdir(central_rdir_path) report_nodes = BBScorevars.getenv('BBS_REPORT_NODES') BBSreportutils.set_NODES(report_nodes) allpkgs = BBSreportutils.get_pkgs_from_meat_index() make_STATUS_DB(allpkgs) print "BBS> [stage7a] DONE at %s." % time.asctime()
def make_outgoing_biarch_pkgs(fresh_pkgs_subdir1, fresh_pkgs_subdir2): tmp1 = fresh_pkgs_subdir1.split("/") if len(tmp1) != 2: sys.exit("ERROR: Invalid relative path to fresh pkgs %s (must be of the form node/subdir)" % fresh_pkgs_subdir1) node1_id = tmp1[0] node1_hostname = node1_id.split("-")[0] tmp2 = fresh_pkgs_subdir2.split("/") if len(tmp2) != 2: sys.exit("ERROR: Invalid relative path to fresh pkgs %s (must be of the form node/subdir)" % fresh_pkgs_subdir2) node2_id = tmp2[0] node2_hostname = node2_id.split("-")[0] ## Check that node1 and node2 are registered as Windows i386 and x64 ## builders, respectively pkgType1 = BBScorevars.getNodeSpec(node1_hostname, 'pkgType') if pkgType1 != "win.binary": sys.exit("ERROR: %s pkgType is not \"win.binary\"" % node1_hostname) pkgType2 = BBScorevars.getNodeSpec(node2_hostname, 'pkgType') if pkgType2 != "win64.binary": sys.exit("ERROR: %s pkgType is not \"win64.binary\"" % node2_hostname) fileext = BBScorevars.getNodeSpec(node1_hostname, 'pkgFileExt') fileext2 = BBScorevars.getNodeSpec(node2_hostname, 'pkgFileExt') if fileext2 != fileext: sys.exit("ERROR: %s pkgFileExt and %s pkgFileExt differ" % (node1_hostname, node2_hostname)) fresh_pkgs_subdir1 = os.path.join(BBScorevars.nodes_rdir.path, fresh_pkgs_subdir1) fresh_pkgs_subdir2 = os.path.join(BBScorevars.nodes_rdir.path, fresh_pkgs_subdir2) print "BBS> [stage6] BEGIN making outgoing bi-arch packages from %s and %s." % (fresh_pkgs_subdir1, fresh_pkgs_subdir2) ## Get lists of supported pkgs for node1 and node2 meat_index_file = os.path.join(BBScorevars.Central_rdir.path, BBScorevars.meat_index_file) dcf = open(meat_index_file, 'r') pkgs1 = bbs.parse.readPkgsFromDCF(dcf, node1_id, pkgType1) dcf.close() dcf = open(meat_index_file, 'r') pkgs2 = bbs.parse.readPkgsFromDCF(dcf, node2_id, pkgType2) dcf.close() ## Loop on list of supported pkgs pkgs0 = set(pkgs1 + pkgs2) nb_products = 0 t1 = time.time() for pkg in pkgs0: dcf = open(meat_index_file, 'r') version = bbs.parse.getPkgFieldFromDCF(dcf, pkg, 'Version', BBScorevars.meat_index_file) dcf.close() binpkg_file = "%s_%s.%s" % (pkg, version, fileext) if pkg not in pkgs1: if pkgMustBeRejected(node2_hostname, node2_id, pkg): continue ## Copy pkg from 'fresh_pkgs_subdir2' binpkg_file2 = os.path.join(fresh_pkgs_subdir2, binpkg_file) shutil.copy(binpkg_file2, ".") nb_products += 1 continue if pkg not in pkgs2: if pkgMustBeRejected(node1_hostname, node1_id, pkg): continue ## Copy pkg from 'fresh_pkgs_subdir1' binpkg_file1 = os.path.join(fresh_pkgs_subdir1, binpkg_file) shutil.copy(binpkg_file1, ".") nb_products += 1 continue if pkgMustBeRejected(node1_hostname, node1_id, pkg) or pkgMustBeRejected(node2_hostname, node2_id, pkg): continue ## Merge syscmd = '%s/utils/merge-win-bin-pkgs.sh %s %s %s %s cleanup' % (BBScorevars.BBS_home, pkg, version, fresh_pkgs_subdir1, fresh_pkgs_subdir2) bbs.jobs.doOrDie(syscmd) nb_products += 1 dt = time.time() - t1 print "BBS> [stage6] END making outgoing bi-arch packages from %s and %s." % (fresh_pkgs_subdir1, fresh_pkgs_subdir2) print "BBS> -------------------------------------------------------------" print "BBS> [stage6] MERGE(%s, %s) SUMMARY:" % (node1_id, node2_id) print "BBS> o Working dir: %s" % os.getcwd() print "BBS> o %d pkg(s) supported on Windows" % len(pkgs0) print "BBS> o %d binpkg file(s) produced" % nb_products print "BBS> o Total time: %.2f seconds" % dt print "BBS> -------------------------------------------------------------" return
def is_build_required(manifest): global svn_url_global svn_url_global = manifest['svn_url'] package_name = manifest['job_id'].split("_")[0] logging.info("Starting is_build_required() '%s'." % package_name) if (get_package_source() == "svn"): description_url = manifest['svn_url'].rstrip("/") + "/DESCRIPTION" logging.debug("is_build_required() package source is svn" + "\n description_url = " + description_url + "\n svn_user ="******"\n svn_pass = "******"curl", "-k", "-s", "--user", "%s:%s" % (ENVIR['svn_user'], ENVIR['svn_pass']), description_url ], stdout=subprocess.PIPE).communicate()[0] # TODO - handle it if description does not exist except: logging.error("is_build_required() curl exception: %s.", sys.exc_info()[0]) raise logging.debug("is_build_required()" + "\n description = %s" % description + "\n length = %d" % len(description)) dcf_file = dcf.DcfRecordParser(description.rstrip().split("\n")) send_dcf_info(dcf_file) svn_version = dcf_file.getValue("Version") elif get_package_source() == "github": github_url = re.sub(r'\.git$', '', manifest['svn_url']) if not github_url.endswith("/"): github_url += "/" # We only build the master branch. There had better be one. # (technically we build whatever the default branch is, but # this step looks at master because to find out what the # default branch is at this point we would need octokit here). github_url += "master/DESCRIPTION" github_url = github_url.replace("https://github.com", "https://raw.githubusercontent.com") f = urllib2.urlopen(github_url) dcf_text = f.read() dcf_file = dcf.DcfRecordParser(dcf_text.rstrip().split("\n")) send_dcf_info(dcf_file) svn_version = dcf_file.getValue("Version") elif get_package_source() == "tracker": tmp = manifest["svn_url"].split("/") pkgname = tmp[len(tmp)-1].replace(".tar.gz", "") if (pkgname.find("_") == -1): # package name doesn't have version in it return(True) # TODO - download tarball and examine DESCRIPTION file svn_version = pkgname.split("_")[1] if ("force" in manifest.keys()): if (manifest['force'] is True): return(True) r_version = BIOC_R_MAP[ENVIR['bbs_Bioc_version']] pkg_type = BBScorevars.getNodeSpec(BUILDER_ID, "pkgType") cran_repo_map = { 'source': "src/contrib", 'win.binary': "bin/windows/contrib/" + r_version, 'win64.binary': "bin/windows64/contrib/" + r_version, 'mac.binary': "bin/macosx/contrib/" + r_version, 'mac.binary.mavericks': "bin/macosx/mavericks/contrib/" + r_version } # todo - put repos url in config file (or get it from user) base_repo_url = HOSTS['bioc'] if (manifest['repository'] == 'course'): base_repo_url += '/course-packages' elif (manifest['repository'] == 'scratch'): base_repo_url += '/scratch_repos/' + manifest['bioc_version'] repository_url = "%s/%s/PACKAGES" % (base_repo_url, cran_repo_map[pkg_type]) # What if there is no file at this url? packages = subprocess.Popen(["curl", "-k", "-s", repository_url], stdout=subprocess.PIPE).communicate()[0] inpackage = False repository_version = False for line in packages.split("\n"): if line == "Package: %s" % package_name: inpackage = True if (line.startswith("Version: ") and inpackage): repository_version = line.split(": ")[1] break if not repository_version: return True # package hasn't been pushed to repo before logging.debug("is_build_required()" + "\n [%s] svn version is %s, repository version is %s" % (package_name, svn_version, repository_version)) return svn_version != repository_version
## Report title. if BBScorevars.subbuilds == "bioc-longtests": REPORT_TITLE = "Long tests" elif BBScorevars.subbuilds == "workflows": REPORT_TITLE = "Workflows build" else: if len(NODES) != 1: REPORT_TITLE = "Multiple platform build/check" else: REPORT_TITLE = "Build/check" REPORT_TITLE += " report for " if BBScorevars.subbuilds == "cran": REPORT_TITLE += "CRAN" else: bioc_version = BBScorevars.getenv('BBS_BIOC_VERSION', False) REPORT_TITLE += "BioC %s" % bioc_version if BBScorevars.subbuilds == "data-experiment": REPORT_TITLE += " experimental data" ## Stages to display on report (as columns in HTML table). def stages_to_display(): if BBScorevars.subbuilds == "bioc-longtests": return ['checksrc'] # we run 'buildsrc' but don't display it if BBScorevars.subbuilds == "workflows": return ['install', 'buildsrc'] return ['install', 'buildsrc', 'checksrc', 'buildbin'] def stage_label(stage): stage2label = { 'install': "INSTALL",
import bbs.rdir import bbs.jobs import BBScorevars ############################################################################## ### BBS GLOBAL VARIABLES ############################################################################## ### Only needed by BBS-prerun.py and BBS-run.py MEAT0_rdir = bbs.rdir.RemoteDir('BBS_MEAT0_RDIR', None, BBScorevars.getenv('BBS_MEAT0_RDIR'), BBScorevars.getenv('BBS_MEAT0_RHOST', False), BBScorevars.getenv('BBS_MEAT0_RUSER', False), BBScorevars.ssh_cmd, BBScorevars.rsync_cmd, BBScorevars.rsync_rsh_cmd) meat_path = BBScorevars.getenv('BBS_MEAT_PATH') work_topdir = BBScorevars.getenv('BBS_WORK_TOPDIR') r_cmd = BBScorevars.getenv('BBS_R_CMD') r_home = BBScorevars.getenv('BBS_R_HOME') ### Only needed by BBS-prerun.py
def propagate_package(): global build_product global repos global url pkg_type = BBScorevars.getNodeSpec(BUILDER_ID, "pkgType") ext = BBScorevars.pkgType2FileExt[pkg_type] files = os.listdir(working_dir) build_product = filter(lambda x: x.endswith(ext), files)[0] r_version = BIOC_R_MAP[ENVIR['bbs_Bioc_version']] if (platform.system() == "Darwin"): os_seg = "bin/macosx/mavericks/contrib/%s" % r_version elif (platform.system() == "Linux"): os_seg = "src/contrib" else: os_seg = "bin/windows/contrib/%s" % r_version if (manifest['repository'] == 'course'): repos = "/loc/www/bioconductor-test.fhcrc.org/course-packages/%s" % os_seg url = repos.replace("/loc/www/bioconductor-test.fhcrc.org/", HOSTS['bioc']) elif (manifest['repository'] == 'scratch'): repos = '/loc/www/bioconductor-test.fhcrc.org/scratch-repos/%s/%s' % ( manifest['bioc_version'], os_seg) url = repos.replace( "/loc/www/bioconductor-test.fhcrc.org/scratch-repos/", HOSTS['bioc'] + '/scratch-repos') url += "/" + build_product rawsize = os.path.getsize(build_product) kib = rawsize / float(1024) filesize = "%.2f" % kib package_name = manifest['job_id'].split("_")[0] files_to_delete = "%s/%s_*.%s" % (repos, package_name, ext) if (platform.system() == "Windows"): command = "c:/cygwin/bin/ssh.exe -qi %s -o StrictHostKeyChecking=no biocadmin@%s 'rm -f %s/%s_*.zip'" command = command % (ENVIR['spb_RSA_key'], ENVIR['spb_staging_url'], repos, package_name) retcode = subprocess.call(command) else: logging.info("propagate_package() files_to_delete: %s" % files_to_delete) retcode = ssh("rm -f %s" % files_to_delete) logging.info( "Finished propagate_package().\n Result of deleting files: %d." % retcode) send_message({ "body": "Pruning older packages from repository. ", "status": "post_processing", "retcode": retcode }) if retcode != 0: logging.error("propagate_package() Failed to prune repos.") sys.exit("repos prune failed") if (platform.system() == "Windows"): logging.info("propagate_package() Windows chmod") chmod_retcode = subprocess.call( "chmod a+r %s" % os.path.join(working_dir, package_name)) logging.info("propagate_package() Windows chmod_retcode = %d" % chmod_retcode) send_message({ "status": "chmod_retcode", "body": "chmod_retcode=%d" % chmod_retcode, "retcode": chmod_retcode }) command = "c:/cygwin/bin/scp.exe -qi %s -o StrictHostKeyChecking=no %s biocadmin@%s:%s/" command = command % (ENVIR['spb_RSA_key'], build_product, ENVIR['spb_staging_url'], repos) logging.info("propagate_package() Windows scp command = %s." % command) retcode = subprocess.call(command) command = "c:/cygwin/bin/ssh.exe -qi %s -o StrictHostKeyChecking=no biocadmin@%s 'chmod a+r %s/%s_*.zip'" command = command % (ENVIR['spb_RSA_key'], ENVIR['spb_staging_url'], repos, package_name) remote_chmod_retcode = subprocess.call(command) logging.info("propagate_package() Windows remote_chmod_retcode = %s" % remote_chmod_retcode) else: logging.info("propagate_package() %s chmod not run" % platform.system()) retcode = scp(build_product, repos) logging.info("propagate_package() Result of copying file: %d" % retcode) send_message({ "body": "Copied build file to repository. ", "status": "post_processing", "retcode": retcode, "build_product": build_product, "filesize": filesize }) if retcode != 0: logging.error("propagate_package() Failed to copy file to repository.") sys.exit("failed to copy file to repository")
if BBSreportutils.is_doing_buildbin(node): stage = 'buildbin' if skipped_is_OK: status = "skipped" else: status = get_status_from_summary_file(pkg, node.id, stage) out.write('%s#%s#%s: %s\n' % (pkg, node.id, stage, status)) out.close() print "BBS> [make_STATUS_DB] END" return ############################################################################## ### MAIN SECTION ############################################################################## print "BBS> [stage7a] STARTING stage7a at %s..." % time.asctime() central_rdir_path = BBScorevars.Central_rdir.path if central_rdir_path != None: print "BBS> [stage7a] cd %s/" % central_rdir_path os.chdir(central_rdir_path) report_nodes = BBScorevars.getenv('BBS_REPORT_NODES') BBSreportutils.set_NODES(report_nodes) allpkgs = BBSreportutils.get_pkgs_from_meat_index() make_STATUS_DB(allpkgs) print "BBS> [stage7a] DONE at %s." % time.asctime()
def propagate_package(): global build_product global repos global url pkg_type = BBScorevars.getNodeSpec(builder_id, "pkgType") ext = BBScorevars.pkgType2FileExt[pkg_type] files = os.listdir(working_dir) build_product = filter(lambda x: x.endswith(ext), files)[0] # now install the package #r_cmd = "%s CMD INSTALL %s" % (os.getenv("BBS_R_CMD"), build_product) #send_message({"body": "Installing package", "status": "post_processing", "retcode": 0}) #retcode = subprocess.call(r_cmd, shell=True) #send_message({"body": "Installed package", "status": "post_processing", "retcode": retcode}) #if retcode != 0: # sys.exit("package install failed") r_version = bioc_r_map[os.getenv("BBS_BIOC_VERSION")] if (platform.system() == "Darwin"): os_seg = "bin/macosx/mavericks/contrib/%s" % r_version elif (platform.system() == "Linux"): os_seg = "src/contrib" else: os_seg = "bin/windows/contrib/%s" % r_version if (manifest['repository'] == 'course'): repos = "/loc/www/bioconductor-test.fhcrc.org/course-packages/%s" % os_seg url = repos.replace("/loc/www/bioconductor-test.fhcrc.org/","http://bioconductor.org/") elif (manifest['repository'] == 'scratch'): repos = '/loc/www/bioconductor-test.fhcrc.org/scratch-repos/%s/%s' % (manifest['bioc_version'], os_seg) url = repos.replace("/loc/www/bioconductor-test.fhcrc.org/scratch-repos/","http://bioconductor.org/scratch-repos/") url += "/" + build_product rawsize = os.path.getsize(build_product) kib = rawsize / float(1024) filesize = "%.2f" % kib files_to_delete = "%s/%s_*.%s" % (repos, package_name, ext) if (platform.system() == "Windows"): retcode = subprocess.call("c:/cygwin/bin/ssh.exe -qi %s/.packagebuilder.private_key.rsa -o StrictHostKeyChecking=no [email protected] 'rm -f %s/%s_*.zip'" % (os.environ["PACKAGEBUILDER_HOME"], repos, package_name)) else: retcode = ssh("rm -f %s" % files_to_delete) print("result of deleting files: %d" % retcode) send_message({"body": "Pruning older packages from repository", "status": "post_processing", "retcode": retcode}) if retcode != 0: sys.exit("repos prune failed") if (platform.system() == "Windows"): print("platform.system() == 'Windows', running chmod commands...") chmod_retcode = subprocess.call("chmod a+r %s" % os.path.join(working_dir, package_name)) print("chmod_retcode = %d" % chmod_retcode) send_message({"status": "chmod_retcode", "body": \ "chmod_retcode=%d" % chmod_retcode, "retcode": chmod_retcode}) command = "c:/cygwin/bin/scp.exe -qi %s/.packagebuilder.private_key.rsa -o StrictHostKeyChecking=no %s [email protected]:%s/" % (os.environ["PACKAGEBUILDER_HOME"], build_product, repos) print("command = %s" % command) retcode = subprocess.call(command) remote_chmod_retcode = subprocess.call("c:/cygwin/bin/ssh.exe -qi %s/.packagebuilder.private_key.rsa -o StrictHostKeyChecking=no [email protected] 'chmod a+r %s/%s_*.zip'" % (os.environ["PACKAGEBUILDER_HOME"], repos, package_name)) print("remote_chmod_retcode = %s" % remote_chmod_retcode) else: print("chmod code not run, because platform.system() == %s" % platform.system()) retcode = scp(build_product, repos) print("result of copying file: %d" % retcode) send_message({"body": "Copied build file to repository", "status": "post_processing", "retcode": retcode, "build_product": build_product, "filesize": filesize}) if retcode != 0: sys.exit("copying file to repository failed")
def update_packages_file(): global repos global build_product r_version = BIOC_R_MAP[ENVIR['bbs_Bioc_version']] if (platform.system() == "Darwin"): pkg_type = BBScorevars.getNodeSpec(BUILDER_ID, "pkgType") if pkg_type == "mac.binary.leopard": os_seg = "bin/macosx/contrib/%s" % r_version else: os_seg = "bin/macosx/mavericks/contrib/%s" % r_version elif (platform.system() == "Linux"): os_seg = "src/contrib" else: os_seg = "bin/windows/contrib/%s" % r_version if (manifest['repository'] == 'course'): repos = "/loc/www/bioconductor-test.fhcrc.org/course-packages/%s" % os_seg url = repos.replace("/loc/www/bioconductor-test.fhcrc.org/", HOSTS['bioc'] + '/') script_loc = "/loc/www/bioconductor-test.fhcrc.org/course-packages" elif (manifest['repository'] == 'scratch'): repos = '/loc/www/bioconductor-test.fhcrc.org/scratch-repos/%s/%s' % ( manifest['bioc_version'], os_seg) url = repos.replace( "/loc/www/bioconductor-test.fhcrc.org/scratch-repos/", HOSTS['bioc'] + "/scratch-repos/") script_loc = "/loc/www/bioconductor-test.fhcrc.org/scratch-repos/%s" % manifest['bioc_version'] pkg_type = BBScorevars.getNodeSpec(BUILDER_ID, "pkgType") if pkg_type == "mac.binary.leopard": pkg_type = "mac.binary" command = "%s biocadmin@%s 'R -f %s/update-repo.R --args %s %s'" command = command % (packagebuilder_ssh_cmd, ENVIR['spb_staging_url'], script_loc, repos, pkg_type) logging.info("update_packages_file() command: %s" % command) retcode = subprocess.call(command, shell=True) logging.info("update_packages_file() retcode: %d" % retcode) send_message({ "status": "post_processing", "retcode": retcode, "body": "Updated packages list. " }) if retcode != 0: send_message({ "status": "post_processing_complete", "retcode": retcode, "body": "Updating packages failed.", "build_product": build_product, "url": url }) sys.exit("updating packages failed") if (manifest['repository'] == 'course' or manifest['repository'] == 'scratch'): command = "%s biocadmin@%s \"source ~/.bash_profile && cd /home/biocadmin/bioc-test-web/bioconductor.org && rake deploy_production\"" command = command % (packagebuilder_ssh_cmd, ENVIR['spb_staging_url']) logging.info("update_packages_file() sync command = %s" % command) retcode = subprocess.call(command, shell=True) send_message({ "status": "post_processing", "retcode": retcode, "body": "Synced repository to website. ", "build_product": build_product, "url": url }) if retcode != 0: send_message({ "status": "post_processing_complete", "retcode": retcode, "body": "Syncing repository failed", "build_product": build_product, "url": url}) sys.exit("sync to website failed") send_message({ "status": "post_processing_complete", "retcode": retcode, "body": "Post-processing complete.", "build_product": build_product, "url": url})
def is_doing_buildbin(node_hostname): return BBScorevars.getNodeSpec(node_hostname, 'pkgType') != "source"
def build_package(source_build): global pkg_type pkg_type = BBScorevars.getNodeSpec(builder_id, "pkgType") buildmsg = None if (source_build): buildmsg = "building" else: buildmsg = "buildingbin" if ((not source_build) and (pkg_type == "source")): send_message({"status": "skip_buildbin", "body": "skipped"}) return(0) if (not source_build): send_message({"status": "starting_buildbin", "body": ""}) global message_sequence global warnings message_sequence = 1 flags = "--keep-empty-dirs --no-resave-data" #flags += " --no-vignettes" ## be sure to comment this line!!!!!!! (used for testing, to speed up builds) win_multiarch = True # todo make this a checkbox if (source_build): r_cmd = "%s CMD build %s %s" % (os.getenv("BBS_R_CMD"), flags, package_name) else: if pkg_type == "mac.binary" or pkg_type == "mac.binary.mavericks": libdir = "libdir" if os.path.exists(libdir): _call("rm -rf %s" % libdir, False) if (not (os.path.exists(libdir))): os.mkdir(libdir) #r_cmd = "R CMD INSTALL --build %s --library=%s" % (\ # get_source_tarball_name(), os.getenv("SPB_R_LIBS")) r_cmd = "../../build-universal.sh %s %s" % (\ get_source_tarball_name(), libdir) elif pkg_type == "win.binary": if (win_multiarch): pass #pkg = package_name.split("_")[0] #libdir = "%s.buildbin-libdir" % pkg #if (os.path.exists(libdir)): # retcode = _call("rm -rf %s" % libdir, False) #if (not os.path.exists(libdir)): # os.mkdir(libdir) # TODO - use win_multiarch_buildbin here #r_cmd = ("%s CMD INSTALL --build " # "--merge-multiarch --library=%s %s") % (\ # os.getenv("BBS_R_CMD"), libdir, get_source_tarball_name()) else: r_cmd = "%s CMD INSTALL --build --library=%s %s" % \ (os.getenv("BBS_R_CMD"), libdir, package_name) status = None if (source_build): status = "r_cmd" outfile = "R.out" else: status = "r_buildbin_cmd" outfile = "Rbuildbin.out" print("before build, working dir is %s" % working_dir) if ((not source_build) and win_multiarch and pkg_type == "win.binary"): retcode = win_multiarch_buildbin(buildmsg) else: send_message({"status": status, "body": r_cmd}) retcode = do_build(r_cmd, buildmsg, source_build) # check for warnings out_fh = open(outfile) warnings = False for line in out_fh: if line.lower().startswith("warning:"): warnings = True if line.lower().startswith("error:"): retcode = 1 out_fh.close() complete_status = None if (source_build): complete_status = "build_complete" else: complete_status = "buildbin_complete" # todo - fix elapsed time throughout send_message({"status": complete_status, "result_code": retcode, "warnings": warnings, "body": "Build completed with status %d" % retcode, "elapsed_time": -1}) return (retcode)
def propagate_package(): global build_product global repos global url pkg_type = BBScorevars.getNodeSpec(BUILDER_ID, "pkgType") ext = BBScorevars.pkgType2FileExt[pkg_type] files = os.listdir(working_dir) build_product = filter(lambda x: x.endswith(ext), files)[0] r_version = BIOC_R_MAP[ENVIR['bbs_Bioc_version']] if (platform.system() == "Darwin"): os_seg = "bin/macosx/mavericks/contrib/%s" % r_version elif (platform.system() == "Linux"): os_seg = "src/contrib" else: os_seg = "bin/windows/contrib/%s" % r_version if (manifest['repository'] == 'course'): repos = "/loc/www/bioconductor-test.fhcrc.org/course-packages/%s" % os_seg url = repos.replace("/loc/www/bioconductor-test.fhcrc.org/", HOSTS['bioc']) elif (manifest['repository'] == 'scratch'): repos = '/loc/www/bioconductor-test.fhcrc.org/scratch-repos/%s/%s' % ( manifest['bioc_version'], os_seg) url = repos.replace( "/loc/www/bioconductor-test.fhcrc.org/scratch-repos/", HOSTS['bioc'] + '/scratch-repos') url += "/" + build_product rawsize = os.path.getsize(build_product) kib = rawsize / float(1024) filesize = "%.2f" % kib package_name = manifest['job_id'].split("_")[0] files_to_delete = "%s/%s_*.%s" % (repos, package_name, ext) if (platform.system() == "Windows"): command = "c:/cygwin/bin/ssh.exe -qi %s -o StrictHostKeyChecking=no biocadmin@%s 'rm -f %s/%s_*.zip'" command = command % (ENVIR['spb_RSA_key'], ENVIR['spb_staging_url'], repos, package_name) retcode = subprocess.call(command) else: logging.info("propagate_package() files_to_delete: %s" % files_to_delete) retcode = ssh("rm -f %s" % files_to_delete) logging.info("Finished propagate_package().\n Result of deleting files: %d." % retcode) send_message({ "body": "Pruning older packages from repository. ", "status": "post_processing", "retcode": retcode }) if retcode != 0: logging.error("propagate_package() Failed to prune repos.") sys.exit("repos prune failed") if (platform.system() == "Windows"): logging.info("propagate_package() Windows chmod") chmod_retcode = subprocess.call( "chmod a+r %s" % os.path.join(working_dir, package_name)) logging.info("propagate_package() Windows chmod_retcode = %d" % chmod_retcode) send_message({ "status": "chmod_retcode", "body": "chmod_retcode=%d" % chmod_retcode, "retcode": chmod_retcode }) command = "c:/cygwin/bin/scp.exe -qi %s -o StrictHostKeyChecking=no %s biocadmin@%s:%s/" command = command % (ENVIR['spb_RSA_key'], build_product, ENVIR['spb_staging_url'], repos) logging.info("propagate_package() Windows scp command = %s." % command) retcode = subprocess.call(command) command = "c:/cygwin/bin/ssh.exe -qi %s -o StrictHostKeyChecking=no biocadmin@%s 'chmod a+r %s/%s_*.zip'" command = command % (ENVIR['spb_RSA_key'], ENVIR['spb_staging_url'], repos, package_name) remote_chmod_retcode = subprocess.call(command) logging.info("propagate_package() Windows remote_chmod_retcode = %s" % remote_chmod_retcode) else: logging.info("propagate_package() %s chmod not run" % platform.system()) retcode = scp(build_product, repos) logging.info("propagate_package() Result of copying file: %d" % retcode) send_message({ "body": "Copied build file to repository. ", "status": "post_processing", "retcode": retcode, "build_product": build_product, "filesize": filesize }) if retcode != 0: logging.error("propagate_package() Failed to copy file to repository.") sys.exit("failed to copy file to repository")
def build_package(source_build): global pkg_type pkg_type = BBScorevars.getNodeSpec(BUILDER_ID, "pkgType") buildmsg = None if (source_build): buildmsg = "building" else: buildmsg = "buildingbin" if ((not source_build) and (pkg_type == "source")): send_message({"status": "skip_buildbin", "body": "skipped"}) logging.info("Skip buildbin") return(0) if (not source_build): if platform.system() == "Darwin": pkg_type = "mac.binary.mavericks" elif platform.system() == "Linux": pkg_type = "source" elif platform.system() == "Windows": pkg_type = "win.binary" else: pkg_type = "source" send_message({"status": "starting_buildbin", "body": ""}) logging.info("Start buildbin") global message_sequence global warnings message_sequence = 1 flags = "--keep-empty-dirs --no-resave-data" if (source_build): package_name = manifest['job_id'].split("_")[0] r_cmd = "%s CMD build %s %s" % \ (ENVIR['bbs_R_cmd'], flags, package_name) else: if pkg_type == "mac.binary" or pkg_type == "mac.binary.mavericks": libdir = "libdir" if os.path.exists(libdir): _call("rm -rf %s" % libdir, False) if (not (os.path.exists(libdir))): os.mkdir(libdir) r_cmd = os.environ['SPB_HOME'] + "/build-universal.sh %s %s" % ( get_source_tarball_name(), libdir) status = None if (source_build): status = "r_cmd" outfile = "R.out" else: status = "r_buildbin_cmd" outfile = "Rbuildbin.out" logging.debug("Before build, working dir is %s." % working_dir) start_time = datetime.datetime.now() if ((not source_build) and pkg_type == "win.binary"): retcode = win_multiarch_buildbin(buildmsg) else: send_message({"status": status, "body": r_cmd}) retcode = do_build(r_cmd, buildmsg, source_build) stop_time = datetime.datetime.now() time_dif = stop_time - start_time min_time, sec_time = divmod(time_dif.seconds,60) sec_time = str(format(float(str(time_dif).split(":")[2]), '.2f')) elapsed_time = str(min_time) + " minutes " + sec_time + " seconds" # check for warnings out_fh = open(outfile) warnings = False for line in out_fh: if line.lower().startswith("warning:"): warnings = True if line.lower().startswith("error:"): retcode = 1 out_fh.close() complete_status = None if (source_build): complete_status = "build_complete" else: complete_status = "buildbin_complete" send_message({ "status": complete_status, "retcode": retcode, "warnings": warnings, "body": "Build completed with status %d" % retcode, "elapsed_time": elapsed_time}) logging.info(complete_status + "\n Build completed with status: " + str(retcode) + " Elapsed time: " + elapsed_time) return (retcode)
return def send_CRAN_notifications(allpkgs): print "BBS> [send_CRAN_notifications] BEGIN..." send_notifications(allpkgs) print "BBS> [send_CRAN_notifications] END." return ############################################################################## ### MAIN SECTION ############################################################################## print "BBS> [stage9] STARTING stage9 at %s..." % time.asctime() BBSreportutils.data_source = BBScorevars.getenv('BBS_PUBLISHED_REPORT_URL') notify_nodes = BBScorevars.getenv('BBS_NOTIFY_NODES') argc = len(sys.argv) if argc > 1: arg1 = sys.argv[1] else: arg1 = "" if arg1 != "": bbs.email.mode = "do-it" if arg1 != "do-it": bbs.email.redirect_to_addr = arg1 BBSreportutils.set_NODES(notify_nodes) if len(BBSreportutils.NODES) != 1:
def update_packages_file(): global repos global build_product r_version = BIOC_R_MAP[ENVIR['bbs_Bioc_version']] if (platform.system() == "Darwin"): pkg_type = BBScorevars.getNodeSpec(BUILDER_ID, "pkgType") if pkg_type == "mac.binary.leopard": os_seg = "bin/macosx/contrib/%s" % r_version else: os_seg = "bin/macosx/mavericks/contrib/%s" % r_version elif (platform.system() == "Linux"): os_seg = "src/contrib" else: os_seg = "bin/windows/contrib/%s" % r_version if (manifest['repository'] == 'course'): repos = "/loc/www/bioconductor-test.fhcrc.org/course-packages/%s" % os_seg url = repos.replace("/loc/www/bioconductor-test.fhcrc.org/", HOSTS['bioc'] + '/') script_loc = "/loc/www/bioconductor-test.fhcrc.org/course-packages" elif (manifest['repository'] == 'scratch'): repos = '/loc/www/bioconductor-test.fhcrc.org/scratch-repos/%s/%s' % ( manifest['bioc_version'], os_seg) url = repos.replace( "/loc/www/bioconductor-test.fhcrc.org/scratch-repos/", HOSTS['bioc'] + "/scratch-repos/") script_loc = "/loc/www/bioconductor-test.fhcrc.org/scratch-repos/%s" % manifest[ 'bioc_version'] pkg_type = BBScorevars.getNodeSpec(BUILDER_ID, "pkgType") if pkg_type == "mac.binary.leopard": pkg_type = "mac.binary" command = "%s biocadmin@%s 'R -f %s/update-repo.R --args %s %s'" command = command % (packagebuilder_ssh_cmd, ENVIR['spb_staging_url'], script_loc, repos, pkg_type) logging.info("update_packages_file() command: %s" % command) retcode = subprocess.call(command, shell=True) logging.info("update_packages_file() retcode: %d" % retcode) send_message({ "status": "post_processing", "retcode": retcode, "body": "Updated packages list. " }) if retcode != 0: send_message({ "status": "post_processing_complete", "retcode": retcode, "body": "Updating packages failed.", "build_product": build_product, "url": url }) sys.exit("updating packages failed") if (manifest['repository'] == 'course' or manifest['repository'] == 'scratch'): command = "%s biocadmin@%s \"source ~/.bash_profile && cd /home/biocadmin/bioc-test-web/bioconductor.org && rake deploy_production\"" command = command % (packagebuilder_ssh_cmd, ENVIR['spb_staging_url']) logging.info("update_packages_file() sync command = %s" % command) retcode = subprocess.call(command, shell=True) send_message({ "status": "post_processing", "retcode": retcode, "body": "Synced repository to website. ", "build_product": build_product, "url": url }) if retcode != 0: send_message({ "status": "post_processing_complete", "retcode": retcode, "body": "Syncing repository failed", "build_product": build_product, "url": url }) sys.exit("sync to website failed") send_message({ "status": "post_processing_complete", "retcode": retcode, "body": "Post-processing complete.", "build_product": build_product, "url": url })
def is_build_required(manifest): global svn_url_global svn_url_global = manifest['svn_url'] package_name = manifest['job_id'].split("_")[0] logging.info("Starting is_build_required() '%s'." % package_name) if (get_package_source() == "svn"): description_url = manifest['svn_url'].rstrip("/") + "/DESCRIPTION" logging.debug("is_build_required() package source is svn" + "\n description_url = " + description_url + "\n svn_user ="******"\n svn_pass = "******"curl", "-k", "-s", "--user", "%s:%s" % (ENVIR['svn_user'], ENVIR['svn_pass']), description_url ], stdout=subprocess.PIPE).communicate()[0] # TODO - handle it if description does not exist except: logging.error("is_build_required() curl exception: %s.", sys.exc_info()[0]) raise logging.debug("is_build_required()" + "\n description = %s" % description + "\n length = %d" % len(description)) dcf_file = dcf.DcfRecordParser(description.rstrip().split("\n")) send_dcf_info(dcf_file) svn_version = dcf_file.getValue("Version") elif get_package_source() == "github": github_url = re.sub(r'\.git$', '', manifest['svn_url']) if not github_url.endswith("/"): github_url += "/" # We only build the master branch. There had better be one. # (technically we build whatever the default branch is, but # this step looks at master because to find out what the # default branch is at this point we would need octokit here). github_url += "master/DESCRIPTION" github_url = github_url.replace("https://github.com", "https://raw.githubusercontent.com") f = urllib2.urlopen(github_url) dcf_text = f.read() dcf_file = dcf.DcfRecordParser(dcf_text.rstrip().split("\n")) send_dcf_info(dcf_file) svn_version = dcf_file.getValue("Version") elif get_package_source() == "tracker": tmp = manifest["svn_url"].split("/") pkgname = tmp[len(tmp) - 1].replace(".tar.gz", "") if (pkgname.find("_") == -1 ): # package name doesn't have version in it return (True ) # TODO - download tarball and examine DESCRIPTION file svn_version = pkgname.split("_")[1] if ("force" in manifest.keys()): if (manifest['force'] is True): return (True) r_version = BIOC_R_MAP[ENVIR['bbs_Bioc_version']] pkg_type = BBScorevars.getNodeSpec(BUILDER_ID, "pkgType") cran_repo_map = { 'source': "src/contrib", 'win.binary': "bin/windows/contrib/" + r_version, 'win64.binary': "bin/windows64/contrib/" + r_version, 'mac.binary': "bin/macosx/contrib/" + r_version, 'mac.binary.mavericks': "bin/macosx/mavericks/contrib/" + r_version } # todo - put repos url in config file (or get it from user) base_repo_url = HOSTS['bioc'] if (manifest['repository'] == 'course'): base_repo_url += '/course-packages' elif (manifest['repository'] == 'scratch'): base_repo_url += '/scratch_repos/' + manifest['bioc_version'] repository_url = "%s/%s/PACKAGES" % (base_repo_url, cran_repo_map[pkg_type]) # What if there is no file at this url? packages = subprocess.Popen(["curl", "-k", "-s", repository_url], stdout=subprocess.PIPE).communicate()[0] inpackage = False repository_version = False for line in packages.split("\n"): if line == "Package: %s" % package_name: inpackage = True if (line.startswith("Version: ") and inpackage): repository_version = line.split(": ")[1] break if not repository_version: return True # package hasn't been pushed to repo before logging.debug("is_build_required()" + "\n [%s] svn version is %s, repository version is %s" % (package_name, svn_version, repository_version)) return svn_version != repository_version
def build_package(source_build): global pkg_type pkg_type = BBScorevars.getNodeSpec(BUILDER_ID, "pkgType") buildmsg = None if (source_build): buildmsg = "building" else: buildmsg = "buildingbin" if ((not source_build) and (pkg_type == "source")): send_message({"status": "skip_buildbin", "body": "skipped"}) logging.info("Skip buildbin") return (0) if (not source_build): if platform.system() == "Darwin": pkg_type = "mac.binary.mavericks" elif platform.system() == "Linux": pkg_type = "source" elif platform.system() == "Windows": pkg_type = "win.binary" else: pkg_type = "source" send_message({"status": "starting_buildbin", "body": ""}) logging.info("Start buildbin") global message_sequence global warnings message_sequence = 1 flags = "--keep-empty-dirs --no-resave-data" if (source_build): package_name = manifest['job_id'].split("_")[0] r_cmd = "%s CMD build %s %s" % \ (ENVIR['bbs_R_cmd'], flags, package_name) else: if pkg_type == "mac.binary" or pkg_type == "mac.binary.mavericks": libdir = "libdir" if os.path.exists(libdir): _call("rm -rf %s" % libdir, False) if (not (os.path.exists(libdir))): os.mkdir(libdir) r_cmd = os.environ['SPB_HOME'] + "/build-universal.sh %s %s" % ( get_source_tarball_name(), libdir) status = None if (source_build): status = "r_cmd" outfile = "R.out" else: status = "r_buildbin_cmd" outfile = "Rbuildbin.out" logging.debug("Before build, working dir is %s." % working_dir) start_time = datetime.datetime.now() if ((not source_build) and pkg_type == "win.binary"): retcode = win_multiarch_buildbin(buildmsg) else: send_message({"status": status, "body": r_cmd}) retcode = do_build(r_cmd, buildmsg, source_build) stop_time = datetime.datetime.now() time_dif = stop_time - start_time min_time, sec_time = divmod(time_dif.seconds, 60) sec_time = str(format(float(str(time_dif).split(":")[2]), '.2f')) elapsed_time = str(min_time) + " minutes " + sec_time + " seconds" # check for warnings out_fh = open(outfile) warnings = False for line in out_fh: if line.lower().startswith("warning:"): warnings = True if line.lower().startswith("error:"): retcode = 1 out_fh.close() complete_status = None if (source_build): complete_status = "build_complete" else: complete_status = "buildbin_complete" send_message({ "status": complete_status, "retcode": retcode, "warnings": warnings, "body": "Build completed with status %d" % retcode, "elapsed_time": elapsed_time }) logging.info(complete_status + "\n Build completed with status: " + str(retcode) + " Elapsed time: " + elapsed_time) return (retcode)
import bbs.rdir import bbs.jobs import BBScorevars ############################################################################## ### BBS GLOBAL VARIABLES ############################################################################## ### Only needed by BBS-prerun.py and BBS-run.py MEAT0_rdir = bbs.rdir.RemoteDir('BBS_MEAT0_RDIR', None, BBScorevars.getenv('BBS_MEAT0_RDIR'), BBScorevars.getenv('BBS_MEAT0_RHOST', False), BBScorevars.getenv('BBS_MEAT0_RUSER', False), BBScorevars.ssh_cmd, BBScorevars.rsync_cmd, BBScorevars.rsync_rsh_cmd) meat_path = BBScorevars.getenv('BBS_MEAT_PATH') work_topdir = BBScorevars.getenv('BBS_WORK_TOPDIR') r_cmd = BBScorevars.getenv('BBS_R_CMD') ### Only needed by BBS-prerun.py
import sys import os import bbs.rdir import bbs.jobs import BBScorevars ############################################################################## ### BBS GLOBAL VARIABLES ############################################################################## ### Only needed by BBS-prerun.py and BBS-run.py MEAT0_rdir = bbs.rdir.RemoteDir('BBS_MEAT0_RDIR', None, BBScorevars.getenv('BBS_MEAT0_RDIR'), BBScorevars.getenv('BBS_MEAT0_RHOST', False), BBScorevars.getenv('BBS_MEAT0_RUSER', False), BBScorevars.ssh_cmd, BBScorevars.rsync_cmd, BBScorevars.rsync_rsh_cmd) meat_path = BBScorevars.getenv('BBS_MEAT_PATH') work_topdir = BBScorevars.getenv('BBS_WORK_TOPDIR') r_cmd = BBScorevars.getenv('BBS_R_CMD') r_home = BBScorevars.getenv('BBS_R_HOME') ### Only needed by BBS-prerun.py