def has_already_been_built(ver): s3_dir = "sumatrapdf/buildbot/" expected_name = s3_dir + ver + "/analyze.html" keys = s3List(s3_dir) for k in keys: if k.name == expected_name: return True return False
def deleteOldPreReleaseBuilds(): s3Dir = "sumatrapdf/prerel/" keys = s3List(s3Dir) files_by_ver = {} for k in keys: #print(k.name) # sumatrapdf/prerel/SumatraPDF-prerelease-4819.pdb.zip ver = re.findall(r'sumatrapdf/prerel/SumatraPDF-prerelease-(\d+)*', k.name) ver = int(ver[0]) #print(ver) val = files_by_ver.get(ver, []) #print(val) val.append(k.name) #print(val) files_by_ver[ver] = val versions = files_by_ver.keys() versions.sort() #print(versions) todelete = versions[:-3] #print(todelete) for vertodelete in todelete: for f in files_by_ver[vertodelete]: #print("Deleting %s" % f) s3Delete(f)
def build_index_html(): s3_dir = "sumatrapdf/buildbot/" html = "<html><head>%s</head><body>\n" % g_index_html_css html += "<p>SumatraPDF buildbot results:</p>\n" names = [n.name for n in s3List(s3_dir)] # filter out top-level files like index.html and sizes.js names = [n[len(s3_dir):] for n in names if len(n.split("/")) == 4] names.sort(reverse=True, key=lambda name: int(name.split("/")[0])) html += '<table id="table-5"><tr><th>build</th><th>/analyze</th><th>release</th>' html += '<th style="font-size:80%">SumatraPDF.exe size</th><th style="font-size:80%"">Installer.exe size</th></tr>\n' files_by_ver = group_by_ver(names) for arr in files_by_ver: (ver, files) = arr if "stats.txt" not in files: print("stats.txt missing in %s (%s)" % (ver, str(files))) assert("stats.txt" in files) try: stats = stats_for_ver(ver) except: print("names: %s" % str(names)) print("ver: %s" % str(ver)) print("files: %s" % str(files)) raise total_warnings = stats.analyze_sumatra_warnings_count + stats.analyze_mupdf_warnings_count + stats.analyze_ext_warnings_count if int(ver) >= g_first_analyze_build and total_warnings > 0 and not stats.rel_failed: assert("analyze.html" in files) s3_ver_url = "http://kjkpub.s3.amazonaws.com/" + s3_dir + ver + "/" html += " <tr>\n" # build number url = "https://code.google.com/p/sumatrapdf/source/detail?r=" + ver html += td(a(url, ver), 4) + "\n" # TODO: this must group several revisions on one line to actually be shorter if not build_files_changed(ver): html += '<td colspan=4>unchanged</td>' continue # /analyze warnings count if int(ver) >= g_first_analyze_build and total_warnings > 0: url = s3_ver_url + "analyze.html" s = "%d %d %d warnings" % (stats.analyze_sumatra_warnings_count, stats.analyze_mupdf_warnings_count, stats.analyze_ext_warnings_count) html += td(a(url, s), 4) else: html += td("", 4) # release build status if stats.rel_failed: url = s3_ver_url + "release_build_log.txt" s = '<font color="red"><b>fail</b></font> (' + a(url, "log") + ')' else: s = '<font color="green"<b>ok!</b></font>' html += td(s, 4) + "\n" # SumatraPDF.exe, Installer.exe size if stats.rel_failed: html += td("", 4) + "\n" + td("", 4) + "\n" else: prev_stats = stats_for_previous_build(ver) if None == prev_stats: html += td(str(stats.rel_sumatrapdf_exe_size), 4) + "\n" html += td(str(stats.rel_installer_exe_size), 4) + "\n" else: s = size_diff_html(stats.rel_sumatrapdf_exe_size - prev_stats.rel_sumatrapdf_exe_size) s = str(stats.rel_sumatrapdf_exe_size) + s html += td(s, 4) + "\n" s = size_diff_html(stats.rel_installer_exe_size - prev_stats.rel_installer_exe_size) s = str(stats.rel_installer_exe_size) + s html += td(s, 4) + "\n" html += " </tr>\n" html += "</table>" html += "</body></html>\n" #print(html) s3UploadDataPublicWithContentType(html, "sumatrapdf/buildbot/index.html")