def PYPI(): """upload the source distribution to PyPI This task uploads `zip` and `tar.gz` source distributions to PyPI. The distributions must be already built with `cogs pkg-src`. """ if not (glob.glob("./build/pkg/src/HTSQL-*.tar.gz") and glob.glob("./build/pkg/src/HTSQL-*.zip")): raise fail("cannot find source packages; run `cogs pkg-src` first") if os.path.exists("./build/tmp"): rmtree("./build/tmp") mktree("./build/tmp") archives = [] for tgzname in glob.glob("./build/pkg/src/*.tar.gz"): dirname = tgzname[:-7] zipname = dirname + ".zip" dirname = os.path.basename(dirname) project, version = dirname.rsplit('-', 1) dirname = "./build/tmp/" + dirname run("tar -xzf %s -C ./build/tmp" % tgzname) mktree(dirname + "/dist") run("tar -xzf %s -C %s" % (tgzname, dirname)) cp(tgzname, dirname + "/dist") cp(zipname, dirname + "/dist") setup_py("sdist --formats=zip,gztar --dry-run" " register upload --sign --identity=" + KEYSIG, cd=dirname) archives.append((project, version)) rmtree("./build/tmp") log() log("Source distribution archives are uploaded to:") for project, version in archives: log(" `http://pypi.python.org/pypi/{}/{}/`", project, version) log()
def build(self): super(DebianTemplateVM, self).build() log("building VM: `{}`...", self.name) start_time = datetime.datetime.now() src_iso_path = getattr(env, self.iso_env) if not (src_iso_path and os.path.isfile(src_iso_path)): src_iso_path = self.download(self.iso_urls) unpack_path = TMP_DIR + "/" + self.name if os.path.exists(unpack_path): rmtree(unpack_path) self.unpack_iso(src_iso_path, unpack_path) cp(DATA_ROOT + "/vm/%s-isolinux.cfg" % self.name, unpack_path + "/isolinux/isolinux.cfg") cp(DATA_ROOT + "/vm/%s-preseed.cfg" % self.name, unpack_path + "/preseed.cfg") cp(DATA_ROOT + "/vm/%s-install.sh" % self.name, unpack_path + "/install.sh") cp(CTL_DIR + "/identity.pub", unpack_path + "/identity.pub") sh( "md5sum" " `find ! -name \"md5sum.txt\"" " ! -path \"./isolinux/*\" -follow -type f` > md5sum.txt", cd=unpack_path) iso_path = TMP_DIR + "/%s.iso" % self.name if os.path.exists(iso_path): rm(iso_path) sh("mkisofs -o %s" " -q -r -J -no-emul-boot -boot-load-size 4 -boot-info-table" " -b isolinux/isolinux.bin -c isolinux/boot.cat %s" % (iso_path, unpack_path)) rmtree(unpack_path) try: self.kvm_img() self.kvm("-cdrom %s -boot d" % iso_path) rm(iso_path) self.compress() except: if os.path.exists(self.img_path): rm(self.img_path) raise stop_time = datetime.datetime.now() log("VM is built successfully: `{}` ({})", self.name, stop_time - start_time)
def COVERAGE(): """measure code coverage by regression tests (coverage.py)""" if os.path.exists("./build/coverage"): rmtree("./build/coverage") mktree("./build/coverage") environ = {} environ['COVERAGE_FILE'] = "./build/coverage/coverage.dat" variables = make_variables() coverage_py("run --branch" " --source=htsql,htsql_sqlite,htsql_pgsql,htsql_oracle," "htsql_mssql,htsql_django" + " `which \"%s\"` test/regress.yaml -E test/regress.py -q " % env.pbbt_path + variables, environ=environ) coverage_py("html --directory=build/coverage", environ) log() log("To see the coverage report, open:") log(" `./build/coverage/index.html`") log()
def build(self): super(CentOSTemplateVM, self).build() log("building VM: `{}`...", self.name) start_time = datetime.datetime.now() src_iso_path = env.centos_iso if not (src_iso_path and os.path.isfile(src_iso_path)): src_iso_path = self.download(CENTOS_ISO_URLS) unpack_path = TMP_DIR + "/" + self.name if os.path.exists(unpack_path): rmtree(unpack_path) self.unpack_iso(src_iso_path, unpack_path) cp(DATA_ROOT + "/vm/%s-isolinux.cfg" % self.name, unpack_path + "/isolinux/isolinux.cfg") cp(DATA_ROOT + "/vm/%s-ks.cfg" % self.name, unpack_path + "/ks.cfg") cp(DATA_ROOT + "/vm/%s-install.sh" % self.name, unpack_path + "/install.sh") cp(CTL_DIR + "/identity.pub", unpack_path + "/identity.pub") iso_path = TMP_DIR + "/%s.iso" % self.name if os.path.exists(iso_path): rm(iso_path) sh("mkisofs -o %s" " -q -r -J -T -no-emul-boot -boot-load-size 4 -boot-info-table" " -b isolinux/isolinux.bin -c isolinux/boot.cat %s" % (iso_path, unpack_path)) rmtree(unpack_path) try: self.kvm_img() self.kvm("-cdrom %s -boot d" % iso_path) rm(iso_path) self.compress() except: if os.path.exists(self.img_path): rm(self.img_path) raise stop_time = datetime.datetime.now() log("VM is built successfully: `{}` ({})", (self.name, stop_time - start_time))
def PKG_DEB(): """create Debian packages This task creates Debian packages from source packages. """ if deb_vm.missing(): raise fail("VM is not built: {}", deb_vm.name) if deb_vm.running(): deb_vm.stop() if os.path.exists("./build/pkg/deb"): rmtree("./build/pkg/deb") if os.path.exists("./build/tmp"): rmtree("./build/tmp") version = get_version() debian_version = ".".join(version.split(".")[:3]) moves = load_moves(DATA_ROOT + "/pkg/debian/moves.yaml") deb_vm.start() pubkey = pipe("gpg --armour --export %s" % KEYSIG) seckey = pipe("gpg --armour --export-secret-key %s" % KEYSIG) deb_vm.write("/root/sign.key", pubkey + seckey) deb_vm.run("gpg --import /root/sign.key") try: for move in moves: package = "%s-%s" % (move.code.upper(), version) debian_package = "%s_%s" % (move.code, debian_version) archive = "./build/pkg/src/%s.tar.gz" % package if not os.path.exists(archive): raise fail("cannot find a source package;" " run `cogs pkg-src` first") changelog = open(DATA_ROOT + "/pkg/debian/changelog").read() if ('htsql (%s-1)' % debian_version) not in changelog: raise fatal("run `job pkg-deb-changelog`" " to update the changelog file") changelog = changelog.replace('htsql (', '%s (' % move.code) mktree("./build/tmp") cp(archive, "./build/tmp/%s.orig.tar.gz" % debian_package) sh("tar -xzf %s -C ./build/tmp" % archive) move(DATA_ROOT + "/pkg/debian", "./build/tmp/%s" % package) open("./build/tmp/%s/debian/changelog" % package, 'w') \ .write(changelog) deb_vm.put("./build/tmp", "./build") deb_vm.run("cd ./build/%s && dpkg-buildpackage -k%s" % (package, KEYSIG)) if not os.path.exists("./build/pkg/deb"): mktree("./build/pkg/deb") deb_vm.get("./build/*.deb", "./build/pkg/deb") deb_vm.run("rm -rf build") rmtree("./build/tmp") finally: deb_vm.stop() log() log("The generated Debian packages are placed in:") for filename in glob.glob("./build/pkg/deb/*"): log(" `{}`", filename) log()
def CLEAN(): """delete generated files This task deletes generated files. """ if os.path.exists("./build"): rmtree("./build") if os.path.exists("./dist"): rmtree("./dist") for dirpath, dirnames, filenames in os.walk("."): for filename in filenames: if filename.endswith(".pyc") or filename.endswith(".pyo"): filename = os.path.join(dirpath, filename) rm(filename) for dirname in dirnames: if dirname == "vendor": dirname = os.path.join(dirpath, dirname) rmtree(dirname) for filename in glob.glob("./HTSQL-*"): if os.path.isdir(filename): rmtree(filename)
def PKG_RPM(): """create RedHat/CentOS packages This task creates RedHat/CentOS packages from source packages. """ if rpm_vm.missing(): raise fail("VM is not built: {}", rpm_vm.name) if rpm_vm.running(): rpm_vm.stop() if os.path.exists("./build/pkg/rpm"): rmtree("./build/pkg/rpm") if os.path.exists("./build/tmp"): rmtree("./build/tmp") version = get_version() redhat_version = ".".join(version.split(".")[:3]) moves = load_moves(DATA_ROOT + "/pkg/redhat/moves.yaml") rpm_vm.start() pubkey = pipe("gpg --armour --export %s" % KEYSIG) seckey = pipe("gpg --armour --export-secret-key %s" % KEYSIG) rpm_vm.write("/root/sign.key", pubkey + seckey) rpm_vm.run("gpg --import /root/sign.key") rpm_vm.put(DATA_ROOT + "/pkg/redhat/.rpmmacros", ".") try: for move in moves: name = move.variables['name'] move.variables['version'] = redhat_version move.variables['package'] = "%s-%s" % (name, version) package = "%s-%s" % (name, version) archive = "./build/pkg/src/%s.tar.gz" % package if not os.path.exists(archive): raise fail("cannot find a source package;" " run `cogs pkg-src` first") mktree("./build/tmp") move(DATA_ROOT + "/pkg/redhat", "./build/tmp") cp(archive, "./build/tmp/SOURCES") rpm_vm.put("./build/tmp", "./rpmbuild") rpm_vm.run("rpmbuild -bb rpmbuild/SPECS/%s.spec" % name) if not os.path.exists("./build/pkg/rpm"): mktree("./build/pkg/rpm") #rpm_vm.run("rpmsign --addsign ./rpmbuild/RPMS/noarch/*.rpm") rpm_vm.get("./rpmbuild/RPMS/noarch/*.rpm", "./build/pkg/rpm") rpm_vm.run("rm -rf rpmbuild") rmtree("./build/tmp") finally: rpm_vm.stop() log() log("The generated RedHat/CentOS packages are placed in:") for filename in glob.glob("./build/pkg/rpm/*"): log(" `{}`", filename) log()
def PKG_SRC(): """create a source package This task creates Python source distribution. """ if src_vm.missing(): raise fail("VM is not built: {}", src_vm.name) if src_vm.running(): src_vm.stop() if os.path.exists("./build/pkg/src"): rmtree("./build/pkg/src") if os.path.exists("./build/tmp"): rmtree("./build/tmp") version = get_version() all_routines = get_routines() all_addons = get_addons() moves = load_moves(DATA_ROOT + "/pkg/source/moves.yaml") src_vm.start() src_vm.run("pip install wheel") try: for move in moves: with_doc = move.variables['with-doc'] packages = move.variables['packages'].strip().splitlines() routines = "".join( routine + "\n" for routine in all_routines if routine.split('=', 1)[1].strip().split('.')[0] in packages) addons = "".join( addon + "\n" for addon in all_addons if addon.split('=', 1)[1].strip().split('.')[0] in packages) move.variables['version'] = version move.variables['htsql-routines'] = routines move.variables['htsql-addons'] = addons mktree("./build/tmp") sh("hg archive ./build/tmp/htsql") if with_doc: setup_py("-q download_vendor", cd="./build/tmp/htsql") for dirname in glob.glob("./build/tmp/htsql/src/*"): if os.path.basename(dirname) not in packages: rmtree(dirname) packages = setuptools.find_packages("./build/tmp/htsql/src") move.variables['packages'] = "".join(package + "\n" for package in packages) if not with_doc: rmtree("./build/tmp/htsql/doc") move(DATA_ROOT + "/pkg/source", "./build/tmp/htsql") src_vm.put("./build/tmp/htsql", ".") if with_doc: src_vm.run("cd htsql &&" " PYTHONPATH=src sphinx-build -d doc doc doc/html") for filename in glob.glob("./build/tmp/htsql/doc/man/*.?.rst"): basename = os.path.basename(filename) target = basename[:-4] src_vm.run("rst2man htsql/doc/man/%s htsql/doc/man/%s" % (basename, target)) src_vm.run("cd htsql && python setup.py sdist --formats=zip,gztar") src_vm.run("cd htsql && python setup.py bdist_wheel") if not os.path.exists("./build/pkg/src"): mktree("./build/pkg/src") src_vm.get("./htsql/dist/*", "./build/pkg/src") src_vm.run("rm -rf htsql") rmtree("./build/tmp") finally: src_vm.stop() log() log("The generated source packages are placed in:") for filename in glob.glob("./build/pkg/src/*"): log(" `{}`", filename) log()
def build(self): super(WindowsTemplateVM, self).build() log("building VM: `{}`...", self.name) start_time = datetime.datetime.now() src_iso_path = env.windows_iso if not (src_iso_path and os.path.isfile(src_iso_path)): src_iso_path = None output = pipe("locate %s || true" % " ".join(WINDOWS_ISO_FILES)) for line in output.splitlines(): if os.path.exists(line): src_iso_path = line break if src_iso_path is None: log("unable to find an ISO image for Windows XP or Windows 2003") src_iso_path = prompt("enter path to an ISO image:") if not (src_iso_path and os.path.isfile(src_iso_path)): raise fail("invalid path: %s" % src_iso_path) key_regexp = re.compile(r'^\w{5}-\w{5}-\w{5}-\w{5}-\w{5}$') key = env.windows_key if not (key and key_regexp.match(key)): key = None key_path = os.path.splitext(src_iso_path)[0] + ".key" if os.path.isfile(key_path): key = open(key_path).readline().strip() if not key_regexp.match(key): key = None if key is None: log("unable to find a Windows product key") key = prompt("enter product key:") if not key_regexp.match(key): raise fail("invalid product key: {}", key) wget_path = self.download(WGET_EXE_URLS) unpack_path = TMP_DIR + "/" + self.name boot_path = unpack_path + "/eltorito.img" if os.path.exists(unpack_path): rmtree(unpack_path) self.unpack_iso(src_iso_path, unpack_path) self.unpack_iso_boot(src_iso_path, boot_path) sif_template_path = DATA_ROOT + "/vm/%s-winnt.sif" % self.name sif_path = unpack_path + "/I386/WINNT.SIF" debug("translating: {} => {}", sif_template_path, sif_path) sif_template = open(sif_template_path).read() sif = sif_template.replace("#####-#####-#####-#####-#####", key) assert sif != sif_template open(sif_path, 'w').write(sif) install_path = unpack_path + "/$OEM$/$1/INSTALL" mktree(install_path) cp(wget_path, install_path) cp(CTL_DIR + "/identity.pub", install_path) cp(DATA_ROOT + "/vm/%s-install.cmd" % self.name, install_path + "/INSTALL.CMD") iso_path = TMP_DIR + "/%s.iso" % self.name if os.path.exists(iso_path): rm(iso_path) sh("mkisofs -o %s -q -iso-level 2 -J -l -D -N" " -joliet-long -relaxed-filenames -no-emul-boot" " -boot-load-size 4 -b eltorito.img %s" % (iso_path, unpack_path)) rmtree(unpack_path) try: self.kvm_img() self.kvm("-cdrom %s -boot d" % iso_path) rm(iso_path) self.compress() except: if os.path.exists(self.img_path): rm(self.img_path) raise stop_time = datetime.datetime.now() log("VM is built successfully: `{}` ({})", self.name, stop_time - start_time)