def delete(self): # Delete the VM image. if self.missing(): raise fail("VM is not built: {}", self.name) if self.running(): raise fail("VM is running: {}", self.name) log("deleting VM: {}", self.name) rm(self.img_path) if os.path.exists(self.ctl_path): rm(self.ctl_path)
def stop(self): # Stop a VM. if self.missing(): raise fail("VM is not built: {}", self.name) if not self.running(): raise fail("VM is not running: {}", self.name) log("stopping VM: {}", self.name) for port in self.ports(): self.unforward(port) self.ctl("quit") self.wait()
def CREATEDB(engine): """deploy regression databases Run `cogs createdb <engine>` to create regression databases for the specified backend. To set database connection parameters, copy file `cogs.conf.sample` to `cogs.conf` and update respective settings. Supported backends: sqlite : SQLite backend pgsql : PostgreSQL backend mysql : MySQL backend oracle : Oracle backend mssql : MS SQL Server backend Regression databases: demo : student enrollment database edge : edge cases collection etl : CRUD/ETL database sandbox : empty database """ db = make_db(engine, 'demo') regress("-q -S /all/%s/dropdb -S /all/%s/createdb" % (engine, engine)) log() log("The demo regression database has been deployed at:") log(" `{}`", db) log()
def PYPI(): """upload the source distribution to PyPI This task uploads `zip` and `tar.gz` source distributions to PyPI. The distributions must be already built with `cogs pkg-src`. """ if not (glob.glob("./build/pkg/src/HTSQL-*.tar.gz") and glob.glob("./build/pkg/src/HTSQL-*.zip")): raise fail("cannot find source packages; run `cogs pkg-src` first") if os.path.exists("./build/tmp"): rmtree("./build/tmp") mktree("./build/tmp") archives = [] for tgzname in glob.glob("./build/pkg/src/*.tar.gz"): dirname = tgzname[:-7] zipname = dirname + ".zip" dirname = os.path.basename(dirname) project, version = dirname.rsplit('-', 1) dirname = "./build/tmp/" + dirname run("tar -xzf %s -C ./build/tmp" % tgzname) mktree(dirname + "/dist") run("tar -xzf %s -C %s" % (tgzname, dirname)) cp(tgzname, dirname + "/dist") cp(zipname, dirname + "/dist") setup_py("sdist --formats=zip,gztar --dry-run" " register upload --sign --identity=" + KEYSIG, cd=dirname) archives.append((project, version)) rmtree("./build/tmp") log() log("Source distribution archives are uploaded to:") for project, version in archives: log(" `http://pypi.python.org/pypi/{}/{}/`", project, version) log()
def start(self): # Start a VM. if self.missing(): raise fail("VM is not built: {}", self.name) if self.running(): raise fail("VM is already running: {}", self.name) log("starting VM: {}", self.name) for filename in glob.glob(CTL_DIR + "/port.*"): name = open(filename).read().strip() if name == self.name: rm(filename) if self.state: self.kvm("-daemonize -loadvm %s" % self.state) else: self.kvm("-daemonize -snapshot")
def PKG_DEB(): """create Debian packages This task creates Debian packages from source packages. """ if deb_vm.missing(): raise fail("VM is not built: {}", deb_vm.name) if deb_vm.running(): deb_vm.stop() if os.path.exists("./build/pkg/deb"): rmtree("./build/pkg/deb") if os.path.exists("./build/tmp"): rmtree("./build/tmp") version = get_version() debian_version = ".".join(version.split(".")[:3]) moves = load_moves(DATA_ROOT + "/pkg/debian/moves.yaml") deb_vm.start() pubkey = pipe("gpg --armour --export %s" % KEYSIG) seckey = pipe("gpg --armour --export-secret-key %s" % KEYSIG) deb_vm.write("/root/sign.key", pubkey + seckey) deb_vm.run("gpg --import /root/sign.key") try: for move in moves: package = "%s-%s" % (move.code.upper(), version) debian_package = "%s_%s" % (move.code, debian_version) archive = "./build/pkg/src/%s.tar.gz" % package if not os.path.exists(archive): raise fail("cannot find a source package;" " run `cogs pkg-src` first") changelog = open(DATA_ROOT + "/pkg/debian/changelog").read() if ('htsql (%s-1)' % debian_version) not in changelog: raise fatal("run `job pkg-deb-changelog`" " to update the changelog file") changelog = changelog.replace('htsql (', '%s (' % move.code) mktree("./build/tmp") cp(archive, "./build/tmp/%s.orig.tar.gz" % debian_package) sh("tar -xzf %s -C ./build/tmp" % archive) move(DATA_ROOT + "/pkg/debian", "./build/tmp/%s" % package) open("./build/tmp/%s/debian/changelog" % package, 'w') \ .write(changelog) deb_vm.put("./build/tmp", "./build") deb_vm.run("cd ./build/%s && dpkg-buildpackage -k%s" % (package, KEYSIG)) if not os.path.exists("./build/pkg/deb"): mktree("./build/pkg/deb") deb_vm.get("./build/*.deb", "./build/pkg/deb") deb_vm.run("rm -rf build") rmtree("./build/tmp") finally: deb_vm.stop() log() log("The generated Debian packages are placed in:") for filename in glob.glob("./build/pkg/deb/*"): log(" `{}`", filename) log()
def DEVELOP(): """install HTSQL in a development mode This task installs HTSQL in a development mode so that any changes to the source tree affect subsequent calls of `htsql-ctl`. """ setup_py("develop") log() log("HTSQL is installed in a development mode. Any changes") log("to the source tree will affect subsequent calls of `htsql-ctl`.") log()
def DOC(): """build HTSQL documentation This task compiles HTSQL documentation and places it to: `./build/doc/` """ sphinx("-b html doc build/doc") log() log("To see the generated documentation, open:") log(" `./build/doc/index.html`") log()
def build(self): super(DebianTemplateVM, self).build() log("building VM: `{}`...", self.name) start_time = datetime.datetime.now() src_iso_path = getattr(env, self.iso_env) if not (src_iso_path and os.path.isfile(src_iso_path)): src_iso_path = self.download(self.iso_urls) unpack_path = TMP_DIR + "/" + self.name if os.path.exists(unpack_path): rmtree(unpack_path) self.unpack_iso(src_iso_path, unpack_path) cp(DATA_ROOT + "/vm/%s-isolinux.cfg" % self.name, unpack_path + "/isolinux/isolinux.cfg") cp(DATA_ROOT + "/vm/%s-preseed.cfg" % self.name, unpack_path + "/preseed.cfg") cp(DATA_ROOT + "/vm/%s-install.sh" % self.name, unpack_path + "/install.sh") cp(CTL_DIR + "/identity.pub", unpack_path + "/identity.pub") sh( "md5sum" " `find ! -name \"md5sum.txt\"" " ! -path \"./isolinux/*\" -follow -type f` > md5sum.txt", cd=unpack_path) iso_path = TMP_DIR + "/%s.iso" % self.name if os.path.exists(iso_path): rm(iso_path) sh("mkisofs -o %s" " -q -r -J -no-emul-boot -boot-load-size 4 -boot-info-table" " -b isolinux/isolinux.bin -c isolinux/boot.cat %s" % (iso_path, unpack_path)) rmtree(unpack_path) try: self.kvm_img() self.kvm("-cdrom %s -boot d" % iso_path) rm(iso_path) self.compress() except: if os.path.exists(self.img_path): rm(self.img_path) raise stop_time = datetime.datetime.now() log("VM is built successfully: `{}` ({})", self.name, stop_time - start_time)
def INSTALL(): """install HTSQL This task installs HTSQL to the local filesystem. This task may require root privileges. """ setup_py("build --build-base=build/lib install") log() log("HTSQL is installed. For information on usage, run:") log(" `htsql-ctl help`") log()
def Factorial(n): """calculate n! This task calculates the value of the factorial of the given positive number `n`. Factorial of n, also known as n!, is defined by the formula: n! = 1*2*...*(n-1)*n """ try: n = int(n) except ValueError: raise fail("n must be an integer") if n < 1: raise fail("n must be positive") f = 1 for k in range(2, n + 1): f *= k log("{}! = `{}`", n, f)
def trial(command, description): # Run a command on VM; complain if exited with non-zero error code. log("{}...", description) command = "ssh -F %s linux-vm \"cd src/htsql && %s\"" \ % (CTL_DIR+"/ssh_config", command) stream = subprocess.PIPE debug("trying: {}", command) proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, err = proc.communicate() if proc.returncode == 0: log("{}: PASSED", description) return 0 out = out.strip() log("{}: :warning:`FAILED!`", description) log("*" * 72) sys.stdout.write(out + "\n") log("*" * 72) return 1
def VM_LIST(): """list all virtual machines This task lists all registered virtual machines and their states. A VM could be in one of three states: - `missing`: the VM image is not built; - `stopped`: the VM image exists; the VM is not active. - `running`: the VM image exists and the VM is active. For running VMs, the task lists all ports forwarded from the local host to the VM. """ log("Available virtual machines:") for vm in VM.list(): status = 'missing' if not vm.missing(): status = 'stopped' if vm.running(): status = 'running' ports = vm.ports() if ports: status += " (%s)" \ % (", ".join("%s -> %s" % (port+10000, port) for port in ports)) log(" {:<24} : {}", vm.name, status) log()
def PKG_RPM(): """create RedHat/CentOS packages This task creates RedHat/CentOS packages from source packages. """ if rpm_vm.missing(): raise fail("VM is not built: {}", rpm_vm.name) if rpm_vm.running(): rpm_vm.stop() if os.path.exists("./build/pkg/rpm"): rmtree("./build/pkg/rpm") if os.path.exists("./build/tmp"): rmtree("./build/tmp") version = get_version() redhat_version = ".".join(version.split(".")[:3]) moves = load_moves(DATA_ROOT + "/pkg/redhat/moves.yaml") rpm_vm.start() pubkey = pipe("gpg --armour --export %s" % KEYSIG) seckey = pipe("gpg --armour --export-secret-key %s" % KEYSIG) rpm_vm.write("/root/sign.key", pubkey + seckey) rpm_vm.run("gpg --import /root/sign.key") rpm_vm.put(DATA_ROOT + "/pkg/redhat/.rpmmacros", ".") try: for move in moves: name = move.variables['name'] move.variables['version'] = redhat_version move.variables['package'] = "%s-%s" % (name, version) package = "%s-%s" % (name, version) archive = "./build/pkg/src/%s.tar.gz" % package if not os.path.exists(archive): raise fail("cannot find a source package;" " run `cogs pkg-src` first") mktree("./build/tmp") move(DATA_ROOT + "/pkg/redhat", "./build/tmp") cp(archive, "./build/tmp/SOURCES") rpm_vm.put("./build/tmp", "./rpmbuild") rpm_vm.run("rpmbuild -bb rpmbuild/SPECS/%s.spec" % name) if not os.path.exists("./build/pkg/rpm"): mktree("./build/pkg/rpm") #rpm_vm.run("rpmsign --addsign ./rpmbuild/RPMS/noarch/*.rpm") rpm_vm.get("./rpmbuild/RPMS/noarch/*.rpm", "./build/pkg/rpm") rpm_vm.run("rm -rf rpmbuild") rmtree("./build/tmp") finally: rpm_vm.stop() log() log("The generated RedHat/CentOS packages are placed in:") for filename in glob.glob("./build/pkg/rpm/*"): log(" `{}`", filename) log()
def build(self): super(LinuxBenchVM, self).build() parent_vm = VM.find(self.parent) if parent_vm.missing(): parent_vm.build() if parent_vm.running(): raise fail("unable to copy VM while it is running: {}", parent_vm.name) log("building VM: `{}`...", self.name) start_time = datetime.datetime.now() try: sh("qemu-img create -b %s.qcow2 -f qcow2 %s.qcow2" % (parent_vm.name, self.name), cd=IMG_DIR) self.kvm("-daemonize") time.sleep(60.0) self.put(DATA_ROOT + "/vm/%s-update.sh" % self.name, "/root/update.sh") self.run("/root/update.sh") self.run("rm /root/update.sh") self.run("shutdown") self.wait() #self.compress(parent_vm.name) self.kvm("-daemonize") time.sleep(60.0) self.ctl("savevm %s" % self.state) self.ctl("quit") self.wait() except: if self.running(): self.ctl("quit") self.wait() if os.path.exists(self.img_path): rm(self.img_path) raise stop_time = datetime.datetime.now() log("VM is built successfully: `{}` ({})", self.name, stop_time - start_time)
def build(self): super(WindowsBenchVM, self).build() parent_vm = VM.find(self.parent) if parent_vm.missing(): parent_vm.build() if parent_vm.running(): raise fail("unable to copy VM while it is running: {}", parent_vm.name) log("building VM: `{}`...", self.name) start_time = datetime.datetime.now() try: cp(parent_vm.img_path, self.img_path) self.kvm("-daemonize") time.sleep(120.0) self.put(DATA_ROOT + "/vm/%s-update.cmd" % self.name, "/cygdrive/c/INSTALL/UPDATE.CMD") self.run( "reg add 'HKLM\Software\Microsoft\Windows\CurrentVersion\RunOnce'" " /v %s /t REG_SZ /d 'C:\INSTALL\UPDATE.CMD' /f" % self.name) self.run("shutdown /r /t 0 /f") self.wait() #self.compress(parent_vm.name) self.kvm("-daemonize") time.sleep(120.0) self.ctl("savevm %s" % self.state) self.ctl("quit") self.wait() except: if self.running(): self.ctl("quit") self.wait() if os.path.exists(self.img_path): rm(self.img_path) raise stop_time = datetime.datetime.now() log("VM is built successfully: `{}` ({})", self.name, stop_time - start_time)
def build(self): super(CentOSTemplateVM, self).build() log("building VM: `{}`...", self.name) start_time = datetime.datetime.now() src_iso_path = env.centos_iso if not (src_iso_path and os.path.isfile(src_iso_path)): src_iso_path = self.download(CENTOS_ISO_URLS) unpack_path = TMP_DIR + "/" + self.name if os.path.exists(unpack_path): rmtree(unpack_path) self.unpack_iso(src_iso_path, unpack_path) cp(DATA_ROOT + "/vm/%s-isolinux.cfg" % self.name, unpack_path + "/isolinux/isolinux.cfg") cp(DATA_ROOT + "/vm/%s-ks.cfg" % self.name, unpack_path + "/ks.cfg") cp(DATA_ROOT + "/vm/%s-install.sh" % self.name, unpack_path + "/install.sh") cp(CTL_DIR + "/identity.pub", unpack_path + "/identity.pub") iso_path = TMP_DIR + "/%s.iso" % self.name if os.path.exists(iso_path): rm(iso_path) sh("mkisofs -o %s" " -q -r -J -T -no-emul-boot -boot-load-size 4 -boot-info-table" " -b isolinux/isolinux.bin -c isolinux/boot.cat %s" % (iso_path, unpack_path)) rmtree(unpack_path) try: self.kvm_img() self.kvm("-cdrom %s -boot d" % iso_path) rm(iso_path) self.compress() except: if os.path.exists(self.img_path): rm(self.img_path) raise stop_time = datetime.datetime.now() log("VM is built successfully: `{}` ({})", (self.name, stop_time - start_time))
def BUILD(): """build HTSQL library This task copies HTSQL source files to the build directory: `./build/lib` """ setup_py("build --build-base=build/lib") log() log("HTSQL is successfully built.") log()
def COVERAGE(): """measure code coverage by regression tests (coverage.py)""" if os.path.exists("./build/coverage"): rmtree("./build/coverage") mktree("./build/coverage") environ = {} environ['COVERAGE_FILE'] = "./build/coverage/coverage.dat" variables = make_variables() coverage_py("run --branch" " --source=htsql,htsql_sqlite,htsql_pgsql,htsql_oracle," "htsql_mssql,htsql_django" + " `which \"%s\"` test/regress.yaml -E test/regress.py -q " % env.pbbt_path + variables, environ=environ) coverage_py("html --directory=build/coverage", environ) log() log("To see the coverage report, open:") log(" `./build/coverage/index.html`") log()
def DROPDB(engine): """delete regression databases Run `cogs dropdb <engine>` to delete users and databases deployed by regression tests. To get a list of supported engines, run: `cogs help createdb` """ db = make_db(engine, 'demo') regress("-q -S /all/%s/dropdb" % engine) log() log("Regression databases has beeen deleted.") log()
def PKG_DEB_CHANGELOG(message=None): """update the Debian changelog This task updates the Debian changelog to the current HTSQL version. """ if message is None: message = "new upstream release" version = get_version() debian_version = ".".join(version.split(".")[:3]) debian_version += "-1" changelog = open(DATA_ROOT + "/pkg/debian/changelog").read() if ('htsql (%s)' % debian_version) in changelog: raise fail("changelog is already up-to-date") sh("dch --check-dirname-level 0 -D unstable -v %s %s" % (debian_version, message), cd=DATA_ROOT + "/pkg/debian") log("The Debian changelog is updated to version:") log(" `{}`", debian_version) log()
def CHECK_ALL(): """run regression tests for all supported backends This task runs HTSQL regression tests on all combinations of client and server platforms. """ vms = [ py26_vm, py27_vm, pgsql84_vm, pgsql90_vm, pgsql91_vm, mysql51_vm, oracle10g_vm, mssql2005_vm, mssql2008_vm ] for vm in vms: if vm.missing(): warn("VM is not built: {}", vm.name) for vm in vms: if vm.running(): vm.stop() errors = 0 try: for client_vm in [py26_vm, py27_vm]: if client_vm.missing(): continue client_vm.start() client_vm.run("~/bin/pip -q install" " hg+http://bitbucket.org/prometheus/pbbt") sh("hg clone --ssh='ssh -F %s' . ssh://linux-vm/src/htsql" % (CTL_DIR + "/ssh_config")) errors += trial("hg update && python setup.py install", "installing HTSQL under %s" % client_vm.name) errors += trial( "pbbt test/regress.yaml -E test/regress.py" " -q -S /all/sqlite", "testing sqlite backend") for server_vm, suite in [(pgsql84_vm, 'pgsql'), (pgsql90_vm, 'pgsql'), (pgsql91_vm, 'pgsql'), (mysql51_vm, 'mysql'), (oracle10g_vm, 'oracle'), (mssql2005_vm, 'mssql'), (mssql2008_vm, 'mssql')]: if server_vm.missing(): continue server_vm.start() username_key = "%s_USERNAME" % suite.upper() password_key = "%s_PASSWORD" % suite.upper() host_key = "%s_HOST" % suite.upper() port_key = "%s_PORT" % suite.upper() username_value = { 'pgsql': "postgres", 'mysql': "root", 'oracle': "system", 'mssql': "sa" }[suite] password_value = "admin" host_value = "10.0.2.2" port_value = 10000 + server_vm.port command = "pbbt test/regress.yaml -E test/regress.py" \ " -q -S /all/%s" \ " -D %s=%s -D %s=%s -D %s=%s -D %s=%s" \ % (suite, username_key, username_value, password_key, password_value, host_key, host_value, port_key, port_value) message = "testing %s backend against %s" \ % (suite, server_vm.name) errors += trial(command, message) server_vm.stop() errors += trial( "pbbt test/regress.yaml -E test/regress.py" " -q -S /all/routine", "testing htsql-ctl routines") client_vm.stop() except: for vm in vms: if vm.running(): vm.stop() raise log() if errors: if errors == 1: warn("1 failed test") else: warn("{} failed tests", errors) else: log("`All tests passed`")
def PKG_SRC(): """create a source package This task creates Python source distribution. """ if src_vm.missing(): raise fail("VM is not built: {}", src_vm.name) if src_vm.running(): src_vm.stop() if os.path.exists("./build/pkg/src"): rmtree("./build/pkg/src") if os.path.exists("./build/tmp"): rmtree("./build/tmp") version = get_version() all_routines = get_routines() all_addons = get_addons() moves = load_moves(DATA_ROOT + "/pkg/source/moves.yaml") src_vm.start() src_vm.run("pip install wheel") try: for move in moves: with_doc = move.variables['with-doc'] packages = move.variables['packages'].strip().splitlines() routines = "".join( routine + "\n" for routine in all_routines if routine.split('=', 1)[1].strip().split('.')[0] in packages) addons = "".join( addon + "\n" for addon in all_addons if addon.split('=', 1)[1].strip().split('.')[0] in packages) move.variables['version'] = version move.variables['htsql-routines'] = routines move.variables['htsql-addons'] = addons mktree("./build/tmp") sh("hg archive ./build/tmp/htsql") if with_doc: setup_py("-q download_vendor", cd="./build/tmp/htsql") for dirname in glob.glob("./build/tmp/htsql/src/*"): if os.path.basename(dirname) not in packages: rmtree(dirname) packages = setuptools.find_packages("./build/tmp/htsql/src") move.variables['packages'] = "".join(package + "\n" for package in packages) if not with_doc: rmtree("./build/tmp/htsql/doc") move(DATA_ROOT + "/pkg/source", "./build/tmp/htsql") src_vm.put("./build/tmp/htsql", ".") if with_doc: src_vm.run("cd htsql &&" " PYTHONPATH=src sphinx-build -d doc doc doc/html") for filename in glob.glob("./build/tmp/htsql/doc/man/*.?.rst"): basename = os.path.basename(filename) target = basename[:-4] src_vm.run("rst2man htsql/doc/man/%s htsql/doc/man/%s" % (basename, target)) src_vm.run("cd htsql && python setup.py sdist --formats=zip,gztar") src_vm.run("cd htsql && python setup.py bdist_wheel") if not os.path.exists("./build/pkg/src"): mktree("./build/pkg/src") src_vm.get("./htsql/dist/*", "./build/pkg/src") src_vm.run("rm -rf htsql") rmtree("./build/tmp") finally: src_vm.stop() log() log("The generated source packages are placed in:") for filename in glob.glob("./build/pkg/src/*"): log(" `{}`", filename) log()
def __call__(self): p, q = 0, 1 for k in range(self.n): p, q = p + q, p log("F_{} = `{}`", self.n, p)
def build(self): super(WindowsTemplateVM, self).build() log("building VM: `{}`...", self.name) start_time = datetime.datetime.now() src_iso_path = env.windows_iso if not (src_iso_path and os.path.isfile(src_iso_path)): src_iso_path = None output = pipe("locate %s || true" % " ".join(WINDOWS_ISO_FILES)) for line in output.splitlines(): if os.path.exists(line): src_iso_path = line break if src_iso_path is None: log("unable to find an ISO image for Windows XP or Windows 2003") src_iso_path = prompt("enter path to an ISO image:") if not (src_iso_path and os.path.isfile(src_iso_path)): raise fail("invalid path: %s" % src_iso_path) key_regexp = re.compile(r'^\w{5}-\w{5}-\w{5}-\w{5}-\w{5}$') key = env.windows_key if not (key and key_regexp.match(key)): key = None key_path = os.path.splitext(src_iso_path)[0] + ".key" if os.path.isfile(key_path): key = open(key_path).readline().strip() if not key_regexp.match(key): key = None if key is None: log("unable to find a Windows product key") key = prompt("enter product key:") if not key_regexp.match(key): raise fail("invalid product key: {}", key) wget_path = self.download(WGET_EXE_URLS) unpack_path = TMP_DIR + "/" + self.name boot_path = unpack_path + "/eltorito.img" if os.path.exists(unpack_path): rmtree(unpack_path) self.unpack_iso(src_iso_path, unpack_path) self.unpack_iso_boot(src_iso_path, boot_path) sif_template_path = DATA_ROOT + "/vm/%s-winnt.sif" % self.name sif_path = unpack_path + "/I386/WINNT.SIF" debug("translating: {} => {}", sif_template_path, sif_path) sif_template = open(sif_template_path).read() sif = sif_template.replace("#####-#####-#####-#####-#####", key) assert sif != sif_template open(sif_path, 'w').write(sif) install_path = unpack_path + "/$OEM$/$1/INSTALL" mktree(install_path) cp(wget_path, install_path) cp(CTL_DIR + "/identity.pub", install_path) cp(DATA_ROOT + "/vm/%s-install.cmd" % self.name, install_path + "/INSTALL.CMD") iso_path = TMP_DIR + "/%s.iso" % self.name if os.path.exists(iso_path): rm(iso_path) sh("mkisofs -o %s -q -iso-level 2 -J -l -D -N" " -joliet-long -relaxed-filenames -no-emul-boot" " -boot-load-size 4 -b eltorito.img %s" % (iso_path, unpack_path)) rmtree(unpack_path) try: self.kvm_img() self.kvm("-cdrom %s -boot d" % iso_path) rm(iso_path) self.compress() except: if os.path.exists(self.img_path): rm(self.img_path) raise stop_time = datetime.datetime.now() log("VM is built successfully: `{}` ({})", self.name, stop_time - start_time)