def main(suite): suite.addLink('Grafana', 'http://grafana.localdomain/') suite.addLink('Login page', 'http://192.168.100.3/login') s1 = suite.addNode( ptHost("s1", ip="192.168.0.1", hostname="server1.localdomain", version="CentOS 7.4", cpus=32, ram_gb=128)) s2 = suite.addNode( ptHost("s2", ip="192.168.0.2", hostname="server2.localdomain", version="CentOS 7.4", cpus=32, ram_gb=128)) s3 = suite.addNode( ptHost("s3", ip="192.168.0.3", hostname="server3.localdomain", version="CentOS 7.4", cpus=32, ram_gb=128)) s4 = suite.addNode( ptHost("s4", ip="192.168.0.4", hostname="server4.localdomain", version="CentOS 7.4", cpus=16, ram_gb=64)) vm1 = s1.addNode( ptVM("vm1", ip="192.168.100.1", version="CentOS 7.4", virt_type="KVM VM", cpus=4, ram_gb=32)) vm2 = s1.addNode( ptVM("vm2", ip="192.168.100.2", version="CentOS 7.4", virt_type="KVM VM", cpus=4, ram_gb=32)) vm3 = s2.addNode( ptVM("vm3", ip="192.168.100.3", version="CentOS 7.4", virt_type="KVM VM", cpus=8, ram_gb=64)) vm4 = s3.addNode( ptVM("vm4", ip="192.168.100.4", version="CentOS 7.4", virt_type="KVM VM", cpus=8, ram_gb=64)) suite.addNode(ptHost("client", params="Python3", scan_info=True)) vm1.addNode(ptComponent("database", version="1.0.12")) vm2.addNode(ptComponent("backend", version="1.0.12")) vm3.addNode(ptComponent("UI#1", version="1.0.13")) vm4.addNode(ptComponent("UI#2", version="1.0.13")) g = "Latency tests" suite.addTest( ptTest( "Simple user login test", less_better=True, description= "Login under a user, 1 parallel client, time includes navigation to home page", group=g, metrics="sec", scores=[0.6, 0.72, 0.65 + random.randint(0, 10) / 10.0], deviations=[0.05, 0.12, 0.03], loops=100, links={"repo": "https://github.com/perfguru87/perftracker-client"}, attribs={"version": str(__version__)})) suite.addTest( ptTest( "Simple admin login test", less_better=True, description="Login under admin, 1 parallel client", group=g, metrics="sec", scores=[0.8, 0.9, 1.2 + random.randint(0, 10) / 10.0], deviations=[0.03, 0.09, 0.08], loops=100, links={"repo": "https://github.com/perfguru87/perftracker-client"}, attribs={"version": str(__version__)})) for p in range(1, 5 + random.randint(0, 2)): suite.addTest( ptTest("Login time", group=g, metrics="sec", less_better=True, category="%d parallel users" % (2**p), scores=[0.3 + sqrt(p) + random.randint(0, 20) / 40.0])) for p in range(1, 20 + random.randint(0, 10)): suite.addTest( ptTest( "Pages response time, 1 parallel client", group=g, metrics="sec", less_better=True, category="page #%3d" % p, scores=[0.3 + random.randint(0, 20) / 40.0], errors=['4xx error'] if random.randint(0, 30) == 0 else [], warnings=['HTTP 500'] if random.randint(0, 20) == 0 else [], status="FAILED" if random.randint(0, 25) == 0 else "SUCCESS")) for p in range(1, 100 + random.randint(0, 100)): suite.addTest( ptTest( "Home page response time", group=g, metrics="sec", less_better=True, category="DB size %d GB" % p, scores=[0.3 + (sqrt(p) + random.randint(0, 20)) / 40], errors=['4xx error'] if random.randint(0, 30) == 0 else [], warnings=['HTTP 500'] if random.randint(0, 20) == 0 else [], status="FAILED" if random.randint(0, 25) == 0 else "SUCCESS")) for p in range(1, 100 + random.randint(0, 100)): suite.addTest( ptTest( "Dashboard page response time", group=g, metrics="sec", less_better=True, category="DB size %d GB" % p, scores=[0.8 + (sqrt(p) + random.randint(0, 20)) / 40], errors=['4xx error'] if random.randint(0, 30) == 0 else [], warnings=['HTTP 500'] if random.randint(0, 20) == 0 else [], status="FAILED" if random.randint(0, 25) == 0 else "SUCCESS")) suite.upload() g = "Throughput tests" for p in range(1, 5 + random.randint(0, 2)): suite.addTest( ptTest("Home page throughput", group=g, metrics="pages/sec", category="%d parallel clients" % (2**p), scores=[10 + sqrt(p) + random.randint(0, 20) / 5])) suite.upload() g = "Download throughput" for p in range(1, 5 + random.randint(0, 2)): suite.addTest( ptTest("Download throughput", group=g, metrics="Bytes/sec", category="%d parallel clients" % (2**p), scores=[ 10000000000 + random.randint(10 * p, 20 * p) * 5000000 ])) suite.upload() g = "Upgrade" suite.addTest( ptTest("Upgrade with small database", group=g, metrics="seconds", scores=[124, 125, 123])) suite.addTest( ptTest("Upgrade with large database", group=g, metrics="seconds", scores=[344, 329, 351])) suite.upload()
def init(self): self._validate_urls() self.suite.addNode(ptHost("client", ip='127.0.0.1', scan_info=True)) self.suite.upload() self.print_ab_header()
def main(suite): s1 = suite.addNode( ptHost("s1", ip="192.168.0.1", hostname="s1.mydomain", version="ESX 7.0", cpus=32, ram_gb=128)) vm1 = s1.addNode( ptVM("account-server-vm", ip="192.168.100.1", version="CentOS 7.4", cpus=16, ram_gb=64)) vm1 = s1.addNode( ptVM("test-client-vm", ip="192.168.100.2", version="CentOS 7.4", cpus=16, ram_gb=64)) chunk = 50 for accounts_in_db in range(0, 1050, chunk): volume = "%d accounts in DB" % (1000 * accounts_in_db) group = "Provisioning tests" suite.addTest( ptTest( "Create accounts in 1 thread", group=group, category=volume, metrics="accounts/sec", duration_sec=900, description="POST /accounts in 1 thread (create %d accounts)" % (chunk * 0.1), scores=[60 / sqrt(sqrt(sqrt(accounts_in_db + 100))) ])) # fake score suite.addTest( ptTest( "Create accounts in 16 threads", group=group, category=volume, metrics="accounts/sec", duration_sec=3200, description="POST /accounts in 16 threads (create %d accounts)" % (chunk * 0.9), scores=[600 / (1.1 * sqrt(accounts_in_db + 100)) ])) # fake score group = "Read-only tests" suite.addTest( ptTest("GET information for some account in 1 thread", group=group, category=volume, metrics="accounts/sec", duration_sec=30, description="GET /account/1 in 1 thread (30 sec)", scores=[120 - sqrt(accounts_in_db + 10)])) # fake score suite.addTest( ptTest("GET information for some account in 8 threads", group=group, category=volume, metrics="accounts/sec", duration_sec=30, description="GET /account/1 in 8 threads (30 sec)", scores=[430 - 2 * sqrt(accounts_in_db + 10)])) # fake score suite.addTest( ptTest("GET information for some account in 64 threads", group=group, category=volume, metrics="accounts/sec", duration_sec=30, description="GET /account/1 in 64 threads (30 sec)", scores=[850 - 1.5 * sqrt(accounts_in_db + 10) ])) # fake score suite.addTest( ptTest("GET list of 10 first accounts in 1 thread", group=group, category=volume, metrics="lists/sec", duration_sec=30, description="GET /accounts/?limit=10 in 1 thread (30 sec)", scores=[95 - 0.3 * sqrt(accounts_in_db + 10) ])) # fake score suite.addTest( ptTest("GET list of 10 first accounts in 8 thread", group=group, category=volume, metrics="lists/sec", duration_sec=30, description="GET /accounts/?limit=10 in 1 thread (30 sec)", scores=[105 - 0.4 * sqrt(accounts_in_db + 10) ])) # fake score suite.addTest( ptTest("GET list of 10 first accounts in 64 thread", group=group, category=volume, metrics="lists/sec", duration_sec=30, description="GET /accounts/?limit=10 in 64 thread (30 sec)", scores=[115 - 0.3 * sqrt(accounts_in_db + 10) ])) # fake score group = "Modificating tests" suite.addTest( ptTest( "Update accounts in 1 thread", group=group, category=volume, metrics="accounts/sec", duration_sec=89, description="PUT /accounts in 1 thread (update 100 accounts)", scores=[110 - 0.2 * sqrt(accounts_in_db + 10)])) # fake score suite.addTest( ptTest("Update accounts in 16 threads", group=group, category=volume, metrics="accounts/sec", duration_sec=970, description= "PUT /accounts in 16 threads (update 1600 accounts)", scores=[130 - 0.3 * sqrt(accounts_in_db + 10) ])) # fake score suite.addTest( ptTest( "Delete accounts in 1 thread", group=group, category=volume, metrics="accounts/sec", duration_sec=520, description="PUT /accounts in 1 thread (update 100 accounts)", scores=[20 - 0.2 * sqrt(accounts_in_db + 10)])) # fake score suite.addTest( ptTest("Delete accounts in 16 threads", group=group, category=volume, metrics="accounts/sec", duration_sec=540, description= "PUT /accounts in 16 threads (update 1600 accounts)", scores=[320 - 0.1 * sqrt(accounts_in_db + 10) ])) # fake score suite.upload()