def testRackspaceComputationalCosts(self): cpu_usage = generate_random_usage(random.randint(1,100000), 'spikes') mem_usage = generate_random_usage(random.randint(1,100000), 'spikes') rackspace_cost = rackspace.get_optimal_rackspace(cpu_usage, mem_usage, False) self.assertTrue(rackspace_cost > 0) rackspace_cost = rackspace.get_optimal_rackspace([0], [0]) self.assertTrue(rackspace_cost == 0) # assure it's a monotonically increasing function cpu = random.randint(1,1000) mem = random.randint(1,1000) rackspace_cost = rackspace.get_optimal_rackspace([cpu], [mem]) rackspace_cost2 = rackspace.get_optimal_rackspace([cpu*1.2], [mem*1.2]) self.assertTrue(rackspace_cost <= rackspace_cost2)
def testAWSStorageCosts(self): current_storage = generate_random_usage(random.randint(10,100), 'spikes') ebs_storage_cost, s3_storage_cost = aws.get_storage_costs(current_storage) self.assertTrue(s3_storage_cost > 0) self.assertTrue(ebs_storage_cost > 0) ebs_storage_cost, s3_storage_cost = aws.get_storage_costs([0]) self.assertTrue(s3_storage_cost == 0) self.assertTrue(ebs_storage_cost == 0)
def testAWSComputationalCosts(self): cpu_usage = generate_random_usage(random.randint(1,100000), 'spikes') mem_usage = generate_random_usage(random.randint(1,100000), 'spikes') _, ec2_cost = aws.get_optimal_ec2(cpu_usage, mem_usage, False) self.assertTrue(ec2_cost > 0) deployment, ec2_cost = aws.get_optimal_ec2([0], [0]) self.assertTrue(ec2_cost == 0) self.assertTrue(deployment == {}) # assure it's a monotonically increasing function cpu = random.randint(1,1000) mem = random.randint(1,1000) _, ec2_cost = aws.get_optimal_ec2([cpu], [mem]) _, ec2_cost2 = aws.get_optimal_ec2([cpu*1.2], [mem*1.2]) self.assertTrue(ec2_cost <= ec2_cost2) # solve in integers # assure that no reserved instances are offered for small numbers pack, _ = aws.get_optimal_ec2([1], [1], True) self.assertTrue(pack == {})
''' Demonstrates usage of cloud-calculator library. ''' # We start by getting usage statistics. This can be # either simulated or acquired from one of the monitoring solutions # (e.g. Ganglia, Zabbix or collectd). # We generate usage statistics for a 12-month period # Generation result is a list of accumulated monthly usage. # The last parameter - 'spikes', 'flat' or 'semi-flat' - is a randomization parameter, # which describes variation of the usage from sitio.common.utils import generate_random_usage # network (GB/month) and storage (avg GB/month) storage_used = generate_random_usage(40, 'spikes') network_used_in = generate_random_usage(2, 'spikes') network_used_out = generate_random_usage(20, 'spikes') # consumed VM time (h/month) and used memory (GB/h*month), normalized to AWS CPU units cpu_usage = generate_random_usage(5000, 'spikes') mem_usage = generate_random_usage(12000, 'spikes') # Perform basic analysis from sitio.analyser import aws, rackspace # Calculate storage cost on two clouds: AWS and Rackspace # Pricelists in csv format are located in sitio/analyser/pricelist folder. ebs_storage_cost, s3_storage_cost = aws.get_storage_costs(storage_used) rack_storage_costs = rackspace.get_storage_costs(storage_used) print "Storage costs on AWS: $%s, $%s" %(ebs_storage_cost, s3_storage_cost) print "Storage costs on Rackspace: $%s" % rack_storage_costs
def testAWSStorageCosts(self): current_storage = generate_random_usage(random.randint(10,100), 'spikes') rack_costs = rackspace.get_storage_costs(current_storage) self.assertTrue(rack_costs > 0) rack_costs = rackspace.get_storage_costs([0]) self.assertTrue(rack_costs == 0)