def runTest(self): from budget.scripts.gcp_billing_import import run settings = {'cache.dir': os.path.dirname(__file__)} options = {'nocacheupdate': True} run(settings, options) result = DBSession.query(GcpLineItem).all() self.assertEqual(result[0].cost_amount, 1.234567)
def runTest(self): from budget.scripts.openshift_v3_stats import expire expire(DBSession, Openshift3Node, ['12345678-1234-5678-1234-567812345678']) result = DBSession.query(Openshift3Node.uid, Openshift3Node.end_date).all() self.assertEqual(result[0], ('12345678-1234-5678-1234-567812345678',None)) self.assertEqual(result[1][0], '23456789-2345-6789-2345-678923456789') self.assertLessEqual((datetime.now()-result[1][1]).total_seconds(), 2)
def runTest(self): import os cache_dir = os.path.dirname(__file__) + "/gcp" from budget.scripts.gcp_billing_import import insert_data filename = 'gcp-billing-2001-01-01.json' insert_data(filename, cache_dir) result = DBSession.query(GcpLineItem).all() self.assertEqual(result[0].cost_amount, 1.234567)
def runTest(self): from budget.scripts.openshift_v3_stats import expire expire(DBSession, Openshift3Node, ['12345678-1234-5678-1234-567812345678']) result = DBSession.query(Openshift3Node.uid, Openshift3Node.end_date).all() self.assertEqual(result[0], ('12345678-1234-5678-1234-567812345678', None)) self.assertEqual(result[1][0], '23456789-2345-6789-2345-678923456789') self.assertLessEqual((datetime.now() - result[1][1]).total_seconds(), 2)
def lookup_price(options): ''' Digs through a massive nest of json data to extract the on demand pricing for AWS instances. See also: https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/price-changes.html Params: instance_type: any valid AWS instance size. (e.g. 'm4.xlarge') region: an AWS region endpoint name. (e.g. 'us-east-1') tenancy: 'Shared' or 'Dedicated' pricing: 'OnDemand' or 'Reserved' lease_contract_length: '1yr' or '3yr' purchase_option: 'No Upfront' or 'Partial Upfront' or 'Full Upfront' Returns: dict: key - 'Hrs' or 'Quantity' value - Decimal ''' region_name = region_lookup(options.region) products = DBSession.query(\ AwsPrice.price_dimensions, AwsPrice.term_attributes ).filter(\ AwsProduct.instance_type == options.instance_type, AwsProduct.location == region_name, AwsProduct.tenancy == options.tenancy, AwsProduct.operating_system == options.operating_system, AwsPrice.sku == AwsProduct.sku ).all() costs = [] for prd in products: price_dimensions = json.loads(prd[0]) term_attributes = json.loads(prd[1]) if options.pricing == 'OnDemand': rgx = re.compile(r'On Demand %s %s' % (options.operating_system, options.instance_type)) costs.append(_find_cost(rgx, price_dimensions)) elif options.pricing == 'Reserved': # On-Demand has no term_attributes if term_attributes == {}: continue for _, val in price_dimensions.items(): term_attributes.update( {val['description']: val['pricePerUnit']}) costs.append(term_attributes) return costs
def lookup_price(options): ''' Digs through a massive nest of json data to extract the on demand pricing for AWS instances. See also: https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/price-changes.html Params: instance_type: any valid AWS instance size. (e.g. 'm4.xlarge') region: an AWS region endpoint name. (e.g. 'us-east-1') tenancy: 'Shared' or 'Dedicated' pricing: 'OnDemand' or 'Reserved' lease_contract_length: '1yr' or '3yr' purchase_option: 'No Upfront' or 'Partial Upfront' or 'Full Upfront' Returns: dict: key - 'Hrs' or 'Quantity' value - Decimal ''' region_name = region_lookup(options.region) products = DBSession.query(\ AwsPrice.price_dimensions, AwsPrice.term_attributes ).filter(\ AwsProduct.instance_type == options.instance_type, AwsProduct.location == region_name, AwsProduct.tenancy == options.tenancy, AwsProduct.operating_system == options.operating_system, AwsPrice.sku == AwsProduct.sku ).all() costs = [] for prd in products: price_dimensions = json.loads(prd[0]) term_attributes = json.loads(prd[1]) if options.pricing == 'OnDemand': rgx = re.compile(r'On Demand %s %s' % (options.operating_system, options.instance_type)) costs.append(_find_cost(rgx, price_dimensions)) elif options.pricing == 'Reserved': # On-Demand has no term_attributes if term_attributes == {}: continue for _, val in price_dimensions.items(): term_attributes.update({val['description']:val['pricePerUnit']}) costs.append(term_attributes) return costs
def runTest(self): from budget.scripts.openshift_v3_stats import update yml = yaml.load(''' apiVersion: v1 items: - apiVersion: v1 kind: Node metadata: creationTimestamp: 2001-01-01T12:00:00Z labels: color: red type: compute name: test uid: 12345678-1234-5678-1234-567812345678 spec: externalID: i-123456789abcdef providerID: test:///test/i-123456789abcdef status: addresses: - address: 10.0.0.5 type: InternalIP - address: 10.0.0.6 type: ExternalIP allocatable: cpu: "2" memory: 2048Ki pods: "20" capacity: cpu: "2" memory: 2048Ki pods: "20" conditions: - lastHeartbeatTime: 2001-01-01T12:01:00Z lastTransitionTime: 2001-01-01T12:01:00Z message: kubelet is posting ready status reason: KubeletReady status: "True" type: Ready''') yaml_info = {'collection_date' : datetime.now(), 'cluster_id' : 'test'} lst = update(DBSession, Openshift3Node, yml, yaml_info) self.assertEqual(lst, ['12345678-1234-5678-1234-567812345678']) result = DBSession.query(Openshift3Node.uid).all() self.assertEqual(result, [(u'12345678-1234-5678-1234-567812345678',)])
def runTest(self): from budget.scripts.openshift_v3_stats import update yml = yaml.load(''' apiVersion: v1 items: - apiVersion: v1 kind: Node metadata: creationTimestamp: 2001-01-01T12:00:00Z labels: color: red type: compute name: test uid: 12345678-1234-5678-1234-567812345678 spec: externalID: i-123456789abcdef providerID: test:///test/i-123456789abcdef status: addresses: - address: 10.0.0.5 type: InternalIP - address: 10.0.0.6 type: ExternalIP allocatable: cpu: "2" memory: 2048Ki pods: "20" capacity: cpu: "2" memory: 2048Ki pods: "20" conditions: - lastHeartbeatTime: 2001-01-01T12:01:00Z lastTransitionTime: 2001-01-01T12:01:00Z message: kubelet is posting ready status reason: KubeletReady status: "True" type: Ready''') yaml_info = {'collection_date': datetime.now(), 'cluster_id': 'test'} lst = update(DBSession, Openshift3Node, yml, yaml_info) self.assertEqual(lst, ['12345678-1234-5678-1234-567812345678']) result = DBSession.query(Openshift3Node.uid).all() self.assertEqual(result, [(u'12345678-1234-5678-1234-567812345678', )])