def test_hours_per_consumer(self): ReportData.drop_collection() count = 0 for key, value in products_dict.items(): count += 1 entry = TestData.create_entry(key, mem_high=True) entry.save(safe=True) lookup = len(ReportData.objects.all()) self.assertEqual(lookup, count) end = datetime.now() delta=timedelta(days=1) start = datetime.now() - delta list_of_rhics = RHIC.objects.all() results = hours_per_consumer(start, end, list_of_rhics ) # right now products_dict RHEL and JBoss are sharing a RHIC so.. -1 on length self.assertEqual(len(results), (len(products_dict) -1 ), "correct number of results returned") results_product_list = [] for r in results: self.assertEqual(r[0]['nau'], '1', "number of checkins is accurate") results_product_list.append(r[0]['product_name']) intersect = set(results_product_list).intersection(products_dict.keys()) self.assertEqual(len(intersect), (len(products_dict) -1), "number of products returned in results is accurate")
def test_basic_mdu(self): """ Basic test of mdu """ ReportData.drop_collection() rhel_entry = TestData.create_entry(RHEL, mem_high=True) rhel_entry.save() lookup = ReportData.objects.all() self.assertEqual(len(lookup), 1) delta=timedelta(days=1) start = datetime.now() - delta end = datetime.now() + delta filter_args = { "memtotal__gt": rhel_entry.memtotal - 1, "product": rhel_entry.product, "contract_id": rhel_entry.contract_id, "support": rhel_entry.support, "memtotal__lt": rhel_entry.memtotal + 1, "consumer_uuid": rhel_entry.consumer_uuid, "sla": rhel_entry.sla } args = { "start": start.strftime(constants.epoch), "end": end.strftime(constants.epoch), "filter_args_dict": filter_args, "description": {"Product": RHEL} } test_dict = MaxUsage.get_MDU_MCU(**args) result = test_dict['mdu'][1] self.assertEqual(result[1], 1, "correct mdu found")
def test_generic_config_RHEL_JBOSS_same_rhic(self): ReportData.drop_collection() # create 1 RHEL, 2 JBoss entry_high = TestData.create_entry(RHEL, mem_high=True) entry_high.save(safe=True) entry_high = TestData.create_entry(JBoss, socket=5) entry_high.save(safe=True) entry_low = TestData.create_entry(JBoss, socket=4 ) entry_low.save(safe=True) delta=timedelta(days=1) start = datetime.now() - delta end = datetime.now() + delta environment = "us-east-1" lookup = ReportData.objects.all() self.assertEqual(len(lookup), 3) #test for RHEL Match rhic = RHIC.objects.filter(uuid=products_dict[RHEL][1])[0] p = Product.objects.filter(name=RHEL, sla=rhic.sla, support_level=rhic.support_level)[0] results_dicts = Product_Def.get_count(p, rhic, start, end, rhic.contract, environment, report_biz_rules) self.assertEqual(len(results_dicts), 1) #test for JBoss match rhic = RHIC.objects.filter(uuid=products_dict[JBoss][1])[0] p = Product.objects.filter(name=JBoss, sla=rhic.sla, support_level=rhic.support_level)[0] results_dicts = Product_Def.get_count(p, rhic, start, end, rhic.contract, environment, report_biz_rules) results_dicts = Product_Def.get_count(p, rhic, start, end, rhic.contract, environment, report_biz_rules) self.assertEqual(len(results_dicts), 2)
def test_RHEL_Host_negative(self): ReportData.drop_collection() entry_high = TestData.create_entry(UNLIMITED, socket=3) entry_high.save(safe=True) entry_low = TestData.create_entry(UNLIMITED, socket=3, mem_high=True ) entry_low.save(safe=True) self.check_product_result(1, 1)
def test_OpenShift_Gear_negative(self): ReportData.drop_collection() entry_high = TestData.create_entry(GEAR, cpu=2, mem_high=True) entry_high.save(safe=True) entry_low = TestData.create_entry(GEAR, cpu=3, mem_high=True) entry_low.save(safe=True) self.check_product_result(1, 1)
def test_JBoss_vcpu_negative(self): ReportData.drop_collection() entry_high = TestData.create_entry(JBoss, socket=5) entry_high.save(safe=True) entry_low = TestData.create_entry(JBoss, socket=5, mem_high=True ) entry_low.save(safe=True) self.check_product_result(1, 1)
def test_RHEL_memory_negative(self): ReportData.drop_collection() entry_high = TestData.create_entry(RHEL, mem_high=True) entry_high.save(safe=True) entry_low = TestData.create_entry(RHEL, mem_high=True, socket=12) entry_low.save(safe=True) self.check_product_result(1, 1)
def test_advanced_mcu_mdu(self): """ three report data entries, each w/ unique instance_ident 2 in the same $hour 1 in $hour + 1 mcu = 2 , mdu = 3 """ delta_day=timedelta(days=1) delta_hour=timedelta(hours=1) start = datetime.now() - delta_day end = datetime.now() + delta_day hour_plus_1 = datetime.now() + delta_hour ReportData.drop_collection() rhel_entry = TestData.create_entry(RHEL, instance_identifier="00:10") rhel_entry.save() rhel_entry = TestData.create_entry(RHEL, instance_identifier="00:11") rhel_entry.save() rhel_entry = TestData.create_entry(RHEL, date=hour_plus_1, instance_identifier="00:12") rhel_entry.save() lookup = ReportData.objects.all() self.assertEqual(len(lookup), 3) filter_args = { "memtotal__gt": rhel_entry.memtotal - 1, "product": rhel_entry.product, "contract_id": rhel_entry.contract_id, "support": rhel_entry.support, "memtotal__lt": rhel_entry.memtotal + 1, "consumer_uuid": rhel_entry.consumer_uuid, "sla": rhel_entry.sla } args = { "start": start.strftime(constants.epoch), "end": end.strftime(constants.epoch), "filter_args_dict": filter_args, "description": {"Product": RHEL} } test_dict = MaxUsage.get_MDU_MCU(**args) mdu = test_dict['mdu'][1] mcu = test_dict['mcu'][1] self.assertEqual(mdu[1], 3, "correct mdu found") self.assertEqual(mcu[1], 2, "correct mcu found")
def main(): SpliceServer.drop_collection() ProductUsage.drop_collection() ReportData.drop_collection() ss1 = TestData.create_splice_server("test01", "east") uuid_rhel_jboss = products_dict[RHEL][1] prod_rhel = products_dict[RHEL][0] prod_jboss = products_dict[JBoss][0] uuid_ha = products_dict[HA][1] prod_ha = products_dict[HA][0] uuid_edu = products_dict[EDU][1] prod_edu = products_dict[EDU][0] now = datetime.now() delta_day = timedelta(days=4) delta_hour = timedelta(days=3) for i in range(1, 10): this_time = now - (delta_hour * i) create_set_of_usage( prod_rhel, uuid_rhel_jboss, get_times(this_time), ss1, 7) for i in range(1, 10): this_time = now - (delta_hour * i) create_set_of_usage( prod_jboss, uuid_rhel_jboss, get_times(this_time), ss1, 4) for i in range(1, 4): this_time = now - (delta_hour * i) create_set_of_usage(prod_ha, uuid_ha, get_times(this_time), ss1, 7) for i in range(5, 10): this_time = now - (delta_hour * i) create_set_of_usage( prod_edu, uuid_edu, get_times(this_time), ss1, 5) # run import results = import_data(force_import=True) # verify 1 items in db lookup = ReportData.objects.all()
def test_generic_config_RHEL(self): ReportData.drop_collection() entry_high = TestData.create_entry(RHEL, mem_high=True) entry_high.save(safe=True) delta=timedelta(days=1) start = datetime.now() - delta end = datetime.now() + delta contract_num = "3116649" environment = "us-east-1" lookup = ReportData.objects.all() self.assertEqual(len(lookup), 1) #test perfect match p = Product.objects.filter(name=RHEL)[0] #print(products_dict[RHEL][1]) rhic = RHIC.objects.filter(uuid=products_dict[RHEL][1])[0] results_dicts = Product_Def.get_count(p, rhic, start, end, contract_num, environment, report_biz_rules) self.assertEqual(len(results_dicts), 1)
def test_find_each_product(self): ReportData.drop_collection() count = 0 for key, value in products_dict.items(): count += 1 entry = TestData.create_entry(key, mem_high=True) entry.save(safe=True) lookup = len(ReportData.objects.all()) self.assertEqual(lookup, count) end = datetime.now() delta=timedelta(days=1) start = datetime.now() - delta for key, value in products_dict.items(): rhic = RHIC.objects.filter(uuid=value[1])[0] p = Product.objects.filter(name=key, sla=rhic.sla, support_level=rhic.support_level)[0] results_dicts = Product_Def.get_count(p, rhic, start, end, rhic.contract, "us-east-1", report_biz_rules) self.assertEqual(len(results_dicts), 1)
def import_data(product_usage=[], checkin_interval=1, from_splice_server="NA", force_import=False): """ @param product_usage @type mongoengine cursor @param checkin_interval @type int @description the interval between client checkins range 1-23, 24=daily @return dict w/ keys start, end to measure how long the import took @rtype dict """ # config fail/pass on missing rhic if not product_usage: product_usage = ProductUsage.objects.all() start_stop_time = [] quarantined = [] #total_import_count = len(product_usage) #remaining_import_count = total_import_count # debug start = datetime.utcnow() #provide a way to throttle how often import can be run time = {} time['start'] = start.strftime(constants.full_format) time_now = datetime.utcnow() threshold = 0 if config.CONFIG.has_option('import', 'quiet_period'): threshold = int(config.CONFIG.get('import', 'quiet_period')) last_import_threshhold = time_now - timedelta(minutes=threshold) last_import = ImportHistory.objects.filter( date__gt=last_import_threshhold).count() if last_import > 0 and force_import == False: time['end'] = -1 # -1 send import skipped message start_stop_time.append(time) _LOG.info("import skipped") return [], start_stop_time else: record = ImportHistory(date=start, splice_server=from_splice_server) record.save() # committing every 100 records instead of every 1 record saves about 5 # seconds. cached_rhics = {} cached_contracts = {} for pu in product_usage: uuid = pu.consumer #BEGIN SANITIZE THE PU DATA if not pu.allowed_product_info: _LOG.critical('product usuage object does not have any allowed products (engineering ids)') continue try: splice_server = SpliceServer.objects.get(uuid=pu.splice_server) _LOG.info('splice server = ' + splice_server.hostname) except Exception: _LOG.critical('splice server named: ' + str(pu.splice_server) + ' not found') continue if uuid in cached_rhics: rhic = cached_rhics[uuid] else: try: _LOG.info('using RHIC: ' + uuid) rhic = RHIC.objects.filter(uuid=uuid)[0] cached_rhics[uuid] = rhic except IndexError: _LOG.critical('rhic not found @ import: ' + uuid) _LOG.critical('product usuage object will NOT be imported') continue #END SANITIZE THE PU DATA account = Account.objects(account_id=rhic.account_id).only('contracts').first() contract = None if rhic.contract in cached_contracts: contract = cached_contracts['rhic.contract'] else: for c in account.contracts: if c.contract_id == rhic.contract: cached_contracts['rhic.contract'] = c contract = c break # Set of used engineering ids for this checkin product_set = Set(pu.allowed_product_info) # Iterate over each product in the contract, see if it matches sla and # support level, and consumed engineering ids. If so, save an instance # of ReportData for product in contract.products: # Match on sla and support level if not (product.sla == rhic.sla and product.support_level == rhic.support_level): continue # Set of engineering ids for this product. product_eng_id_set = set(product.engineering_ids) # If the set of engineering ids for the product is a subset of the # used engineering ids for this checkin, create an instance of # ReportData, check for dupes, and save the instance. if product_eng_id_set.issubset(product_set): # This line isn't technically necessary, but it improves # performance by making the set we need to search smaller each # time. product_set.difference_update(product_eng_id_set) splice_server = SpliceServer.objects.get(uuid=pu.splice_server) for interval in range(checkin_interval): td = timedelta(hours=interval) this_time = pu.date + td rd = ReportData( instance_identifier=str(pu.instance_identifier), consumer=rhic.name, consumer_uuid=uuid, product=product.engineering_ids, product_name=product.name, date=this_time, hour=this_time.strftime( constants.hr_fmt), day=pu.date.strftime( constants.day_fmt), sla=product.sla, support=product.support_level, contract_id=rhic.contract, contract_use=str(product.quantity), memtotal=int( pu.facts['memory_dot_memtotal']), cpu_sockets=int( pu.facts['lscpu_dot_cpu_socket(s)']), cpu=int( pu.facts['lscpu_dot_cpu(s)']), environment=str( splice_server.environment), splice_server=str( splice_server.hostname), duplicate=interval, record_identifier=rhic.name + str(pu.instance_identifier) + pu.date.strftime(constants.day_fmt) + product.name ) try: rd.save(safe=True) _LOG.info( 'recording: ' + str(product.engineering_ids)) except NotUniqueError: _LOG.info("Ignorning NotUniqueError for: %s" % (rd)) except OperationError as oe: _LOG.info("could not import:" + str(pu) + "Exception: " + str(oe)) quarantined.append(rd) end = datetime.utcnow() time['end'] = end.strftime(constants.full_format) start_stop_time.append(time) _LOG.info('import complete') return quarantined, start_stop_time
def drop_collections(self): ReportData.drop_collection()
def drop_report_data(self): ReportData.drop_collection()
def drop_collections(self): ReportData.drop_collection() QuarantinedReportData.drop_collection()