Example #1
0
 def setUp(self):
     db_name = settings.MONGO_DATABASE_NAME_RESULTS
     self.db = connect(db_name)
     ReportData.drop_collection()
     rhel_product = TestData.create_products()
     rhel_entry = TestData.create_entry(RHEL, memhigh=True)
     rhel_entry.save()
Example #2
0
    def test_find_each_product(self):
        ReportData.drop_collection()
        count = 0
        for key, value in products_dict.items():
            count += 1
            entry = TestData.create_entry(key, memhigh=True)
            entry.save(safe=True)
            lookup = len(ReportData.objects.all())
            self.assertEqual(lookup, count)

        end = datetime.now()
        delta = timedelta(days=1)
        start = datetime.now() - delta

        for key, value in products_dict.items():
            print(key)
            p = Product.objects.filter(name=key)[0]
            print(p.name)

            rhic = RHIC.objects.filter(uuid=value[1])[0]
            print(rhic.uuid)
            print(rhic.contract)
            results_dicts = Product_Def.get_product_match(
                p, rhic, start, end, rhic.contract, "us-east-1")
            self.assertEqual(len(results_dicts), 1)
Example #3
0
 def setUp(self):
     db_name = settings.MONGO_DATABASE_NAME_RESULTS
     self.db = connect(db_name)
     ReportData.drop_collection()
     rhel_product = TestData.create_products()
     rhel_entry = TestData.create_entry(RHEL, memhigh=True)
     rhel_entry.save()
Example #4
0
    def create_entry(product, memhigh=True, date=None):
        if not date:
            date = datetime.now()
        this_hour = date.strftime(hr_fmt)

        row = ReportData(instance_identifier="12:31:3D:08:49:00",
                         date=date,
                         hour=this_hour,
                         memtotal=16048360,
                         cpu_sockets=4,
                         environment="us-east-1",
                         splice_server="splice-server-1.spliceproject.org")

        for key, value in products_dict.items():
            if product == key:
                rhic = RHIC.objects.filter(uuid=value[1])[0]
                row['product_name'] = key
                row['product'] = value[0]
                row['consumer_uuid'] = value[1]
                row['consumer'] = value[2]
                row['contract_id'] = rhic.contract
                row['sla'] = rhic.sla
                row['support'] = rhic.support_level
                row['contract_use'] = "20"

        if memhigh:
            row['memtotal'] = 16048360
            return row
        else:
            row['memtotal'] = 640
            return row
Example #5
0
    def test_find_each_product(self):
        ReportData.drop_collection()
        count = 0
        for key, value in products_dict.items():
            count += 1
            entry = TestData.create_entry(key, memhigh=True)
            entry.save(safe=True)
            lookup = len(ReportData.objects.all())
            self.assertEqual(lookup, count)

        end = datetime.now()
        delta = timedelta(days=1)
        start = datetime.now() - delta

        for key, value in products_dict.items():
            print(key)
            p = Product.objects.filter(name=key)[0]
            print(p.name)

            rhic = RHIC.objects.filter(uuid=value[1])[0]
            print(rhic.uuid)
            print(rhic.contract)
            results_dicts = Product_Def.get_product_match(p, rhic, start, end, rhic.contract, "us-east-1")
            self.assertEqual(len(results_dicts), 1)
Example #6
0
def checkin_data():
    #config fail/pass on missing rhic
    config.init()
    c = config.get_import_info()

    results = []
    #debug
    format = "%a %b %d %H:%M:%S %Y"
    start = datetime.utcnow()
    time = {}
    time['start'] = start.strftime(format)
    #debug

    hr_fmt = "%m%d%Y:%H"

    # committing every 100 records instead of every 1 record saves about 5
    # seconds.
    commit_count = 100
    cached_rhics = {}
    cached_contracts = {}
    rds = []

    for pu in ProductUsage.objects.all():
        uuid = pu.consumer

        if uuid in cached_rhics:
            rhic = cached_rhics[uuid]
        else:
            try:
                _LOG.info('using RHIC: ' + uuid)
                rhic = RHIC.objects.filter(uuid=uuid)[0]
                cached_rhics[uuid] = rhic
            except IndexError:
                _LOG.critical('rhic not found @ import: ' + uuid)
                if c['continue_on_error'] == 0:
                    raise Exception('rhic not found: ' + uuid)
                else:
                    continue

        account = Account.objects(
            account_id=rhic.account_id).only('contracts').first()

        contract = None
        if rhic.contract in cached_contracts:
            contract = cached_contracts['rhic.contract']
        else:
            for c in account.contracts:
                if c.contract_id == rhic.contract:
                    cached_contracts['rhic.contract'] = c
                    contract = c
                    break

        # Set of used engineering ids for this checkin
        product_set = Set(pu.allowed_product_info)

        # Iterate over each product in the contract, see if it matches sla and
        # support level, and consumed engineering ids.  If so, save an instance
        # of ReportData
        for product in contract.products:
            # Match on sla and support level
            if not (product.sla == rhic.sla
                    and product.support_level == rhic.support_level):
                continue

            # Set of engineering ids for this product.
            product_eng_id_set = set(product.engineering_ids)

            # If the set of engineering ids for the product is a subset of the
            # used engineering ids for this checkin, create an instance of
            # ReportData, check for dupes, and save the instance.
            if product_eng_id_set.issubset(product_set):
                # This line isn't technically necessary, but it improves
                # performance by making the set we need to search smaller each
                # time.
                product_set.difference_update(product_eng_id_set)
                splice_server = SpliceServer.objects.get(
                    id=pu.splice_server.id)

                rd = ReportData(
                    instance_identifier=str(pu.instance_identifier),
                    consumer=rhic.name,
                    consumer_uuid=uuid,
                    product=product.engineering_ids,
                    product_name=product.name,
                    date=pu.date,
                    hour=pu.date.strftime(hr_fmt),
                    sla=product.sla,
                    support=product.support_level,
                    contract_id=rhic.contract,
                    contract_use=str(product.quantity),
                    memtotal=int(pu.facts['memory_dot_memtotal']),
                    cpu_sockets=int(pu.facts['lscpu_dot_cpu_socket(s)']),
                    environment=str(splice_server.environment),
                    splice_server=str(splice_server.hostname))

                # If there's a dupe, log it instead of saving a new record.
                dupe = ReportData.objects.filter(
                    consumer_uuid=rhic.uuid,
                    instance_identifier=str(pu.instance_identifier),
                    hour=pu.date.strftime(hr_fmt),
                    product=product.engineering_ids)
                if dupe:
                    _LOG.info("found dupe:" + str(pu))
                else:
                    _LOG.info('recording: ' + str(product.engineering_ids))
                    # rd.save()
                    rds.append(rd)

        if rds and len(rds) % commit_count == 0:
            ReportData.objects.insert(rds)
            rds = []

    if rds:
        ReportData.objects.insert(rds)

    end = datetime.utcnow()
    time['end'] = end.strftime(format)
    results.append(time)
    _LOG.info('import complete')

    return json.dumps(time)