def test_facts_has_changed_cache_exists_false(self, mock_load_hw, mock_load_cf, mock_read_cache): test_facts = json.loads(facts_buf) mock_load_hw.return_value = test_facts changed = self.f.has_changed() self.assertTrue(changed)
def test_facts_has_changed_cache_is_none(self, mock_load_hw, mock_load_cf, mock_read_cache): test_facts = json.loads(facts_buf) mock_load_hw.return_value = test_facts changed = self.f.has_changed() self.assert_equal_dict(test_facts, self.f.facts) self.assertTrue(changed)
def test_package_json_missing_attributes(self): package = Package(name=None, version=None, release=None, arch=None, vendor=None) data = package.to_dict() json_str = json.dumps(data) # to json data = json.loads(json_str) # and back to an object for attr in ['name', 'version', 'release', 'arch', 'vendor']: self.assertEqual(None, data[attr])
def setUp(self): SubManFixture.setUp(self) self.status = json.loads(INST_PROD_STATUS)['installedProducts'] self.prod_status_cache = NonCallableMock() self.prod_status_cache.load_status = Mock(return_value=self.status) inj.provide(inj.PROD_STATUS_CACHE, self.prod_status_cache) self.calculator = ValidProductDateRangeCalculator(None)
def test_package_json_handles_non_unicode(self): package = Package(name=b'\xf6', version=b'\xf6', release=b'\xf6', arch=b'\xf6', vendor=b'\xf6') data = package.to_dict() json_str = json.dumps(data) # to json data = json.loads(json_str) # and back to an object for attr in ['name', 'version', 'release', 'arch', 'vendor']: self.assertEqual(u'\ufffd', data[attr])
def check_for_update(self): if self.exists(): data = json.loads(self.read()) last_update = parse_date(data["lastUpdate"]) else: last_update = None return self._query_for_update(if_modified_since=last_update)
def test_write_cache(self): mock_server_status = {'fake server status': random.uniform(1, 2**32)} status_cache = EntitlementStatusCache() status_cache.server_status = mock_server_status cache_dir = tempfile.mkdtemp() cache_file = os.path.join(cache_dir, 'status_cache.json') status_cache.CACHE_FILE = cache_file status_cache.write_cache() def threadActive(name): for thread in threading.enumerate(): if thread.getName() == name: return True return False # If the file exists, and the thread that writes it does not, we know writing has completed while not (os.path.exists(cache_file) and not threadActive("WriteCacheEntitlementStatusCache")): pass try: new_status_buf = open(cache_file).read() new_status = json.loads(new_status_buf) self.assertEquals(new_status, mock_server_status) finally: shutil.rmtree(cache_dir)
def __init__(self, from_file=None): """ Load the RPM package profile from a given file, or from rpm itself. NOTE: from_file is a file descriptor, not a file name. """ self.packages = [] if from_file: log.debug("Loading RPM profile from file.") json_buffer = from_file.read() pkg_dicts = json.loads(json_buffer) for pkg_dict in pkg_dicts: self.packages.append(Package( name=pkg_dict['name'], version=pkg_dict['version'], release=pkg_dict['release'], arch=pkg_dict['arch'], epoch=pkg_dict['epoch'], vendor=pkg_dict['vendor'] )) else: log.debug("Loading current RPM profile.") ts = rpm.TransactionSet() ts.setVSFlags(-1) installed = ts.dbMatch() self.packages = self._accumulate_profile(installed)
def test_facts_has_changed_cache_exists_false(self, mock_collect, mock_read_cache): test_facts = json.loads(facts_buf) mock_collect.return_value = test_facts changed = self.f.has_changed() self.assertTrue(changed)
def test_write_cache(self): mock_server_status = {'fake server status': random.uniform(1, 2 ** 32)} status_cache = EntitlementStatusCache() status_cache.server_status = mock_server_status cache_dir = tempfile.mkdtemp() cache_file = os.path.join(cache_dir, 'status_cache.json') status_cache.CACHE_FILE = cache_file status_cache.write_cache() # try to load the file 5 times, if # we still can't read it, fail # we don't know when the write_cache thread ends or # when it starts. Need to track the cache threads # but we do not... tries = 0 while tries <= 5: try: new_status_buf = open(cache_file).read() new_status = json.loads(new_status_buf) break except Exception as e: log.exception(e) tries += 1 time.sleep(.1) continue shutil.rmtree(cache_dir) self.assertEqual(new_status, mock_server_status)
def test_facts_has_changed_cache_is_none(self, mock_collect, mock_read_cache): test_facts = json.loads(facts_buf) mock_collect.return_value = test_facts changed = self.f.has_changed() self.assert_equal_dict(test_facts, self.f.facts) self.assertTrue(changed)
def test_package_json_as_unicode_type(self): # note that the data type at time of writing is bytes, so this is just defensive coding package = Package(name=u'Björk', version=u'Björk', release=u'Björk', arch=u'Björk', vendor=u'Björk') data = package.to_dict() json_str = json.dumps(data) # to json data = json.loads(json_str) # and back to an object for attr in ['name', 'version', 'release', 'arch', 'vendor']: self.assertEqual(u'Björk', data[attr])
def _print_products(self, zip_archive): entitlements = zip_archive._get_entitlements() if len(entitlements) == 0: self._print_section(_("Subscriptions:"), [["None"]], 1, True) return for ent_file in entitlements: part = zip_archive._read_file(ent_file) data = json.loads(part) to_print = [] to_print.append((_("Name"), get_value(data, "pool.productName"))) to_print.append((_("Quantity"), get_value(data, "quantity"))) to_print.append((_("Created"), get_value(data, "created"))) to_print.append((_("Start Date"), get_value(data, "startDate"))) to_print.append((_("End Date"), get_value(data, "endDate"))) to_print.append((_("Service Level"), self._get_product_attribute("support_level", data))) to_print.append((_("Service Type"), self._get_product_attribute("support_type", data))) to_print.append((_("Architectures"), self._get_product_attribute("arch", data))) to_print.append((_("SKU"), get_value(data, "pool.productId"))) to_print.append((_("Contract"), get_value(data, "pool.contractNumber"))) to_print.append((_("Order"), get_value(data, "pool.orderNumber"))) to_print.append((_("Account"), get_value(data, "pool.accountNumber"))) virt_limit = self._get_product_attribute("virt_limit", data) to_print.append((_("Virt Limit"), virt_limit)) require_virt_who = False if virt_limit: require_virt_who = True to_print.append((_("Requires Virt-who"), require_virt_who)) entitlement_file = os.path.join("export", "entitlements", "%s.json" % data["id"]) to_print.append((_("Entitlement File"), entitlement_file)) #Get the certificate to get the version serial = data["certificates"][0]["serial"]["id"] cert_file = os.path.join("export", "entitlement_certificates", "%s.pem" % serial) to_print.append((_("Certificate File"), cert_file)) try: cert = certificate.create_from_pem(zip_archive._read_file(cert_file)) except certificate.CertificateException, ce: raise certificate.CertificateException( _("Unable to read certificate file '%s': %s") % (cert_file, ce)) to_print.append((_("Certificate Version"), cert.version)) self._print_section(_("Subscription:"), to_print, 1, False) # Get the provided Products to_print = [(int(pp["productId"]), pp["productName"]) for pp in data["pool"]["providedProducts"]] self._print_section(_("Provided Products:"), sorted(to_print), 2, False) # Get the Content Sets if not self.options.no_content: to_print = [[item.url] for item in cert.content] self._print_section(_("Content Sets:"), sorted(to_print), 2, True) else: # bz#1369577: print a blank line to separate subscriptions when --no-content in use print ""
def test_facts_has_changed_with_change(self, mock_collect): test_facts = json.loads(facts_buf) # change socket fact count from what is in the cache test_facts["cpu.cpu_socket(s)"] = "16" mock_collect.return_value = test_facts changed = self.f.has_changed() self.assertEqual(self.f.facts["cpu.cpu_socket(s)"], "16") self.assertTrue(changed)
def test_facts_has_changed_with_change(self, mock_load_hw, mock_load_cf): test_facts = json.loads(facts_buf) # change socket fact count from what is in the cache test_facts['cpu.cpu_socket(s)'] = '16' mock_load_hw.return_value = test_facts changed = self.f.has_changed() self.assertEquals(self.f.facts['cpu.cpu_socket(s)'], '16') self.assertTrue(changed)
def _parse_facts_json(self, json_buffer, file_path): custom_facts = None try: custom_facts = json.loads(json_buffer) except ValueError: log.warn("Unable to load custom facts file: %s" % file_path) return custom_facts
def test_facts_has_changed_with_change(self, mock_collect): test_facts = json.loads(facts_buf) # change socket fact count from what is in the cache test_facts['cpu.cpu_socket(s)'] = '16' mock_collect.return_value = test_facts changed = self.f.has_changed() self.assertEqual(self.f.facts['cpu.cpu_socket(s)'], '16') self.assertTrue(changed)
def _print_consumer(self, zip_archive): # Print out the consumer data part = zip_archive._read_file(os.path.join("export", "consumer.json")) data = json.loads(part) to_print = [] to_print.append((_("Name"), get_value(data, "name"))) to_print.append((_("UUID"), get_value(data, "uuid"))) to_print.append((_("Type"), get_value(data, "type.label"))) self._print_section(_("Consumer:"), to_print)
def test_facts_has_changed_with_change(self, mock_load_hw, mock_load_cf): test_facts = json.loads(facts_buf) # change socket fact count from what is in the cache test_facts["cpu.cpu_socket(s)"] = "16" mock_load_hw.return_value = test_facts changed = self.f.has_changed() self.assertEquals(self.f.facts["cpu.cpu_socket(s)"], "16") self.assertTrue(changed)
def from_json(cls, json_blob): custom_facts = cls try: data = ourjson.loads(json_blob) except ValueError: log.warn("Unable to load custom facts file.") custom_facts.data = data return custom_facts
def _print_general(self, zip_archive): # Print out general data part = zip_archive._read_file(os.path.join("export", "meta.json")) data = json.loads(part) to_print = [] to_print.append((_("Server"), get_value(data, "webAppPrefix"))) to_print.append((_("Server Version"), get_value(data, "version"))) to_print.append((_("Date Created"), get_value(data, "created"))) to_print.append((_("Creator"), get_value(data, "principalName"))) self._print_section(_("General:"), to_print)
def test_package_json_missing_vendor(self): package = Package(name="package1", version="1.0.0", release=1, arch="x86_64", vendor=None) data = package.to_dict() json_str = json.dumps(data) # to json data = json.loads(json_str) # and back to an object self.assertEqual(None, data['vendor'])
def _load_data(self, open_file): try: self.content_access_mode = json.loads(open_file.read()) or {} return self.content_access_mode except IOError as err: log.error("Unable to read cache: %s" % self.CACHE_FILE) log.exception(err) except ValueError: # Ignore json file parse error pass
def _load_data(self, open_file): try: self.supported_resources = json.loads(open_file.read()) or {} return self.supported_resources except IOError as err: log.error("Unable to read cache: %s" % self.CACHE_FILE) log.exception(err) except ValueError: # Ignore json file parse error pass
def _load_data(self, open_file): try: self.available_entitlements = json.loads(open_file.read()) or {} return self.available_entitlements except IOError as err: log.error("Unable to read cache: %s" % self.CACHE_FILE) log.exception(err) except ValueError: # Ignore json file parse error pass
def test_package_json_handles_non_unicode(self): package = Package(name="package1", version="1.0.0", release=1, arch="x86_64", vendor=b'\xf6') data = package.to_dict() json_str = json.dumps(data) # to json data = json.loads(json_str) # and back to an object self.assertEqual(u'\ufffd', data['vendor'])
def _load_data(self, open_file): try: self.overrides = json.loads(open_file.read()) or {} return self.overrides except IOError as err: log.error("Unable to read cache: %s" % self.CACHE_FILE) log.exception(err) except ValueError: # ignore json file parse errors, we are going to generate # a new as if it didn't exist pass
def test_package_json_vendor_as_unicode_type(self): # note that the data type at time of writing is bytes, so this is just defensive coding package = Package(name="package1", version="1.0.0", release=1, arch="x86_64", vendor=u'Björk') data = package.to_dict() json_str = json.dumps(data) # to json data = json.loads(json_str) # and back to an object self.assertEqual(u'Björk', data['vendor'])
def check_for_update(self): if self.exists(): try: data = json.loads(self.read()) last_update = parse_date(data["lastUpdate"]) except (ValueError, KeyError) as err: log.debug("Cache file {file} is corrupted: {err}".format( file=self.CACHE_FILE, err=err)) last_update = None else: last_update = None return self._query_for_update(if_modified_since=last_update)
def _decompress_payload(self, payload): """ Certificate payloads arrive in zlib compressed strings of JSON. This method de-compresses and parses the JSON and returns the resulting dict. """ try: decompressed = zlib.decompress(payload) return json.loads(decompressed) except Exception, e: log.exception(e) raise CertificateException("Error decompressing/parsing " "certificate payload.")
def from_json(cls, json_blob): custom_facts = cls # Default to no facts collected # See BZ#1435771 data = {} try: data = ourjson.loads(json_blob) except ValueError: log.warn("Unable to load custom facts file.") custom_facts.data = data return custom_facts
def _decompress_payload(self, payload): """ Certificate payloads arrive in zlib compressed strings of JSON. This method de-compresses and parses the JSON and returns the resulting dict. """ try: decompressed = zlib.decompress(payload).decode('utf-8') return json.loads(decompressed) except Exception as e: log.exception(e) raise CertificateException("Error decompressing/parsing " "certificate payload.")
def read_avail_pkgs_cache(self): try: with open(self.CACHE_FILE) as file: json_str = file.read() data = json.loads(json_str) return data except IOError as err: log.error("Unable to read cache: %s" % self.CACHE_FILE) log.exception(err) except ValueError: # ignore json file parse errors, we are going to generate # a new as if it didn't exist pass return None
def __read_cache_file(file_name): try: with open(file_name) as file: json_str = file.read() data = json.loads(json_str) return data except IOError as err: log.error("Unable to read cache: %s" % file_name) log.exception(err) except ValueError: # ignore json file parse errors, we are going to generate # a new as if it didn't exist pass return None
def _print_consumer(self, zip_archive): # Print out the consumer data part = zip_archive._read_file(os.path.join("export", "consumer.json")) data = json.loads(part) to_print = [] to_print.append((_("Name"), get_value(data, "name"))) to_print.append((_("UUID"), get_value(data, "uuid"))) # contentAccessMode is entitlement if null, blank or non-present contentAccessMode = 'entitlement' if "contentAccessMode" in data and data[ "contentAccessMode"] == 'org_environment': contentAccessMode = 'org_environment' to_print.append((_("Content Access Mode"), contentAccessMode)) to_print.append((_("Type"), get_value(data, "type.label"))) self._print_section(_("Consumer:"), to_print)
def _print_consumer(self, zip_archive): # Print out the consumer data part = zip_archive._read_file(os.path.join("export", "consumer.json")) data = json.loads(part) to_print = [] to_print.append((_("Name"), get_value(data, "name"))) to_print.append((_("UUID"), get_value(data, "uuid"))) # contentAccessMode is entitlement if null, blank or non-present contentAccessMode = 'entitlement' if "contentAccessMode" in data and data["contentAccessMode"] == 'org_environment': contentAccessMode = 'org_environment' to_print.append((_("Content Access Mode"), contentAccessMode)) to_print.append((_("Type"), get_value(data, "type.label"))) to_print.append((_("API URL"), get_value(data, "urlApi"))) to_print.append((_("Web URL"), get_value(data, "urlWeb"))) self._print_section(_("Consumer:"), to_print)
def validateResponse(self, response, request_type=None, handler=None): # FIXME: what are we supposed to do with a 204? if str(response['status']) not in ["200", "204"]: parsed = {} if not response.get('content'): parsed = {} else: # try vaguely to see if it had a json parseable body try: parsed = json.loads(response['content'], object_hook=self._decode_dict) except ValueError, e: log.error("Response: %s" % response['status']) log.error("JSON parsing error: %s" % e) except Exception, e: log.error("Response: %s" % response['status']) log.exception(e)
def test_json_uft8_encoding(self): # A unicode string containing JSON test_json = u""" { "firstName": "John", "message": "こんにちは世界", "address": { "street": "21 2nd Street" }, "phoneNumbers": [ [ { "type": "home", "number": "212 555-1234" }, { "type": "fax", "number": "646 555-4567" } ] ] } """ data = json.loads(test_json) self.assertTrue(isinstance(data["message"], type(u""))) # Access a value deep in the structure to make sure we recursed down. self.assertTrue(isinstance(data["phoneNumbers"][0][0]["type"], type(u"")))
def test_json_uft8_encoding(self): # A unicode string containing JSON test_json = u""" { "firstName": "John", "message": "こんにちは世界", "address": { "street": "21 2nd Street" }, "phoneNumbers": [ [ { "type": "home", "number": "212 555-1234" }, { "type": "fax", "number": "646 555-4567" } ] ] } """ restlib = Restlib("somehost", "123", "somehandler") data = json.loads(test_json, object_hook=restlib._decode_dict) self.assertTrue(isinstance(data["message"], str)) # Access a value deep in the structure to make sure we recursed down. self.assertTrue(isinstance(data["phoneNumbers"][0][0]["type"], str))
def test_write_facts(self, mock_load_hw, mock_load_cf): mock_load_hw.return_value = { "net.interface.lo.ipv4_address": "127.0.0.1", "cpu.cpu_socket(s)": "128", "newstuff": "newstuff_is_true", } fact_cache_dir = tempfile.mkdtemp() fact_cache = fact_cache_dir + "/facts.json" # write to a new file self.f.fact_cache_dir = fact_cache_dir self.f.CACHE_FILE = fact_cache # mocking load_hw_facts and load_custom_facts neuters get_facts # self.f.get_facts = 'asdfadfasdfadf' self.f.write_cache() new_facts_buf = open(fact_cache).read() new_facts = json.loads(new_facts_buf) self.assertEquals(new_facts["newstuff"], "newstuff_is_true")
def test_write_cache(self): mock_server_status = {'fake server status': random.uniform(1, 2 ** 32)} status_cache = EntitlementStatusCache() status_cache.server_status = mock_server_status cache_dir = tempfile.mkdtemp() cache_file = os.path.join(cache_dir, 'status_cache.json') status_cache.CACHE_FILE = cache_file status_cache.write_cache() def threadActive(name): for thread in threading.enumerate(): if thread.getName() == name: return True return False # If the file exists, and the thread that writes it does not, we know writing has completed while not (os.path.exists(cache_file) and not threadActive("WriteCacheEntitlementStatusCache")): pass try: new_status_buf = open(cache_file).read() new_status = json.loads(new_status_buf) self.assertEquals(new_status, mock_server_status) finally: shutil.rmtree(cache_dir)
def _load_data(self, open_file): json_str = open_file.read() return json.loads(json_str)
def _request(self, request_type, method, info=None): handler = self.apihandler + method # See M2Crypto/SSL/Context.py in m2crypto source and # https://www.openssl.org/docs/ssl/SSL_CTX_new.html # This ends up invoking SSLv23_method, which is the catch all # "be compatible" protocol, even though it explicitly is not # using sslv2. This will by default potentially include sslv3 # if not used with post-poodle openssl. If however, the server # intends to not offer sslv3, it's workable. # # So this supports tls1.2, 1.1, 1.0, and/or sslv3 if supported. context = SSL.Context("sslv23") # Disable SSLv2 and SSLv3 support to avoid poodles. context.set_options(m2.SSL_OP_NO_SSLv2 | m2.SSL_OP_NO_SSLv3) if self.insecure: # allow clients to work insecure mode if required.. context.post_connection_check = NoOpChecker() else: # Proper peer verification is essential to prevent MITM attacks. context.set_verify( SSL.verify_peer | SSL.verify_fail_if_no_peer_cert, self.ssl_verify_depth) if self.ca_dir is not None: self._load_ca_certificates(context) if self.cert_file and os.path.exists(self.cert_file): context.load_cert(self.cert_file, keyfile=self.key_file) if self.proxy_hostname and self.proxy_port: log.debug("Using proxy: %s:%s" % (self.proxy_hostname, self.proxy_port)) conn = RhsmProxyHTTPSConnection(self.proxy_hostname, self.proxy_port, username=self.proxy_user, password=self.proxy_password, ssl_context=context) # this connection class wants the full url handler = "https://%s:%s%s" % (self.host, self.ssl_port, handler) else: conn = httpslib.HTTPSConnection(self.host, self.ssl_port, ssl_context=context) if info is not None: body = json.dumps(info, default=json.encode) else: body = None log.debug("Making request: %s %s" % (request_type, handler)) headers = self.headers if body is None: headers = dict(self.headers.items() + {"Content-Length": "0"}.items()) # NOTE: alters global timeout_altered (and socket timeout) set_default_socket_timeout_if_python_2_3() try: conn.request(request_type, handler, body=body, headers=headers) except SSLError: if self.cert_file: id_cert = certificate.create_from_file(self.cert_file) if not id_cert.is_valid(): raise ExpiredIdentityCertException() raise response = conn.getresponse() result = { "content": response.read(), "status": response.status, } response_log = 'Response: status=' + str(result['status']) if response.getheader('x-candlepin-request-uuid'): response_log = "%s, requestUuid=%s" % (response_log, response.getheader('x-candlepin-request-uuid')) log.debug(response_log) # Look for server drift, and log a warning if drift_check(response.getheader('date')): log.warn("Clock skew detected, please check your system time") # FIXME: we should probably do this in a wrapper method # so we can use the request method for normal http self.validateResponse(result, request_type, handler) # handle empty, but succesful responses, ala 204 if not len(result['content']): return None return json.loads(result['content'], object_hook=self._decode_dict)
def _request(self, request_type, method, info=None): handler = self.apihandler + method context = SSL.Context("tlsv1") if self.insecure: # allow clients to work insecure mode if required.. context.post_connection_check = NoOpChecker() else: context.set_verify(SSL.verify_fail_if_no_peer_cert, self.ssl_verify_depth) if self.ca_dir is not None: self._load_ca_certificates(context) if self.cert_file and os.path.exists(self.cert_file): context.load_cert(self.cert_file, keyfile=self.key_file) if self.proxy_hostname and self.proxy_port: log.debug("Using proxy: %s:%s" % (self.proxy_hostname, self.proxy_port)) conn = RhsmProxyHTTPSConnection(self.proxy_hostname, self.proxy_port, username=self.proxy_user, password=self.proxy_password, ssl_context=context) # this connection class wants the full url handler = "https://%s:%s%s" % (self.host, self.ssl_port, handler) else: conn = httpslib.HTTPSConnection(self.host, self.ssl_port, ssl_context=context) if info is not None: body = json.dumps(info) else: body = None log.debug("Making request: %s %s" % (request_type, handler)) headers = self.headers if body is None: headers = dict(self.headers.items() + {"Content-Length": "0"}.items()) try: conn.request(request_type, handler, body=body, headers=headers) except SSLError: if self.cert_file: id_cert = certificate.create_from_file(self.cert_file) if not id_cert.is_valid(): raise ExpiredIdentityCertException() raise response = conn.getresponse() result = { "content": response.read(), "status": response.status, } response_log = 'Response: status=' + str(result['status']) if response.getheader('x-candlepin-request-uuid'): response_log = "%s, requestUuid=%s" % (response_log, response.getheader('x-candlepin-request-uuid')) log.debug(response_log) # Look for server drift, and log a warning if drift_check(response.getheader('date')): log.warn("Clock skew detected, please check your system time") # FIXME: we should probably do this in a wrapper method # so we can use the request method for normal http self.validateResponse(result, request_type, handler) # handle empty, but succesful responses, ala 204 if not len(result['content']): return None return json.loads(result['content'], object_hook=self._decode_dict)
SAMPLE_COMPLIANCE_JSON = json.loads(""" { "date" : "2013-04-26T13:43:12.436+0000", "compliantUntil" : "2013-04-26T13:43:12.436+0000", "nonCompliantProducts" : [ "801" ], "compliantProducts" : { "100000000000002" : [ { "created" : "2013-04-26T13:41:56.688+0000", "updated" : "2013-04-26T13:41:56.688+0000", "id" : "ff8080813e468fd8013e46942f501173", "consumer" : null, "pool" : { "created" : "2013-04-26T13:38:29.296+0000", "updated" : "2013-04-26T13:41:56.688+0000", "id" : "ff8080813e468fd8013e469105300613", "owner" : { "id" : "ff8080813e468fd8013e468ff4c70002", "key" : "admin", "displayName" : "Admin Owner", "href" : "/owners/admin" }, "activeSubscription" : true, "subscriptionId" : "ff8080813e468fd8013e4690809e018f", "subscriptionSubKey" : "master", "sourceEntitlement" : null, "quantity" : 10, "startDate" : "2013-04-26T00:00:00.000+0000", "endDate" : "2014-04-26T00:00:00.000+0000", "productId" : "awesomeos-x86_64", "providedProducts" : [ { "id" : "ff8080813e468fd8013e46910531061c", "productId" : "100000000000002", "productName" : "Awesome OS for x86_64 Bits" } ], "attributes" : [ ], "productAttributes" : [ { "id" : null, "name" : "arch", "value" : "x86_64", "productId" : null }, { "id" : null, "name" : "multi-entitlement", "value" : "yes", "productId" : null }, { "id" : null, "name" : "type", "value" : "MKT", "productId" : null }, { "id" : null, "name" : "stacking_id", "value" : "1", "productId" : null }, { "id" : null, "name" : "sockets", "value" : "1", "productId" : null }, { "id" : null, "name" : "version", "value" : "3.11", "productId" : null }, { "id" : null, "name" : "variant", "value" : "ALL", "productId" : null }, { "id" : null, "name" : "warning_period", "value" : "30", "productId" : null } ], "restrictedToUsername" : null, "contractNumber" : "79", "accountNumber" : "12331131231", "orderNumber" : "order-8675309", "consumed" : 3, "exported" : 0, "productName" : "Awesome OS for x86_64", "href" : "/pools/ff8080813e468fd8013e469105300613" }, "startDate" : "2013-04-26T00:00:00.000+0000", "endDate" : "2014-04-26T00:00:00.000+0000", "certificates" : [ ], "quantity" : 3, "href" : "/entitlements/ff8080813e468fd8013e46942f501173" }, { "created" : "2013-04-26T13:41:28.554+0000", "updated" : "2013-04-26T13:41:28.554+0000", "id" : "ff8080813e468fd8013e4693c16a1170", "consumer" : null, "pool" : { "created" : "2013-04-26T13:38:29.320+0000", "updated" : "2013-04-26T13:41:28.554+0000", "id" : "ff8080813e468fd8013e46910548061d", "owner" : { "id" : "ff8080813e468fd8013e468ff4c70002", "key" : "admin", "displayName" : "Admin Owner", "href" : "/owners/admin" }, "activeSubscription" : true, "subscriptionId" : "ff8080813e468fd8013e4690801e018e", "subscriptionSubKey" : "master", "sourceEntitlement" : null, "quantity" : 5, "startDate" : "2013-04-26T00:00:00.000+0000", "endDate" : "2014-04-26T00:00:00.000+0000", "productId" : "awesomeos-x86_64", "providedProducts" : [ { "id" : "ff8080813e468fd8013e469105490627", "productId" : "100000000000002", "productName" : "Awesome OS for x86_64 Bits" } ], "attributes" : [ ], "productAttributes" : [ { "id" : null, "name" : "arch", "value" : "x86_64", "productId" : null }, { "id" : null, "name" : "multi-entitlement", "value" : "yes", "productId" : null }, { "id" : null, "name" : "type", "value" : "MKT", "productId" : null }, { "id" : null, "name" : "stacking_id", "value" : "1", "productId" : null }, { "id" : null, "name" : "sockets", "value" : "1", "productId" : null }, { "id" : null, "name" : "version", "value" : "3.11", "productId" : null }, { "id" : null, "name" : "variant", "value" : "ALL", "productId" : null }, { "id" : null, "name" : "warning_period", "value" : "30", "productId" : null } ], "restrictedToUsername" : null, "contractNumber" : "78", "accountNumber" : "12331131231", "orderNumber" : "order-8675309", "consumed" : 5, "exported" : 0, "productName" : "Awesome OS for x86_64", "href" : "/pools/ff8080813e468fd8013e46910548061d" }, "startDate" : "2013-04-26T00:00:00.000+0000", "endDate" : "2014-04-26T00:00:00.000+0000", "certificates" : [ ], "quantity" : 5, "href" : "/entitlements/ff8080813e468fd8013e4693c16a1170" } ] }, "partiallyCompliantProducts" : { "100000000000003" : [ { "created" : "2013-04-26T13:42:26.706+0000", "updated" : "2013-04-26T13:42:26.706+0000", "id" : "ff8080813e468fd8013e4694a4921179", "consumer" : null, "pool" : { "created" : "2013-04-26T13:38:28.981+0000", "updated" : "2013-04-26T13:42:26.707+0000", "id" : "ff8080813e468fd8013e469103f505b6", "owner" : { "id" : "ff8080813e468fd8013e468ff4c70002", "key" : "admin", "displayName" : "Admin Owner", "href" : "/owners/admin" }, "activeSubscription" : true, "subscriptionId" : "ff8080813e468fd8013e4690966601d7", "subscriptionSubKey" : "master", "sourceEntitlement" : null, "quantity" : 10, "startDate" : "2013-04-26T00:00:00.000+0000", "endDate" : "2014-04-26T00:00:00.000+0000", "productId" : "awesomeos-ppc64", "providedProducts" : [ { "id" : "ff8080813e468fd8013e469103f505be", "productId" : "100000000000003", "productName" : "Awesome OS for ppc64 Bits" } ], "attributes" : [ ], "productAttributes" : [ { "id" : null, "name" : "sockets", "value" : "16", "productId" : null }, { "id" : null, "name" : "arch", "value" : "ppc64", "productId" : null }, { "id" : null, "name" : "type", "value" : "MKT", "productId" : null }, { "id" : null, "name" : "version", "value" : "3.11", "productId" : null }, { "id" : null, "name" : "variant", "value" : "ALL", "productId" : null }, { "id" : null, "name" : "warning_period", "value" : "30", "productId" : null } ], "restrictedToUsername" : null, "contractNumber" : "97", "accountNumber" : "12331131231", "orderNumber" : "order-8675309", "consumed" : 1, "exported" : 0, "productName" : "Awesome OS for ppc64", "href" : "/pools/ff8080813e468fd8013e469103f505b6" }, "startDate" : "2013-04-26T00:00:00.000+0000", "endDate" : "2014-04-26T00:00:00.000+0000", "certificates" : [ ], "quantity" : 1, "href" : "/entitlements/ff8080813e468fd8013e4694a4921179" } ], "900" : [ { "created" : "2013-04-26T13:42:16.220+0000", "updated" : "2013-04-26T13:42:16.220+0000", "id" : "ff8080813e468fd8013e46947b9c1176", "consumer" : null, "pool" : { "created" : "2013-04-26T13:38:27.320+0000", "updated" : "2013-04-26T13:42:16.220+0000", "id" : "ff8080813e468fd8013e4690fd7803a4", "owner" : { "id" : "ff8080813e468fd8013e468ff4c70002", "key" : "admin", "displayName" : "Admin Owner", "href" : "/owners/admin" }, "activeSubscription" : true, "subscriptionId" : "ff8080813e468fd8013e4690f041031b", "subscriptionSubKey" : "master", "sourceEntitlement" : null, "quantity" : 5, "startDate" : "2013-04-26T00:00:00.000+0000", "endDate" : "2014-04-26T00:00:00.000+0000", "productId" : "sock-core-ram-multiattr", "providedProducts" : [ { "id" : "ff8080813e468fd8013e4690fd7903b0", "productId" : "900", "productName" : "Multi-Attribute Limited Product" } ], "attributes" : [ ], "productAttributes" : [ { "id" : null, "name" : "cores", "value" : "16", "productId" : null }, { "id" : null, "name" : "multi-entitlement", "value" : "yes", "productId" : null }, { "id" : null, "name" : "ram", "value" : "8", "productId" : null }, { "id" : null, "name" : "support_type", "value" : "Level 3", "productId" : null }, { "id" : null, "name" : "type", "value" : "MKT", "productId" : null }, { "id" : null, "name" : "arch", "value" : "ALL", "productId" : null }, { "id" : null, "name" : "stacking_id", "value" : "multiattr-stack-test", "productId" : null }, { "id" : null, "name" : "version", "value" : "1.0", "productId" : null }, { "id" : null, "name" : "support_level", "value" : "Super", "productId" : null }, { "id" : null, "name" : "sockets", "value" : "4", "productId" : null }, { "id" : null, "name" : "variant", "value" : "ALL", "productId" : null } ], "restrictedToUsername" : null, "contractNumber" : "204", "accountNumber" : "12331131231", "orderNumber" : "order-8675309", "consumed" : 1, "exported" : 0, "productName" : "Multi-Attribute Stackable (16 cores, 4 sockets, 8GB RAM)", "href" : "/pools/ff8080813e468fd8013e4690fd7803a4" }, "startDate" : "2013-04-26T00:00:00.000+0000", "endDate" : "2014-04-26T00:00:00.000+0000", "certificates" : [ ], "quantity" : 1, "href" : "/entitlements/ff8080813e468fd8013e46947b9c1176" } ] }, "partialStacks" : { "multiattr-stack-test" : [ { "created" : "2013-04-26T13:42:16.220+0000", "updated" : "2013-04-26T13:42:16.220+0000", "id" : "ff8080813e468fd8013e46947b9c1176", "consumer" : null, "pool" : { "created" : "2013-04-26T13:38:27.320+0000", "updated" : "2013-04-26T13:42:16.220+0000", "id" : "ff8080813e468fd8013e4690fd7803a4", "owner" : { "id" : "ff8080813e468fd8013e468ff4c70002", "key" : "admin", "displayName" : "Admin Owner", "href" : "/owners/admin" }, "activeSubscription" : true, "subscriptionId" : "ff8080813e468fd8013e4690f041031b", "subscriptionSubKey" : "master", "sourceEntitlement" : null, "quantity" : 5, "startDate" : "2013-04-26T00:00:00.000+0000", "endDate" : "2014-04-26T00:00:00.000+0000", "productId" : "sock-core-ram-multiattr", "providedProducts" : [ { "id" : "ff8080813e468fd8013e4690fd7903b0", "productId" : "900", "productName" : "Multi-Attribute Limited Product" } ], "attributes" : [ ], "productAttributes" : [ { "id" : null, "name" : "cores", "value" : "16", "productId" : null }, { "id" : null, "name" : "multi-entitlement", "value" : "yes", "productId" : null }, { "id" : null, "name" : "ram", "value" : "8", "productId" : null }, { "id" : null, "name" : "support_type", "value" : "Level 3", "productId" : null }, { "id" : null, "name" : "type", "value" : "MKT", "productId" : null }, { "id" : null, "name" : "arch", "value" : "ALL", "productId" : null }, { "id" : null, "name" : "stacking_id", "value" : "multiattr-stack-test", "productId" : null }, { "id" : null, "name" : "version", "value" : "1.0", "productId" : null }, { "id" : null, "name" : "support_level", "value" : "Super", "productId" : null }, { "id" : null, "name" : "sockets", "value" : "4", "productId" : null }, { "id" : null, "name" : "variant", "value" : "ALL", "productId" : null } ], "restrictedToUsername" : null, "contractNumber" : "204", "accountNumber" : "12331131231", "orderNumber" : "order-8675309", "consumed" : 1, "exported" : 0, "productName" : "Multi-Attribute Stackable (16 cores, 4 sockets, 8GB RAM)", "href" : "/pools/ff8080813e468fd8013e4690fd7803a4" }, "startDate" : "2013-04-26T00:00:00.000+0000", "endDate" : "2014-04-26T00:00:00.000+0000", "certificates" : [ ], "quantity" : 1, "href" : "/entitlements/ff8080813e468fd8013e46947b9c1176" } ] }, "reasons" : [ { "key" : "NOTCOVERED", "message" : "The system does not have subscriptions that cover RAM Limiting Product.", "attributes" : { "product_id" : "801", "name" : "RAM Limiting Product" } }, { "key" : "CORES", "message" : "Multi-Attribute Stackable (16 cores, 4 sockets, 8GB RAM) only covers 16 of 32 cores.", "attributes" : { "has" : "32", "covered" : "16", "stack_id" : "multiattr-stack-test", "name" : "Multi-Attribute Stackable (16 cores, 4 sockets, 8GB RAM)" } }, { "key" : "SOCKETS", "message" : "Multi-Attribute Stackable (16 cores, 4 sockets, 8GB RAM) only covers 4 of 8 sockets.", "attributes" : { "has" : "8", "covered" : "4", "stack_id" : "multiattr-stack-test", "name" : "Multi-Attribute Stackable (16 cores, 4 sockets, 8GB RAM)" } }, { "key" : "RAM", "message" : "Multi-Attribute Stackable (16 cores, 4 sockets, 8GB RAM) only covers 8GB of 31GB of RAM.", "attributes" : { "has" : "31", "covered" : "8", "stack_id" : "multiattr-stack-test", "name" : "Multi-Attribute Stackable (16 cores, 4 sockets, 8GB RAM)" } }, { "key" : "ARCH", "message" : "Awesome OS for ppc64 covers architecture ppc64 but the system is x86_64.", "attributes" : { "has" : "x86_64", "covered" : "ppc64", "entitlement_id" : "ff8080813e468fd8013e4694a4921179", "name" : "Awesome OS for ppc64" } } ], "status" : "invalid", "compliant" : false } """)
def test_facts_has_changed_no_change(self, mock_load_hw, mock_load_cf): test_facts = json.loads(facts_buf) mock_load_hw.return_value = test_facts changed = self.f.has_changed() self.assert_equal_dict(test_facts, self.f.facts) self.assertFalse(changed)