def _mock_pkg_profile(packages, repo_file, enabled_modules): """ Turn a list of package objects into an RPMProfile object. """ dict_list = [] for pkg in packages: dict_list.append(pkg.to_dict()) mock_file = Mock() mock_file.read = Mock(return_value=json.dumps(dict_list)) mock_rpm_profile = RPMProfile(from_file=mock_file) mock_enabled_repos_profile = EnabledReposProfile(repo_file=repo_file) mock_module_profile = ModulesProfile() mock_module_profile.collect = Mock(return_value=enabled_modules) mock_profile = { "rpm": mock_rpm_profile, "enabled_repos": mock_enabled_repos_profile, "modulemd": mock_module_profile } return mock_profile
def test_custom_set_encoding(self): s = set(['a', 'b', 'c', 'c']) result = json.dumps(s, default=json.encode) # Python prints lists with single quotes, JSON with double quotes # so we need to convert to do a string comparison. expected = "[%s]" % ", ".join(['"%s"' % x for x in s]) self.assertEqual(expected, result)
def test_package_json_handles_non_unicode(self): package = Package(name=b'\xf6', version=b'\xf6', release=b'\xf6', arch=b'\xf6', vendor=b'\xf6') data = package.to_dict() json_str = json.dumps(data) # to json data = json.loads(json_str) # and back to an object for attr in ['name', 'version', 'release', 'arch', 'vendor']: self.assertEqual(u'\ufffd', data[attr])
def test_package_json_missing_attributes(self): package = Package(name=None, version=None, release=None, arch=None, vendor=None) data = package.to_dict() json_str = json.dumps(data) # to json data = json.loads(json_str) # and back to an object for attr in ['name', 'version', 'release', 'arch', 'vendor']: self.assertEqual(None, data[attr])
def test_load_data(self): cached = {'prod1': 'Product 1', 'prod2': 'Product 2'} mock_file = Mock() mock_file.read = Mock(return_value=json.dumps(cached)) data = self.mgr._load_data(mock_file) self.assertEquals(data, cached)
def test_load_data(self): cached = {"prod1": "Product 1", "prod2": "Product 2"} mock_file = Mock() mock_file.read = Mock(return_value=json.dumps(cached)) data = self.mgr._load_data(mock_file) self.assertEquals(data, cached)
def _write_flat_file(self, content_path, filename, content): path = os.path.join(content_path, filename) with open(path, "w+") as fo: fo.write( json.dumps(content, indent=4, sort_keys=True, default=json.encode))
def test_package_json_as_unicode_type(self): # note that the data type at time of writing is bytes, so this is just defensive coding package = Package(name=u'Björk', version=u'Björk', release=u'Björk', arch=u'Björk', vendor=u'Björk') data = package.to_dict() json_str = json.dumps(data) # to json data = json.loads(json_str) # and back to an object for attr in ['name', 'version', 'release', 'arch', 'vendor']: self.assertEqual(u'Björk', data[attr])
def mock_pkg_profile(packages): dict_list = [] for pkg in packages: dict_list.append(pkg.to_dict()) mock_file = mock.Mock() mock_file.read = mock.Mock(return_value=json.dumps(dict_list)) mock_profile = RPMProfile(from_file=mock_file) return mock_profile
def test_package_json_missing_vendor(self): package = Package(name="package1", version="1.0.0", release=1, arch="x86_64", vendor=None) data = package.to_dict() json_str = json.dumps(data) # to json data = json.loads(json_str) # and back to an object self.assertEqual(None, data['vendor'])
def test_package_json_handles_non_unicode(self): package = Package(name="package1", version="1.0.0", release=1, arch="x86_64", vendor=b'\xf6') data = package.to_dict() json_str = json.dumps(data) # to json data = json.loads(json_str) # and back to an object self.assertEqual(u'\ufffd', data['vendor'])
def test_load_data(self): cached = { 'prod1': 'Product 1', 'prod2': 'Product 2' } mock_file = Mock() mock_file.read = Mock(return_value=json.dumps(cached)) data = self.mgr._load_data(mock_file) self.assertEquals(data, cached)
def test_package_json_vendor_as_unicode_type(self): # note that the data type at time of writing is bytes, so this is just defensive coding package = Package(name="package1", version="1.0.0", release=1, arch="x86_64", vendor=u'Björk') data = package.to_dict() json_str = json.dumps(data) # to json data = json.loads(json_str) # and back to an object self.assertEqual(u'Björk', data['vendor'])
def test_load_data(self): cached = { 'pools': { 'pool1': 'Pool 1', 'pool2': 'Pool 2' }, 'tags': ['p1', 'p2'] } mock_file = Mock() mock_file.read = Mock(return_value=json.dumps(cached)) data = self.pool_status_cache._load_data(mock_file) self.assertEqual(data, cached)
def _mock_pkg_profile_file(self): """ Turn a list of package objects into an RPMProfile object. """ packages = self._get_packages() dict_list = [] for pkg in packages: dict_list.append(pkg.to_dict()) mock_file = mock.Mock() mock_file.read = mock.Mock(return_value=json.dumps(dict_list)) return mock_file
def _mock_pkg_profile(packages): """ Turn a list of package objects into an RPMProfile object. """ dict_list = [] for pkg in packages: dict_list.append(pkg.to_dict()) mock_file = Mock() mock_file.read = Mock(return_value=json.dumps(dict_list)) mock_profile = RPMProfile(from_file=mock_file) return mock_profile
def _update_cache(self, data): log.debug("Updating content access cache") with open(self.CACHE_FILE, "w") as cache: cache.write(json.dumps(data))
def test_writes_to_cache_after_read(self): self.cache.check_for_update() self.MOCK_OPEN_EMPTY.assert_any_call(ContentAccessCache.CACHE_FILE, 'w') self.MOCK_OPEN_EMPTY().write.assert_any_call(json.dumps(self.MOCK_CONTENT))
class TestContentAccessCache(SubManFixture): MOCK_CONTENT = { "lastUpdate": "2016-12-01T21:56:35+0000", "contentListing": {"42": ["cert-part1", "cert-part2"]} } MOCK_CONTENT_EMPTY_CONTENT_LISTING = { "lastUpdate": "2016-12-01T21:56:35+0000", "contentListing": None } MOCK_CERT = """ before -----BEGIN ENTITLEMENT DATA----- entitlement data goes here -----END ENTITLEMENT DATA----- after """ MOCK_OPEN_EMPTY = mock_open() MOCK_OPEN_CACHE = mock_open(read_data=json.dumps(MOCK_CONTENT)) def setUp(self): super(TestContentAccessCache, self).setUp() self.cache = ContentAccessCache() self.cache.cp_provider = Mock() self.mock_uep = Mock() self.mock_uep.getAccessibleContent = Mock(return_value=self.MOCK_CONTENT) self.cache.cp_provider.get_consumer_auth_cp = Mock(return_value=self.mock_uep) self.cache.identity = Mock() self.cert = Mock() @patch('subscription_manager.cache.open', MOCK_OPEN_EMPTY) def test_empty_cache(self): self.assertFalse(self.cache.exists()) @patch('subscription_manager.cache.open', MOCK_OPEN_EMPTY) def test_writes_to_cache_after_read(self): self.cache.check_for_update() self.MOCK_OPEN_EMPTY.assert_any_call(ContentAccessCache.CACHE_FILE, 'w') self.MOCK_OPEN_EMPTY().write.assert_any_call(json.dumps(self.MOCK_CONTENT)) @patch('subscription_manager.cache.open', MOCK_OPEN_EMPTY) def test_cert_updated_after_read(self): self.cert.serial = 42 update_data = self.cache.check_for_update() self.cache.update_cert(self.cert, update_data) self.MOCK_OPEN_EMPTY.assert_any_call(self.cert.path, 'w') self.MOCK_OPEN_EMPTY().write.assert_any_call(''.join(self.MOCK_CONTENT['contentListing']['42'])) @patch('subscription_manager.cache.open', MOCK_OPEN_CACHE) def test_check_for_update_provides_date(self): mock_exists = Mock(return_value=True) with patch('os.path.exists', mock_exists): self.cache.check_for_update() date = isodate.parse_date("2016-12-01T21:56:35+0000") self.mock_uep.getAccessibleContent.assert_called_once_with(self.cache.identity.uuid, if_modified_since=date) @patch('os.path.exists', Mock(return_value=True)) def test_cache_remove_deletes_file(self): mock_remove = Mock() with patch('os.remove', mock_remove): self.cache.remove() mock_remove.assert_called_once_with(ContentAccessCache.CACHE_FILE) @patch('subscription_manager.cache.open', MOCK_OPEN_EMPTY) def test_cache_handles_empty_content_listing(self): self.mock_uep.getAccessibleContent = Mock(return_value=self.MOCK_CONTENT_EMPTY_CONTENT_LISTING) self.cache.check_for_update() # getting this far means we did not raise an exception :-) @patch('subscription_manager.cache.open', MOCK_OPEN_EMPTY) def test_cache_fails_server_issues_gracefully(self): self.mock_uep.getAccessibleContent = Mock(side_effect=RestlibException(404)) self.cache.check_for_update()
def _request(self, request_type, method, info=None): handler = self.apihandler + method # See M2Crypto/SSL/Context.py in m2crypto source and # https://www.openssl.org/docs/ssl/SSL_CTX_new.html # This ends up invoking SSLv23_method, which is the catch all # "be compatible" protocol, even though it explicitly is not # using sslv2. This will by default potentially include sslv3 # if not used with post-poodle openssl. If however, the server # intends to not offer sslv3, it's workable. # # So this supports tls1.2, 1.1, 1.0, and/or sslv3 if supported. context = SSL.Context("sslv23") # Disable SSLv2 and SSLv3 support to avoid poodles. context.set_options(m2.SSL_OP_NO_SSLv2 | m2.SSL_OP_NO_SSLv3) if self.insecure: # allow clients to work insecure mode if required.. context.post_connection_check = NoOpChecker() else: # Proper peer verification is essential to prevent MITM attacks. context.set_verify( SSL.verify_peer | SSL.verify_fail_if_no_peer_cert, self.ssl_verify_depth) if self.ca_dir is not None: self._load_ca_certificates(context) if self.cert_file and os.path.exists(self.cert_file): context.load_cert(self.cert_file, keyfile=self.key_file) if self.proxy_hostname and self.proxy_port: log.debug("Using proxy: %s:%s" % (self.proxy_hostname, self.proxy_port)) conn = RhsmProxyHTTPSConnection(self.proxy_hostname, self.proxy_port, username=self.proxy_user, password=self.proxy_password, ssl_context=context) # this connection class wants the full url handler = "https://%s:%s%s" % (self.host, self.ssl_port, handler) else: conn = httpslib.HTTPSConnection(self.host, self.ssl_port, ssl_context=context) if info is not None: body = json.dumps(info, default=json.encode) else: body = None log.debug("Making request: %s %s" % (request_type, handler)) headers = self.headers if body is None: headers = dict(self.headers.items() + {"Content-Length": "0"}.items()) # NOTE: alters global timeout_altered (and socket timeout) set_default_socket_timeout_if_python_2_3() try: conn.request(request_type, handler, body=body, headers=headers) except SSLError: if self.cert_file: id_cert = certificate.create_from_file(self.cert_file) if not id_cert.is_valid(): raise ExpiredIdentityCertException() raise response = conn.getresponse() result = { "content": response.read(), "status": response.status, } response_log = 'Response: status=' + str(result['status']) if response.getheader('x-candlepin-request-uuid'): response_log = "%s, requestUuid=%s" % (response_log, response.getheader('x-candlepin-request-uuid')) log.debug(response_log) # Look for server drift, and log a warning if drift_check(response.getheader('date')): log.warn("Clock skew detected, please check your system time") # FIXME: we should probably do this in a wrapper method # so we can use the request method for normal http self.validateResponse(result, request_type, handler) # handle empty, but succesful responses, ala 204 if not len(result['content']): return None return json.loads(result['content'], object_hook=self._decode_dict)
def _request(self, request_type, method, info=None): handler = self.apihandler + method # See M2Crypto/SSL/Context.py in m2crypto source and # https://www.openssl.org/docs/ssl/SSL_CTX_new.html # This ends up invoking SSLv23_method, which is the catch all # "be compatible" protocol, even though it explicitly is not # using sslv2. This will by default potentially include sslv3 # if not used with post-poodle openssl. If however, the server # intends to not offer sslv3, it's workable. # # So this supports tls1.2, 1.1, 1.0, and/or sslv3 if supported. context = SSL.Context("sslv23") # Disable SSLv2 and SSLv3 support to avoid poodles. context.set_options(m2.SSL_OP_NO_SSLv2 | m2.SSL_OP_NO_SSLv3) if self.insecure: # allow clients to work insecure mode if required.. context.post_connection_check = NoOpChecker() else: # Proper peer verification is essential to prevent MITM attacks. context.set_verify( SSL.verify_peer | SSL.verify_fail_if_no_peer_cert, self.ssl_verify_depth) if self.ca_dir is not None: self._load_ca_certificates(context) if self.cert_file and os.path.exists(self.cert_file): context.load_cert(self.cert_file, keyfile=self.key_file) if self.proxy_hostname and self.proxy_port: log.debug("Using proxy: %s:%s" % (self.proxy_hostname, self.proxy_port)) conn = RhsmProxyHTTPSConnection(self.proxy_hostname, self.proxy_port, username=self.proxy_user, password=self.proxy_password, ssl_context=context) # this connection class wants the full url handler = "https://%s:%s%s" % (self.host, self.ssl_port, handler) else: conn = httpslib.HTTPSConnection(self.host, self.ssl_port, ssl_context=context) if info is not None: body = json.dumps(info, default=json.encode) else: body = None log.debug("Making request: %s %s" % (request_type, handler)) if self.user_agent: self.headers['User-Agent'] = self.user_agent headers = self.headers if body is None: headers = dict(self.headers.items() + {"Content-Length": "0"}.items()) # NOTE: alters global timeout_altered (and socket timeout) set_default_socket_timeout_if_python_2_3() try: conn.request(request_type, handler, body=body, headers=headers) except SSLError: if self.cert_file: id_cert = certificate.create_from_file(self.cert_file) if not id_cert.is_valid(): raise ExpiredIdentityCertException() raise response = conn.getresponse() result = { "content": response.read(), "status": response.status, "headers": dict(response.getheaders()) } response_log = 'Response: status=' + str(result['status']) if response.getheader('x-candlepin-request-uuid'): response_log = "%s, requestUuid=%s" % (response_log, response.getheader('x-candlepin-request-uuid')) log.debug(response_log) # Look for server drift, and log a warning if drift_check(response.getheader('date')): log.warn("Clock skew detected, please check your system time") # FIXME: we should probably do this in a wrapper method # so we can use the request method for normal http self.validateResponse(result, request_type, handler) # handle empty, but succesful responses, ala 204 if not len(result['content']): return None return json.loads(result['content'], object_hook=self._decode_dict)
def _request(self, request_type, method, info=None): handler = self.apihandler + method context = SSL.Context("tlsv1") if self.insecure: # allow clients to work insecure mode if required.. context.post_connection_check = NoOpChecker() else: context.set_verify(SSL.verify_fail_if_no_peer_cert, self.ssl_verify_depth) if self.ca_dir is not None: self._load_ca_certificates(context) if self.cert_file and os.path.exists(self.cert_file): context.load_cert(self.cert_file, keyfile=self.key_file) if self.proxy_hostname and self.proxy_port: log.debug("Using proxy: %s:%s" % (self.proxy_hostname, self.proxy_port)) conn = RhsmProxyHTTPSConnection(self.proxy_hostname, self.proxy_port, username=self.proxy_user, password=self.proxy_password, ssl_context=context) # this connection class wants the full url handler = "https://%s:%s%s" % (self.host, self.ssl_port, handler) else: conn = httpslib.HTTPSConnection(self.host, self.ssl_port, ssl_context=context) if info is not None: body = json.dumps(info) else: body = None log.debug("Making request: %s %s" % (request_type, handler)) headers = self.headers if body is None: headers = dict(self.headers.items() + {"Content-Length": "0"}.items()) try: conn.request(request_type, handler, body=body, headers=headers) except SSLError: if self.cert_file: id_cert = certificate.create_from_file(self.cert_file) if not id_cert.is_valid(): raise ExpiredIdentityCertException() raise response = conn.getresponse() result = { "content": response.read(), "status": response.status, } response_log = 'Response: status=' + str(result['status']) if response.getheader('x-candlepin-request-uuid'): response_log = "%s, requestUuid=%s" % (response_log, response.getheader('x-candlepin-request-uuid')) log.debug(response_log) # Look for server drift, and log a warning if drift_check(response.getheader('date')): log.warn("Clock skew detected, please check your system time") # FIXME: we should probably do this in a wrapper method # so we can use the request method for normal http self.validateResponse(result, request_type, handler) # handle empty, but succesful responses, ala 204 if not len(result['content']): return None return json.loads(result['content'], object_hook=self._decode_dict)
def _write_flat_file(self, content_path, filename, content): path = os.path.join(content_path, filename) with open(path, "w+") as fo: fo.write(json.dumps(content, indent=4, sort_keys=True))
def test_test_encoding_datetime(self): date = datetime(2020, 1, 1) result = json.dumps(date, default=json.encode) expected = '"2020-01-01 00:00:00"' self.assertEqual(expected, result)