def test_update_payload_v1(self): """Call :meth:`nailgun.entity_mixins.EntityUpdateMixin.update_payload`. Assert that the method behaves correctly given various values for the ``field`` argument. """ class TestEntity(EntityWithUpdate): """Just like its parent class, but with fields.""" def __init__(self, server_config=None, **kwargs): self._fields = {"one": IntegerField(), "two": IntegerField()} super(TestEntity, self).__init__(server_config, **kwargs) cfg = config.ServerConfig("url") args_list = ({}, {"one": gen_integer()}, {"two": gen_integer()}, {"one": gen_integer(), "two": gen_integer()}) # Make `update_payload` return all or no values. for args in args_list: entity = TestEntity(cfg, **args) self.assertEqual(entity.update_payload(), args) self.assertEqual(entity.update_payload(list(args.keys())), args) self.assertEqual(entity.update_payload([]), {}) # Make `update_payload` return only some values. entity = TestEntity(cfg, **args_list[-1]) self.assertEqual(entity.update_payload(["one"]), {"one": args_list[-1]["one"]}) self.assertEqual(entity.update_payload(["two"]), {"two": args_list[-1]["two"]}) # Ask `update_payload` to return unavailable values. entity = TestEntity(cfg) for field_names in (["one"], ["two"], ["one", "two"]): with self.assertRaises(KeyError): entity.update_payload(field_names)
def test_simple_shared_counter_timeout(self): """The shared function should loose it's results after timeout""" counter_value = gen_integer(min_value=1, max_value=10000) result = simple_shared_counter_increment_timeout(counter_value) self.assertIsInstance(result, dict) self.assertIn('index', result) result_value = result['index'] self.assertEqual(result_value, counter_value+1) second_counter_value = gen_integer(min_value=1, max_value=10000) # be sure they are diffrent self.assertNotEqual(counter_value, second_counter_value) result = simple_shared_counter_increment_timeout(second_counter_value) self.assertIsInstance(result, dict) self.assertIn('index', result) second_result_value = result['index'] # assert the value has not changed, as the shared function return # always the same result of the first call self.assertNotEqual(second_result_value, second_counter_value + 1) self.assertEqual(second_result_value, result_value) # sleep to reach timeout time.sleep(SIMPLE_TIMEOUT_VALUE+1) timeout_counter_value = gen_integer(min_value=1, max_value=10000) # be sure they are diffrent self.assertNotEqual(counter_value, timeout_counter_value) timeout_result = simple_shared_counter_increment_timeout( timeout_counter_value) self.assertIsInstance(timeout_result, dict) self.assertIn('index', timeout_result) timeout_result_value = timeout_result['index'] self.assertEqual(timeout_result_value, timeout_counter_value + 1) self.assertNotEqual(timeout_result_value, result_value)
def test_read_v3(self): """Make ``read_json`` return IDs.""" # Generate some bogus values and call `read`. entity_1 = self.test_entity(self.cfg) attrs = { 'id': gen_integer(min_value=1), 'many_ids': [gen_integer(min_value=1)], 'none': None, 'one_id': gen_integer(min_value=1), } with mock.patch.object(entity_1, 'read_json') as read_json: read_json.return_value = attrs entity_2 = entity_1.read(ignore={'ignore_me'}) # Make assertions about the call and the returned entity. self.assertEqual( entity_2._server_config, # pylint:disable=protected-access self.cfg, ) self.assertEqual(read_json.call_count, 1) self.assertEqual( set(entity_1.get_fields().keys()), set(entity_2.get_fields().keys()), ) self.assertEqual(entity_2.id, attrs['id']) self.assertEqual(entity_2.many[0].id, attrs['many_ids'][0]) self.assertEqual(entity_2.one.id, attrs['one_id'])
def setUp(self): self.path = gen_string( 'utf8', gen_integer(min_value=1, max_value=100) ) self.desired = gen_integer() self.response = MockResponse()
def test_search_normalize_v1(self): """Call ``search_normalize``. Pretend the server returns values for all fields, and an extra value. """ with mock.patch.object(entity_mixins, '_get_entity_ids') as get_ids: with mock.patch.object(entity_mixins, '_get_entity_id') as get_id: attrs_list = EntityWithSearch2(self.cfg).search_normalize([{ 'extra': 'foo', # simulate extra value returned by server 'id': 'bar', 'many_ids': [gen_integer()], 'one_id': gen_integer(), }]) self.assertEqual(get_ids.call_count, 1) self.assertEqual(get_id.call_count, 1) self.assertEqual(len(attrs_list), 1) self.assertEqual( attrs_list[0], { 'id': 'bar', 'many': get_ids.return_value, 'one': get_id.return_value, } )
def valid_addr_pools(): """Returns a list of valid address pools""" return [ [gen_integer(min_value=1, max_value=255), gen_integer(min_value=1, max_value=255)], [gen_integer(min_value=1, max_value=255)] * 2, [1, 255], ]
def test_simple_shared_counter(self): """The counter should never change when calling second time""" index_value = gen_integer(min_value=1, max_value=10000) index_increment_by = gen_integer(min_value=1, max_value=100) index_expected_value = index_value + index_increment_by result = simple_shared_counter_increment( index=index_value, increment_by=index_increment_by ) self.assertIsInstance(result, dict) self.assertIn('index', result) self.assertEqual(result['index'], index_expected_value) index_value_2 = gen_integer(min_value=1, max_value=10000) self.assertNotEqual(index_value, index_value_2) index_increment_by_2 = gen_integer(min_value=1, max_value=100) self.assertNotEqual(index_increment_by, index_increment_by_2) # call the counter function a second time result = simple_shared_counter_increment( index=index_value_2, increment_by=index_increment_by_2 ) self.assertIsInstance(result, dict) self.assertIn('index', result) self.assertEqual(result['index'], index_expected_value)
def invalid_addr_pools(): """Returns a list of invalid address pools""" return [ {u"from": gen_integer(min_value=1, max_value=255)}, {u"to": gen_integer(min_value=1, max_value=255)}, {u"from": gen_integer(min_value=128, max_value=255), u"to": gen_integer(min_value=1, max_value=127)}, {u"from": 256, u"to": 257}, ]
def test_gen_integer_9_2(self): """ @Test: Create a random integer using alpha strings as args @Feature: Numbers Generator @Assert: An integer number is not created due to value error """ with self.assertRaises(ValueError): gen_integer(min_value='a', max_value='b')
def invalid_addr_pools(): """Returns a tuple of invalid address pools""" return( {u'from': gen_integer(min_value=1, max_value=255)}, {u'to': gen_integer(min_value=1, max_value=255)}, {u'from': gen_integer(min_value=128, max_value=255), u'to': gen_integer(min_value=1, max_value=127)}, {u'from': 256, u'to': 257}, )
def test_gen_integer_8_1(self): """ @Test: Create a random integer using whitespace as args @Feature: Numbers Generator @Assert: An integer number is not created due to value error """ with self.assertRaises(ValueError): gen_integer(max_value=' ')
def test_missing_value_error(self): """Raise a :class:`nailgun.entity_mixins.MissingValueError`.""" entity = self.test_entity(config.ServerConfig("example.com")) for attrs in ( {"id": gen_integer(min_value=1), "none": None, "one_id": gen_integer(min_value=1)}, {"id": gen_integer(min_value=1), "many_ids": [gen_integer(min_value=1)], "none": None}, ): with self.subTest(attrs): with mock.patch.object(entity, "read_json") as read_json: read_json.return_value = attrs with self.assertRaises(entity_mixins.MissingValueError): entity.read(ignore={"ignore_me"})
def test_gen_integer_5(self): """ @Test: Create a random integer with disallowed minimum limit @Feature: Numbers Generator @Assert: An integer number is not created due to value error """ # This is lower than allowed platform minimum low_min = - sys.maxsize - 2 with self.assertRaises(ValueError): gen_integer(min_value=low_min)
def test_gen_integer_6(self): """ @Test: Create a random integer with disallowed maximum limit @Feature: Numbers Generator @Assert: An integer number is not created due to value error """ # This is greater than allowed platform maximum high_max = sys.maxsize + 1 with self.assertRaises(ValueError): gen_integer(max_value=high_max)
def test_basic_without_using_shared_data(self): """Ensure that USE_SHARED_DATA is false the function is called and do not use the cached data """ enable_shared_function(False) value = gen_integer(min_value=1, max_value=10000) increment_by = gen_integer(min_value=1, max_value=10000) new_value = basic_shared_counter( index=value, increment_by=increment_by) self.assertEqual(new_value, value+increment_by) second_new_value = basic_shared_counter( index=new_value, increment_by=increment_by) self.assertEqual(second_new_value, new_value + increment_by)
def test_update(self): """Test :meth:`nailgun.entity_mixins.EntityUpdateMixin.update`.""" class EntityWithUpdateRead(EntityWithUpdate, entity_mixins.EntityReadMixin): """An entity that can be updated and read.""" readable = EntityWithUpdateRead(config.ServerConfig("example.com"), id=gen_integer()) with mock.patch.object(readable, "update_json") as update_json: update_json.return_value = gen_integer() with mock.patch.object(readable, "read") as read: readable.update() self.assertEqual(update_json.call_count, 1) self.assertEqual(read.call_count, 1) self.assertEqual(read.call_args[1]["attrs"], update_json.return_value)
def test_positive_create_default_value_with_list(self): """Create variable with matching list validator :id: 6bc2caa0-1300-4751-8239-34b96517465b :steps: 1. Create variable with default value that matches the list validator of step 2 2. Validate this value with list validator type and rule :expectedresults: Variable is created for matching value with list :CaseImportance: Critical """ # Generate list of values values_list = [ gen_string('alpha'), gen_string('alphanumeric'), gen_integer(min_value=100), choice(['true', 'false']), ] # Generate string from list for validator_rule values_list_str = ", ".join(str(x) for x in values_list) value = choice(values_list) smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value=value, validator_type='list', validator_rule=values_list_str, ).create() self.assertEqual(smart_variable.default_value, str(value)) self.assertEqual(smart_variable.validator_type, 'list') self.assertEqual(smart_variable.validator_rule, values_list_str)
def test_repr_v3(self): """Test method ``nailgun.entity_mixins.Entity.__repr__``. Assert that ``__repr__`` works correctly when one entity has a foreign key relationship to a second entity. """ entity_id = gen_integer() target = ( 'tests.test_entity_mixins.SampleEntityTwo(' 'one_to_many=[tests.test_entity_mixins.SampleEntity(id={0})]' ')' .format(entity_id) ) entity = SampleEntityTwo( self.cfg, one_to_many=[SampleEntity(self.cfg, id=entity_id)] ) self.assertEqual(repr(entity), target) # create default config if it does not exist try: config.ServerConfig.get() except (KeyError, config.ConfigFileError): self.cfg.save() import nailgun # noqa pylint:disable=unused-variable import tests # noqa pylint:disable=unused-variable # pylint:disable=eval-used self.assertEqual(repr(eval(repr(entity))), target)
def test_positive_add_repo_from_product_with_repos(self): """@test: Create gpg key via file import and associate with custom repo GPGKey should contain valid name and valid key and should be associated to one repository from custom product. Make sure custom product should have more than one repository. @feature: GPG Keys @assert: gpg key is associated with the repository """ product = make_product({'organization-id': self.org['id']}) repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] gpg_key = make_gpg_key({'organization-id': self.org['id']}) Repository.update({ 'gpg-key': gpg_key['name'], 'id': repos[0]['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertNotEqual(product['gpg'].get('gpg-key-id'), gpg_key['id']) # First repo should have a valid gpg key assigned repo = Repository.info({'id': repos.pop(0)['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id']) # The rest of repos should not for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertNotEqual(repo['gpg-key'].get('id'), gpg_key['id'])
def test_negative_create_matcher_empty_value(self): """Create matcher with empty value with type other than string :id: ad24999f-1bed-4abb-a01f-3cb485d67968 :steps: Create a matcher for variable with empty value and type any other than string :expectedresults: Matcher is not created for empty value :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value=gen_integer(), variable_type='integer', override_value_order='is_virtual', ).create() with self.assertRaises(HTTPError) as context: entities.OverrideValue( smart_variable=smart_variable, match='is_virtual=true', value='', ).create() self.assertEqual(len(smart_variable.read().override_values), 0) self.assertRegexpMatches( context.exception.response.text, "Validation failed: Value is invalid integer" )
def test_positive_update_key(self): """@test: Create gpg key with valid name and valid gpg key via file import then update its gpg key file @feature: GPG Keys @assert: gpg key is updated """ gpg_key = make_gpg_key({'organization-id': self.org['id']}) content = gen_alphanumeric(gen_integer(20, 50)) self.assertNotEqual(gpg_key['content'], content) local_key = create_gpg_key_file(content) self.assertIsNotNone(local_key, 'GPG Key file must be created') key = '/tmp/%s' % gen_alphanumeric() ssh.upload_file(local_file=local_key, remote_file=key) GPGKey.update({ 'key': key, 'name': gpg_key['name'], 'organization-id': self.org['id'], }) gpg_key = GPGKey.info({ 'name': gpg_key['name'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['content'], content)
def test_positive_create_with_hosts_limit(self): """Create Discovery Rule providing any number from range 1..100 for hosts limit field :id: 64b90586-c1a9-4be4-8c44-4fa19ca998f8 :expectedresults: Rule should be successfully created and has expected hosts limit field value :CaseImportance: Critical """ name = gen_string('alpha') limit = str(gen_integer(1, 100)) with Session(self) as session: make_discoveryrule( session, name=name, hostgroup=self.host_group.name, host_limit=limit, locations=[self.session_loc.name], ) self.assertIsNotNone(self.discoveryrules.search(name)) self.assertEqual( self.discoveryrules.get_attribute_value(name, 'host_limit'), limit )
def test_positive_create_with_priority(self): """Create Discovery Rule providing any number from range 1..100 for priority field :id: de847288-257a-4f0e-9cb6-9a0dd0877d23 :expectedresults: Rule should be successfully created and has expected priority field value :CaseImportance: Critical """ name = gen_string('alpha') priority = str(gen_integer(1, 100)) with Session(self) as session: make_discoveryrule( session, name=name, hostgroup=self.host_group.name, priority=priority, locations=[self.session_loc.name], ) self.assertIsNotNone(self.discoveryrules.search(name)) self.assertEqual( self.discoveryrules.get_attribute_value(name, 'priority'), priority )
def test_delete_manifest(self, http_status_code): """Call :meth:`robottelo.entities.Organization.delete_manifest`. Assert that ``Organization.delete_manifest`` returns a dictionary when an HTTP 202 or some other success status code is returned. """ # `client.post` will return this. post_return = mock.Mock() post_return.status_code = http_status_code post_return.raise_for_status.return_value = None post_return.json.return_value = {'id': gen_integer()} # mock task ID # Start by patching `client.post` and `ForemanTask.poll`... # NOTE: Python 3 allows for better nested context managers. with mock.patch.object(client, 'post') as client_post: client_post.return_value = post_return with mock.patch.object(entities.ForemanTask, 'poll') as ft_poll: ft_poll.return_value = {} # ... then see if `delete_manifest` acts correctly. for synchronous in (True, False): reply = entities.Organization( self.server_config, id=self.entity_id ).delete_manifest(synchronous) self.assertIsInstance(reply, dict)
def test_positive_add_product_with_repos(self): """@test: Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository @feature: GPG Keys @assert: gpg key is associated with product as well as with the repositories """ product = make_product({'organization-id': self.org['id']}) repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] gpg_key = make_gpg_key({'organization-id': self.org['id']}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key-id'], gpg_key['id']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id'])
def test_negative_update_priority(self): """Update discovery rule priority using invalid values @id: d44ad49c-5d95-442f-a1b3-cd82dd8ffabf @Assert: Rule priority is not updated """ name = gen_string('alpha') priority = str(gen_integer(1, 100)) with Session(self.browser) as session: make_discoveryrule( session, name=name, hostgroup=self.host_group.name, priority=priority, ) self.assertIsNotNone(self.discoveryrules.search(name)) for new_priority in '-1', gen_string('alpha'): with self.subTest(new_priority): self.discoveryrules.update( name=name, priority=new_priority) self.assertIsNotNone( self.discoveryrules.wait_until_element( common_locators['haserror']) ) self.assertEqual( self.discoveryrules.get_attribute_value(name, 'priority'), priority )
def test_negative_update_limit(self): """Update discovery rule host limit using invalid values @id: 7e8b7218-3c8a-4b03-b0df-484e0d793ceb @Assert: Rule host limit is not updated """ name = gen_string('alpha') limit = str(gen_integer(1, 100)) with Session(self.browser) as session: make_discoveryrule( session, name=name, hostgroup=self.host_group.name, host_limit=limit, ) self.assertIsNotNone(self.discoveryrules.search(name)) for new_limit in '-1', gen_string('alpha'): with self.subTest(new_limit): self.discoveryrules.update( name=name, host_limit=new_limit) self.assertIsNotNone( self.discoveryrules.wait_until_element( common_locators['haserror']) ) self.assertEqual( self.discoveryrules.get_attribute_value(name, 'host_limit'), limit )
def generate_strings_list(length=None, exclude_types=None, bug_id=None, min_length=3, max_length=30): """Generates a list of different input strings. :param int length: Specifies the length of the strings to be be generated. If the len1 is None then the list is returned with string types of random length. :param exclude_types: Specify a list of data types to be removed from generated list. example: exclude_types=['html', 'cjk'] :param int bug_id: Specify any bug id that is associated to the datapoint specified in remove_str. This will be used only when remove_str is populated. :param int min_length: Minimum length to be used in integer generator :param int max_length: Maximum length to be used in integer generator :returns: A list of various string types. """ if length is None: length = gen_integer(min_length, max_length) strings = { str_type: gen_string(str_type, length) for str_type in STRING_TYPES } # Handle No bug_id, If some entity doesn't support a str_type. # Remove str_type from dictionary only if bug is open. if exclude_types and (bug_id is None or bz_bug_is_open(bug_id)): for item in exclude_types: strings.pop(item, None) return list(strings.values())
def new_chargeback_rate(appliance, include_variable_rates=True): # Create a new Chargeback compute rate def rand_float_str(): return str(round(random() * fauxfactory.gen_integer(1, 20), 2)) def gen_var_rate(): return (rand_float_str() if include_variable_rates else 0) description = 'custom_rate_' + fauxfactory.gen_alphanumeric() data = { 'Used CPU Cores': {'per_time': 'Hourly', 'fixed_rate': fauxfactory.gen_integer(1, 4), 'variable_rate': gen_var_rate()}, 'Fixed Compute Cost 1': {'per_time': 'Hourly', 'fixed_rate': rand_float_str()}, 'Fixed Compute Cost 2': {'per_time': 'Hourly', 'fixed_rate': rand_float_str()}, 'Used Memory': {'per_time': 'Hourly', 'fixed_rate': rand_float_str(), 'variable_rate': gen_var_rate()}, 'Used Network I/O': {'per_time': 'Hourly', 'fixed_rate': rand_float_str(), 'variable_rate': gen_var_rate()} } ccb = rates.ComputeRate(description, fields=data, appliance=appliance) ccb.create() return ccb
def test_negative_update_content_source(self): """Attempt to update hostgroup's content source with invalid value :id: 4ffe6d18-3899-4bf1-acb2-d55ea09b7a26 :BZ: 1260697, 1313056 :expectedresults: Host group was not updated. Content source remains the same as it was before update :CaseImportance: Medium """ content_source = Proxy.list({ 'search': 'url = https://{0}:9090'.format(settings.server.hostname) })[0] hostgroup = make_hostgroup({ 'content-source-id': content_source['id'], 'organization-ids': self.org['id'], }) with self.assertRaises(CLIBaseError): HostGroup.update({ 'id': hostgroup['id'], 'content-source-id': gen_integer(10000, 99999), }) hostgroup = HostGroup.info({'id': hostgroup['id']}) self.assertEqual( hostgroup['content-source']['name'], content_source['name'])
def test_positive_check_ignore_facts_os_setting(session, vm, module_org, request): """Verify that 'Ignore facts for operating system' setting works properly :steps: 1. Create a new host entry using content host self registration procedure 2. Check that there is a new setting added "Ignore facts for operating system", and set it to true. 3. Upload the facts that were read from initial host, but with a change in all the operating system fields to a different OS or version. 4. Verify that the host OS isn't updated. 5. Set the setting in step 2 to false. 6. Upload same modified facts from step 3. 7. Verify that the host OS is updated. 8. Verify that new OS is created :id: 71bed439-105c-4e87-baae-738379d055fb :customerscenario: true :expectedresults: Host facts impact its own values properly according to the setting values :BZ: 1155704 :CaseLevel: System """ major = str(gen_integer(15, 99)) minor = str(gen_integer(1, 9)) expected_os = "RedHat {}.{}".format(major, minor) set_ignore_facts_for_os(False) host = entities.Host().search( query={ 'search': 'name={0} and organization_id={1}'.format(vm.hostname, module_org.id) })[0].read() with session: # Get host current operating system value os = session.contenthost.read(vm.hostname)['details']['os'] # Change necessary setting to true set_ignore_facts_for_os(True) # Add cleanup function to roll back setting to default value request.addfinalizer(set_ignore_facts_for_os) # Read all facts for corresponding host facts = host.get_facts( data={u'per_page': 10000})['results'][vm.hostname] # Modify OS facts to another values and upload them to the server # back facts['operatingsystem'] = 'RedHat' facts['osfamily'] = 'RedHat' facts['operatingsystemmajrelease'] = major facts['operatingsystemrelease'] = "{}.{}".format(major, minor) host.upload_facts(data={ u'name': vm.hostname, u'facts': facts, }) session.contenthost.search('') updated_os = session.contenthost.read(vm.hostname)['details']['os'] # Check that host OS was not changed due setting was set to true assert os == updated_os # Put it to false and re-run the process set_ignore_facts_for_os(False) host.upload_facts(data={ u'name': vm.hostname, u'facts': facts, }) session.contenthost.search('') updated_os = session.contenthost.read(vm.hostname)['details']['os'] # Check that host OS was changed to new value assert os != updated_os assert updated_os == expected_os # Check that new OS was created assert session.operatingsystem.search( expected_os)[0]['Title'] == expected_os
def _good_max_hosts(): """Return a list of valid ``max_hosts`` values.""" return [gen_integer(*limits) for limits in ((1, 20), (10000, 20000))]
def _bad_max_hosts(): """Return a list of invalid ``max_hosts`` values.""" return [gen_integer(-100, -1), 0, gen_string('alpha')]
def valid_http_credentials(url_encoded=False): """Returns a list of valid credentials for HTTP authentication The credentials dictionary contains the following keys: login - a username pass - a password quote - a Bool flag stating whether the credentials include special chars http_valid - a Bool flag stating whether the HTTP authentication will pass successfully on the server :param url_encoded: flag for quoting special characters :return: A list of dictionaries with user and password credentials """ credentials = [ { u'login': '******', u'pass': '******', u'quote': False, u'http_valid': True, }, { u'login': '******', u'pass': '******', u'quote': True, u'http_valid': True, }, { u'login': '******', u'pass': '******', u'quote': False, u'http_valid': True, }, { u'login': '******', u'pass': '******', u'quote': True, u'http_valid': True, }, { u'login': '******', u'pass': '******', u'quote': False, u'http_valid': True, }, { u'login': '******', u'pass': '******', u'quote': True, u'http_valid': True, }, { u'login': '******', u'pass': '******', u'quote': True, u'http_valid': True, }, { u'login': '******', u'pass': '******', u'quote': False, u'http_valid': True, }, { u'login': '******', u'pass': '******', u'quote': False, u'http_valid': True, }, { u'login': '******', u'pass': '', u'quote': False, u'http_valid': False, }, { u'login': '', u'pass': '******', u'quote': False, u'http_valid': False, }, { u'login': '', u'pass': '', u'quote': False, u'http_valid': False, }, { u'login': gen_string('alpha', gen_integer(1, 512)), u'pass': gen_string('alpha'), u'quote': False, u'http_valid': False, }, { u'login': gen_string('alphanumeric', gen_integer(1, 512)), u'pass': gen_string('alphanumeric'), u'quote': False, u'http_valid': False, }, { u'login': gen_string('utf8', gen_integer(1, 50)), u'pass': gen_string('utf8'), u'quote': True, u'http_valid': False, }, ] if url_encoded: return [{ u'login': quote_plus(cred['login'].encode('utf-8'), ''), u'pass': quote_plus(cred['pass'].encode('utf-8'), ''), u'http_valid': cred['http_valid'], } for cred in credentials] else: return credentials
def setUp(self): # noqa pylint:disable=C0103 """Back up objects and generate common values.""" self.rm_backup = robozilla_decorators._get_redmine_bug_status_id self.stat_backup = robozilla_decorators._redmine_closed_issue_statuses robozilla_decorators._redmine_closed_issue_statuses = lambda: [1, 2] self.bug_id = gen_integer()
from fauxfactory import gen_alphanumeric, gen_integer import pytest from cfme.containers.provider import ContainersProvider from cfme.exceptions import FlashMessageException from cfme.web_ui import flash from utils import testgen from utils.version import current_version pytestmark = [pytest.mark.uncollectif(lambda: current_version() < "5.8.0.3")] pytest_generate_tests = testgen.generate([ContainersProvider], scope='module') alphanumeric_name = gen_alphanumeric(10) long_alphanumeric_name = gen_alphanumeric(100) integer_name = str(gen_integer(0, 100000000)) provider_names = alphanumeric_name, integer_name, long_alphanumeric_name DEFAULT_SEC_PROTOCOLS = ( pytest.mark.polarion('CMP-10598')('SSL trusting custom CA'), pytest.mark.polarion('CMP-10597')('SSL without validation'), pytest.mark.polarion('CMP-10599')('SSL')) checked_item = namedtuple('TestItem', ['default_sec_protocol', 'hawkular_sec_protocol']) TEST_ITEMS = ( pytest.mark.polarion('CMP-10593')(checked_item('SSL trusting custom CA', 'SSL trusting custom CA')), pytest.mark.polarion('CMP-10594')(checked_item('SSL trusting custom CA', 'SSL without validation')), pytest.mark.polarion('CMP-10589')(checked_item('SSL trusting custom CA', 'SSL')),
def simple_shared_counter_increment_process(index=1): """a simple shared function each time called increment index by a new generated value""" return {'index': index + gen_integer(min_value=1, max_value=100)}
def test_positive_end_to_end(session, module_org, module_loc): """Perform end to end testing for discovery rule component. :id: dd35e566-dc3a-43d3-939c-a33ae528740f :expectedresults: All expected CRUD actions finished successfully :CaseImportance: Critical """ rule_name = gen_string('alpha') search = 'cpu_count = {0}'.format(gen_integer(1, 5)) hg_name = gen_string('alpha') hostname = gen_string('alpha') hosts_limit = str(gen_integer(0, 100)) priority = str(gen_integer(1, 100)) new_rule_name = gen_string('alpha') new_search = 'cpu_count = {0}'.format(gen_integer(6, 10)) new_hg_name = gen_string('alpha') new_hostname = gen_string('alpha') new_hosts_limit = str(gen_integer(101, 200)) new_priority = str(gen_integer(101, 200)) entities.HostGroup(name=hg_name, organization=[module_org], location=[module_loc]).create() entities.HostGroup(name=new_hg_name, organization=[module_org], location=[module_loc]).create() new_org = entities.Organization().create() new_loc = entities.Location().create() with session: session.discoveryrule.create({ 'primary.name': rule_name, 'primary.search': search, 'primary.host_group': hg_name, 'primary.hostname': hostname, 'primary.hosts_limit': hosts_limit, 'primary.priority': priority, 'primary.enabled': False, 'organizations.resources.assigned': [module_org.name], 'locations.resources.assigned': [module_loc.name], }) values = session.discoveryrule.read(rule_name) assert values['primary']['name'] == rule_name assert values['primary']['search'] == search assert values['primary']['host_group'] == hg_name assert values['primary']['hostname'] == hostname assert values['primary']['hosts_limit'] == hosts_limit assert values['primary']['priority'] == priority assert values['primary']['enabled'] is False assert values['organizations']['resources']['assigned'] == [ module_org.name ] assert values['locations']['resources']['assigned'] == [ module_loc.name ] session.discoveryrule.update( rule_name, { 'primary.name': new_rule_name, 'primary.search': new_search, 'primary.host_group': new_hg_name, 'primary.hostname': new_hostname, 'primary.hosts_limit': new_hosts_limit, 'primary.priority': new_priority, 'primary.enabled': True, 'organizations.resources.assigned': [new_org.name], 'locations.resources.assigned': [new_loc.name], }) rules = session.discoveryrule.read_all() assert rule_name not in [rule['Name'] for rule in rules] values = session.discoveryrule.read(new_rule_name) assert values['primary']['name'] == new_rule_name assert values['primary']['search'] == new_search assert values['primary']['host_group'] == new_hg_name assert values['primary']['hostname'] == new_hostname assert values['primary']['hosts_limit'] == new_hosts_limit assert values['primary']['priority'] == new_priority assert values['primary']['enabled'] is True assert {new_org.name, module_org.name } == set(values['organizations']['resources']['assigned']) assert {new_loc.name, module_loc.name } == set(values['locations']['resources']['assigned']) session.discoveryrule.delete(new_rule_name) rules = session.discoveryrule.read_all() assert new_rule_name not in [rule['Name'] for rule in rules]
u"ppc", u's390x', ] # https://en.wikipedia.org/wiki/Red_Hat_Enterprise_Linux#Version_history DISTRO_IDS = [ { u'id': u'Maipo', u'version': u'7.0', # There is no 'i386' for RHEL 7 u'architecture': gen_choice(ARCHITECTURES[1:]), u'kernel': u'3.10.0-123.el7' }, { u'id': u'Santiago', u'version': u'6.{0}'.format(gen_integer(1, 5)), u'architecture': gen_choice(ARCHITECTURES), u'kernel': u'2.6.32-431.el6' }, { u'id': u'Tikanga', u'version': u'5.{0}'.format(gen_integer(1, 10)), u'architecture': gen_choice(ARCHITECTURES), u'kernel': u'2.6.18-371.el5' }, { u'id': u'Nahant', u'version': u'4.{0}'.format(gen_integer(1, 9)), # Assuming only 'i386' and 'x86_64' u'architecture': gen_choice(ARCHITECTURES[:2]), u'kernel': u'2.6.9-100.el4'
def setUp(self): """Set ``self.entity = EntityWithRead(…)``.""" self.cfg = config.ServerConfig('example.com') self.entity = EntityWithRead(self.cfg, id=gen_integer(min_value=1))
def test_positive_list_host_based_on_rule_search_query(session, module_org, module_loc, module_discovery_env): """List all the discovered hosts resolved by given rule's search query e.g. all discovered hosts with cpu_count = 2, and list rule's associated hosts. :id: f7473fa2-7349-42d3-9cdb-f74b55d2f440 :Steps: 1. discovered host with cpu_count = 2 2. Define a rule 'rule1' with search query cpu_count = 2 3. Click on 'Discovered Hosts' from rule1 4. Auto Provision the discovered host 5. Click on 'Associated Hosts' from rule1 :expectedresults: 1. After step 3, the rule's Discovered host should be listed. 2. The rule's Associated Host should be listed. """ ip_address = gen_ipaddr() cpu_count = gen_integer(2, 10) rule_search = 'cpu_count = {0}'.format(cpu_count) # any way create a host to be sure that this org has more than one host host = entities.Host(organization=module_org, location=module_loc).create() host_group = entities.HostGroup( organization=[module_org], location=[module_loc], medium=host.medium, root_pass=gen_string('alpha'), operatingsystem=host.operatingsystem, ptable=host.ptable, domain=host.domain, architecture=host.architecture, ).create() discovery_rule = entities.DiscoveryRule(hostgroup=host_group, search_=rule_search, organization=[module_org], location=[module_loc]).create() discovered_host = create_discovered_host( ip_address=ip_address, options={'physicalprocessorcount': cpu_count}) # create an other discovered host with an other cpu count create_discovered_host(options={'physicalprocessorcount': cpu_count + 1}) provisioned_host_name = '{0}.{1}'.format(discovered_host['name'], host.domain.read().name) with session: session.organization.select(org_name=module_org.name) session.location.select(loc_name=module_loc.name) values = session.discoveryrule.read_all() assert discovery_rule.name in [rule['Name'] for rule in values] values = session.discoveryrule.read_discovered_hosts( discovery_rule.name) assert values['searchbox'] == rule_search assert len(values['table']) == 1 assert values['table'][0]['IP Address'] == ip_address assert values['table'][0]['CPUs'] == str(cpu_count) # auto provision the discovered host session.discoveredhosts.apply_action('Auto Provision', [discovered_host['name']]) assert not session.discoveredhosts.search('name = "{0}"'.format( discovered_host['name'])) values = session.discoveryrule.read_associated_hosts( discovery_rule.name) assert (values['searchbox'] == 'discovery_rule = "{0}"'.format( discovery_rule.name)) assert len(values['table']) == 1 assert values['table'][0]['Name'] == provisioned_host_name values = session.host.get_details(provisioned_host_name) assert (values['properties']['properties_table']['IP Address'] == ip_address)
def setUp(self): """Set ``self.entity = EntityWithCreate(…)``.""" self.entity = EntityWithCreate( config.ServerConfig('example.com'), id=gen_integer(min_value=1), )