def test_positive_update_interval(self): """Create a sync plan and update its interval. :id: cf2eddf8-b4db-430e-a9b0-83c626b45068 :expectedresults: A sync plan is created and its interval can be updated with the specified interval. :CaseImportance: Critical """ for interval in valid_sync_interval(): sync_plan = entities.SyncPlan(description=gen_string('alpha'), organization=self.org, interval=interval) if interval == SYNC_INTERVAL['custom']: sync_plan.cron_expression = gen_choice( valid_cron_expressions()) sync_plan = sync_plan.create() valid_intervals = valid_sync_interval() self.assertIn(interval, valid_intervals) valid_intervals.remove(interval) new_interval = gen_choice(valid_intervals) sync_plan.interval = new_interval if new_interval == SYNC_INTERVAL['custom']: sync_plan.cron_expression = gen_choice( valid_cron_expressions()) sync_plan = sync_plan.update(['interval', 'cron_expression']) else: sync_plan = sync_plan.update(['interval']) self.assertEqual(sync_plan.interval, new_interval)
def test_positive_CRUD(default_os): """Create a new Architecture with several attributes, update the name and delete the Architecture itself. :id: 80bca2c0-a6a1-4676-a036-bd918812d600 :expectedresults: Architecture should be created, modified and deleted successfully with given attributes. :CaseImportance: Critical """ # Create name = gen_choice(list(valid_data_list().values())) arch = entities.Architecture(name=name, operatingsystem=[default_os]).create() assert {default_os.id} == {os.id for os in arch.operatingsystem} assert name == arch.name # Update name = gen_choice(list(valid_data_list().values())) arch = entities.Architecture(id=arch.id, name=name).update(['name']) assert name == arch.name # Delete arch.delete() with pytest.raises(HTTPError): arch.read()
def generate_system_facts(name=None): """Generate random system facts for registration. :param str name: A valid FQDN for a system. If one is not provided, then a random value will be generated. :return: A dictionary with random system facts :rtype: dict """ if name is None: name = f'{gen_alpha().lower()}.example.net' # Make a copy of the system facts 'template' new_facts = copy.deepcopy(SYSTEM_FACTS) # Select a random RHEL version... distro = gen_choice(DISTRO_IDS) # ...and update our facts new_facts['distribution.id'] = distro['id'] new_facts['distribution.version'] = distro['version'] new_facts['dmi.bios.relase_date'] = _bios_date().strftime('%m/%d/%Y') new_facts['dmi.memory.maximum_capacity'] = gen_choice(MEMORY_CAPACITY) new_facts['dmi.memory.size'] = gen_choice(MEMORY_SIZE) new_facts['dmi.system.uuid'] = gen_uuid() new_facts['dmi.system.version'] = 'RHEL' new_facts['lscpu.architecture'] = distro['architecture'] new_facts['net.interface.eth1.hwaddr'] = gen_mac(multicast=False) new_facts['net.interface.eth1.ipaddr'] = gen_ipaddr() new_facts['network.hostname'] = name new_facts['network.ipaddr'] = new_facts['net.interface.eth1.ipaddr'] new_facts['uname.machine'] = distro['architecture'] new_facts['uname.nodename'] = name new_facts['uname.release'] = distro['kernel'] new_facts['virt.uuid'] = new_facts['dmi.system.uuid'] return new_facts
def test_positive_update_interval(module_org, interval): """Create a sync plan and update its interval. :id: cf2eddf8-b4db-430e-a9b0-83c626b45068 :parametrized: yes :expectedresults: A sync plan is created and its interval can be updated with the specified interval. :CaseImportance: Critical """ sync_plan = entities.SyncPlan(description=gen_string('alpha'), organization=module_org, interval=interval) if interval == SYNC_INTERVAL['custom']: sync_plan.cron_expression = gen_choice(valid_cron_expressions()) sync_plan = sync_plan.create() # get another random interval and workaround issue #7231 new_interval = gen_choice(valid_sync_interval()) while new_interval == interval: new_interval = gen_choice(valid_sync_interval()) sync_plan.interval = new_interval if new_interval == SYNC_INTERVAL['custom']: sync_plan.cron_expression = gen_choice(valid_cron_expressions()) sync_plan = sync_plan.update(['interval', 'cron_expression']) else: sync_plan = sync_plan.update(['interval']) sync_plan = sync_plan.read() assert sync_plan.interval == new_interval
def test_positive_update_interval(self): """Create a sync plan and update its interval. :id: cf2eddf8-b4db-430e-a9b0-83c626b45068 :expectedresults: A sync plan is created and its interval can be updated with the specified interval. :CaseImportance: Critical """ for interval in valid_sync_interval(): sync_plan = entities.SyncPlan( description=gen_string('alpha'), organization=self.org, interval=interval ) if interval == SYNC_INTERVAL['custom']: sync_plan.cron_expression = gen_choice(valid_cron_expressions()) sync_plan = sync_plan.create() valid_intervals = valid_sync_interval() valid_intervals.remove(interval) new_interval = gen_choice(valid_intervals) sync_plan.interval = new_interval if new_interval == SYNC_INTERVAL['custom']: sync_plan.cron_expression = gen_choice(valid_cron_expressions()) sync_plan = sync_plan.update(['interval', 'cron_expression']) else: sync_plan = sync_plan.update(['interval']) self.assertEqual(sync_plan.interval, new_interval)
def test_positive_update_interval(module_org, interval): """Create a sync plan and update its interval. :id: cf2eddf8-b4db-430e-a9b0-83c626b45068 :parametrized: yes :expectedresults: A sync plan is created and its interval can be updated with the specified interval. :CaseImportance: Critical """ sync_plan = entities.SyncPlan(description=gen_string('alpha'), organization=module_org, interval=interval) if interval == SYNC_INTERVAL['custom']: sync_plan.cron_expression = gen_choice(valid_cron_expressions()) sync_plan = sync_plan.create() # ensure "new interval" not equal to "interval" new_interval = 'hourly' if interval != 'hourly' else 'daily' sync_plan.interval = new_interval if new_interval == SYNC_INTERVAL['custom']: sync_plan.cron_expression = gen_choice(valid_cron_expressions()) sync_plan = sync_plan.update(['interval', 'cron_expression']) else: sync_plan = sync_plan.update(['interval']) sync_plan = sync_plan.read() assert sync_plan.interval == new_interval
def test_positive_end_to_end_crud(module_org, module_location, module_hostgroup): """Create a new discovery rule with several attributes, update them and delete the rule itself. :id: 25366930-b7f4-4db8-a9c3-a470fe4f3583 :expectedresults: Rule should be created, modified and deleted successfully with given attributes. :CaseImportance: Critical """ # Create discovery rule searches = [ 'CPU_Count = 1', 'disk_count < 5', 'memory > 500', 'model = KVM', 'Organization = Default_Organization', ] name = gen_choice(list(valid_data_list().values())) search = gen_choice(searches) hostname = 'myhost-<%= rand(99999) %>' discovery_rule = entities.DiscoveryRule( name=name, search_=search, hostname=hostname, organization=[module_org], location=[module_location], hostgroup=module_hostgroup, ).create() assert name == discovery_rule.name assert hostname == discovery_rule.hostname assert search == discovery_rule.search_ assert module_org.name == discovery_rule.organization[0].read().name assert module_location.name == discovery_rule.location[0].read().name assert discovery_rule.enabled is True # Update discovery rule name = gen_choice(list(valid_data_list().values())) search = 'Location = Default_Location' max_count = gen_integer(1, 100) enabled = False discovery_rule.name = name discovery_rule.search_ = search discovery_rule.max_count = max_count discovery_rule.enabled = enabled discovery_rule = discovery_rule.update( ['name', 'search_', 'max_count', 'enabled']) assert name == discovery_rule.name assert search == discovery_rule.search_ assert max_count == discovery_rule.max_count assert enabled == discovery_rule.enabled # Delete discovery rule discovery_rule.delete() with pytest.raises(HTTPError): discovery_rule.read()
def test_gen_choice_13(self): """ @Test: Cannot use None for Choice generator @Feature: Choice Generator @Assert: ValueError is raised """ choices = None with self.assertRaises(ValueError): gen_choice(choices)
def test_gen_choice_10(self): """ @Test: Select a random value from single dictionary @Feature: Choice Generator @Assert: No choice from single dictionary """ choices = {'Name': 'Bob', 'Age': 39} with self.assertRaises(ValueError): gen_choice(choices)
def test_gen_choice_8(self): """ @Test: Select a random value from empty tuple @Feature: Choice Generator @Assert: No choice from empty tuple """ choices = () with self.assertRaises(ValueError): gen_choice(choices)
def test_gen_choice_9(self): """ @Test: Select a random value from empty dictionary @Feature: Choice Generator @Assert: No choice from empty dictionary """ choices = {} with self.assertRaises(ValueError): gen_choice(choices)
def test_gen_choice_11(self): """ @Test: Select a random value from dictionary list @Feature: Choice Generator @Assert: Selects a value from a list of dictionaries """ choices = [ { 'Name': 'Bob', 'Age': 39 }, { 'Name': 'Alice', 'Age': 23 }, { 'Name': 'Pete', 'Age': 79 }, ] for turn in range(10): result = gen_choice(choices) self.assertIn( result, choices, "An invalid value was selected from available choices.")
def test_create_cvf_with_different_names(self, name): """Test: Create new content view filter and assign it to existing content view by id. Use different value types as a name and random filter content type as a parameter for this filter @Feature: Content View Filter @Assert: Content view filter created successfully and has correct and expected parameters """ filter_content_type = gen_choice(['rpm', 'package_group', 'erratum']) result = ContentView.filter_create({ 'content-view-id': self.content_view['id'], 'type': filter_content_type, 'name': name, }) self.assertEqual(result.return_code, 0) self.assertEqual(len(result.stderr), 0) result = ContentView.filter_info({ u'content-view-id': self.content_view['id'], u'name': name }) self.assertEqual(result.return_code, 0) self.assertEqual(result.stdout['name'], name) self.assertEqual(result.stdout['type'], filter_content_type)
def test_post_sync_plan_migration(pre_upgrade_data): """Post-upgrade scenario that tests existing sync plans are working as expected after satellite upgrade with migrating from pulp to katello :id: 61f65f5d-351c-4aa4-83dc-71afae5dc1e0 :steps: 1. Verify sync plan exists and works as earlier :expectedresults: Post Upgrade, Sync plans exists and works as earlier. """ org = entities.Organization(id=pre_upgrade_data.get('org_id')) product = entities.Product(id=pre_upgrade_data.get("product_id")).read() sync_plan = entities.SyncPlan(id=pre_upgrade_data.get("sync_plan_id"), organization=org).read() assert product.sync_plan.id == sync_plan.id assert sync_plan.name == pre_upgrade_data.get("sync_plan_name") assert sync_plan.interval == pre_upgrade_data.get("interval") assert sync_plan.sync_date == pre_upgrade_data.get("sync_date") # checking sync plan update on upgraded satellite sync_plan.interval = SYNC_INTERVAL['custom'] sync_plan.cron_expression = gen_choice(valid_cron_expressions()) assert (sync_plan.update(['interval', 'cron_expression' ]).interval == SYNC_INTERVAL['custom']) # checking sync plan delete on upgraded satellite sync_plan.delete() product = product.read() assert product.sync_plan is None with raises(HTTPError): sync_plan.read()
def test_post_sync_plan_migration(pre_upgrade_data): """Post-upgrade scenario that tests existing sync plans are working as expected after satellite upgrade with migrating from pulp to katello :id: 61f65f5d-351c-4aa4-83dc-71afae5dc1e0 :steps: 1. Verify sync plan exists and works as earlier :expectedresults: Post Upgrade, Sync plans exists and works as earlier. """ org = entities.Organization(id=pre_upgrade_data.get('org_id')) product = entities.Product(id=pre_upgrade_data.get("product_id")).read() sync_plan = entities.SyncPlan(id=pre_upgrade_data.get("sync_plan_id"), organization=org).read() assert product.sync_plan.id == sync_plan.id assert sync_plan.name == pre_upgrade_data.get("sync_plan_name") assert sync_plan.interval == pre_upgrade_data.get("interval") assert sync_plan.sync_date == pre_upgrade_data.get("sync_date") # checking sync plan update on upgraded satellite sync_plan.interval = SYNC_INTERVAL['custom'] sync_plan.cron_expression = gen_choice(valid_cron_expressions()) assert (sync_plan.update(['interval', 'cron_expression']).interval == SYNC_INTERVAL['custom']) # checking sync plan delete on upgraded satellite sync_plan.delete() product = product.read() assert product.sync_plan is None with raises(HTTPError): sync_plan.read()
def create_missing(self): """Automagically populate all required instance attributes. Iterate through the set of all required class :class:`nailgun.entity_fields.Field` defined on ``type(self)`` and create a corresponding instance attribute if none exists. Subclasses should override this method if there is some relationship between two required fields. :return: Nothing. This method relies on side-effects. """ for field_name, field in self.get_fields().items(): if field.required and not hasattr(self, field_name): # Most `gen_value` methods return a value such as an integer, # string or dictionary, but OneTo{One,Many}Field.gen_value # returns the referenced class. if hasattr(field, 'default'): value = field.default elif hasattr(field, 'choices'): value = gen_choice(field.choices) elif isinstance(field, OneToOneField): value = field.gen_value()(self._server_config).create(True) elif isinstance(field, OneToManyField): value = [ field.gen_value()(self._server_config).create(True) ] else: value = field.gen_value() setattr(self, field_name, value)
def test_positive_create_with_name_by_cv_id(self): """Create new content view filter and assign it to existing content view by id. Use different value types as a name and random filter content type as a parameter for this filter :id: 2cfdf72e-179d-4bba-8aab-288594cac836 :expectedresults: Content view filter created successfully and has correct and expected parameters :CaseImportance: Critical """ for name in valid_data_list(): with self.subTest(name): filter_content_type = gen_choice([ 'rpm', 'package_group', 'erratum', ]) ContentView.filter.create({ 'content-view-id': self.content_view['id'], 'name': name, 'organization-id': self.org['id'], 'type': filter_content_type, }) cvf = ContentView.filter.info({ u'content-view-id': self.content_view['id'], u'name': name, }) self.assertEqual(cvf['name'], name) self.assertEqual(cvf['type'], filter_content_type)
def test_positive_create_with_name_by_cv_id(self): """Test: Create new content view filter and assign it to existing content view by id. Use different value types as a name and random filter content type as a parameter for this filter @Feature: Content View Filter @Assert: Content view filter created successfully and has correct and expected parameters """ for name in valid_data_list(): with self.subTest(name): filter_content_type = gen_choice([ 'rpm', 'package_group', 'erratum', ]) ContentView.filter_create({ 'content-view-id': self.content_view['id'], 'name': name, 'type': filter_content_type, }) cvf = ContentView.filter_info({ u'content-view-id': self.content_view['id'], u'name': name, }) self.assertEqual(cvf['name'], name) self.assertEqual(cvf['type'], filter_content_type)
def create_missing(self, auth=None): """Automagically populate all required instance attributes. Iterate through the set of all required class ``Field`` defined on ``type(self)`` and create a corresponding instance attribute if none exists. Subclasses should override this method if there is some relationship between two required fields. :param tuple auth: Same as :meth:`create_raw`. :return: Nothing. This method relies on side-effects. """ for field_name, field in self.get_fields().items(): if field.required and field_name not in vars(self): # Most `gen_value` methods return a value such as an integer, # string or dictionary, but OneTo{One,Many}Field.gen_value # returns an instance of the referenced class. if hasattr(field, 'default'): value = field.default elif hasattr(field, 'choices'): value = gen_choice(field.choices) elif isinstance(field, OneToOneField): value = field.gen_value().create_json(auth=auth)['id'] elif isinstance(field, OneToManyField): value = [field.gen_value().create_json(auth=auth)['id']] else: value = field.gen_value() setattr(self, field_name, value)
def test_post_sync_plan_migration(self): """Post-upgrade scenario that tests existing sync plans are working as expected after satellite upgrade with migrating from pulp to katello :id: badaeec2-d42f-41d5-bd85-4b23d6d5a724 :steps: 1. Verify sync plan exists and works as earlier :expectedresults: Post Upgrade, Sync plans exists and works as earlier. """ entity_data = get_entity_data(self.__class__.__name__) org = entities.Organization(id=entity_data.get('org_id')) product = entities.Product(id=entity_data.get("product_id")).read() sync_plan = entities.SyncPlan(id=entity_data.get("sync_plan_id"), organization=org).read() self.assertEqual(product.sync_plan.id, sync_plan.id) self.assertEqual(sync_plan.name, entity_data.get("sync_plan_name")) self.assertEqual(sync_plan.interval, entity_data.get("interval")) self.assertEqual(sync_plan.sync_date, entity_data.get("sync_date")) # checking sync plan update on upgraded satellite sync_plan.interval = SYNC_INTERVAL['custom'] sync_plan.cron_expression = gen_choice(valid_cron_expressions()) self.assertEqual( sync_plan.update(['interval', 'cron_expression']).interval, SYNC_INTERVAL['custom'] ) # checking sync plan delete on upgraded satellite sync_plan.delete() product = product.read() self.assertIsNone(product.sync_plan) with self.assertRaises(HTTPError): sync_plan.read()
def test_positive_update_interval(self): """Create a sync plan and update its interval. :id: cf2eddf8-b4db-430e-a9b0-83c626b45068 :expectedresults: A sync plan is created and its interval can be updated with the specified interval. :CaseImportance: Critical """ for interval in valid_sync_interval(): with self.subTest(interval): sync_plan = entities.SyncPlan(organization=self.org) result = sync_plan.get_fields()['interval'] sync_plan.interval = sample( set(result.choices) - set([interval]), 1 )[0] sync_plan = sync_plan.create() sync_plan.interval = interval if (interval == 'custom cron'): sync_plan.cron_expression = gen_choice(valid_cron_expressions()) sync_plan = sync_plan.update(['interval', 'cron_expression']) else: sync_plan = sync_plan.update(['interval']) self.assertEqual(sync_plan.interval, interval)
def _make_discoveryrule(self, options=None): """Makes a new discovery rule and asserts its success""" if options is None: options = {} searches = [ 'cpu_count = 1', 'disk_count < 5', 'memory > 500', 'model = KVM', 'Organization = Default_Organization', 'last_report = Today', 'subnet = 192.168.100.0', 'facts.architecture != x86_64', ] if not any( options.get(key) for key in ['organizations', 'organization-ids']): options[u'organization-ids'] = self.org['id'] if not any(options.get(key) for key in ['locations', 'locations-ids']): options[u'location-ids'] = self.loc['id'] if not any(options.get(key) for key in ['hostgroup', 'hostgroup-ids']): options[u'hostgroup-id'] = self.hostgroup['id'] if options.get('search') is None: options[u'search'] = gen_choice(searches) return make_discoveryrule(options)
def test_positive_delete_synced_product_custom_cron(module_org): """Create a sync plan with custom cron with one synced product and delete it. :id: f13936f5-7522-43b8-a986-26795637cde9 :expectedresults: A sync plan is created with one synced product and sync plan can be deleted. :CaseLevel: Integration """ sync_plan = entities.SyncPlan( organization=module_org, interval='custom cron', cron_expression=gen_choice(valid_cron_expressions()), ).create() product = entities.Product(organization=module_org).create() entities.Repository(product=product).create() sync_plan.add_products(data={'product_ids': [product.id]}) product.sync() product = product.read() assert product.sync_plan.id == sync_plan.id sync_plan.delete() product = product.read() assert product.sync_plan is None with pytest.raises(HTTPError): sync_plan.read()
def test_positive_add_remove_products_custom_cron(self): """Create a sync plan with two products having custom_cron interval and then remove both products from it. :id: 5ce34eaa-3574-49ba-ab02-aa25515394aa :expectedresults: A sync plan can be created and both products can be removed from it. :CaseLevel: Integration """ cron_expression = gen_choice(valid_cron_expressions()) syncplan = entities.SyncPlan(organization=self.org, interval='custom cron', cron_expression=cron_expression ).create() products = [ entities.Product(organization=self.org).create() for _ in range(2) ] syncplan.add_products(data={ 'product_ids': [product.id for product in products], }) self.assertEqual(len(syncplan.read().product), 2) syncplan.remove_products(data={ 'product_ids': [product.id for product in products], }) self.assertEqual(len(syncplan.read().product), 0)
def test_positive_create_with_name_by_cv_id(self): """Create new content view filter and assign it to existing content view by id. Use different value types as a name and random filter content type as a parameter for this filter @Feature: Content View Filter @Assert: Content view filter created successfully and has correct and expected parameters """ for name in valid_data_list(): with self.subTest(name): filter_content_type = gen_choice([ 'rpm', 'package_group', 'erratum', ]) ContentView.filter_create({ 'content-view-id': self.content_view['id'], 'name': name, 'type': filter_content_type, }) cvf = ContentView.filter_info({ u'content-view-id': self.content_view['id'], u'name': name, }) self.assertEqual(cvf['name'], name) self.assertEqual(cvf['type'], filter_content_type)
def _make_discoveryrule(self, options=None): """Makes a new discovery rule and asserts its success""" if options is None: options = {} searches = [ 'cpu_count = 1', 'disk_count < 5', 'memory > 500', 'model = KVM', 'Organization = Default_Organization', 'last_report = Today', 'subnet = 192.168.100.0', 'facts.architecture != x86_64', ] if not any(options.get(key) for key in [ 'organizations', 'organization-ids' ]): options[u'organization-ids'] = self.org['id'] if not any(options.get(key) for key in ['locations', 'locations-ids']): options[u'location-ids'] = self.loc['id'] if not any(options.get(key) for key in ['hostgroup', 'hostgroup-ids']): options[u'hostgroup-id'] = self.hostgroup['id'] if options.get('search') is None: options[u'search'] = gen_choice(searches) return make_discoveryrule(options)
def test_positive_delete_synced_product_custom_cron(self): """Create a sync plan with custom cron with one synced product and delete it. :id: f13936f5-7522-43b8-a986-26795637cde9 :expectedresults: A sync plan is created with one synced product and sync plan can be deleted. :CaseLevel: Integration """ sync_plan = entities.SyncPlan( organization=self.org, interval='custom cron', cron_expression=gen_choice((valid_cron_expressions()))).create() product = entities.Product(organization=self.org).create() entities.Repository(product=product).create() sync_plan.add_products(data={'product_ids': [product.id]}) product.sync() product = product.read() self.assertEqual(product.sync_plan.id, sync_plan.id) sync_plan.delete() product = product.read() self.assertIsNone(product.sync_plan) with self.assertRaises(HTTPError): sync_plan.read()
def test_positive_upload_facts(): """Upload fake facts to create a discovered host :id: c1f40204-bbb0-46d0-9b60-e42f00ad1649 :BZ: 1349364, 1392919 :Steps: 1. POST /api/v2/discovered_hosts/facts 2. Read the created discovered host :expectedresults: Host should be created successfully :CaseImportance: High :CaseLevel: Integration :BZ: 1731112 """ name = gen_choice(list(valid_data_list().values())) result = _create_discovered_host(name) discovered_host = entities.DiscoveredHost(id=result['id']).read_json() host_name = 'mac{}'.format(discovered_host['mac'].replace(':', '')) assert discovered_host['name'] == host_name
def create_missing(self, auth=None): """Automagically populate all required instance attributes. Iterate through the set of all required :class:`Field` defined on ``type(self)`` and create a corresponding instance attribute if none exists. Subclasses should override this method if there is some relationship between two required fields. :param tuple auth: Same as :meth:`create_raw`. :return: Nothing. This method relies on side-effects. """ for field_name, field in self.get_fields().items(): if field.required and field_name not in vars(self): # Most `get_value` methods return a value such as an integer, # string or dictionary, but OneTo{One,Many}Field.get_value # returns an instance of the referenced class. if hasattr(field, 'default'): value = field.default elif hasattr(field, 'choices'): value = gen_choice(field.choices) elif isinstance(field, OneToOneField): value = field.get_value().create_json(auth=auth)['id'] elif isinstance(field, OneToManyField): value = [field.get_value().create_json(auth=auth)['id']] else: value = field.get_value() setattr(self, field_name, value)
def _create_discoveryrule(org, loc, hostgroup, options=None): """Makes a new discovery rule and asserts its success""" options = options or {} searches = [ 'cpu_count = 1', 'disk_count < 5', 'memory > 500', 'model = KVM', 'Organization = Default_Organization', 'last_report = Today', 'subnet = 192.168.100.0', 'facts.architecture != x86_64', ] if not any( options.get(key) for key in ['organizations', 'organization-ids']): options['organization-ids'] = org.id if not any( options.get(key) for key in ['locations', 'locations-ids']): options['location-ids'] = loc.id if not any( options.get(key) for key in ['hostgroup', 'hostgroup-ids']): options['hostgroup-id'] = hostgroup.id if options.get('search') is None: options['search'] = gen_choice(searches) # create a simple object from the dictionary that the CLI factory provides # This allows for consistent attributized access of all fixture entities in the tests return AttrDict(make_discoveryrule(options))
def test_post_sync_plan_migration(self): """Post-upgrade scenario that tests existing sync plans are working as expected after satellite upgrade with migrating from pulp to katello :id: badaeec2-d42f-41d5-bd85-4b23d6d5a724 :steps: 1. Verify sync plan exists and works as earlier :expectedresults: Post Upgrade, Sync plans exists and works as earlier. """ entity_data = get_entity_data(self.__class__.__name__) org = entities.Organization(id=entity_data.get('org_id')) product = entities.Product(id=entity_data.get("product_id")).read() sync_plan = entities.SyncPlan(id=entity_data.get("sync_plan_id"), organization=org).read() self.assertEqual(product.sync_plan.id, sync_plan.id) self.assertEqual(sync_plan.name, entity_data.get("sync_plan_name")) self.assertEqual(sync_plan.interval, entity_data.get("interval")) self.assertEqual(sync_plan.sync_date, entity_data.get("sync_date")) # checking sync plan update on upgraded satellite sync_plan.interval = SYNC_INTERVAL['custom'] sync_plan.cron_expression = gen_choice(valid_cron_expressions()) self.assertEqual( sync_plan.update(['interval', 'cron_expression']).interval, SYNC_INTERVAL['custom']) # checking sync plan delete on upgraded satellite sync_plan.delete() product = product.read() self.assertIsNone(product.sync_plan) with self.assertRaises(HTTPError): sync_plan.read()
def test_negative_end_to_end(self, module_host): """Attempt to create and update an interface with different invalid entries as names (>255 chars, unsupported string types), at the end attempt to remove primary interface :id: 6fae26d8-8f62-41ba-a1cc-0185137ef70f :expectedresults: An interface is not created, not updated and primary interface is not deleted :CaseImportance: Critical """ name = gen_choice(invalid_interfaces_list()) with pytest.raises(HTTPError) as error: entities.Interface(host=module_host, name=name).create() assert str(422) in str(error) interface = entities.Interface(host=module_host).create() interface.name = name with pytest.raises(HTTPError) as error: interface.update(['name']) assert interface.read().name != name assert str(422) in str(error) primary_interface = next( interface for interface in module_host.interface if interface.read().primary ) with pytest.raises(HTTPError): primary_interface.delete() try: primary_interface.read() except HTTPError: pytest.fail("HTTPError 404 raised unexpectedly!")
def test_positive_add_remove_products_custom_cron(module_org): """Create a sync plan with two products having custom cron interval and then remove both products from it. :id: 5ce34eaa-3574-49ba-ab02-aa25515394aa :expectedresults: A sync plan can be created and both products can be removed from it. :CaseLevel: Integration """ cron_expression = gen_choice(valid_cron_expressions()) syncplan = entities.SyncPlan(organization=module_org, interval='custom cron', cron_expression=cron_expression).create() products = [ entities.Product(organization=module_org).create() for _ in range(2) ] syncplan.add_products( data={'product_ids': [product.id for product in products]}) assert len(syncplan.read().product) == 2 syncplan.remove_products( data={'product_ids': [product.id for product in products]}) assert len(syncplan.read().product) == 0
def test_positive_create_and_update_with_name(): """Create and update a host with different names and minimal input parameters :id: a7c0e8ec-3816-4092-88b1-0324cb271752 :expectedresults: A host is created and updated with expected name :CaseImportance: Critical """ name = gen_choice(valid_hosts_list()) host = entities.Host(name=name).create() assert host.name == f'{name}.{host.domain.read().name}' new_name = gen_choice(valid_hosts_list()) host.name = new_name host = host.update(['name']) assert host.name == f'{new_name}.{host.domain.read().name}'
def test_positive_create_and_update_with_comment(): """Create and update a host with a comment :id: 9b78663f-139c-4d0b-9115-180624b0d41b :expectedresults: A host is created and updated with expected comment :CaseImportance: Critical """ comment = gen_choice(list(valid_data_list().values())) host = entities.Host(comment=comment).create() assert host.comment == comment new_comment = gen_choice(list(valid_data_list().values())) host.comment = new_comment host = host.update(['comment']) assert host.comment == new_comment
def test_positive_update_key_for_product_with_repos(module_org): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository then update the key :id: 8aa3dc75-6257-48ae-b3f9-c617e323b47a :expectedresults: gpg key is associated with product before/after update as well as with the repositories :CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': module_org.id}) gpg_key = make_content_credential({'organization-id': module_org.id}) # Create repositories and assign them to the product repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': module_org.id }) # Verify gpg key was associated product = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) assert product['gpg']['gpg-key'] == gpg_key['name'] for repo in repos: repo = Repository.info({'id': repo['id']}) assert repo['gpg-key'].get('name') == gpg_key['name'] # Update the gpg key new_name = gen_choice(list(valid_data_list().values())) ContentCredential.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': module_org.id }) # Verify changes are reflected in the gpg key gpg_key = ContentCredential.info({ 'id': gpg_key['id'], 'organization-id': module_org.id }) assert gpg_key['name'] == new_name # Verify changes are reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) assert product['gpg']['gpg-key'] == new_name # Verify changes are reflected in the repositories for repo in repos: repo = Repository.info({'id': repo['id']}) assert repo['gpg-key'].get('name') == new_name
def test_positive_update_key_for_product_with_repos(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository then update the key :id: a95eb51b-4b6b-4c04-bb4d-cbe600431850 :expectedresults: gpg key is associated with product before/after update as well as with the repositories :CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Create repositories and assign them to the product repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], gpg_key['name']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('name'), gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], new_name) # Verify changes are reflected in the repositories for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('name'), new_name)
def test_positive_update_key_for_product_with_repos(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository then update the key @id: a95eb51b-4b6b-4c04-bb4d-cbe600431850 @assert: gpg key is associated with product before/after update as well as with the repositories @CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Create repositories and assign them to the product repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], gpg_key['name']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('name'), gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], new_name) # Verify changes are reflected in the repositories for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('name'), new_name)
def test_positive_update_key_for_repo_from_product_with_repos(module_org): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has more than one repository then update the key :id: c548ed4f-7f2d-456f-a644-7597644f6457 :expectedresults: gpg key is associated with a single repository before/after update and not associated with product or other repositories :CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': module_org.id}) gpg_key = make_content_credential({'organization-id': module_org.id}) # Create repositories and assign them to the product repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] # Associate gpg key with a single repository Repository.update({ 'gpg-key': gpg_key['name'], 'id': repos[0]['id'], 'organization-id': module_org.id }) # Verify gpg key was associated repos[0] = Repository.info({'id': repos[0]['id']}) assert repos[0]['gpg-key']['name'] == gpg_key['name'] # Update the gpg key new_name = gen_choice(list(valid_data_list().values())) ContentCredential.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': module_org.id }) # Verify changes are reflected in the gpg key gpg_key = ContentCredential.info({ 'id': gpg_key['id'], 'organization-id': module_org.id }) assert gpg_key['name'] == new_name # Verify changes are reflected in the associated repository repos[0] = Repository.info({'id': repos[0]['id']}) assert repos[0]['gpg-key'].get('name') == new_name # Verify changes are not reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) assert product['gpg']['gpg-key'] != new_name # Verify changes are not reflected in the rest of repositories for repo in repos[1:]: repo = Repository.info({'id': repo['id']}) assert repo['gpg-key'].get('name') != new_name
def test_positive_update_key_for_repo_from_product_with_repos(self): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has more than one repository then update the key :id: 773a9141-9f04-40ba-b3df-4b6d80db25a6 :expectedresults: gpg key is associated with a single repository before/after update and not associated with product or other repositories :CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Create repositories and assign them to the product repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] # Associate gpg key with a single repository Repository.update({ 'gpg-key': gpg_key['name'], 'id': repos[0]['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated repos[0] = Repository.info({'id': repos[0]['id']}) self.assertEqual(repos[0]['gpg-key']['name'], gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the associated repository repos[0] = Repository.info({'id': repos[0]['id']}) self.assertEqual(repos[0]['gpg-key'].get('name'), new_name) # Verify changes are not reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertNotEqual(product['gpg']['gpg-key'], new_name) # Verify changes are not reflected in the rest of repositories for repo in repos[1:]: repo = Repository.info({'id': repo['id']}) self.assertNotEqual(repo['gpg-key'].get('name'), new_name)
def test_gen_choice_11(item): """Select a random value from dictionary list.""" choices = [ {'Name': 'Bob', 'Age': 39}, {'Name': 'Alice', 'Age': 23}, {'Name': 'Pete', 'Age': 79}, ] result = gen_choice(choices) assert result in choices
def test_key_associate_13(self): """@test: Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has more than one repository then update the key @feature: GPG Keys @assert: gpg key is associated with a single repository before/after update and not associated with product or other repositories """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Create repositories and assign them to the product repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] # Associate gpg key with a single repository Repository.update({ 'gpg-key': gpg_key['name'], 'id': repos[0]['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated repos[0] = Repository.info({'id': repos[0]['id']}) self.assertEqual(repos[0]['gpg-key']['name'], gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the associated repository repos[0] = Repository.info({'id': repos[0]['id']}) self.assertEqual(repos[0]['gpg-key'].get('name'), new_name) # Verify changes are not reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertNotEqual(product['gpg']['gpg-key'], new_name) # Verify changes are not reflected in the rest of repositories for repo in repos[1:]: repo = Repository.info({'id': repo['id']}) self.assertNotEqual(repo['gpg-key'].get('name'), new_name)
def test_positive_update_key_for_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has one repository then update the key @id: 3fb550a7-507e-4988-beb6-35bdfc2e99a8 @assert: gpg key is associated with product before/after update as well as with the repository @CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Create a repository and assign it to the product repo = make_repository({'product-id': product['id']}) # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(product['gpg']['gpg-key'], gpg_key['name']) self.assertEqual(repo['gpg-key'].get('name'), gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], new_name) # Verify changes are reflected in the repository repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('id'), gpg_key['id'])
def generate_system_facts(name=None): """Generate random system facts for registration. :param str name: A valid FQDN for a system. If one is not provided, then a random value will be generated. :return: A dictionary with random system facts :rtype: dict """ if name is None: name = u'{0}.example.net'.format( gen_alpha().lower()) # Make a copy of the system facts 'template' new_facts = copy.deepcopy(SYSTEM_FACTS) # Select a random RHEL version... distro = gen_choice(DISTRO_IDS) # ...and update our facts new_facts['distribution.id'] = distro['id'] new_facts['distribution.version'] = distro['version'] new_facts['dmi.bios.relase_date'] = _bios_date().strftime('%m/%d/%Y') new_facts['dmi.memory.maximum_capacity'] = gen_choice( MEMORY_CAPACITY) new_facts['dmi.memory.size'] = gen_choice(MEMORY_SIZE) new_facts['dmi.system.uuid'] = gen_uuid() new_facts['dmi.system.version'] = u'RHEL' new_facts['lscpu.architecture'] = distro['architecture'] new_facts['net.interface.eth1.hwaddr'] = gen_mac() new_facts['net.interface.eth1.ipaddr'] = gen_ipaddr() new_facts['network.hostname'] = name new_facts['network.ipaddr'] = new_facts['net.interface.eth1.ipaddr'] new_facts['uname.machine'] = distro['architecture'] new_facts['uname.nodename'] = name new_facts['uname.release'] = distro['kernel'] new_facts['virt.uuid'] = new_facts['dmi.system.uuid'] return new_facts
def test_gen_choice_6(self): """ @Test: Select a random value from longer tuple @Feature: Choice Generator @Assert: Selects a random choice from longer tuple """ choices = (1, 2, 3, 9, 10, 11, 100, 101, 102, ) for turn in range(10): result = gen_choice(choices) self.assertIn( result, choices, "An invalid value was selected from available choices.")
def test_gen_choice_1(self): """ @Test: Select a random value from integer values @Feature: Choice Generator @Assert: Selects a random choice from options """ choices = range(5) for turn in range(10): result = gen_choice(choices) self.assertIn( result, choices, "An invalid value was selected from available choices.")
def test_gen_choice_12(self): """ @Test: Select a random value from words list @Feature: Choice Generator @Assert: Selects a random choice from list """ choices = ['green', 'yellow', 'blue' 'white'] for turn in range(10): result = gen_choice(choices) self.assertIn( result, choices, "An invalid value was selected from available choices.")
def test_gen_choice_5(self): """ @Test: Select a random value from short tuple @Feature: Choice Generator @Assert: Selects a random choice from short tuple """ choices = (1, ) for turn in range(10): result = gen_choice(choices) self.assertEqual( result, choices[0], "An invalid value was selected from available choices.")
def test_gen_choice_2(self): """ @Test: Select a random value from alphanumeric values @Feature: Choice Generator @Assert: Selects a random choice from alphanumeric options """ choices = string.ascii_letters + string.digits for turn in range(10): result = gen_choice(choices) self.assertIn( result, choices, "An invalid value was selected from available choices.")
def test_positive_update_key_for_empty_product(self): """Create gpg key with valid name and valid gpg key via file import then associate it with empty (no repos) custom product then update the key :id: c0c84c45-21fc-4940-9d52-00babb807ec7 :expectedresults: gpg key is associated with product before/after update :CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], new_name)
def _make_docker_repo(product_id, name=None, upstream_name=None): """Creates a Docker-based repository. :param product_id: ID of the ``Product``. :param str name: Name for the repository. If ``None`` then a random value will be generated. :param str upstream_name: A valid name for an existing Docker image. If ``None`` then defaults to ``busybox``. :return: A ``Repository`` object. """ return make_repository({ 'content-type': REPO_CONTENT_TYPE, 'docker-upstream-name': upstream_name or REPO_UPSTREAM_NAME, 'name': name or gen_string(gen_choice(STRING_TYPES), 15), 'product-id': product_id, 'url': DOCKER_REGISTRY_HUB, })
def test_positive_product_create_with_create_sync_plan(session, module_org): """Perform Sync Plan Create from Product Create Page :id: 4a87b533-12b6-4d4e-8a99-4bb95efc4321 :expectedresults: Ensure sync get created and assigned to Product. :CaseLevel: Integration :CaseImportance: medium """ product_name = gen_string('alpha') product_description = gen_string('alpha') gpg_key = entities.GPGKey( content=read_data_file(VALID_GPG_KEY_FILE), organization=module_org ).create() plan_name = gen_string('alpha') description = gen_string('alpha') cron_expression = gen_choice(valid_cron_expressions()) with session: startdate = ( session.browser.get_client_datetime() + timedelta(minutes=10)) sync_plan_values = { 'name': plan_name, 'interval': SYNC_INTERVAL['custom'], 'description': description, 'cron_expression': cron_expression, 'date_time.start_date': startdate.strftime("%Y-%m-%d"), 'date_time.hours': startdate.strftime('%H'), 'date_time.minutes': startdate.strftime('%M'), } session.product.create({ 'name': product_name, 'gpg_key': gpg_key.name, 'description': product_description, }, sync_plan_values=sync_plan_values) assert session.product.search(product_name)[0]['Name'] == product_name product_values = session.product.read(product_name) assert product_values['details']['name'] == product_name assert product_values['details']['sync_plan'] == plan_name # Delete product session.product.delete(product_name) assert not session.product.search(product_name)
def test_positive_update_key_for_repo_from_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository then update the key @id: 549e2e1e-fd10-4487-a3a5-fdee9b8cfc48 @assert: gpg key is associated with the repository before/after update, but not with the product @CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Create repository, assign product and gpg-key repo = make_repository({ 'gpg-key-id': gpg_key['id'], 'product-id': product['id'], }) # Verify gpg key was associated self.assertEqual(repo['gpg-key'].get('name'), gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the repositories repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('name'), new_name) # Verify gpg key wasn't added to the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertNotEqual(product['gpg']['gpg-key'], new_name)