def test_import_orgs_recovery(self, test_data): """@test: Try to Import organizations with the same name to invoke usage of a recovery strategy (rename, map, none) @feature: Import Organizations Recover @assert: 2nd Import will result in No Action Taken, 3rd one will rename the new organizations, and the 4th one will map them """ # prepare the data files = dict(self.default_dataset[1]) files['users'] = update_csv_values( files['users'], 'organization_id', test_data, self.default_dataset[0] ) # initial import self.assertEqual( Import.organization({'csv-file': files['users']}).return_code, 0) # clear the .transition_data to clear the transition mapping ssh.command('rm -rf "${HOME}"/.transition_data') # use the 'none' strategy orgs_before = Org.list().stdout Import.organization({'csv-file': files['users'], 'recover': 'none'}) self.assertEqual(orgs_before, Org.list().stdout) # use the default (rename) strategy ssh_imp_rename = Import.organization_with_tr_data( {'csv-file': files['users']} ) self.assertEqual(len(ssh_imp_rename[1]), len(test_data)) for record in ssh_imp_rename[1]: self.assertEqual(Org.info({'id': record['sat6']}).return_code, 0) Import.organization({'csv-file': files['users'], 'delete': True}) # use the 'map' strategy ssh_imp_map = Import.organization_with_tr_data({ 'csv-file': files['users'], 'recover': 'map', }) for record in ssh_imp_map[1]: self.assertEqual( Org.info({'id': record['sat6']}).return_code, 0 ) Import.organization({'csv-file': files['users'], 'delete': True})
def test_reimport_repo_negative(self, test_data): """@test: Import and enable all Repositories from the default data set (predefined source), then try to Import Repositories from the same CSV again. @feature: Repetitive Import Enable Repositories @assert: 3 Repositories imported and enabled, second run should trigger no action. """ # randomize the values for orgs and repos files = dict(self.default_dataset[1]) for file_ in zip( ['users', 'repositories'], [u'organization_id', u'org_id'], ): files[file_[0]] = update_csv_values( files[file_[0]], file_[1], test_data[file_[0]], self.default_dataset[0] ) # import the prerequisities import_org = Import.organization_with_tr_data( {'csv-file': files['users']} ) # now proceed with importing the repositories import_repo = Import.repository_with_tr_data({ 'csv-file': files['repositories'], 'synchronize': True, 'wait': True, }) for result in (import_org, import_repo): self.assertEqual(result[0].return_code, 0) # get the sat6 mapping of the imported organizations imp_orgs = get_sat6_id(csv_to_dataset([files['users']]), import_org[1]) repos_before = [ Repository.list({'organization-id': imp_org['sat6']}).stdout for imp_org in imp_orgs ] # Reimport the same repos and check for changes in sat6 self.assertEqual( Import.repository({ 'csv-file': files['repositories'], 'synchronize': True, 'wait': True, }).return_code, 0 ) self.assertEqual( repos_before, [ Repository.list({'organization-id': imp_org['sat6']}).stdout for imp_org in imp_orgs ] )
def test_import_cv_default(self, test_data): """@test: Import and enable all Content Views from the default data set (predefined source) @feature: Import Enable Content View @assert: 3 Content Views imported and enabled """ # randomize the values for orgs and repos tmp_dir = self.default_dataset[0] files = dict(self.default_dataset[1]) files['content-views'] = os.path.join( tmp_dir, 'exports/CHANNELS/export.csv', ) for file_ in zip( ['users', 'content-views'], [u'organization_id', u'org_id'], ): files[file_[0]] = update_csv_values( files[file_[0]], file_[1], test_data[file_[0]], self.default_dataset[0] ) # import the prerequisities import_org = Import.organization_with_tr_data( {'csv-file': files['users']} ) import_repo = Import.repository_with_tr_data({ 'csv-file': files['repositories'], 'synchronize': True, 'wait': True, }) # now proceed with Content View import import_cv = Import.content_view_with_tr_data({ 'csv-file': files['content-views'], 'dir': os.path.join(tmp_dir, 'exports/CHANNELS'), }) for result in (import_org, import_repo, import_cv): self.assertEqual(result[0].return_code, 0) # get the sat6 mapping of the imported organizations imp_orgs = get_sat6_id(csv_to_dataset([files['users']]), import_org[1]) # now to check whether all content views from csv appeared in satellite for imp_org in imp_orgs: self.assertNotEqual( ContentView.list( {'organization-id': imp_org['sat6']} ).stdout, [] )
def test_reimport_host_collections_default_negative(self, test_data): """@test: Try to re-import all System Groups from the default data set (predefined source) as the Host Collections. @feature: Repetitive Import Host-Collections @assert: 3 Host Collections created, no action taken on 2nd Import """ files = dict(self.default_dataset[1]) for file_ in zip( ['users', 'system-groups'], [u'organization_id', u'org_id'], [u'orgs', u'hcs'] ): files[file_[0]] = update_csv_values( files[file_[0]], file_[1], test_data[file_[2]], self.default_dataset[0] ) # import the prerequisities import_org = Import.organization_with_tr_data( {'csv-file': files['users']} ) import_hc = Import.host_collection( {'csv-file': files['system-groups']} ) self.assertEqual(import_org[0].return_code, 0) self.assertEqual(import_hc.return_code, 0) hcollections_before = [ HostCollection.list({'organization-id': tr['sat6']}).stdout for tr in import_org[1] ] self.assertNotEqual(hcollections_before, []) self.assertEqual( Import.host_collection( {'csv-file': files['system-groups']} ).return_code, 0 ) hcollections_after = [ HostCollection.list({'organization-id': tr['sat6']}).stdout for tr in import_org[1] ] self.assertEqual(hcollections_before, hcollections_after)
def test_import_repo_default(self, test_data): """@test: Import and enable all Repositories from the default data set (predefined source) @feature: Import Enable Repositories @assert: 3 Repositories imported and enabled """ # randomize the values for orgs and repos files = dict(self.default_dataset[1]) for file_ in zip( ['users', 'repositories'], [u'organization_id', u'org_id'], ): files[file_[0]] = update_csv_values( files[file_[0]], file_[1], test_data[file_[0]], self.default_dataset[0] ) # import the prerequisities import_org = Import.organization_with_tr_data( {'csv-file': files['users']} ) # now proceed with importing the repositories import_repo = Import.repository_with_tr_data({ 'csv-file': files['repositories'], 'synchronize': True, 'wait': True, }) for result in (import_org, import_repo): self.assertEqual(result[0].return_code, 0) # get the sat6 mapping of the imported organizations imp_orgs = get_sat6_id(csv_to_dataset([files['users']]), import_org[1]) # now to check whether all repos from csv appeared in satellite for imp_org in imp_orgs: self.assertNotEqual( Repository.list({'organization-id': imp_org['sat6']}).stdout, [], )
def test_import_host_collections_default(self, test_data): """@test: Import all System Groups from the default data set (predefined source) as the Host Collections. @feature: Import Host-Collections @assert: 3 Host Collections created """ files = dict(self.default_dataset[1]) for file_ in zip( ['users', 'system-groups'], [u'organization_id', u'org_id'], [u'orgs', u'hcs'] ): files[file_[0]] = update_csv_values( files[file_[0]], file_[1], test_data[file_[2]], self.default_dataset[0] ) # import the prerequisities import_org = Import.organization_with_tr_data( {'csv-file': files['users']} ) import_hc = Import.host_collection_with_tr_data( {'csv-file': files['system-groups']} ) for result in (import_org, import_hc): self.assertEqual(result[0].return_code, 0) # now to check whether the all HC from csv appeared in satellite imp_orgs = get_sat6_id(csv_to_dataset([files['users']]), import_org[1]) for imp_org in imp_orgs: self.assertNotEqual( HostCollection.list( {'organization-id': imp_org['sat6']} ).stdout, [] )
def test_import_repo_recovery(self, test_data): """@test: Try to Import Repos with the same name to invoke usage of a recovery strategy (rename, map, none) @feature: Import Repository Recover @assert: 2nd Import will rename the new repos, 3rd import will map them and the 4th one will result in No Action Taken """ # prepare the data files = dict(self.default_dataset[1]) # randomize the values for orgs and repos files = dict(self.default_dataset[1]) for file_ in zip( ['users', 'repositories'], [u'organization_id', u'org_id'], ): files[file_[0]] = update_csv_values( files[file_[0]], file_[1], test_data[file_[0]], self.default_dataset[0] ) # import the prerequisities import_org = Import.organization_with_tr_data( {'csv-file': files['users']} ) for result in ( import_org, Import.repository_with_tr_data( {'csv-file': files['repositories']} ), ): self.assertEqual(result[0].return_code, 0) # clear the .transition_data to clear the transition mapping ssh.command('rm -rf "${HOME}"/.transition_data/repositories*') ssh.command('rm -rf "${HOME}"/.transition_data/products*') # use the default (rename) strategy import_repo_rename = Import.repository_with_tr_data( {'csv-file': files['repositories'], 'verbose': True} ) self.assertEqual(import_repo_rename[0].return_code, 0) for record in import_repo_rename[1][1]: self.assertEqual( Repository.info({'id': record['sat6']}).return_code, 0 ) Import.repository( {'csv-file': files['repositories'], 'delete': True} ) # use the 'none' strategy repos_before = [ Repository.list({'organization-id': tr['sat6']}).stdout for tr in import_org[1] ] Import.repository( {'csv-file': files['repositories'], 'recover': 'none'} ) self.assertEqual( repos_before, [Repository.list({'organization-id': tr['sat6']}).stdout for tr in import_org[1]], ) # use the 'map' strategy import_repo_map = Import.repository_with_tr_data({ 'csv-file': files['repositories'], 'recover': 'map', 'verbose': True, }) self.assertEqual(import_repo_map[0].return_code, 0) for record in import_repo_map[1][1]: self.assertEqual( Repository.info({'id': record['sat6']}).return_code, 0 )
def test_import_host_collections_recovery(self, test_data): """@test: Try to Import Collections with the same name to invoke usage of a recovery strategy (rename, map, none) @feature: Import HostCollection Recover @assert: 2nd Import will rename the new collections, 3nd import will result in No Action Taken and the 4th one will map them """ # prepare the data files = dict(self.default_dataset[1]) for file_ in zip( ['users', 'system-groups'], [u'organization_id', u'org_id'], [u'orgs', u'hcs'] ): files[file_[0]] = update_csv_values( files[file_[0]], file_[1], test_data[file_[2]], self.default_dataset[0] ) # initial import import_org = Import.organization_with_tr_data( {'csv-file': files['users']} ) for result in ( import_org, Import.host_collection_with_tr_data( {'csv-file': files['system-groups']} ), ): self.assertEqual(result[0].return_code, 0) # clear the .transition_data to clear the transition mapping ssh.command('rm -rf "${HOME}"/.transition_data/host_collections*') # use the default (rename) strategy import_hc_rename = Import.host_collection_with_tr_data( {'csv-file': files['system-groups'], 'verbose': True} ) self.assertEqual(import_hc_rename[0].return_code, 0) for record in import_hc_rename[1]: self.assertEqual( HostCollection.info({'id': record['sat6']}).return_code, 0 ) Import.host_collection( {'csv-file': files['system-groups'], 'delete': True} ) # use the 'none' strategy hc_before = [ HostCollection.list({'organization-id': tr['sat6']}).stdout for tr in import_org[1] ] Import.host_collection( {'csv-file': files['system-groups'], 'recover': 'none'} ) hc_after = [ HostCollection.list({'organization-id': tr['sat6']}).stdout for tr in import_org[1] ] self.assertEqual(hc_before, hc_after) # use the 'map' strategy import_hc_map = Import.host_collection_with_tr_data({ 'csv-file': files['system-groups'], 'recover': 'map', 'verbose': True, }) self.assertEqual(import_hc_map[0].return_code, 0) for record in import_hc_map[1]: self.assertEqual( HostCollection.info({'id': record['sat6']}).return_code, 0 )
def test_import_cv_recovery(self, test_data): """@test: Try to Import Content Views with the same name to invoke usage of a recovery strategy (rename, map, none) @feature: Import Content View Recover @assert: 2nd Import will rename the new Content Views, 3rd import will map them and the 4th one will result in No Action Taken """ # prepare the data tmp_dir = self.default_dataset[0] files = dict(self.default_dataset[1]) files['content-views'] = os.path.join( tmp_dir, 'exports/CHANNELS/export.csv', ) # randomize the values for orgs and repos for file_ in zip( ['users', 'content-views'], [u'organization_id', u'org_id'], ): files[file_[0]] = update_csv_values( files[file_[0]], file_[1], test_data[file_[0]], self.default_dataset[0] ) # import the prerequisities import_org = Import.organization_with_tr_data( {'csv-file': files['users']} ) for result in ( import_org, Import.repository_with_tr_data( {'csv-file': files['repositories']} ), Import.content_view_with_tr_data({ 'csv-file': files['content-views'], 'dir': os.path.join(tmp_dir, 'exports/CHANNELS'), }), ): self.assertEqual(result[0].return_code, 0) # clear the .transition_data to clear the transition mapping ssh.command('rm -rf "${HOME}"/.transition_data/repositories*') ssh.command('rm -rf "${HOME}"/.transition_data/products*') ssh.command('rm -rf "${HOME}"/.transition_data/content_views*') # use the default (rename) strategy import_cv_rename = Import.content_view_with_tr_data({ 'csv-file': files['content-views'], 'verbose': True, 'dir': os.path.join(tmp_dir, 'exports/CHANNELS'), }) self.assertEqual(import_cv_rename[0].return_code, 0) for record in import_cv_rename[1]: self.assertEqual( ContentView.info({'id': record['sat6']}).return_code, 0 ) Import.content_view( {'csv-file': files['content-views'], 'delete': True} ) # use the 'none' strategy cvs_before = [ ContentView.list({'organization-id': tr['sat6']}).stdout for tr in import_org[1] ] Import.content_view({ 'csv-file': files['repositories'], 'dir': os.path.join(tmp_dir, 'exports/CHANNELS'), 'recover': 'none', }) cvs_after = [ ContentView.list({'organization-id': tr['sat6']}).stdout for tr in import_org[1] ] self.assertEqual(cvs_before, cvs_after) # use the 'map' strategy import_cvs_map = Import.content_view_with_tr_data({ 'csv-file': files['content-views'], 'dir': os.path.join(tmp_dir, 'exports/CHANNELS'), 'recover': 'map', 'verbose': True, }) self.assertEqual(import_cvs_map[0].return_code, 0) for record in import_cvs_map[1]: self.assertEqual( ContentView.info({'id': record['sat6']}).return_code, 0 )
def test_reimport_cv_negative(self, test_data): """@test: Import and enable all Content Views from the default data set (predefined source), then try to Impor them from the same CSV again. @feature: Repetitive Import Content Views @assert: 3 Content Views imported and enabled, 2nd run should trigger no action. """ # randomize the values for orgs and repos tmp_dir = self.default_dataset[0] files = dict(self.default_dataset[1]) files['content-views'] = os.path.join( tmp_dir, 'exports/CHANNELS/export.csv' ) for file_ in zip( ['users', 'content-views'], [u'organization_id', u'org_id'], ): files[file_[0]] = update_csv_values( files[file_[0]], file_[1], test_data[file_[0]], self.default_dataset[0] ) # import the prerequisities import_org = Import.organization_with_tr_data( {'csv-file': files['users']} ) import_repo = Import.repository_with_tr_data({ 'csv-file': files['repositories'], 'synchronize': True, 'wait': True, }) import_cv = Import.content_view_with_tr_data({ 'csv-file': files['content-views'], 'dir': os.path.join(tmp_dir, 'exports/CHANNELS'), }) for result in (import_org, import_repo, import_cv): self.assertEqual(result[0].return_code, 0) # get the sat6 mapping of the imported organizations imp_orgs = get_sat6_id(csv_to_dataset([files['users']]), import_org[1]) cvs_before = [ ContentView.list({'organization-id': imp_org['sat6']}).stdout for imp_org in imp_orgs ] # Reimport the same content views and check for changes in sat6 self.assertEqual( Import.content_view({ 'csv-file': files['content-views'], 'dir': os.path.join(tmp_dir, 'exports/CHANNELS'), }).return_code, 0 ) self.assertEqual( cvs_before, [ ContentView.list({'organization-id': imp_org['sat6']}).stdout for imp_org in imp_orgs ] )