def test_custom_credential_type_create(get, post, organization, admin): credential_type = CredentialType(kind='cloud', name='MyCloud', inputs={ 'fields': [{ 'id': 'api_token', 'label': 'API Token', 'type': 'string', 'secret': True }] }) credential_type.save() params = { 'name': 'Best credential ever', 'organization': organization.pk, 'credential_type': credential_type.pk, 'inputs': { 'api_token': 'secret' } } response = post(reverse('api:credential_list'), params, admin) assert response.status_code == 201 response = get(reverse('api:credential_list'), admin) assert response.status_code == 200 assert response.data['count'] == 1 cred = response.data['results'][0] assert cred['inputs']['api_token'] == '$encrypted$' cred = Credential.objects.all()[:1].get() assert cred.inputs['api_token'].startswith('$encrypted$UTF8$AES') assert decrypt_field(cred, 'api_token') == 'secret'
def test_create_credential_with_invalid_url_xfail(post, organization, admin, url, status, msg): credential_type = CredentialType(kind='test', name='MyTestCredentialType', inputs={ 'fields': [{ 'id': 'server_url', 'label': 'Server Url', 'type': 'string', 'format': 'url' }] }) credential_type.save() params = { 'name': 'Second Best Credential Ever', 'organization': organization.pk, 'credential_type': credential_type.pk, 'inputs': { 'server_url': url } } endpoint = reverse('api:credential_list') response = post(endpoint, params, admin) assert response.status_code == status if status != 201: assert response.data['inputs']['server_url'] == [msg]
def test_credential_creation_validation_failure(organization_factory, inputs): org = organization_factory('test').organization type_ = CredentialType(kind='cloud', name='SomeCloud', managed_by_tower=True, inputs={ 'fields': [{ 'id': 'username', 'label': 'Username for SomeCloud', 'type': 'string' }, { 'id': 'flag', 'label': 'Some Boolean Flag', 'type': 'boolean' }] }) type_.save() with pytest.raises(Exception) as e: cred = Credential(credential_type=type_, name="Bob's Credential", inputs=inputs, organization=org) cred.save() cred.full_clean() assert e.type in (ValidationError, serializers.ValidationError)
def job_template_with_ids(job_template_factory): # Create non-persisted objects with IDs to send to job_template_factory ssh_type = CredentialType(kind='ssh') credential = Credential(id=1, pk=1, name='testcred', credential_type=ssh_type) net_type = CredentialType(kind='net') net_cred = Credential(id=2, pk=2, name='testnetcred', credential_type=net_type) cloud_type = CredentialType(kind='aws') cloud_cred = Credential(id=3, pk=3, name='testcloudcred', credential_type=cloud_type) inv = Inventory(id=11, pk=11, name='testinv') proj = Project(id=14, pk=14, name='testproj') jt_objects = job_template_factory('testJT', project=proj, inventory=inv, credential=credential, cloud_credential=cloud_cred, network_credential=net_cred, persisted=False) return jt_objects.job_template
def test_idempotent_credential_type_setup(): assert CredentialType.objects.count() == 0 CredentialType.setup_tower_managed_defaults() total = CredentialType.objects.count() assert total > 0 CredentialType.setup_tower_managed_defaults() assert CredentialType.objects.count() == total
def _populate_deprecated_cred_types(cred, kind): if kind not in cred: return None if cred[kind] is None: new_obj = CredentialType(**DEPRECATED_CRED_KIND[kind]) new_obj.save() cred[kind] = new_obj return cred[kind]
def update_cyberark_aim_name(apps, schema_editor): CredentialType.setup_tower_managed_defaults(apps) aim_types = apps.get_model('main', 'CredentialType').objects.filter(namespace='aim').order_by('id') if aim_types.count() == 2: original, renamed = aim_types.all() apps.get_model('main', 'Credential').objects.filter(credential_type_id=original.id).update(credential_type_id=renamed.id) original.delete()
def test_credential_get_input(organization_factory): organization = organization_factory('test').organization type_ = CredentialType( kind='vault', name='somevault', managed_by_tower=True, inputs={ 'fields': [ { 'id': 'vault_password', 'type': 'string', 'secret': True, }, { 'id': 'vault_id', 'type': 'string', 'secret': False }, { 'id': 'secret', 'type': 'string', 'secret': True, }, ] }, ) type_.save() cred = Credential(organization=organization, credential_type=type_, name="Bob's Credential", inputs={'vault_password': '******'}) cred.save() cred.full_clean() assert isinstance(cred, Credential) # verify expected exception is raised when attempting to access an unset # input without providing a default with pytest.raises(AttributeError): cred.get_input('vault_id') # verify that the provided default is used for unset inputs assert cred.get_input('vault_id', default='foo') == 'foo' # verify expected exception is raised when attempting to access an undefined # input without providing a default with pytest.raises(AttributeError): cred.get_input('field_not_on_credential_type') # verify that the provided default is used for undefined inputs assert cred.get_input('field_not_on_credential_type', default='bar') == 'bar' # verify expected exception is raised when attempting to access an unset secret # input without providing a default with pytest.raises(AttributeError): cred.get_input('secret') # verify that the provided default is used for undefined inputs assert cred.get_input('secret', default='fiz') == 'fiz' # verify return values for encrypted secret fields are decrypted assert cred.inputs['vault_password'].startswith('$encrypted$') assert cred.get_input('vault_password') == 'testing321'
def test_credential_defaults_idempotency(): CredentialType.setup_tower_managed_defaults() old_inputs = CredentialType.objects.get(name='Ansible Tower', kind='cloud').inputs prior_count = ActivityStream.objects.count() # this is commonly re-ran in migrations, and no changes should be shown # because inputs and injectors are not actually tracked in the database CredentialType.setup_tower_managed_defaults() assert CredentialType.objects.get(name='Ansible Tower', kind='cloud').inputs == old_inputs assert ActivityStream.objects.count() == prior_count
def test_credential_creation(organization_factory): org = organization_factory('test').organization type_ = CredentialType( kind='cloud', name='SomeCloud', managed_by_tower=True, inputs={'fields': [{'id': 'username', 'label': 'Username for SomeCloud', 'type': 'string'}]} ) type_.save() cred = Credential(credential_type=type_, name="Bob's Credential", inputs={'username': '******'}, organization=org) cred.save() cred.full_clean() assert isinstance(cred, Credential) assert cred.name == "Bob's Credential" assert cred.inputs['username'] == 'bob'
def test_cred_type_injectors_schema(injectors, valid): type_ = CredentialType(kind='cloud', name='SomeCloud', managed_by_tower=True, inputs={ 'fields': [ { 'id': 'username', 'type': 'string', 'label': '_' }, { 'id': 'pass', 'type': 'string', 'label': '_' }, { 'id': 'awx_secret', 'type': 'string', 'label': '_' }, { 'id': 'host', 'type': 'string', 'label': '_' }, ] }, injectors=injectors) field = CredentialType._meta.get_field('injectors') if valid is False: with pytest.raises(ValidationError): field.clean(injectors, type_) else: field.clean(injectors, type_)
def test_credential_creation_validation_failure(inputs): type_ = CredentialType( kind='cloud', name='SomeCloud', managed=True, inputs={ 'fields': [{ 'id': 'username', 'label': 'Username for SomeCloud', 'type': 'string' }, { 'id': 'flag', 'label': 'Some Boolean Flag', 'type': 'boolean' }] }, ) cred = Credential(credential_type=type_, name="Bob's Credential", inputs=inputs) field = cred._meta.get_field('inputs') with pytest.raises(Exception) as e: field.validate(inputs, cred) assert e.type in (ValidationError, DRFValidationError)
def test_unique_hash_with_unicode(): ct = CredentialType(name=u'Väult', kind='vault') cred = Credential(id=4, name=u'Iñtërnâtiônàlizætiøn', credential_type=ct, inputs={u'vault_id': u'🐉🐉🐉'}, credential_type_id=42) assert cred.unique_hash(display=True) == u'Väult (id=🐉🐉🐉)'
def test_cred_type_input_schema_validity(input_, valid): type_ = CredentialType(kind='cloud', name='SomeCloud', managed_by_tower=True, inputs=input_) field = CredentialType._meta.get_field('inputs') if valid is False: with pytest.raises(ValidationError): field.clean(input_, type_) else: field.clean(input_, type_)
def test_custom_cred_with_empty_encrypted_field(): ct = CredentialType(name='My Custom Cred', kind='custom', inputs={ 'fields': [{ 'id': 'some_field', 'label': 'My Field', 'secret': True }] }) cred = Credential(id=4, name='Testing 1 2 3', credential_type=ct, inputs={}) assert cred.encrypt_field('some_field', None) is None
def handle(self, *args, **kwargs): # Sanity check: Is there already an organization in the system? if Organization.objects.count(): print('An organization is already in the system, exiting.') print('(changed: False)') return # Create a default organization as the first superuser found. try: superuser = User.objects.filter( is_superuser=True).order_by('pk')[0] except IndexError: superuser = None with impersonate(superuser): with disable_computed_fields(): o = Organization.objects.create(name='Default') p = Project( name='Demo Project', scm_type='git', scm_url='https://github.com/ansible/ansible-tower-samples', scm_branch='master', scm_update_on_launch=True, scm_update_cache_timeout=0, organization=o) p.save(skip_update=True) ssh_type = CredentialType.from_v1_kind('ssh') c = Credential.objects.create( credential_type=ssh_type, name='Demo Credential', inputs={'username': superuser.username}, created_by=superuser) c.admin_role.members.add(superuser) i = Inventory.objects.create(name='Demo Inventory', organization=o, created_by=superuser) Host.objects.create(name='localhost', inventory=i, variables="ansible_connection: local", created_by=superuser) jt = JobTemplate.objects.create(name='Demo Job Template', playbook='hello_world.yml', project=p, inventory=i) jt.credentials.add(c) print('Default organization added.') print('Demo Credential, Inventory, and Job Template added.') print('(changed: True)')
def test_cred_type_input_schema_validity(input_, valid): type_ = CredentialType(kind='cloud', name='SomeCloud', managed_by_tower=True, inputs=input_) if valid is False: with pytest.raises(Exception) as e: type_.full_clean() assert e.type in (ValidationError, serializers.ValidationError) else: type_.full_clean()
def test_vault_with_ssh_migration(): cred = Credential(name='My Credential') with migrate(cred, 'ssh'): cred.__dict__.update({ 'vault_password': '******', 'username': '******', 'password': '******', 'ssh_key_data': EXAMPLE_PRIVATE_KEY, 'ssh_key_unlock': 'keypass', 'become_method': 'sudo', 'become_username': '******', 'become_password': '******', }) assert Credential.objects.count() == 2 assert Credential.objects.filter( credential_type__name='Vault').get() == cred assert cred.inputs.keys() == ['vault_password'] assert cred.inputs['vault_password'].startswith('$encrypted$') assert decrypt_field(cred, 'vault_password') == 'vault' ssh_cred = Credential.objects.filter(credential_type__name='Machine').get() assert sorted(ssh_cred.inputs.keys()) == sorted( CredentialType.from_v1_kind('ssh').defined_fields) assert ssh_cred.credential_type.name == 'Machine' assert ssh_cred.inputs['username'] == 'bob' assert ssh_cred.inputs['password'].startswith('$encrypted$') assert decrypt_field(ssh_cred, 'password') == 'secret' assert ssh_cred.inputs['ssh_key_data'].startswith('$encrypted$') assert decrypt_field(ssh_cred, 'ssh_key_data') == EXAMPLE_PRIVATE_KEY assert ssh_cred.inputs['ssh_key_unlock'].startswith('$encrypted$') assert decrypt_field(ssh_cred, 'ssh_key_unlock') == 'keypass' assert ssh_cred.inputs['become_method'] == 'sudo' assert ssh_cred.inputs['become_username'] == 'superuser' assert ssh_cred.inputs['become_password'].startswith('$encrypted$') assert decrypt_field(ssh_cred, 'become_password') == 'superpassword'
def migrate_galaxy_settings(apps, schema_editor): Organization = apps.get_model('main', 'Organization') if Organization.objects.count() == 0: # nothing to migrate return set_current_apps(apps) ModernCredentialType.setup_tower_managed_defaults(apps) CredentialType = apps.get_model('main', 'CredentialType') Credential = apps.get_model('main', 'Credential') Setting = apps.get_model('conf', 'Setting') galaxy_type = CredentialType.objects.get(kind='galaxy') private_galaxy_url = Setting.objects.filter( key='PRIMARY_GALAXY_URL').first() # by default, prior versions of AWX automatically pulled content # from galaxy.ansible.com public_galaxy_enabled = True public_galaxy_setting = Setting.objects.filter( key='PUBLIC_GALAXY_ENABLED').first() if public_galaxy_setting and public_galaxy_setting.value is False: # ...UNLESS this behavior was explicitly disabled via this setting public_galaxy_enabled = False try: # Needed for old migrations public_galaxy_credential = Credential( created=now(), modified=now(), name='Ansible Galaxy', managed_by_tower=True, credential_type=galaxy_type, inputs={'url': 'https://galaxy.ansible.com/'}, ) except: # Needed for new migrations, tests public_galaxy_credential = Credential( created=now(), modified=now(), name='Ansible Galaxy', managed=True, credential_type=galaxy_type, inputs={'url': 'https://galaxy.ansible.com/'}) public_galaxy_credential.save() for org in Organization.objects.all(): if private_galaxy_url and private_galaxy_url.value: # If a setting exists for a private Galaxy URL, make a credential for it username = Setting.objects.filter( key='PRIMARY_GALAXY_USERNAME').first() password = Setting.objects.filter( key='PRIMARY_GALAXY_PASSWORD').first() if (username and username.value) or (password and password.value): logger.error( f'Specifying HTTP basic auth for the Ansible Galaxy API ' f'({private_galaxy_url.value}) is no longer supported. ' 'Please provide an API token instead after your upgrade ' 'has completed', ) inputs = {'url': private_galaxy_url.value} token = Setting.objects.filter(key='PRIMARY_GALAXY_TOKEN').first() if token and token.value: inputs['token'] = decrypt_field(token, 'value') auth_url = Setting.objects.filter( key='PRIMARY_GALAXY_AUTH_URL').first() if auth_url and auth_url.value: inputs['auth_url'] = auth_url.value name = f'Private Galaxy ({private_galaxy_url.value})' if 'cloud.redhat.com' in inputs['url']: name = f'Ansible Automation Hub ({private_galaxy_url.value})' cred = Credential(created=now(), modified=now(), name=name, organization=org, credential_type=galaxy_type, inputs=inputs) cred.save() if token and token.value: # encrypt based on the primary key from the prior save cred.inputs['token'] = encrypt_field(cred, 'token') cred.save() org.galaxy_credentials.add(cred) fallback_servers = getattr(settings, 'FALLBACK_GALAXY_SERVERS', []) for fallback in fallback_servers: url = fallback.get('url', None) auth_url = fallback.get('auth_url', None) username = fallback.get('username', None) password = fallback.get('password', None) token = fallback.get('token', None) if username or password: logger.error( f'Specifying HTTP basic auth for the Ansible Galaxy API ' f'({url}) is no longer supported. ' 'Please provide an API token instead after your upgrade ' 'has completed', ) inputs = {'url': url} if token: inputs['token'] = token if auth_url: inputs['auth_url'] = auth_url cred = Credential(created=now(), modified=now(), name=f'Ansible Galaxy ({url})', organization=org, credential_type=galaxy_type, inputs=inputs) cred.save() if token: # encrypt based on the primary key from the prior save cred.inputs['token'] = encrypt_field(cred, 'token') cred.save() org.galaxy_credentials.add(cred) if public_galaxy_enabled: # If public Galaxy was enabled, associate it to the org org.galaxy_credentials.add(public_galaxy_credential)
def setup_tower_managed_defaults(apps, schema_editor): CredentialType.setup_tower_managed_defaults()
def migrate_to_static_inputs(apps, schema_editor): set_current_apps(apps) CredentialType.setup_tower_managed_defaults()
def handle(self, *args, **options): CredentialType.setup_tower_managed_defaults()
def create_new_credential_types(apps, schema_editor): set_current_apps(apps) CredentialType.setup_tower_managed_defaults()
def migrate_to_v2_credentials(apps, schema_editor): CredentialType.setup_tower_managed_defaults() deprecated_cred = _generate_deprecated_cred_types() # this monkey-patch is necessary to make the implicit role generation save # signal use the correct Role model (the version active at this point in # migration, not the one at HEAD) orig_current_apps = utils.get_current_apps try: utils.get_current_apps = lambda: apps for cred in apps.get_model('main', 'Credential').objects.all(): job_templates = cred.jobtemplates.all() jobs = cred.jobs.all() data = {} if getattr(cred, 'vault_password', None): data['vault_password'] = cred.vault_password if _is_insights_scm(apps, cred): _disassociate_non_insights_projects(apps, cred) credential_type = _get_insights_credential_type() else: credential_type = _populate_deprecated_cred_types( deprecated_cred, cred.kind) or CredentialType.from_v1_kind( cred.kind, data) defined_fields = credential_type.defined_fields cred.credential_type = apps.get_model( 'main', 'CredentialType').objects.get(pk=credential_type.pk) for field in defined_fields: if getattr(cred, field, None): cred.inputs[field] = getattr(cred, field) if cred.vault_password: for jt in job_templates: jt.credential = None jt.vault_credential = cred jt.save() for job in jobs: job.credential = None job.vault_credential = cred job.save() if data.get('is_insights', False): cred.kind = 'insights' cred.save() # # If the credential contains a vault password, create a new # *additional* credential for the ssh details # if cred.vault_password: # We need to make an ssh credential, too ssh_type = CredentialType.from_v1_kind('ssh') new_cred = apps.get_model('main', 'Credential').objects.get(pk=cred.pk) new_cred.pk = None new_cred.vault_password = '' new_cred.credential_type = apps.get_model( 'main', 'CredentialType').objects.get(pk=ssh_type.pk) if 'vault_password' in new_cred.inputs: del new_cred.inputs['vault_password'] # unset these attributes so that new roles are properly created # at save time new_cred.read_role = None new_cred.admin_role = None new_cred.use_role = None if any([ getattr(cred, field) for field in ssh_type.defined_fields ]): new_cred.save(force_insert=True) # copy rbac roles for role_type in ('read_role', 'admin_role', 'use_role'): for member in getattr(cred, role_type).members.all(): getattr(new_cred, role_type).members.add(member) for role in getattr(cred, role_type).parents.all(): getattr(new_cred, role_type).parents.add(role) for jt in job_templates: jt.credential = new_cred jt.save() for job in jobs: job.credential = new_cred job.save() # passwords must be decrypted and re-encrypted, because # their encryption is based on the Credential's primary key # (which has changed) for field in ssh_type.defined_fields: if field in ssh_type.secret_fields: value = decrypt_field(cred, field) if value: setattr(new_cred, field, value) new_cred.inputs[field] = encrypt_field( new_cred, field) setattr(new_cred, field, '') elif getattr(cred, field): new_cred.inputs[field] = getattr(cred, field) new_cred.save() finally: utils.get_current_apps = orig_current_apps
def setup_tower_managed_defaults(apps, schema_editor): set_current_apps(apps) CredentialType.setup_tower_managed_defaults()
def make_the_data(): with disable_activity_stream(): with batch_role_ancestor_rebuilding(), disable_computed_fields(): admin, created = User.objects.get_or_create(username='******', is_superuser=True) if created: admin.is_superuser = True admin.save() admin.set_password('test') admin.save() org_admin, created = User.objects.get_or_create( username='******') if created: org_admin.set_password('test') org_admin.save() org_member, created = User.objects.get_or_create( username='******') if created: org_member.set_password('test') org_member.save() prj_admin, created = User.objects.get_or_create( username='******') if created: prj_admin.set_password('test') prj_admin.save() jt_admin, created = User.objects.get_or_create(username='******') if created: jt_admin.set_password('test') jt_admin.save() inv_admin, created = User.objects.get_or_create( username='******') if created: inv_admin.set_password('test') inv_admin.save() print('# Creating %d organizations' % n_organizations) for i in range(n_organizations): sys.stdout.write('\r%d ' % (i + 1)) sys.stdout.flush() org, _ = Organization.objects.get_or_create( name='%s Organization %d' % (prefix, i)) organizations.append(org) if i == 0: org.admin_role.members.add(org_admin) org.member_role.members.add(org_admin) org.member_role.members.add(org_member) org.member_role.members.add(prj_admin) org.member_role.members.add(jt_admin) org.member_role.members.add(inv_admin) print('') print('# Creating %d users' % n_users) org_idx = 0 for n in spread(n_users, n_organizations): for i in range(n): ids['user'] += 1 user_id = ids['user'] sys.stdout.write('\r Assigning %d to %s: %d ' % (n, organizations[org_idx].name, i + 1)) sys.stdout.flush() user, _ = User.objects.get_or_create(username='******' % (prefix, user_id)) organizations[org_idx].member_role.members.add(user) users.append(user) org_idx += 1 print('') creator_gen = yield_choice(users) for i in range(6): next(creator_gen) modifier_gen = yield_choice(users) print('# Creating %d teams' % n_teams) org_idx = 0 for n in spread(n_teams, n_organizations): org = organizations[org_idx] for i in range(n): ids['team'] += 1 team_id = ids['team'] sys.stdout.write('\r Assigning %d to %s: %d ' % (n, org.name, i + 1)) sys.stdout.flush() team, _ = Team.objects.get_or_create( name='%s Team %d Org %d' % (prefix, team_id, org_idx), organization=org, defaults=dict(created_by=next(creator_gen), modified_by=next(modifier_gen))) teams.append(team) org_idx += 1 print('') print('# Adding users to teams') for org in organizations: org_teams = [t for t in org.teams.all()] org_users = [u for u in org.member_role.members.all()] print(' Spreading %d users accross %d teams for %s' % (len(org_users), len(org_teams), org.name)) # Our normal spread for most users cur_user_idx = 0 cur_team_idx = 0 for n in spread(len(org_users), len(org_teams)): team = org_teams[cur_team_idx] for i in range(n): if cur_user_idx < len(org_users): user = org_users[cur_user_idx] team.member_role.members.add(user) cur_user_idx += 1 cur_team_idx += 1 # First user gets added to all teams for team in org_teams: team.member_role.members.add(org_users[0]) print('# Creating %d credentials for users' % (n_credentials - n_credentials // 2)) user_idx = 0 for n in spread(n_credentials - n_credentials // 2, n_users): user = users[user_idx] for i in range(n): ids['credential'] += 1 sys.stdout.write('\r %d ' % (ids['credential'])) sys.stdout.flush() credential_id = ids['credential'] credential, _ = Credential.objects.get_or_create( name='%s Credential %d User %d' % (prefix, credential_id, user_idx), defaults=dict(created_by=next(creator_gen), modified_by=next(modifier_gen)), credential_type=CredentialType.from_v1_kind('ssh')) credential.admin_role.members.add(user) credentials.append(credential) user_idx += 1 print('') credential_gen = yield_choice(credentials) print('# Creating %d credentials for teams' % (n_credentials // 2)) team_idx = 0 starting_credential_id = ids['credential'] for n in spread(n_credentials - n_credentials // 2, n_teams): team = teams[team_idx] for i in range(n): ids['credential'] += 1 sys.stdout.write( '\r %d ' % (ids['credential'] - starting_credential_id)) sys.stdout.flush() credential_id = ids['credential'] credential, _ = Credential.objects.get_or_create( name='%s Credential %d team %d' % (prefix, credential_id, team_idx), defaults=dict(created_by=next(creator_gen), modified_by=next(modifier_gen)), credential_type=CredentialType.from_v1_kind('ssh')) credential.admin_role.parents.add(team.member_role) credentials.append(credential) team_idx += 1 print('') print('# Creating %d projects' % n_projects) org_idx = 0 for n in spread(n_projects, n_organizations): org = organizations[org_idx] for i in range(n): ids['project'] += 1 project_id = ids['project'] sys.stdout.write('\r Assigning %d to %s: %d ' % (n, org.name, i + 1)) sys.stdout.flush() project, _ = Project.objects.get_or_create( name='%s Project %d Org %d' % (prefix, project_id, org_idx), organization=org, defaults=dict( created_by=next(creator_gen), modified_by=next(modifier_gen), scm_url= 'https://github.com/jlaska/ansible-playbooks.git', scm_type='git', playbook_files=[ "check.yml", "debug-50.yml", "debug.yml", "debug2.yml", "debug_extra_vars.yml", "dynamic_inventory.yml", "environ_test.yml", "fail_unless.yml", "pass_unless.yml", "pause.yml", "ping-20.yml", "ping.yml", "setfact_50.yml", "vault.yml" ])) projects.append(project) if org_idx == 0 and i == 0: project.admin_role.members.add(prj_admin) org_idx += 1 print('') print('# Creating %d inventories' % n_inventories) org_idx = 0 for n in spread(n_inventories, min(n_inventories // 4 + 1, n_organizations)): org = organizations[org_idx] for i in range(n): ids['inventory'] += 1 inventory_id = ids['inventory'] sys.stdout.write('\r Assigning %d to %s: %d ' % (n, org.name, i + 1)) sys.stdout.flush() inventory, _ = Inventory.objects.get_or_create( name='%s Inventory %d Org %d' % (prefix, inventory_id, org_idx), organization=org, defaults=dict(created_by=next(creator_gen), modified_by=next(modifier_gen)), variables='{"ansible_connection": "local"}') inventories.append(inventory) if org_idx == 0 and i == 0: inventory.admin_role.members.add(inv_admin) org_idx += 1 print('') print('# Creating %d inventory_groups' % n_inventory_groups) inv_idx = 0 for n in spread(n_inventory_groups, n_inventories): inventory = inventories[inv_idx] parent_list = [None] * 3 for i in range(n): ids['group'] += 1 group_id = ids['group'] sys.stdout.write('\r Assigning %d to %s: %d ' % (n, inventory.name, i + 1)) sys.stdout.flush() group, _ = Group.objects.get_or_create( name='%s Group %d Inventory %d' % (prefix, group_id, inv_idx), inventory=inventory, defaults=dict(created_by=next(creator_gen), modified_by=next(modifier_gen))) # Have each group have up to 3 parent groups for parent_n in range(3): if i // 4 + parent_n < len( parent_list) and parent_list[i // 4 + parent_n]: group.parents.add(parent_list[i // 4 + parent_n]) if parent_list[i // 4] is None: parent_list[i // 4] = group else: parent_list.append(group) inventory_groups.append(group) inv_idx += 1 print('') print('# Creating %d inventory_hosts' % n_inventory_hosts) group_idx = 0 for n in spread(n_inventory_hosts, n_inventory_groups): group = inventory_groups[group_idx] for i in range(n): ids['host'] += 1 host_id = ids['host'] sys.stdout.write('\r Assigning %d to %s: %d ' % (n, group.name, i + 1)) sys.stdout.flush() host, _ = Host.objects.get_or_create( name='%s.host-%06d.group-%05d.dummy' % (prefix, host_id, group_idx), inventory=group.inventory, defaults=dict(created_by=next(creator_gen), modified_by=next(modifier_gen))) # Add the host to up to 3 groups host.groups.add(group) for m in range(2): if group_idx + m < len( inventory_groups ) and group.inventory.id == inventory_groups[ group_idx + m].inventory.id: host.groups.add(inventory_groups[group_idx + m]) inventory_hosts.append(host) group_idx += 1 print('') print('# Creating %d job_templates' % n_job_templates) project_idx = 0 inv_idx = 0 for n in spread(n_job_templates, n_projects): project = projects[project_idx] for i in range(n): ids['job_template'] += 1 job_template_id = ids['job_template'] sys.stdout.write('\r Assigning %d to %s: %d ' % (n, project.name, i + 1)) sys.stdout.flush() inventory = None org_inv_count = project.organization.inventories.count() if org_inv_count > 0: inventory = project.organization.inventories.all()[ inv_idx % org_inv_count] extra_kwargs = {} job_template, _ = JobTemplate.objects.get_or_create( name='%s Job Template %d Project %d' % (prefix, job_template_id, project_idx), defaults=dict(inventory=inventory, project=project, created_by=next(creator_gen), modified_by=next(modifier_gen), playbook="debug.yml", **extra_kwargs)) job_template.credentials.add(next(credential_gen)) if ids['job_template'] % 7 == 0: job_template.credentials.add(next(credential_gen)) if ids['job_template'] % 5 == 0: # formerly cloud credential job_template.credentials.add(next(credential_gen)) job_template._is_new = _ job_templates.append(job_template) inv_idx += 1 if project_idx == 0 and i == 0: job_template.admin_role.members.add(jt_admin) project_idx += 1 if n > 0: print('') print('# Creating %d Workflow Job Templates' % n_wfjts) org_idx = 0 for n in spread(n_wfjts, n_organizations): org = organizations[org_idx] for i in range(n): ids['wfjts'] += 1 wfjt_id = ids['wfjts'] sys.stdout.write('\r Assigning %d to %s: %d ' % (n, org.name, i + 1)) sys.stdout.flush() wfjt, _ = WorkflowJobTemplate.objects.get_or_create( name='%s WFJT %d Org %d' % (prefix, wfjt_id, org_idx), description=bulk_data_description, organization=org, defaults=dict(created_by=next(creator_gen), modified_by=next(modifier_gen))) wfjt._is_new = _ wfjts.append(wfjt) org_idx += 1 if n: print('') print('# Creating %d Workflow Job Template nodes' % n_nodes) wfjt_idx = 0 for n in spread(n_nodes, n_wfjts): wfjt = wfjts[wfjt_idx] if not wfjt._is_new: continue jt_gen = yield_choice(job_templates) inv_gen = yield_choice(inventories) cred_gen = yield_choice(credentials) parent_idx = 0 wfjt_nodes = [] for i in range(n): ids['nodes'] += 1 sys.stdout.write('\r Assigning %d to %s: %d ' % (n, wfjt.name, i + 1)) sys.stdout.flush() kwargs = dict(workflow_job_template=wfjt, unified_job_template=next(jt_gen), modified=now()) if i % 2 == 0: # only apply inventories for every other node kwargs['inventory'] = next(inv_gen) node, _ = WorkflowJobTemplateNode.objects.get_or_create( **kwargs) if i % 3 == 0: # only apply prompted credential every 3rd node node.credentials.add(next(cred_gen)) # nodes.append(node) wfjt_nodes.append(node) if i <= 3: continue parent_node = wfjt_nodes[parent_idx] if parent_node.workflow_job_template != node.workflow_job_template: raise Exception( "Programming error, associating nodes in different workflows" ) elif parent_node == node: raise Exception("error, self association") if parent_idx % 2 == 0: parent_node.always_nodes.add(node) else: if (i + 1) % 3 == 0: parent_node.failure_nodes.add(node) else: parent_node.success_nodes.add(node) parent_idx = (parent_idx + 7) % len(wfjt_nodes) wfjt_idx += 1 if n: print('') print('# Creating %d Labels' % n_labels) org_idx = 0 for n in spread(n_labels, n_organizations): org = organizations[org_idx] for i in range(n): ids['labels'] += 1 label_id = ids['labels'] sys.stdout.write('\r Assigning %d to %s: %d ' % (n, org.name, i + 1)) sys.stdout.flush() label, _ = Label.objects.get_or_create( name='%sL_%do%d' % (prefix, label_id, org_idx), organization=org, defaults=dict(created_by=next(creator_gen), modified_by=next(modifier_gen))) labels.append(label) org_idx += 1 if n: print('') label_gen = yield_choice(labels) print('# Adding labels to job templates') jt_idx = 0 for n in spread(n_labels * 7, n_job_templates): if n == 0: continue jt = job_templates[jt_idx] if not jt._is_new: continue print(' Giving %d labels to %s JT' % (n, jt.name)) for i in range(n): jt.labels.add(next(label_gen)) jt_idx += 1 print('# Adding labels to workflow job templates') wfjt_idx = 0 for n in spread(n_labels * 3, n_wfjts): wfjt = wfjts[wfjt_idx] if not jt._is_new: continue print(' Giving %d labels to %s WFJT' % (n, wfjt.name)) for i in range(n): wfjt.labels.add(next(label_gen)) wfjt_idx += 1 # Disable logging here, because it will mess up output format logger = logging.getLogger('awx.main') logger.propagate = False print('# Creating %d jobs' % n_jobs) group_idx = 0 job_template_idx = 0 job_i = 0 for n in spread(n_jobs, n_job_templates): job_template = job_templates[job_template_idx] for i in range(n): sys.stdout.write('\r Assigning %d to %s: %d ' % (n, job_template.name, i + 1)) sys.stdout.flush() if len(jobs) % 4 == 0: job_stat = 'failed' elif len(jobs) % 11 == 0: job_stat = 'canceled' else: job_stat = 'successful' job, _ = Job.objects.get_or_create( job_template=job_template, status=job_stat, name="%s-%d" % (job_template.name, job_i), project=job_template.project, inventory=job_template.inventory, ) for ec in job_template.credentials.all(): job.credentials.add(ec) job._is_new = _ jobs.append(job) job_i += 1 if not job._is_new: group_idx += 1 continue if i + 1 == n: job_template.last_job = job if job_template.pk % 5 == 0: job_template.current_job = job job_template.save() if job._is_new: with transaction.atomic(): if job_template.inventory: inv_groups = [ g for g in job_template.inventory.groups.all() ] if len(inv_groups): JobHostSummary.objects.bulk_create([ JobHostSummary(job=job, host=h, host_name=h.name, processed=1, created=now(), modified=now()) for h in inv_groups[ group_idx % len(inv_groups)].hosts.all()[:100] ]) group_idx += 1 job_template_idx += 1 if n: print('') print('# Creating %d job events' % n_job_events) job_idx = 0 for n in spread(n_job_events, n_jobs): job = jobs[job_idx] # Check if job already has events, for idempotence if not job._is_new: continue # Bulk create in chunks with maximum chunk size MAX_BULK_CREATE = 100 for j in range((n // MAX_BULK_CREATE) + 1): n_subgroup = MAX_BULK_CREATE if j == n / MAX_BULK_CREATE: # on final pass, create the remainder n_subgroup = n % MAX_BULK_CREATE sys.stdout.write( '\r Creating %d job events for job %d, subgroup: %d' % (n, job.id, j + 1)) sys.stdout.flush() JobEvent.objects.bulk_create([ JobEvent(created=now(), modified=now(), job=job, event='runner_on_ok') for i in range(n_subgroup) ]) job_idx += 1 if n: print('')
def create_rhv_tower_credtype(apps, schema_editor): CredentialType.setup_tower_managed_defaults()