def gce(cred, env, private_data_dir): project = cred.get_input('project', default='') username = cred.get_input('username', default='') json_cred = { 'type': 'service_account', 'private_key': cred.get_input('ssh_key_data', default=''), 'client_email': username, 'project_id': project } if 'INVENTORY_UPDATE_ID' not in env: env['GCE_EMAIL'] = username env['GCE_PROJECT'] = project json_cred['token_uri'] = 'https://oauth2.googleapis.com/token' handle, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env')) f = os.fdopen(handle, 'w') json.dump(json_cred, f, indent=2) f.close() os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) container_path = to_container_path(path, private_data_dir) env['GCE_CREDENTIALS_FILE_PATH'] = container_path env['GCP_SERVICE_ACCOUNT_FILE'] = container_path env['GOOGLE_APPLICATION_CREDENTIALS'] = container_path # Handle env variables for new module types. # This includes gcp_compute inventory plugin and # all new gcp_* modules. env['GCP_AUTH_KIND'] = 'serviceaccount' env['GCP_PROJECT'] = project env['GCP_ENV_TYPE'] = 'tower' return path
def openstack(cred, env, private_data_dir): handle, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env')) f = os.fdopen(handle, 'w') openstack_data = _openstack_data(cred) yaml.safe_dump(openstack_data, f, default_flow_style=False, allow_unicode=True) f.close() os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) env['OS_CLIENT_CONFIG_FILE'] = to_container_path(path, private_data_dir)
def kubernetes_bearer_token(cred, env, private_data_dir): env['K8S_AUTH_HOST'] = cred.get_input('host', default='') env['K8S_AUTH_API_KEY'] = cred.get_input('bearer_token', default='') if cred.get_input('verify_ssl') and 'ssl_ca_cert' in cred.inputs: env['K8S_AUTH_VERIFY_SSL'] = 'True' handle, path = tempfile.mkstemp( dir=os.path.join(private_data_dir, 'env')) with os.fdopen(handle, 'w') as f: os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) f.write(cred.get_input('ssl_ca_cert')) env['K8S_AUTH_SSL_CA_CERT'] = to_container_path(path, private_data_dir) else: env['K8S_AUTH_VERIFY_SSL'] = 'False'
def read_content(private_data_dir, raw_env, inventory_update): """Read the environmental data laid down by the task system template out private and secret data so they will be readable and predictable return a dictionary `content` with file contents, keyed off environment variable that references the file """ # build dict env as a mapping of environment variables to file names # Filter out environment variables which come from runtime environment env = {} exclude_keys = set(('PATH', 'INVENTORY_SOURCE_ID', 'INVENTORY_UPDATE_ID')) for key in dir(settings): if key.startswith('ANSIBLE_'): exclude_keys.add(key) for k, v in raw_env.items(): if k in STANDARD_INVENTORY_UPDATE_ENV or k in exclude_keys: continue if k not in os.environ or v != os.environ[k]: env[k] = v inverse_env = {} for key, value in env.items(): inverse_env.setdefault(value, []).append(key) cache_file_regex = re.compile( r'/tmp/awx_{0}_[a-zA-Z0-9_]+/{1}_cache[a-zA-Z0-9_]+'.format( inventory_update.id, inventory_update.source)) private_key_regex = re.compile( r'-----BEGIN ENCRYPTED PRIVATE KEY-----.*-----END ENCRYPTED PRIVATE KEY-----' ) # read directory content # build a mapping of the file paths to aliases which will be constant accross runs dir_contents = {} referenced_paths = set() file_aliases = {} filename_list = os.listdir(private_data_dir) for subdir in ('env', 'inventory'): if subdir in filename_list: filename_list.remove(subdir) for filename in os.listdir(os.path.join(private_data_dir, subdir)): filename_list.append(os.path.join(subdir, filename)) filename_list = sorted(filename_list, key=lambda fn: inverse_env.get( os.path.join(private_data_dir, fn), [fn])[0]) for filename in filename_list: if filename in ('args', 'project'): continue # Ansible runner abs_file_path = os.path.join(private_data_dir, filename) file_aliases[abs_file_path] = filename runner_path = to_container_path(abs_file_path, private_data_dir) if runner_path in inverse_env: referenced_paths.add(abs_file_path) alias = 'file_reference' for i in range(10): if alias not in file_aliases.values(): break alias = 'file_reference_{}'.format(i) else: raise RuntimeError( 'Test not able to cope with >10 references by env vars. ' 'Something probably went very wrong.') file_aliases[abs_file_path] = alias for env_key in inverse_env[runner_path]: env[env_key] = '{{{{ {} }}}}'.format(alias) try: with open(abs_file_path, 'r') as f: dir_contents[abs_file_path] = f.read() # Declare a reference to inventory plugin file if it exists if abs_file_path.endswith( '.yml') and 'plugin: ' in dir_contents[abs_file_path]: referenced_paths.add(abs_file_path) # used as inventory file elif cache_file_regex.match(abs_file_path): file_aliases[abs_file_path] = 'cache_file' except IsADirectoryError: dir_contents[abs_file_path] = '<directory>' if cache_file_regex.match(abs_file_path): file_aliases[abs_file_path] = 'cache_dir' # Substitute in aliases for cross-file references for abs_file_path, file_content in dir_contents.copy().items(): if cache_file_regex.match(file_content): if 'cache_dir' not in file_aliases.values( ) and 'cache_file' not in file_aliases in file_aliases.values(): raise AssertionError( 'A cache file was referenced but never created, files:\n{}' .format(json.dumps(dir_contents, indent=4))) # if another files path appears in this file, replace it with its alias for target_path in dir_contents.keys(): other_alias = file_aliases[target_path] if target_path in file_content: referenced_paths.add(target_path) dir_contents[abs_file_path] = file_content.replace( target_path, '{{ ' + other_alias + ' }}') # The env/settings file should be ignored, nothing needs to reference it as its picked up directly from runner ignore_files = [os.path.join(private_data_dir, 'env', 'settings')] # build dict content which is the directory contents keyed off the file aliases content = {} for abs_file_path, file_content in dir_contents.items(): # assert that all files laid down are used if abs_file_path not in referenced_paths and abs_file_path not in ignore_files: raise AssertionError( "File {} is not referenced. References and files:\n{}\n{}". format(abs_file_path, json.dumps(env, indent=4), json.dumps(dir_contents, indent=4))) file_content = private_key_regex.sub('{{private_key}}', file_content) content[file_aliases[abs_file_path]] = file_content return (env, content)
def inject_credential(self, credential, env, safe_env, args, private_data_dir): """ Inject credential data into the environment variables and arguments passed to `ansible-playbook` :param credential: a :class:`awx.main.models.Credential` instance :param env: a dictionary of environment variables used in the `ansible-playbook` call. This method adds additional environment variables based on custom `env` injectors defined on this CredentialType. :param safe_env: a dictionary of environment variables stored in the database for the job run (`UnifiedJob.job_env`); secret values should be stripped :param args: a list of arguments passed to `ansible-playbook` in the style of `subprocess.call(args)`. This method appends additional arguments based on custom `extra_vars` injectors defined on this CredentialType. :param private_data_dir: a temporary directory to store files generated by `file` injectors (like config files or key files) """ if not self.injectors: if self.managed and credential.credential_type.namespace in dir(builtin_injectors): injected_env = {} getattr(builtin_injectors, credential.credential_type.namespace)(credential, injected_env, private_data_dir) env.update(injected_env) safe_env.update(build_safe_env(injected_env)) return class TowerNamespace: pass tower_namespace = TowerNamespace() # maintain a normal namespace for building the ansible-playbook arguments (env and args) namespace = {'tower': tower_namespace} # maintain a sanitized namespace for building the DB-stored arguments (safe_env) safe_namespace = {'tower': tower_namespace} # build a normal namespace with secret values decrypted (for # ansible-playbook) and a safe namespace with secret values hidden (for # DB storage) injectable_fields = list(credential.inputs.keys()) + credential.dynamic_input_fields for field_name in list(set(injectable_fields)): value = credential.get_input(field_name) if type(value) is bool: # boolean values can't be secret/encrypted/external safe_namespace[field_name] = namespace[field_name] = value continue if field_name in self.secret_fields: safe_namespace[field_name] = '**********' elif len(value): safe_namespace[field_name] = value if len(value): namespace[field_name] = value for field in self.inputs.get('fields', []): # default missing boolean fields to False if field['type'] == 'boolean' and field['id'] not in credential.inputs.keys(): namespace[field['id']] = safe_namespace[field['id']] = False # make sure private keys end with a \n if field.get('format') == 'ssh_private_key': if field['id'] in namespace and not namespace[field['id']].endswith('\n'): namespace[field['id']] += '\n' file_tmpls = self.injectors.get('file', {}) # If any file templates are provided, render the files and update the # special `tower` template namespace so the filename can be # referenced in other injectors sandbox_env = sandbox.ImmutableSandboxedEnvironment() for file_label, file_tmpl in file_tmpls.items(): data = sandbox_env.from_string(file_tmpl).render(**namespace) _, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env')) with open(path, 'w') as f: f.write(data) os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) container_path = to_container_path(path, private_data_dir) # determine if filename indicates single file or many if file_label.find('.') == -1: tower_namespace.filename = container_path else: if not hasattr(tower_namespace, 'filename'): tower_namespace.filename = TowerNamespace() file_label = file_label.split('.')[1] setattr(tower_namespace.filename, file_label, container_path) injector_field = self._meta.get_field('injectors') for env_var, tmpl in self.injectors.get('env', {}).items(): try: injector_field.validate_env_var_allowed(env_var) except ValidationError as e: logger.error('Ignoring prohibited env var {}, reason: {}'.format(env_var, e)) continue env[env_var] = sandbox_env.from_string(tmpl).render(**namespace) safe_env[env_var] = sandbox_env.from_string(tmpl).render(**safe_namespace) if 'INVENTORY_UPDATE_ID' not in env: # awx-manage inventory_update does not support extra_vars via -e extra_vars = {} for var_name, tmpl in self.injectors.get('extra_vars', {}).items(): extra_vars[var_name] = sandbox_env.from_string(tmpl).render(**namespace) def build_extra_vars_file(vars, private_dir): handle, path = tempfile.mkstemp(dir=os.path.join(private_dir, 'env')) f = os.fdopen(handle, 'w') f.write(safe_dump(vars)) f.close() os.chmod(path, stat.S_IRUSR) return path if extra_vars: path = build_extra_vars_file(extra_vars, private_data_dir) container_path = to_container_path(path, private_data_dir) args.extend(['-e', '@%s' % container_path])
def test_invalid_host_path(host_path): with pytest.raises(RuntimeError): to_container_path(host_path, private_data_dir)
def test_switch_paths(container_path, host_path): assert to_container_path(host_path, private_data_dir) == container_path assert to_host_path(container_path, private_data_dir) == host_path