def proxy_check_upgrade_required(self): """Staging is required to collect firmware information from the web services proxy.""" # Verify controller consistency and get firmware versions try: # Retrieve current bundle version if self.is_firmware_bundled(): rc, response = self.request("storage-systems/%s/graph/xpath-filter?query=/controller/codeVersions[codeModule='bundleDisplay']" % self.ssid) current_firmware_version = six.b(response[0]["versionString"]) else: rc, response = self.request("storage-systems/%s/graph/xpath-filter?query=/sa/saData/fwVersion" % self.ssid) current_firmware_version = six.b(response[0]) # Determine whether upgrade is required if current_firmware_version != self.firmware_version(): current = current_firmware_version.split(b".")[:2] upgrade = self.firmware_version().split(b".")[:2] if current[0] < upgrade[0] or (current[0] == upgrade[0] and current[1] <= upgrade[1]): self.upgrade_required = True else: self.module.fail_json(msg="Downgrades are not permitted. Firmware [%s]. Array [%s]." % (self.firmware, self.ssid)) except Exception as error: self.module.fail_json(msg="Failed to retrieve controller firmware information. Array [%s]. Error [%s]" % (self.ssid, to_native(error))) # Determine current NVSRAM version and whether change is required try: rc, response = self.request("storage-systems/%s/graph/xpath-filter?query=/sa/saData/nvsramVersion" % self.ssid) if six.b(response[0]) != self.nvsram_version(): self.upgrade_required = True except Exception as error: self.module.fail_json(msg="Failed to retrieve storage system's NVSRAM version. Array [%s]. Error [%s]" % (self.ssid, to_native(error)))
def cmd(command): # This is code from https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/service.py # Copyright: (c) 2012, Michael DeHaan <*****@*****.**> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C') p = subprocess.Popen(command, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=lang_env) stdout = b("") stderr = b("") fds = [p.stdout, p.stderr] # Wait for all output, or until the main process is dead and its output is done. while fds: rfd, wfd, efd = select.select(fds, [], fds, 1) if not (rfd + wfd + efd) and p.poll() is not None: break if p.stdout in rfd: dat = os.read(p.stdout.fileno(), 4096) if not dat: fds.remove(p.stdout) stdout += dat if p.stderr in rfd: dat = os.read(p.stderr.fileno(), 4096) if not dat: fds.remove(p.stderr) stderr += dat p.wait() return (p.returncode, stdout, stderr)
def proxy_wait_for_upgrade(self, request_id): """Wait for SANtricity Web Services Proxy to report upgrade complete""" if self.is_firmware_bundled(): while True: try: sleep(5) rc, response = self.request("batch/cfw-upgrade/%s" % request_id) if response["status"] == "complete": self.upgrade_in_progress = False break elif response["status"] in ["failed", "cancelled"]: self.module.fail_json(msg="Firmware upgrade failed to complete. Array [%s]." % self.ssid) except Exception as error: self.module.fail_json(msg="Failed to retrieve firmware upgrade status. Array [%s]. Error [%s]." % (self.ssid, to_native(error))) else: for count in range(0, int(self.REBOOT_TIMEOUT_SEC / 5)): try: sleep(5) rc_firmware, firmware = self.request("storage-systems/%s/graph/xpath-filter?query=/sa/saData/fwVersion" % self.ssid) rc_nvsram, nvsram = self.request("storage-systems/%s/graph/xpath-filter?query=/sa/saData/nvsramVersion" % self.ssid) if six.b(firmware[0]) == self.firmware_version() and six.b(nvsram[0]) == self.nvsram_version(): self.upgrade_in_progress = False break except Exception as error: pass else: self.module.fail_json(msg="Timed out waiting for firmware upgrade to complete. Array [%s]." % self.ssid)
def embedded_wait_for_upgrade(self): """Wait for SANtricity Web Services Embedded to be available after reboot.""" for count in range(0, self.REBOOT_TIMEOUT_SEC): try: rc, response = self.request( "storage-systems/%s/graph/xpath-filter?query=/sa/saData" % self.ssid) bundle_display = [ m["versionString"] for m in response[0]["extendedSAData"]["codeVersions"] if m["codeModule"] == "bundleDisplay" ][0] if rc == 200 and six.b( bundle_display) == self.firmware_version() and six.b( response[0] ["nvsramVersion"]) == self.nvsram_version(): self.upgrade_in_progress = False break except Exception as error: pass sleep(1) else: self.module.fail_json( msg= "Timeout waiting for Santricity Web Services Embedded. Array [%s]" % self.ssid)
def absent(module, dest, conf, backup): b_dest = to_bytes(dest, errors='surrogate_or_strict') if not os.path.exists(b_dest): module.exit_json(changed=False, msg="file not present") msg = '' diff = {'before': '', 'after': '', 'before_header': '%s (content)' % dest, 'after_header': '%s (content)' % dest} f = open(b_dest, 'rb') b_lines = f.readlines() f.close() lines = to_native(b('').join(b_lines)) b_conf = to_bytes(conf, errors='surrogate_or_strict') lines = to_native(b('').join(b_lines)) jsonconfig = json.loads(lines) config = eval(b_conf) if not isinstance(config, dict): module.fail_json(msg="Invalid value in json parameter: {0}".format(config)) if module._diff: diff['before'] = to_native(b('').join(b_lines)) b_lines_new = b_lines msg = '' changed = False diffconfig = deepdiff(jsonconfig,config) if diffconfig is None: diffconfig = {} if jsonconfig != diffconfig: b_lines_new = to_bytes(json.dumps(diffconfig, sort_keys=True, indent=4, separators=(',', ': '))) msg = 'config removed' changed = True if module._diff: diff['after'] = to_native(b('').join(b_lines_new)) backupdest = "" if changed and not module.check_mode: if backup: backupdest = module.backup_local(dest) write_changes(module, b_lines_new, dest) attr_diff = {} msg, changed = check_file_attrs(module, changed, msg, attr_diff) attr_diff['before_header'] = '%s (file attributes)' % dest attr_diff['after_header'] = '%s (file attributes)' % dest difflist = [diff, attr_diff] module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=difflist)
def create_multipart_formdata(files, fields=None, send_8kb=False): """Create the data for a multipart/form request. :param list(list) files: list of lists each containing (name, filename, path). :param list(list) fields: list of lists each containing (key, value). :param bool send_8kb: only sends the first 8kb of the files (default: False). """ boundary = "---------------------------" + "".join([str(random.randint(0, 9)) for x in range(27)]) data_parts = list() data = None if six.PY2: # Generate payload for Python 2 newline = "\r\n" if fields is not None: for key, value in fields: data_parts.extend(["--%s" % boundary, 'Content-Disposition: form-data; name="%s"' % key, "", value]) for name, filename, path in files: with open(path, "rb") as fh: value = fh.read(8192) if send_8kb else fh.read() data_parts.extend(["--%s" % boundary, 'Content-Disposition: form-data; name="%s"; filename="%s"' % (name, filename), "Content-Type: %s" % (mimetypes.guess_type(path)[0] or "application/octet-stream"), "", value]) data_parts.extend(["--%s--" % boundary, ""]) data = newline.join(data_parts) else: newline = six.b("\r\n") if fields is not None: for key, value in fields: data_parts.extend([six.b("--%s" % boundary), six.b('Content-Disposition: form-data; name="%s"' % key), six.b(""), six.b(value)]) for name, filename, path in files: with open(path, "rb") as fh: value = fh.read(8192) if send_8kb else fh.read() data_parts.extend([six.b("--%s" % boundary), six.b('Content-Disposition: form-data; name="%s"; filename="%s"' % (name, filename)), six.b("Content-Type: %s" % (mimetypes.guess_type(path)[0] or "application/octet-stream")), six.b(""), value]) data_parts.extend([six.b("--%s--" % boundary), b""]) data = newline.join(data_parts) headers = { "Content-Type": "multipart/form-data; boundary=%s" % boundary, "Content-Length": str(len(data))} return headers, data
def absent(module, dest, regexp, line, backup): b_dest = to_bytes(dest, errors='surrogate_or_strict') if not os.path.exists(b_dest): module.exit_json(changed=False, msg="file not present") msg = '' diff = {'before': '', 'after': '', 'before_header': '%s (content)' % dest, 'after_header': '%s (content)' % dest} f = open(b_dest, 'rb') b_lines = f.readlines() f.close() if module._diff: diff['before'] = to_native(b('').join(b_lines)) if regexp is not None: bre_c = re.compile(to_bytes(regexp, errors='surrogate_or_strict')) found = [] b_line = to_bytes(line, errors='surrogate_or_strict') def matcher(b_cur_line): if regexp is not None: match_found = bre_c.search(b_cur_line) else: match_found = b_line == b_cur_line.rstrip(b('\r\n')) if match_found: found.append(b_cur_line) return not match_found b_lines = [l for l in b_lines if matcher(l)] changed = len(found) > 0 if module._diff: diff['after'] = to_native(b('').join(b_lines)) backupdest = "" if changed and not module.check_mode: if backup: backupdest = module.backup_local(dest) write_changes(module, b_lines, dest) if changed: msg = "%s line(s) removed" % len(found) attr_diff = {} msg, changed = check_file_attrs(module, changed, msg, attr_diff) attr_diff['before_header'] = '%s (file attributes)' % dest attr_diff['after_header'] = '%s (file attributes)' % dest difflist = [diff, attr_diff] module.exit_json(changed=changed, found=len(found), msg=msg, backup=backupdest, diff=difflist)
def wait_for_web_services(self): """Wait for web services to report firmware and nvsram upgrade.""" # Wait for system to reflect changes for count in range(int(self.REBOOT_TIMEOUT_SEC / 5)): try: if self.is_firmware_bundled(): firmware_rc, firmware_version = self.request( "storage-systems/%s/graph/xpath-filter?query=/controller/" "codeVersions[codeModule='bundleDisplay']" % self.ssid, log_request=False) current_firmware_version = six.b( firmware_version[0]["versionString"]) else: firmware_rc, firmware_version = self.request( "storage-systems/%s/graph/xpath-filter?query=/sa/saData/fwVersion" % self.ssid, log_request=False) current_firmware_version = six.b(firmware_version[0]) nvsram_rc, nvsram_version = self.request( "storage-systems/%s/graph/xpath-filter?query=/sa/saData/nvsramVersion" % self.ssid, log_request=False) current_nvsram_version = six.b(nvsram_version[0]) if current_firmware_version == self.firmware_version() and ( not self.nvsram or current_nvsram_version == self.nvsram_version()): break except Exception as error: pass sleep(5) else: self.module.fail_json( msg="Timeout waiting for Santricity Web Services. Array [%s]" % self.ssid) # Wait for system to be optimal for count in range(int(self.REBOOT_TIMEOUT_SEC / 5)): try: rc, response = self.request("storage-systems/%s" % self.ssid, log_request=False) if response["status"] == "optimal": self.upgrade_in_progress = False break except Exception as error: pass sleep(5) else: self.module.fail_json( msg= "Timeout waiting for storage system to return to optimal status. Array [%s]" % self.ssid)
def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, ignore_hidden=False, tmpdir=None): ''' assemble a file from a directory of fragments ''' tmpfd, temp_path = tempfile.mkstemp(dir=tmpdir) tmp = os.fdopen(tmpfd, 'wb') delimit_me = False add_newline = False for f in sorted(os.listdir(src_path)): if compiled_regexp and not compiled_regexp.search(f): continue fragment = os.path.join(src_path, f) if not os.path.isfile(fragment) or ( ignore_hidden and os.path.basename(fragment).startswith('.')): continue with open(fragment, 'rb') as fragment_fh: fragment_content = fragment_fh.read() # always put a newline between fragments if the previous fragment didn't end with a newline. if add_newline: tmp.write(b('\n')) # delimiters should only appear between fragments if delimit_me: if delimiter: # un-escape anything like newlines delimiter = codecs.escape_decode(delimiter)[0] tmp.write(delimiter) # always make sure there's a newline after the # delimiter, so lines don't run together # byte indexing differs on Python 2 and 3, # use indexbytes for compat # chr(10) == '\n' if indexbytes(delimiter, -1) != 10: tmp.write(b('\n')) tmp.write(fragment_content) delimit_me = True if fragment_content.endswith(b('\n')): add_newline = False else: add_newline = True tmp.close() return temp_path
def write_ssh_wrapper(): module_dir = get_module_path() try: # make sure we have full permission to the module_dir, which # may not be the case if we're sudo'ing to a non-root user if os.access(module_dir, os.W_OK | os.R_OK | os.X_OK): fd, wrapper_path = tempfile.mkstemp(prefix=module_dir + '/') else: raise OSError except (IOError, OSError): fd, wrapper_path = tempfile.mkstemp() fh = os.fdopen(fd, 'w+b') template = b("""#!/bin/sh if [ -z "$GIT_SSH_OPTS" ]; then BASEOPTS="" else BASEOPTS=$GIT_SSH_OPTS fi # Let ssh fail rather than prompt BASEOPTS="$BASEOPTS -o BatchMode=yes" if [ -z "$GIT_KEY" ]; then ssh $BASEOPTS "$@" else ssh -i "$GIT_KEY" -o IdentitiesOnly=yes $BASEOPTS "$@" fi """) fh.write(template) fh.close() st = os.stat(wrapper_path) os.chmod(wrapper_path, st.st_mode | stat.S_IEXEC) return wrapper_path
def main(): module = AnsibleModule( argument_spec=dict(_raw_params=dict(), _uses_shell=dict(type='bool', default=False), command=dict(default=None), function=dict(required=True, type='str'), chdir=dict(type='path'), returncode=dict(type='int', default=0), string=dict(type='str', default=None), file=dict(type='path', default=None))) chdir = module.params['chdir'] beakerlibfunc = module.params['function'] myfile = module.params['file'] mystring = module.params['string'] if chdir: chdir = os.path.abspath(chdir) os.chdir(chdir) if beakerlibfunc == 'rlRun': rc, out, err, startd, endd, delta = rlRun(module, module.params['command'], module.params['_uses_shell'], module.params['returncode'], mystring, myfile) if beakerlibfunc == 'rlAssertGrep': rc, startd, endd, delta = rlAssertGrep(myfile, mystring) out = '' err = '' if beakerlibfunc == 'rlAssertNotGrep': rc, startd, endd, delta = rlAssertGrep(myfile, mystring) out = '' err = '' rc = int(not rc) module.exit_json(function=beakerlibfunc, stdout=out.rstrip(b("\r\n")), stderr=err.rstrip(b("\r\n")), rc=rc, start=str(startd), end=str(endd), delta=str(delta), changed=True)
def matcher(b_cur_line): if regexp is not None: match_found = bre_c.search(b_cur_line) else: match_found = b_line == b_cur_line.rstrip(b('\r\n')) if match_found: found.append(b_cur_line) return not match_found
def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, ignore_hidden=False): ''' assemble a file from a directory of fragments ''' tmpfd, temp_path = tempfile.mkstemp() tmp = os.fdopen(tmpfd, 'wb') delimit_me = False add_newline = False for f in sorted(os.listdir(src_path)): if compiled_regexp and not compiled_regexp.search(f): continue fragment = u"%s/%s" % (src_path, f) if not os.path.isfile(fragment) or ( ignore_hidden and os.path.basename(fragment).startswith('.')): continue fragment_content = open(fragment, 'rb').read() # always put a newline between fragments if the previous fragment didn't end with a newline. if add_newline: tmp.write(b('\n')) # delimiters should only appear between fragments if delimit_me: if delimiter: # un-escape anything like newlines delimiter = codecs.escape_decode(delimiter)[0] tmp.write(delimiter) # always make sure there's a newline after the # delimiter, so lines don't run together if delimiter[-1] != b('\n'): tmp.write(b('\n')) tmp.write(fragment_content) delimit_me = True if fragment_content.endswith(b('\n')): add_newline = False else: add_newline = True tmp.close() return temp_path
def get_ca_certs(self): # tries to find a valid CA cert in one of the # standard locations for the current distribution ca_certs = [] paths_checked = [] system = platform.system() # build a list of paths to check for .crt/.pem files # based on the platform type paths_checked.append('/etc/ssl/certs') if system == 'Linux': paths_checked.append('/etc/pki/ca-trust/extracted/pem') paths_checked.append('/etc/pki/tls/certs') paths_checked.append('/usr/share/ca-certificates/cacert.org') elif system == 'FreeBSD': paths_checked.append('/usr/local/share/certs') elif system == 'OpenBSD': paths_checked.append('/etc/ssl') elif system == 'NetBSD': ca_certs.append('/etc/openssl/certs') elif system == 'SunOS': paths_checked.append('/opt/local/etc/openssl/certs') # fall back to a user-deployed cert in a standard # location if the OS platform one is not available paths_checked.append('/etc/ansible') tmp_fd, tmp_path = tempfile.mkstemp() # Write the dummy ca cert if we are running on Mac OS X if system == 'Darwin': os.write(tmp_fd, DUMMY_CA_CERT) # Default Homebrew path for OpenSSL certs paths_checked.append('/usr/local/etc/openssl') # for all of the paths, find any .crt or .pem files # and compile them into single temp file for use # in the ssl check to speed up the test for path in paths_checked: if os.path.exists(path) and os.path.isdir(path): dir_contents = os.listdir(path) for f in dir_contents: full_path = os.path.join(path, f) if os.path.isfile(full_path) and os.path.splitext( f)[1] in ('.crt', '.pem'): try: cert_file = open(full_path, 'rb') os.write(tmp_fd, cert_file.read()) os.write(tmp_fd, b('\n')) cert_file.close() except (OSError, IOError): pass return (tmp_path, paths_checked)
def get_head_branch(git_path, module, dest, remote, bare=False): ''' Determine what branch HEAD is associated with. This is partly taken from lib/ansible/utils/__init__.py. It finds the correct path to .git/HEAD and reads from that file the branch that HEAD is associated with. In the case of a detached HEAD, this will look up the branch in .git/refs/remotes/<remote>/HEAD. ''' if bare: repo_path = dest else: repo_path = os.path.join(dest, '.git') # Check if the .git is a file. If it is a file, it means that we are in a submodule structure. if os.path.isfile(repo_path): try: git_conf = open(repo_path, 'rb') for line in git_conf: config_val = line.split(b(':'), 1) if config_val[0].strip() == b('gitdir'): gitdir = to_native(config_val[1].strip(), errors='surrogate_or_strict') break else: # No repo path found return '' # There is a possibility the .git file to have an absolute path. if os.path.isabs(gitdir): repo_path = gitdir else: repo_path = os.path.join(repo_path.split('.git')[0], gitdir) except (IOError, AttributeError): # No repo path found return '' # Read .git/HEAD for the name of the branch. # If we're in a detached HEAD state, look up the branch associated with # the remote HEAD in .git/refs/remotes/<remote>/HEAD headfile = os.path.join(repo_path, "HEAD") if is_not_a_branch(git_path, module, dest): headfile = os.path.join(repo_path, 'refs', 'remotes', remote, 'HEAD') branch = head_splitter(headfile, remote, module=module, fail_on_error=True) return branch
def get_ca_certs(self): # tries to find a valid CA cert in one of the # standard locations for the current distribution ca_certs = [] paths_checked = [] system = platform.system() # build a list of paths to check for .crt/.pem files # based on the platform type paths_checked.append('/etc/ssl/certs') if system == 'Linux': paths_checked.append('/etc/pki/ca-trust/extracted/pem') paths_checked.append('/etc/pki/tls/certs') paths_checked.append('/usr/share/ca-certificates/cacert.org') elif system == 'FreeBSD': paths_checked.append('/usr/local/share/certs') elif system == 'OpenBSD': paths_checked.append('/etc/ssl') elif system == 'NetBSD': ca_certs.append('/etc/openssl/certs') elif system == 'SunOS': paths_checked.append('/opt/local/etc/openssl/certs') # fall back to a user-deployed cert in a standard # location if the OS platform one is not available paths_checked.append('/etc/ansible') tmp_fd, tmp_path = tempfile.mkstemp() # Write the dummy ca cert if we are running on Mac OS X if system == 'Darwin': os.write(tmp_fd, DUMMY_CA_CERT) # Default Homebrew path for OpenSSL certs paths_checked.append('/usr/local/etc/openssl') # for all of the paths, find any .crt or .pem files # and compile them into single temp file for use # in the ssl check to speed up the test for path in paths_checked: if os.path.exists(path) and os.path.isdir(path): dir_contents = os.listdir(path) for f in dir_contents: full_path = os.path.join(path, f) if os.path.isfile(full_path) and os.path.splitext(f)[1] in ('.crt','.pem'): try: cert_file = open(full_path, 'rb') os.write(tmp_fd, cert_file.read()) os.write(tmp_fd, b('\n')) cert_file.close() except (OSError, IOError): pass return (tmp_path, paths_checked)
def test_metal_helm_chart_compiles(self): self.load_ansible_role("inventory.yaml", "cp.yaml", "deploy control plane", "metal-roles/control-plane/roles/metal") chart_path = os.path.join(self.role._role_path, "files", "metal-control-plane") out = subprocess.check_output(["helm", "lint", chart_path]) self.assertIn(b("0 chart(s) failed"), out)
def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, ignore_hidden=False): ''' assemble a file from a directory of fragments ''' tmpfd, temp_path = tempfile.mkstemp() tmp = os.fdopen(tmpfd, 'wb') delimit_me = False add_newline = False for f in sorted(os.listdir(src_path)): if compiled_regexp and not compiled_regexp.search(f): continue fragment = os.path.join(src_path, f) if not os.path.isfile(fragment) or (ignore_hidden and os.path.basename(fragment).startswith('.')): continue fragment_content = open(fragment, 'rb').read() # always put a newline between fragments if the previous fragment didn't end with a newline. if add_newline: tmp.write(b('\n')) # delimiters should only appear between fragments if delimit_me: if delimiter: # un-escape anything like newlines delimiter = codecs.escape_decode(delimiter)[0] tmp.write(delimiter) # always make sure there's a newline after the # delimiter, so lines don't run together if delimiter[-1] != b('\n'): tmp.write(b('\n')) tmp.write(fragment_content) delimit_me = True if fragment_content.endswith(b('\n')): add_newline = False else: add_newline = True tmp.close() return temp_path
def main(): module = AnsibleModule(argument_spec=dict( path=dict(required=True, aliases=['dest', 'destfile', 'name'], type='path'), state=dict(default='present', choices=['absent', 'present']), marker=dict(default='# {mark} ANSIBLE MANAGED BLOCK', type='str'), block=dict(default='', type='str', aliases=['content']), insertafter=dict(default=None), insertbefore=dict(default=None), create=dict(default=False, type='bool'), backup=dict(default=False, type='bool'), validate=dict(default=None, type='str'), ), mutually_exclusive=[['insertbefore', 'insertafter']], add_file_common_args=True, supports_check_mode=True) params = module.params path = params['path'] if os.path.isdir(path): module.fail_json(rc=256, msg='Path %s is a directory !' % path) path_exists = os.path.exists(path) if not path_exists: if not module.boolean(params['create']): module.fail_json(rc=257, msg='Path %s does not exist !' % path) original = None lines = [] else: f = open(path, 'rb') original = f.read() f.close() lines = original.splitlines() diff = { 'before': '', 'after': '', 'before_header': '%s (content)' % path, 'after_header': '%s (content)' % path } if module._diff and original: diff['before'] = original insertbefore = params['insertbefore'] insertafter = params['insertafter'] block = to_bytes(params['block']) marker = to_bytes(params['marker']) present = params['state'] == 'present' if not present and not path_exists: module.exit_json(changed=False, msg="File %s not present" % path) if insertbefore is None and insertafter is None: insertafter = 'EOF' if insertafter not in (None, 'EOF'): insertre = re.compile( to_bytes(insertafter, errors='surrogate_or_strict')) elif insertbefore not in (None, 'BOF'): insertre = re.compile( to_bytes(insertbefore, errors='surrogate_or_strict')) else: insertre = None marker0 = re.sub(b(r'{mark}'), b('BEGIN'), marker) marker1 = re.sub(b(r'{mark}'), b('END'), marker) if present and block: # Escape seqeuences like '\n' need to be handled in Ansible 1.x if module.ansible_version.startswith('1.'): block = re.sub('', block, '') blocklines = [marker0] + block.splitlines() + [marker1] else: blocklines = [] n0 = n1 = None for i, line in enumerate(lines): if line == marker0: n0 = i if line == marker1: n1 = i if None in (n0, n1): n0 = None if insertre is not None: for i, line in enumerate(lines): if insertre.search(line): n0 = i if n0 is None: n0 = len(lines) elif insertafter is not None: n0 += 1 elif insertbefore is not None: n0 = 0 # insertbefore=BOF else: n0 = len(lines) # insertafter=EOF elif n0 < n1: lines[n0:n1 + 1] = [] else: lines[n1:n0 + 1] = [] n0 = n1 lines[n0:n0] = blocklines if lines: result = b('\n').join(lines) if original is None or original.endswith(b('\n')): result += b('\n') else: result = '' if module._diff: diff['after'] = result if original == result: msg = '' changed = False elif original is None: msg = 'File created' changed = True elif not blocklines: msg = 'Block removed' changed = True else: msg = 'Block inserted' changed = True if changed and not module.check_mode: if module.boolean(params['backup']) and path_exists: module.backup_local(path) # We should always follow symlinks so that we change the real file real_path = os.path.realpath(params['path']) write_changes(module, result, real_path) if module.check_mode and not path_exists: module.exit_json(changed=changed, msg=msg, diff=diff) attr_diff = {} msg, changed = check_file_attrs(module, changed, msg, attr_diff) attr_diff['before_header'] = '%s (file attributes)' % path attr_diff['after_header'] = '%s (file attributes)' % path difflist = [diff, attr_diff] module.exit_json(changed=changed, msg=msg, diff=difflist)
def main(): # the command module is the one ansible module that does not take key=value args # hence don't copy this one if you are looking to build others! module = AnsibleModule( argument_spec=dict( _raw_params = dict(), _uses_shell = dict(type='bool', default=False), chdir = dict(type='path'), executable = dict(), creates = dict(type='path'), removes = dict(type='path'), warn = dict(type='bool', default=True), ) ) shell = module.params['_uses_shell'] chdir = module.params['chdir'] executable = module.params['executable'] args = module.params['_raw_params'] creates = module.params['creates'] removes = module.params['removes'] warn = module.params['warn'] if args.strip() == '': module.fail_json(rc=256, msg="no command given") if chdir: chdir = os.path.abspath(chdir) os.chdir(chdir) if creates: # do not run the command if the line contains creates=filename # and the filename already exists. This allows idempotence # of command executions. if glob.glob(creates): module.exit_json( cmd=args, stdout="skipped, since %s exists" % creates, changed=False, rc=0 ) if removes: # do not run the command if the line contains removes=filename # and the filename does not exist. This allows idempotence # of command executions. if not glob.glob(removes): module.exit_json( cmd=args, stdout="skipped, since %s does not exist" % removes, changed=False, rc=0 ) warnings = list() if warn: warnings = check_command(args) if not shell: args = shlex.split(args) startd = datetime.datetime.now() rc, out, err = module.run_command(args, executable=executable, use_unsafe_shell=shell, encoding=None) endd = datetime.datetime.now() delta = endd - startd if out is None: out = b('') if err is None: err = b('') module.exit_json( cmd = args, stdout = out.rstrip(b("\r\n")), stderr = err.rstrip(b("\r\n")), rc = rc, start = str(startd), end = str(endd), delta = str(delta), changed = True, warnings = warnings )
def main(): module = AnsibleModule(argument_spec=dict( dest=dict(required=True, aliases=['name', 'destfile'], type='path'), state=dict(default='present', choices=['absent', 'present']), marker=dict(default='# {mark} ANSIBLE MANAGED BLOCK', type='str'), block=dict(default='', type='str', aliases=['content']), insertafter=dict(default=None), insertbefore=dict(default=None), create=dict(default=False, type='bool'), backup=dict(default=False, type='bool'), validate=dict(default=None, type='str'), ), mutually_exclusive=[['insertbefore', 'insertafter']], add_file_common_args=True, supports_check_mode=True) params = module.params dest = params['dest'] if module.boolean(params.get('follow', None)): dest = os.path.realpath(dest) if os.path.isdir(dest): module.fail_json(rc=256, msg='Destination %s is a directory !' % dest) path_exists = os.path.exists(dest) if not path_exists: if not module.boolean(params['create']): module.fail_json(rc=257, msg='Destination %s does not exist !' % dest) original = None lines = [] else: f = open(dest, 'rb') original = f.read() f.close() lines = original.splitlines() insertbefore = params['insertbefore'] insertafter = params['insertafter'] block = to_bytes(params['block']) marker = to_bytes(params['marker']) present = params['state'] == 'present' if not present and not path_exists: module.exit_json(changed=False, msg="File not present") if insertbefore is None and insertafter is None: insertafter = 'EOF' if insertafter not in (None, 'EOF'): insertre = re.compile(insertafter) elif insertbefore not in (None, 'BOF'): insertre = re.compile(insertbefore) else: insertre = None marker0 = re.sub(b(r'{mark}'), b('BEGIN'), marker) marker1 = re.sub(b(r'{mark}'), b('END'), marker) if present and block: # Escape seqeuences like '\n' need to be handled in Ansible 1.x if module.ansible_version.startswith('1.'): block = re.sub('', block, '') blocklines = [marker0] + block.splitlines() + [marker1] else: blocklines = [] n0 = n1 = None for i, line in enumerate(lines): if line.startswith(marker0): n0 = i if line.startswith(marker1): n1 = i if None in (n0, n1): n0 = None if insertre is not None: for i, line in enumerate(lines): if insertre.search(line): n0 = i if n0 is None: n0 = len(lines) elif insertafter is not None: n0 += 1 elif insertbefore is not None: n0 = 0 # insertbefore=BOF else: n0 = len(lines) # insertafter=EOF elif n0 < n1: lines[n0:n1 + 1] = [] else: lines[n1:n0 + 1] = [] n0 = n1 lines[n0:n0] = blocklines if lines: result = b('\n').join(lines) if original is None or original.endswith(b('\n')): result += b('\n') else: result = '' if original == result: msg = '' changed = False elif original is None: msg = 'File created' changed = True elif not blocklines: msg = 'Block removed' changed = True else: msg = 'Block inserted' changed = True if changed and not module.check_mode: if module.boolean(params['backup']) and path_exists: module.backup_local(dest) write_changes(module, result, dest) if module.check_mode and not path_exists: module.exit_json(changed=changed, msg=msg) msg, changed = check_file_attrs(module, changed, msg) module.exit_json(changed=changed, msg=msg)
def main(): module = AnsibleModule(argument_spec=dict( path=dict(type='path', required=True, aliases=['dest', 'destfile', 'name']), state=dict(type='str', default='present', choices=['absent', 'present']), marker=dict(type='str', default='# {mark} ANSIBLE MANAGED BLOCK'), block=dict(type='str', default='', aliases=['content']), insertafter=dict(type='str'), insertbefore=dict(type='str'), create=dict(type='bool', default=False), backup=dict(type='bool', default=False), validate=dict(type='str'), marker_begin=dict(type='str', default='BEGIN'), marker_end=dict(type='str', default='END'), ), mutually_exclusive=[['insertbefore', 'insertafter']], add_file_common_args=True, supports_check_mode=True) params = module.params path = params['path'] if os.path.isdir(path): module.fail_json(rc=256, msg='Path %s is a directory !' % path) path_exists = os.path.exists(path) if not path_exists: if not module.boolean(params['create']): module.fail_json(rc=257, msg='Path %s does not exist !' % path) destpath = os.path.dirname(path) if not os.path.exists(destpath) and not module.check_mode: try: os.makedirs(destpath) except Exception as e: module.fail_json( msg='Error creating %s Error code: %s Error description: %s' % (destpath, e[0], e[1])) original = None lines = [] else: with open(path, 'rb') as f: original = f.read() lines = original.splitlines(True) diff = { 'before': '', 'after': '', 'before_header': '%s (content)' % path, 'after_header': '%s (content)' % path } if module._diff and original: diff['before'] = original insertbefore = params['insertbefore'] insertafter = params['insertafter'] block = to_bytes(params['block']) marker = to_bytes(params['marker']) present = params['state'] == 'present' if not present and not path_exists: module.exit_json(changed=False, msg="File %s not present" % path) if insertbefore is None and insertafter is None: insertafter = 'EOF' if insertafter not in (None, 'EOF'): insertre = re.compile( to_bytes(insertafter, errors='surrogate_or_strict')) elif insertbefore not in (None, 'BOF'): insertre = re.compile( to_bytes(insertbefore, errors='surrogate_or_strict')) else: insertre = None marker0 = re.sub(b(r'{mark}'), b(params['marker_begin']), marker) + b( os.linesep) marker1 = re.sub(b(r'{mark}'), b(params['marker_end']), marker) + b( os.linesep) if present and block: if not block.endswith(b(os.linesep)): block += b(os.linesep) blocklines = [marker0] + block.splitlines(True) + [marker1] else: blocklines = [] n0 = n1 = None for i, line in enumerate(lines): if line == marker0: n0 = i if line == marker1: n1 = i if None in (n0, n1): n0 = None if insertre is not None: for i, line in enumerate(lines): if insertre.search(line): n0 = i if n0 is None: n0 = len(lines) elif insertafter is not None: n0 += 1 elif insertbefore is not None: n0 = 0 # insertbefore=BOF else: n0 = len(lines) # insertafter=EOF elif n0 < n1: lines[n0:n1 + 1] = [] else: lines[n1:n0 + 1] = [] n0 = n1 # Ensure there is a line separator before the block of lines to be inserted if n0 > 0: if not lines[n0 - 1].endswith(b(os.linesep)): lines[n0 - 1] += b(os.linesep) lines[n0:n0] = blocklines if lines: result = b''.join(lines) else: result = b'' if module._diff: diff['after'] = result if original == result: msg = '' changed = False elif original is None: msg = 'File created' changed = True elif not blocklines: msg = 'Block removed' changed = True else: msg = 'Block inserted' changed = True backup_file = None if changed and not module.check_mode: if module.boolean(params['backup']) and path_exists: backup_file = module.backup_local(path) # We should always follow symlinks so that we change the real file real_path = os.path.realpath(params['path']) write_changes(module, result, real_path) if module.check_mode and not path_exists: module.exit_json(changed=changed, msg=msg, diff=diff) attr_diff = {} msg, changed = check_file_attrs(module, changed, msg, attr_diff) attr_diff['before_header'] = '%s (file attributes)' % path attr_diff['after_header'] = '%s (file attributes)' % path difflist = [diff, attr_diff] if backup_file is None: module.exit_json(changed=changed, msg=msg, diff=difflist) else: module.exit_json(changed=changed, msg=msg, diff=difflist, backup_file=backup_file)
def main(): # the command module is the one ansible module that does not take key=value args # hence don't copy this one if you are looking to build others! module = AnsibleModule(argument_spec=dict( raw_params=dict(default='pki'), port=dict(default=''), cli=dict(default='--help'), extra_args=dict(default=''), certnick=dict(default="'PKI CA Administrator for Example.Org'"), username=dict(default='caadmin'), userpassword=dict(default='Secret123'), userpwdfile=dict(default='Secret123'), dbpassword=dict(default='Secret123'), nssdb=dict(default='/opt/pkitest/certdb'), protocol=dict(default='http'), hostname=dict(default='localhost'), authType=dict(default='clientAuth', choices=['connection', 'basicAuth', 'clientAuth']))) if module.params['port']: port = module.params['port'] else: Subsystem = map( lambda x: {"True" if x in module.params['cli'] else False: x}, ["ca", "kra", "ocsp", "tks", "tps"]) for idx, val in enumerate(Subsystem): for key, value in val.iteritems(): if key == 'True': sub = value port = '_'.join( [sub.upper(), module.params['protocol'].upper(), "PORT"]) port = getattr(constants, port) conn_args = [ module.params['raw_params'], '-d', module.params['nssdb'], '-P', module.params['protocol'], '-p', '%s' % (port), '-h', module.params['hostname'], '-c', module.params['dbpassword'] ] cli_args = [module.params['cli'], module.params['extra_args']] if module.params['authType'] == 'clientAuth': auth_args = ['-n', module.params['certnick']] args = ' '.join(conn_args + auth_args + cli_args) if module.params['authType'] == 'basicAuth': auth_args = [ '-u', module.params['username'], '-w', module.params['userpassword'] ] args = ' '.join(conn_args + auth_args + cli_args) if module.params['authType'] == 'connection': args = ' '.join(conn_args) rc, out, err = module.run_command(args) result = dict( cmd=args, stdout=out.rstrip(b("\r\n")), stderr=err.rstrip(b("\r\n")), rc=rc, changed=True, ) if rc != 0: module.fail_json(msg='non-zero return code', **result) module.exit_json(**result)
def basic_auth_header(username, password): """Takes a username and password and returns a byte string suitable for using as value of an Authorization header to do basic auth. """ return b("Basic %s") % base64.b64encode( to_bytes("%s:%s" % (username, password), errors='surrogate_or_strict'))
def present(module, dest, regexp, line, insertafter, insertbefore, create, backup, backrefs): diff = { 'before': '', 'after': '', 'before_header': '%s (content)' % dest, 'after_header': '%s (content)' % dest } b_dest = to_bytes(dest, errors='surrogate_or_strict') if not os.path.exists(b_dest): if not create: module.fail_json(rc=257, msg='Destination %s does not exist !' % dest) b_destpath = os.path.dirname(b_dest) if not os.path.exists(b_destpath) and not module.check_mode: os.makedirs(b_destpath) b_lines = [] else: f = open(b_dest, 'rb') b_lines = f.readlines() f.close() if module._diff: diff['before'] = to_native(b('').join(b_lines)) if regexp is not None: bre_m = re.compile(to_bytes(regexp, errors='surrogate_or_strict')) if insertafter not in (None, 'BOF', 'EOF'): bre_ins = re.compile( to_bytes(insertafter, errors='surrogate_or_strict')) elif insertbefore not in (None, 'BOF'): bre_ins = re.compile( to_bytes(insertbefore, errors='surrogate_or_strict')) else: bre_ins = None # index[0] is the line num where regexp has been found # index[1] is the line num where insertafter/inserbefore has been found index = [-1, -1] m = None b_line = to_bytes(line, errors='surrogate_or_strict') for lineno, b_cur_line in enumerate(b_lines): if regexp is not None: match_found = bre_m.search(b_cur_line) else: match_found = b_line == b_cur_line.rstrip(b('\r\n')) if match_found: index[0] = lineno m = match_found elif bre_ins is not None and bre_ins.search(b_cur_line): if insertafter: # + 1 for the next line index[1] = lineno + 1 if insertbefore: # index[1] for the previous line index[1] = lineno msg = '' changed = False # Regexp matched a line in the file b_linesep = to_bytes(os.linesep, errors='surrogate_or_strict') if index[0] != -1: if backrefs: b_new_line = m.expand(b_line) else: # Don't do backref expansion if not asked. b_new_line = b_line if not b_new_line.endswith(b_linesep): b_new_line += b_linesep # If a regexp is specified and a match is found anywhere in the file, do # not insert the line before or after. if regexp is None and m: # Insert lines if insertafter and insertafter != 'EOF': # Ensure there is a line separator after the found string # at the end of the file. if b_lines and not b_lines[-1][-1:] in (b('\n'), b('\r')): b_lines[-1] = b_lines[-1] + b_linesep # If the line to insert after is at the end of the file # use the appropriate index value. if len(b_lines) == index[1]: if b_lines[index[1] - 1].rstrip(b('\r\n')) != b_line: b_lines.append(b_line + b_linesep) msg = 'line added' changed = True elif b_lines[index[1]].rstrip(b('\r\n')) != b_line: b_lines.insert(index[1], b_line + b_linesep) msg = 'line added' changed = True elif insertbefore: # If the line to insert before is at the beginning of the file # use the appropriate index value. if index[1] == 0: if b_lines[index[1]].rstrip(b('\r\n')) != b_line: b_lines.insert(index[1], b_line + b_linesep) msg = 'line replaced' changed = True elif b_lines[index[1] - 1].rstrip(b('\r\n')) != b_line: b_lines.insert(index[1], b_line + b_linesep) msg = 'line replaced' changed = True elif b_lines[index[0]] != b_new_line: b_lines[index[0]] = b_new_line msg = 'line replaced' changed = True elif backrefs: # Do absolutely nothing, since it's not safe generating the line # without the regexp matching to populate the backrefs. pass # Add it to the beginning of the file elif insertbefore == 'BOF' or insertafter == 'BOF': b_lines.insert(0, b_line + b_linesep) msg = 'line added' changed = True # Add it to the end of the file if requested or # if insertafter/insertbefore didn't match anything # (so default behaviour is to add at the end) elif insertafter == 'EOF' or index[1] == -1: # If the file is not empty then ensure there's a newline before the added line if len(b_lines) > 0 and not b_lines[-1][-1:] in (b('\n'), b('\r')): b_lines.append(b_linesep) b_lines.append(b_line + b_linesep) msg = 'line added' changed = True # insert matched, but not the regexp else: b_lines.insert(index[1], b_line + b_linesep) msg = 'line added' changed = True if module._diff: diff['after'] = to_native(b('').join(b_lines)) backupdest = "" if changed and not module.check_mode: if backup and os.path.exists(b_dest): backupdest = module.backup_local(dest) write_changes(module, b_lines, dest) if module.check_mode and not os.path.exists(b_dest): module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=diff) attr_diff = {} msg, changed = check_file_attrs(module, changed, msg, attr_diff) attr_diff['before_header'] = '%s (file attributes)' % dest attr_diff['after_header'] = '%s (file attributes)' % dest difflist = [diff, attr_diff] module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=difflist)
def main(): module = AnsibleModule( argument_spec=dict( path=dict(type='path', required=True, aliases=['dest', 'destfile', 'name']), state=dict(type='str', default='present', choices=['absent', 'present']), marker=dict(type='str', default='# {mark} ANSIBLE MANAGED BLOCK'), block=dict(type='str', default='', aliases=['content']), insertafter=dict(type='str'), insertbefore=dict(type='str'), create=dict(type='bool', default=False), backup=dict(type='bool', default=False), validate=dict(type='str'), marker_begin=dict(type='str', default='BEGIN'), marker_end=dict(type='str', default='END'), ), mutually_exclusive=[['insertbefore', 'insertafter']], add_file_common_args=True, supports_check_mode=True ) params = module.params path = params['path'] if os.path.isdir(path): module.fail_json(rc=256, msg='Path %s is a directory !' % path) path_exists = os.path.exists(path) if not path_exists: if not module.boolean(params['create']): module.fail_json(rc=257, msg='Path %s does not exist !' % path) destpath = os.path.dirname(path) if not os.path.exists(destpath) and not module.check_mode: try: os.makedirs(destpath) except Exception as e: module.fail_json(msg='Error creating %s Error code: %s Error description: %s' % (destpath, e[0], e[1])) original = None lines = [] else: f = open(path, 'rb') original = f.read() f.close() lines = original.splitlines() diff = {'before': '', 'after': '', 'before_header': '%s (content)' % path, 'after_header': '%s (content)' % path} if module._diff and original: diff['before'] = original insertbefore = params['insertbefore'] insertafter = params['insertafter'] block = to_bytes(params['block']) marker = to_bytes(params['marker']) present = params['state'] == 'present' if not present and not path_exists: module.exit_json(changed=False, msg="File %s not present" % path) if insertbefore is None and insertafter is None: insertafter = 'EOF' if insertafter not in (None, 'EOF'): insertre = re.compile(to_bytes(insertafter, errors='surrogate_or_strict')) elif insertbefore not in (None, 'BOF'): insertre = re.compile(to_bytes(insertbefore, errors='surrogate_or_strict')) else: insertre = None marker0 = re.sub(b(r'{mark}'), b(params['marker_begin']), marker) marker1 = re.sub(b(r'{mark}'), b(params['marker_end']), marker) if present and block: # Escape seqeuences like '\n' need to be handled in Ansible 1.x if module.ansible_version.startswith('1.'): block = re.sub('', block, '') blocklines = [marker0] + block.splitlines() + [marker1] else: blocklines = [] n0 = n1 = None for i, line in enumerate(lines): if line == marker0: n0 = i if line == marker1: n1 = i if None in (n0, n1): n0 = None if insertre is not None: for i, line in enumerate(lines): if insertre.search(line): n0 = i if n0 is None: n0 = len(lines) elif insertafter is not None: n0 += 1 elif insertbefore is not None: n0 = 0 # insertbefore=BOF else: n0 = len(lines) # insertafter=EOF elif n0 < n1: lines[n0:n1 + 1] = [] else: lines[n1:n0 + 1] = [] n0 = n1 lines[n0:n0] = blocklines if lines: result = b('\n').join(lines) if original is None or original.endswith(b('\n')): result += b('\n') else: result = '' if module._diff: diff['after'] = result if original == result: msg = '' changed = False elif original is None: msg = 'File created' changed = True elif not blocklines: msg = 'Block removed' changed = True else: msg = 'Block inserted' changed = True if changed and not module.check_mode: if module.boolean(params['backup']) and path_exists: module.backup_local(path) # We should always follow symlinks so that we change the real file real_path = os.path.realpath(params['path']) write_changes(module, result, real_path) if module.check_mode and not path_exists: module.exit_json(changed=changed, msg=msg, diff=diff) attr_diff = {} msg, changed = check_file_attrs(module, changed, msg, attr_diff) attr_diff['before_header'] = '%s (file attributes)' % path attr_diff['after_header'] = '%s (file attributes)' % path difflist = [diff, attr_diff] module.exit_json(changed=changed, msg=msg, diff=difflist)
def basic_auth_header(username, password): """Takes a username and password and returns a byte string suitable for using as value of an Authorization header to do basic auth. """ return b("Basic %s") % base64.b64encode(to_bytes("%s:%s" % (username, password), errors='surrogate_or_strict'))
def present(module, dest, regexp, line, insertafter, insertbefore, create, backup, backrefs): diff = {'before': '', 'after': '', 'before_header': '%s (content)' % dest, 'after_header': '%s (content)' % dest} b_dest = to_bytes(dest, errors='surrogate_or_strict') if not os.path.exists(b_dest): if not create: module.fail_json(rc=257, msg='Destination %s does not exist !' % dest) b_destpath = os.path.dirname(b_dest) if not os.path.exists(b_destpath) and not module.check_mode: os.makedirs(b_destpath) b_lines = [] else: f = open(b_dest, 'rb') b_lines = f.readlines() f.close() if module._diff: diff['before'] = to_native(b('').join(b_lines)) if regexp is not None: bre_m = re.compile(to_bytes(regexp, errors='surrogate_or_strict')) if insertafter not in (None, 'BOF', 'EOF'): bre_ins = re.compile(to_bytes(insertafter, errors='surrogate_or_strict')) elif insertbefore not in (None, 'BOF'): bre_ins = re.compile(to_bytes(insertbefore, errors='surrogate_or_strict')) else: bre_ins = None # index[0] is the line num where regexp has been found # index[1] is the line num where insertafter/inserbefore has been found index = [-1, -1] m = None b_line = to_bytes(line, errors='surrogate_or_strict') for lineno, b_cur_line in enumerate(b_lines): if regexp is not None: match_found = bre_m.search(b_cur_line) else: match_found = b_line == b_cur_line.rstrip(b('\r\n')) if match_found: index[0] = lineno m = match_found elif bre_ins is not None and bre_ins.search(b_cur_line): if insertafter: # + 1 for the next line index[1] = lineno + 1 if insertbefore: # + 1 for the previous line index[1] = lineno msg = '' changed = False # Regexp matched a line in the file b_linesep = to_bytes(os.linesep, errors='surrogate_or_strict') if index[0] != -1: if backrefs: b_new_line = m.expand(b_line) else: # Don't do backref expansion if not asked. b_new_line = b_line if not b_new_line.endswith(b_linesep): b_new_line += b_linesep if b_lines[index[0]] != b_new_line: b_lines[index[0]] = b_new_line msg = 'line replaced' changed = True elif backrefs: # Do absolutely nothing, since it's not safe generating the line # without the regexp matching to populate the backrefs. pass # Add it to the beginning of the file elif insertbefore == 'BOF' or insertafter == 'BOF': b_lines.insert(0, b_line + b_linesep) msg = 'line added' changed = True # Add it to the end of the file if requested or # if insertafter/insertbefore didn't match anything # (so default behaviour is to add at the end) elif insertafter == 'EOF' or index[1] == -1: # If the file is not empty then ensure there's a newline before the added line if len(b_lines) > 0 and not b_lines[-1][-1:] in (b('\n'), b('\r')): b_lines.append(b_linesep) b_lines.append(b_line + b_linesep) msg = 'line added' changed = True # insert* matched, but not the regexp else: b_lines.insert(index[1], b_line + b_linesep) msg = 'line added' changed = True if module._diff: diff['after'] = to_native(b('').join(b_lines)) backupdest = "" if changed and not module.check_mode: if backup and os.path.exists(b_dest): backupdest = module.backup_local(dest) write_changes(module, b_lines, dest) if module.check_mode and not os.path.exists(b_dest): module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=diff) attr_diff = {} msg, changed = check_file_attrs(module, changed, msg, attr_diff) attr_diff['before_header'] = '%s (file attributes)' % dest attr_diff['after_header'] = '%s (file attributes)' % dest difflist = [diff, attr_diff] module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=difflist)
HAS_MATCH_HOSTNAME = True # This is a dummy cacert provided for Mac OS since you need at least 1 # ca cert, regardless of validity, for Python on Mac OS to use the # keychain functionality in OpenSSL for validating SSL certificates. # See: http://mercurial.selenic.com/wiki/CACertificates#Mac_OS_X_10.6_and_higher b_DUMMY_CA_CERT = b("""-----BEGIN CERTIFICATE----- MIICvDCCAiWgAwIBAgIJAO8E12S7/qEpMA0GCSqGSIb3DQEBBQUAMEkxCzAJBgNV BAYTAlVTMRcwFQYDVQQIEw5Ob3J0aCBDYXJvbGluYTEPMA0GA1UEBxMGRHVyaGFt MRAwDgYDVQQKEwdBbnNpYmxlMB4XDTE0MDMxODIyMDAyMloXDTI0MDMxNTIyMDAy MlowSTELMAkGA1UEBhMCVVMxFzAVBgNVBAgTDk5vcnRoIENhcm9saW5hMQ8wDQYD VQQHEwZEdXJoYW0xEDAOBgNVBAoTB0Fuc2libGUwgZ8wDQYJKoZIhvcNAQEBBQAD gY0AMIGJAoGBANtvpPq3IlNlRbCHhZAcP6WCzhc5RbsDqyh1zrkmLi0GwcQ3z/r9 gaWfQBYhHpobK2Tiq11TfraHeNB3/VfNImjZcGpN8Fl3MWwu7LfVkJy3gNNnxkA1 4Go0/LmIvRFHhbzgfuo9NFgjPmmab9eqXJceqZIlz2C8xA7EeG7ku0+vAgMBAAGj gaswgagwHQYDVR0OBBYEFPnN1nPRqNDXGlCqCvdZchRNi/FaMHkGA1UdIwRyMHCA FPnN1nPRqNDXGlCqCvdZchRNi/FaoU2kSzBJMQswCQYDVQQGEwJVUzEXMBUGA1UE CBMOTm9ydGggQ2Fyb2xpbmExDzANBgNVBAcTBkR1cmhhbTEQMA4GA1UEChMHQW5z aWJsZYIJAO8E12S7/qEpMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEA MUB80IR6knq9K/tY+hvPsZer6eFMzO3JGkRFBh2kn6JdMDnhYGX7AXVHGflrwNQH qFy+aenWXsC0ZvrikFxbQnX8GVtDADtVznxOi7XzFw7JOxdsVrpXgSN0eh0aMzvV zKPZsZ2miVGclicJHzm5q080b1p/sZtuKIEZk6vZqEg= -----END CERTIFICATE----- """) # # Exceptions # class ConnectionError(Exception):
def daemonize(module, cmd): ''' Execute a command while detaching as a deamon, returns rc, stdout, and stderr. :arg module: is an AnsbileModule object, used for it's utility methods :arg cmd: is a list or string representing the command and options to run This is complex because daemonization is hard for people. What we do is daemonize a part of this module, the daemon runs the command, picks up the return code and output, and returns it to the main process. ''' # init some vars chunk = 4096 #FIXME: pass in as arg? errors = 'surrogate_or_strict' #start it! try: pipe = os.pipe() pid = os.fork() except OSError: module.fail_json(msg="Error while attempting to fork: %s", exception=traceback.format_exc()) # we don't do any locking as this should be a unique module/process if pid == 0: os.close(pipe[0]) # Set stdin/stdout/stderr to /dev/null fd = os.open(os.devnull, os.O_RDWR) # clone stdin/out/err for num in range(3): if fd != num: os.dup2(fd, num) # close otherwise if fd not in range(3): os.close(fd) # Make us a daemon pid = os.fork() # end if not in child if pid > 0: os._exit(0) # get new process session and detach sid = os.setsid() if sid == -1: module.fail_json(msg="Unable to detach session while daemonizing") # avoid possible problems with cwd being removed os.chdir("/") pid = os.fork() if pid > 0: os._exit(0) # if command is string deal with py2 vs py3 conversions for shlex if not isinstance(cmd, list): if PY2: cmd = shlex.split(to_bytes(cmd, errors=errors)) else: cmd = shlex.split(to_text(cmd, errors=errors)) # make sure we always use byte strings run_cmd = [] for c in cmd: run_cmd.append(to_bytes(c, errors=errors)) # execute the command in forked process p = subprocess.Popen(run_cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=lambda: os.close(pipe[1])) fds = [p.stdout, p.stderr] # loop reading output till its done output = {p.stdout: b(""), p.sterr: b("")} while fds: rfd, wfd, efd = select.select(fds, [], fds, 1) if (rfd + wfd + efd) or p.poll(): for out in fds: if out in rfd: data = os.read(out.fileno(), chunk) if not data: fds.remove(out) output[out] += b(data) # even after fds close, we might want to wait for pid to die p.wait() # Return a pickled data o parent return_data = pickle.dumps([ p.returncode, to_text(output[p.stdout]), to_text(output[p.stderr]) ]) os.write(pipe[1], to_bytes(return_data, errors=errors)) # clean up os.close(pipe[1]) os._exit(0) elif pid == -1: module.fail_json( msg= "Unable to fork, no exception thrown, probably due to lack of resources, check logs." ) else: # in parent os.close(pipe[1]) os.waitpid(pid, 0) # Grab response data after child finishes return_data = b("") while True: rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]]) if pipe[0] in rfd: data = os.read(pipe[0], chunk) if not data: break return_data += b(data) return pickle.loads(to_text(return_data, errors=errors))
def main(): # the command module is the one ansible module that does not take key=value args # hence don't copy this one if you are looking to build others! module = AnsibleModule(argument_spec=dict( cmd=dict(type='str', required=True), chdir=dict(type='path'), use_shell=dict(type='bool', default=False), want_rc=dict(type='int'), want_stdout=dict(type='str'), want_stderr=dict(type='str'), ), supports_check_mode=True) chdir = module.params['chdir'] use_shell = module.params['use_shell'] cmd = module.params['cmd'] if cmd.strip() == '': module.fail_json(rc=256, msg="no command given") want_count = 0 if module.params['want_rc'] is not None: want_count += 1 if module.params['want_stdout'] is not None: want_count += 1 if module.params['want_stderr'] is not None: want_count += 1 if want_count != 1: module.fail_json( rc=256, msg= "one of want_rc, want_stdout and want_stderr must be given (not zero or two or more)" ) if chdir: chdir = os.path.abspath(chdir) os.chdir(chdir) if use_shell: args = cmd else: args = shlex.split(cmd) startd = datetime.datetime.now() rc, out, err = module.run_command(args, use_unsafe_shell=use_shell, encoding=None) endd = datetime.datetime.now() delta = endd - startd if out is None: out = b('') if err is None: err = b('') stdout = out.rstrip(b("\r\n")) stderr = err.rstrip(b("\r\n")) result = { 'result': { 'stdout': stdout, 'stderr': stderr, 'rc': rc, }, 'start': str(startd), 'end': str(endd), 'delta': str(delta) } changed = False if module.params['want_rc'] is not None: changed = (rc != module.params['want_rc']) elif module.params['want_stdout'] is not None: want_stdout = module.params['want_stdout'] changed = (stdout != want_stdout) if changed and b("\n") in stdout: result['diff'] = { 'before_header': 'want_stdout', 'after_header': 'result.stdout', 'before': want_stdout + b("\n"), 'after': stdout + b("\n") } elif module.params['want_stderr'] is not None: want_stderr = module.params['want_stderr'] changed = (stderr != want_stderr) if changed and b("\n") in stderr: result['diff'] = { 'before_header': 'want_stderr', 'after_header': 'result.stderr', 'before': want_stderr + b("\n"), 'after': stderr + b("\n") } result['changed'] = changed module.exit_json(**result)
def main(): module = AnsibleModule( argument_spec=dict( state=dict(choices=['file', 'directory', 'link', 'hard', 'touch', 'absent'], default=None), path=dict(aliases=['dest', 'name'], required=True, type='path'), original_basename=dict(required=False), # Internal use only, for recursive ops recurse=dict(default=False, type='bool'), force=dict(required=False, default=False, type='bool'), diff_peek=dict(default=None), # Internal use only, for internal checks in the action plugins validate=dict(required=False, default=None), # Internal use only, for template and copy src=dict(required=False, default=None, type='path'), ), add_file_common_args=True, supports_check_mode=True ) params = module.params state = params['state'] force = params['force'] diff_peek = params['diff_peek'] src = params['src'] b_src = to_bytes(src, errors='surrogate_or_strict') follow = params['follow'] # modify source as we later reload and pass, specially relevant when used by other modules. path = params['path'] b_path = to_bytes(path, errors='surrogate_or_strict') # short-circuit for diff_peek if diff_peek is not None: appears_binary = False try: f = open(b_path, 'rb') head = f.read(8192) f.close() if b("\x00") in head: appears_binary = True except: pass module.exit_json(path=path, changed=False, appears_binary=appears_binary) prev_state = get_state(b_path) # state should default to file, but since that creates many conflicts, # default to 'current' when it exists. if state is None: if prev_state != 'absent': state = prev_state else: state = 'file' # source is both the source of a symlink or an informational passing of the src for a template module # or copy module, even if this module never uses it, it is needed to key off some things if src is None: if state in ('link', 'hard'): if follow and state == 'link': # use the current target of the link as the source src = to_native(os.path.realpath(b_path), errors='strict') else: module.fail_json(msg='src and dest are required for creating links') # original_basename is used by other modules that depend on file. if os.path.isdir(b_path) and state not in ("link", "absent"): basename = None if params['original_basename']: basename = params['original_basename'] elif src is not None: basename = os.path.basename(src) if basename: params['path'] = path = os.path.join(path, basename) b_path = to_bytes(path, errors='surrogate_or_strict') # make sure the target path is a directory when we're doing a recursive operation recurse = params['recurse'] if recurse and state != 'directory': module.fail_json(path=path, msg="recurse option requires state to be 'directory'") file_args = module.load_file_common_arguments(params) changed = False diff = {'before': {'path': path}, 'after': {'path': path}, } state_change = False if prev_state != state: diff['before']['state'] = prev_state diff['after']['state'] = state state_change = True if state == 'absent': if state_change: if not module.check_mode: if prev_state == 'directory': try: shutil.rmtree(b_path, ignore_errors=False) except Exception: e = get_exception() module.fail_json(msg="rmtree failed: %s" % str(e)) else: try: os.unlink(b_path) except Exception: e = get_exception() module.fail_json(path=path, msg="unlinking failed: %s " % str(e)) module.exit_json(path=path, changed=True, diff=diff) else: module.exit_json(path=path, changed=False) elif state == 'file': if state_change: if follow and prev_state == 'link': # follow symlink and operate on original b_path = os.path.realpath(b_path) path = to_native(b_path, errors='strict') prev_state = get_state(b_path) file_args['path'] = path if prev_state not in ('file', 'hard'): # file is not absent and any other state is a conflict module.fail_json(path=path, msg='file (%s) is %s, cannot continue' % (path, prev_state)) changed = module.set_fs_attributes_if_different(file_args, changed, diff) module.exit_json(path=path, changed=changed, diff=diff) elif state == 'directory': if follow and prev_state == 'link': b_path = os.path.realpath(b_path) path = to_native(b_path, errors='strict') prev_state = get_state(b_path) if prev_state == 'absent': if module.check_mode: module.exit_json(changed=True, diff=diff) changed = True curpath = '' try: # Split the path so we can apply filesystem attributes recursively # from the root (/) directory for absolute paths or the base path # of a relative path. We can then walk the appropriate directory # path to apply attributes. for dirname in path.strip('/').split('/'): curpath = '/'.join([curpath, dirname]) # Remove leading slash if we're creating a relative path if not os.path.isabs(path): curpath = curpath.lstrip('/') b_curpath = to_bytes(curpath, errors='surrogate_or_strict') if not os.path.exists(b_curpath): try: os.mkdir(b_curpath) except OSError: ex = get_exception() # Possibly something else created the dir since the os.path.exists # check above. As long as it's a dir, we don't need to error out. if not (ex.errno == errno.EEXIST and os.path.isdir(b_curpath)): raise tmp_file_args = file_args.copy() tmp_file_args['path'] = curpath changed = module.set_fs_attributes_if_different(tmp_file_args, changed, diff) except Exception: e = get_exception() module.fail_json(path=path, msg='There was an issue creating %s as requested: %s' % (curpath, str(e))) # We already know prev_state is not 'absent', therefore it exists in some form. elif prev_state != 'directory': module.fail_json(path=path, msg='%s already exists as a %s' % (path, prev_state)) changed = module.set_fs_attributes_if_different(file_args, changed, diff) if recurse: changed |= recursive_set_attributes(module, to_bytes(file_args['path'], errors='surrogate_or_strict'), follow, file_args) module.exit_json(path=path, changed=changed, diff=diff) elif state in ('link', 'hard'): if os.path.isdir(b_path) and not os.path.islink(b_path): relpath = path else: b_relpath = os.path.dirname(b_path) relpath = to_native(b_relpath, errors='strict') absrc = os.path.join(relpath, src) b_absrc = to_bytes(absrc, errors='surrogate_or_strict') if not os.path.exists(b_absrc) and not force: module.fail_json(path=path, src=src, msg='src file does not exist, use "force=yes" if you really want to create the link: %s' % absrc) if state == 'hard': if not os.path.isabs(b_src): module.fail_json(msg="absolute paths are required") elif prev_state == 'directory': if not force: module.fail_json(path=path, msg='refusing to convert between %s and %s for %s' % (prev_state, state, path)) elif len(os.listdir(b_path)) > 0: # refuse to replace a directory that has files in it module.fail_json(path=path, msg='the directory %s is not empty, refusing to convert it' % path) elif prev_state in ('file', 'hard') and not force: module.fail_json(path=path, msg='refusing to convert between %s and %s for %s' % (prev_state, state, path)) if prev_state == 'absent': changed = True elif prev_state == 'link': b_old_src = os.readlink(b_path) if b_old_src != b_src: changed = True elif prev_state == 'hard': if not (state == 'hard' and os.stat(b_path).st_ino == os.stat(b_src).st_ino): changed = True if not force: module.fail_json(dest=path, src=src, msg='Cannot link, different hard link exists at destination') elif prev_state in ('file', 'directory'): changed = True if not force: module.fail_json(dest=path, src=src, msg='Cannot link, %s exists at destination' % prev_state) else: module.fail_json(dest=path, src=src, msg='unexpected position reached') if changed and not module.check_mode: if prev_state != 'absent': # try to replace atomically b_tmppath = to_bytes(os.path.sep).join( [os.path.dirname(b_path), to_bytes(".%s.%s.tmp" % (os.getpid(), time.time()))] ) try: if prev_state == 'directory' and (state == 'hard' or state == 'link'): os.rmdir(b_path) if state == 'hard': os.link(b_src, b_tmppath) else: os.symlink(b_src, b_tmppath) os.rename(b_tmppath, b_path) except OSError: e = get_exception() if os.path.exists(b_tmppath): os.unlink(b_tmppath) module.fail_json(path=path, msg='Error while replacing: %s' % to_native(e, nonstring='simplerepr')) else: try: if state == 'hard': os.link(b_src, b_path) else: os.symlink(b_src, b_path) except OSError: e = get_exception() module.fail_json(path=path, msg='Error while linking: %s' % to_native(e, nonstring='simplerepr')) if module.check_mode and not os.path.exists(b_path): module.exit_json(dest=path, src=src, changed=changed, diff=diff) changed = module.set_fs_attributes_if_different(file_args, changed, diff) module.exit_json(dest=path, src=src, changed=changed, diff=diff) elif state == 'touch': if not module.check_mode: if prev_state == 'absent': try: open(b_path, 'wb').close() except OSError: e = get_exception() module.fail_json(path=path, msg='Error, could not touch target: %s' % to_native(e, nonstring='simplerepr')) elif prev_state in ('file', 'directory', 'hard'): try: os.utime(b_path, None) except OSError: e = get_exception() module.fail_json(path=path, msg='Error while touching existing target: %s' % to_native(e, nonstring='simplerepr')) else: module.fail_json(msg='Cannot touch other than files, directories, and hardlinks (%s is %s)' % (path, prev_state)) try: module.set_fs_attributes_if_different(file_args, True, diff) except SystemExit: e = get_exception() if e.code: # We take this to mean that fail_json() was called from # somewhere in basic.py if prev_state == 'absent': # If we just created the file we can safely remove it os.remove(b_path) raise e module.exit_json(dest=path, changed=True, diff=diff) module.fail_json(path=path, msg='unexpected position reached')
def daemonize(module, cmd): ''' Execute a command while detaching as a daemon, returns rc, stdout, and stderr. :arg module: is an AnsibleModule object, used for it's utility methods :arg cmd: is a list or string representing the command and options to run This is complex because daemonization is hard for people. What we do is daemonize a part of this module, the daemon runs the command, picks up the return code and output, and returns it to the main process. ''' # init some vars chunk = 4096 # FIXME: pass in as arg? errors = 'surrogate_or_strict' # start it! try: pipe = os.pipe() pid = fork_process() except OSError: module.fail_json(msg="Error while attempting to fork: %s", exception=traceback.format_exc()) except Exception as exc: module.fail_json(msg=to_text(exc), exception=traceback.format_exc()) # we don't do any locking as this should be a unique module/process if pid == 0: os.close(pipe[0]) # if command is string deal with py2 vs py3 conversions for shlex if not isinstance(cmd, list): if PY2: cmd = shlex.split(to_bytes(cmd, errors=errors)) else: cmd = shlex.split(to_text(cmd, errors=errors)) # make sure we always use byte strings run_cmd = [] for c in cmd: run_cmd.append(to_bytes(c, errors=errors)) # execute the command in forked process p = subprocess.Popen(run_cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=lambda: os.close(pipe[1])) fds = [p.stdout, p.stderr] # loop reading output till its done output = {p.stdout: b(""), p.stderr: b("")} while fds: rfd, wfd, efd = select.select(fds, [], fds, 1) if (rfd + wfd + efd) or p.poll(): for out in fds: if out in rfd: data = os.read(out.fileno(), chunk) if not data: fds.remove(out) output[out] += b(data) # even after fds close, we might want to wait for pid to die p.wait() # Return a pickled data of parent return_data = pickle.dumps([ p.returncode, to_text(output[p.stdout]), to_text(output[p.stderr]) ], protocol=pickle.HIGHEST_PROTOCOL) os.write(pipe[1], to_bytes(return_data, errors=errors)) # clean up os.close(pipe[1]) os._exit(0) elif pid == -1: module.fail_json( msg= "Unable to fork, no exception thrown, probably due to lack of resources, check logs." ) else: # in parent os.close(pipe[1]) os.waitpid(pid, 0) # Grab response data after child finishes return_data = b("") while True: rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]]) if pipe[0] in rfd: data = os.read(pipe[0], chunk) if not data: break return_data += b(data) # Note: no need to specify encoding on py3 as this module sends the # pickle to itself (thus same python interpreter so we aren't mixing # py2 and py3) return pickle.loads(to_bytes(return_data, errors=errors))
def main(): module = AnsibleModule( argument_spec=dict( dest=dict(required=True, aliases=['name', 'destfile'], type='path'), state=dict(default='present', choices=['absent', 'present']), marker=dict(default='# {mark} ANSIBLE MANAGED BLOCK', type='str'), block=dict(default='', type='str', aliases=['content']), insertafter=dict(default=None), insertbefore=dict(default=None), create=dict(default=False, type='bool'), backup=dict(default=False, type='bool'), validate=dict(default=None, type='str'), ), mutually_exclusive=[['insertbefore', 'insertafter']], add_file_common_args=True, supports_check_mode=True ) params = module.params dest = params['dest'] if module.boolean(params.get('follow', None)): dest = os.path.realpath(dest) if os.path.isdir(dest): module.fail_json(rc=256, msg='Destination %s is a directory !' % dest) path_exists = os.path.exists(dest) if not path_exists: if not module.boolean(params['create']): module.fail_json(rc=257, msg='Destination %s does not exist !' % dest) original = None lines = [] else: f = open(dest, 'rb') original = f.read() f.close() lines = original.splitlines() insertbefore = params['insertbefore'] insertafter = params['insertafter'] block = to_bytes(params['block']) marker = to_bytes(params['marker']) present = params['state'] == 'present' if not present and not path_exists: module.exit_json(changed=False, msg="File not present") if insertbefore is None and insertafter is None: insertafter = 'EOF' if insertafter not in (None, 'EOF'): insertre = re.compile(insertafter) elif insertbefore not in (None, 'BOF'): insertre = re.compile(insertbefore) else: insertre = None marker0 = re.sub(b(r'{mark}'), b('BEGIN'), marker) marker1 = re.sub(b(r'{mark}'), b('END'), marker) if present and block: # Escape seqeuences like '\n' need to be handled in Ansible 1.x if module.ansible_version.startswith('1.'): block = re.sub('', block, '') blocklines = [marker0] + block.splitlines() + [marker1] else: blocklines = [] n0 = n1 = None for i, line in enumerate(lines): if line == marker0: n0 = i if line == marker1: n1 = i if None in (n0, n1): n0 = None if insertre is not None: for i, line in enumerate(lines): if insertre.search(line): n0 = i if n0 is None: n0 = len(lines) elif insertafter is not None: n0 += 1 elif insertbefore is not None: n0 = 0 # insertbefore=BOF else: n0 = len(lines) # insertafter=EOF elif n0 < n1: lines[n0:n1+1] = [] else: lines[n1:n0+1] = [] n0 = n1 lines[n0:n0] = blocklines if lines: result = b('\n').join(lines) if original is None or original.endswith(b('\n')): result += b('\n') else: result = '' if original == result: msg = '' changed = False elif original is None: msg = 'File created' changed = True elif not blocklines: msg = 'Block removed' changed = True else: msg = 'Block inserted' changed = True if changed and not module.check_mode: if module.boolean(params['backup']) and path_exists: module.backup_local(dest) write_changes(module, result, dest) if module.check_mode and not path_exists: module.exit_json(changed=changed, msg=msg) msg, changed = check_file_attrs(module, changed, msg) module.exit_json(changed=changed, msg=msg)
def http_request(self, req): tmp_ca_cert_path, to_add_ca_cert_path, paths_checked = self.get_ca_certs() https_proxy = os.environ.get('https_proxy') context = None if HAS_SSLCONTEXT: context = self._make_context(to_add_ca_cert_path) # Detect if 'no_proxy' environment variable is set and if our URL is included use_proxy = self.detect_no_proxy(req.get_full_url()) if not use_proxy: # ignore proxy settings for this host request return req try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if https_proxy: proxy_parts = generic_urlparse(urlparse(https_proxy)) port = proxy_parts.get('port') or 443 s.connect((proxy_parts.get('hostname'), port)) if proxy_parts.get('scheme') == 'http': s.sendall(self.CONNECT_COMMAND % (self.hostname, self.port)) if proxy_parts.get('username'): credentials = "%s:%s" % (proxy_parts.get('username',''), proxy_parts.get('password','')) s.sendall(b('Proxy-Authorization: Basic %s\r\n') % base64.b64encode(to_bytes(credentials, errors='surrogate_or_strict')).strip()) s.sendall(b('\r\n')) connect_result = b("") while connect_result.find(b("\r\n\r\n")) <= 0: connect_result += s.recv(4096) # 128 kilobytes of headers should be enough for everyone. if len(connect_result) > 131072: raise ProxyError('Proxy sent too verbose headers. Only 128KiB allowed.') self.validate_proxy_response(connect_result) if context: ssl_s = context.wrap_socket(s, server_hostname=self.hostname) elif HAS_URLLIB3_SNI_SUPPORT: ssl_s = ssl_wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL, server_hostname=self.hostname) else: ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL) match_hostname(ssl_s.getpeercert(), self.hostname) else: raise ProxyError('Unsupported proxy scheme: %s. Currently ansible only supports HTTP proxies.' % proxy_parts.get('scheme')) else: s.connect((self.hostname, self.port)) if context: ssl_s = context.wrap_socket(s, server_hostname=self.hostname) elif HAS_URLLIB3_SNI_SUPPORT: ssl_s = ssl_wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL, server_hostname=self.hostname) else: ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL) match_hostname(ssl_s.getpeercert(), self.hostname) # close the ssl connection #ssl_s.unwrap() s.close() except (ssl.SSLError, socket.error): e = get_exception() # fail if we tried all of the certs but none worked if 'connection refused' in str(e).lower(): raise ConnectionError('Failed to connect to %s:%s.' % (self.hostname, self.port)) else: build_ssl_validation_error(self.hostname, self.port, paths_checked) except CertificateError: build_ssl_validation_error(self.hostname, self.port, paths_checked) try: # cleanup the temp file created, don't worry # if it fails for some reason os.remove(tmp_ca_cert_path) except: pass try: # cleanup the temp file created, don't worry # if it fails for some reason if to_add_ca_cert_path: os.remove(to_add_ca_cert_path) except: pass return req
def zuul_run_command(self, args, zuul_log_id, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None, encoding='utf-8', errors='surrogate_or_strict'): ''' Execute a command, returns rc, stdout, and stderr. :arg args: is the command to run * If args is a list, the command will be run with shell=False. * If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False * If args is a string and use_unsafe_shell=True it runs with shell=True. :kw check_rc: Whether to call fail_json in case of non zero RC. Default False :kw close_fds: See documentation for subprocess.Popen(). Default True :kw executable: See documentation for subprocess.Popen(). Default None :kw data: If given, information to write to the stdin of the command :kw binary_data: If False, append a newline to the data. Default False :kw path_prefix: If given, additional path to find the command in. This adds to the PATH environment vairable so helper commands in the same directory can also be found :kw cwd: If given, working directory to run the command inside :kw use_unsafe_shell: See `args` parameter. Default False :kw prompt_regex: Regex string (not a compiled regex) which can be used to detect prompts in the stdout which would otherwise cause the execution to hang (especially if no input data is specified) :kw environ_update: dictionary to *update* os.environ with :kw umask: Umask to be used when running the command. Default None :kw encoding: Since we return native strings, on python3 we need to know the encoding to use to transform from bytes to text. If you want to always get bytes back, use encoding=None. The default is "utf-8". This does not affect transformation of strings given as args. :kw errors: Since we return native strings, on python3 we need to transform stdout and stderr from bytes to text. If the bytes are undecodable in the ``encoding`` specified, then use this error handler to deal with them. The default is ``surrogate_or_strict`` which means that the bytes will be decoded using the surrogateescape error handler if available (available on all python3 versions we support) otherwise a UnicodeError traceback will be raised. This does not affect transformations of strings given as args. :returns: A 3-tuple of return code (integer), stdout (native string), and stderr (native string). On python2, stdout and stderr are both byte strings. On python3, stdout and stderr are text strings converted according to the encoding and errors parameters. If you want byte strings on python3, use encoding=None to turn decoding to text off. ''' if not isinstance(args, (list, binary_type, text_type)): msg = "Argument 'args' to run_command must be list or string" self.fail_json(rc=257, cmd=args, msg=msg) shell = False if use_unsafe_shell: # stringify args for unsafe/direct shell usage if isinstance(args, list): args = " ".join([shlex_quote(x) for x in args]) # not set explicitly, check if set by controller if executable: args = [executable, '-c', args] elif self._shell not in (None, '/bin/sh'): args = [self._shell, '-c', args] else: shell = True else: # ensure args are a list if isinstance(args, (binary_type, text_type)): # On python2.6 and below, shlex has problems with text type # On python3, shlex needs a text type. if PY2: args = to_bytes(args, errors='surrogate_or_strict') elif PY3: args = to_text(args, errors='surrogateescape') args = shlex.split(args) # expand shellisms args = [os.path.expanduser(os.path.expandvars(x)) for x in args if x is not None] prompt_re = None if prompt_regex: if isinstance(prompt_regex, text_type): if PY3: prompt_regex = to_bytes(prompt_regex, errors='surrogateescape') elif PY2: prompt_regex = to_bytes(prompt_regex, errors='surrogate_or_strict') try: prompt_re = re.compile(prompt_regex, re.MULTILINE) except re.error: self.fail_json(msg="invalid prompt regular expression given to run_command") rc = 0 msg = None st_in = None # Manipulate the environ we'll send to the new process old_env_vals = {} # We can set this from both an attribute and per call for key, val in self.run_command_environ_update.items(): old_env_vals[key] = os.environ.get(key, None) os.environ[key] = val if environ_update: for key, val in environ_update.items(): old_env_vals[key] = os.environ.get(key, None) os.environ[key] = val if path_prefix: old_env_vals['PATH'] = os.environ['PATH'] os.environ['PATH'] = "%s:%s" % (path_prefix, os.environ['PATH']) # If using test-module and explode, the remote lib path will resemble ... # /tmp/test_module_scratch/debug_dir/ansible/module_utils/basic.py # If using ansible or ansible-playbook with a remote system ... # /tmp/ansible_vmweLQ/ansible_modlib.zip/ansible/module_utils/basic.py # Clean out python paths set by ansiballz if 'PYTHONPATH' in os.environ: pypaths = os.environ['PYTHONPATH'].split(':') pypaths = [x for x in pypaths if not x.endswith('/ansible_modlib.zip') and not x.endswith('/debug_dir')] os.environ['PYTHONPATH'] = ':'.join(pypaths) if not os.environ['PYTHONPATH']: del os.environ['PYTHONPATH'] # create a printable version of the command for use # in reporting later, which strips out things like # passwords from the args list to_clean_args = args if PY2: if isinstance(args, text_type): to_clean_args = to_bytes(args) else: if isinstance(args, binary_type): to_clean_args = to_text(args) if isinstance(args, (text_type, binary_type)): to_clean_args = shlex.split(to_clean_args) clean_args = [] is_passwd = False for arg in (to_native(a) for a in to_clean_args): if is_passwd: is_passwd = False clean_args.append('********') continue if PASSWD_ARG_RE.match(arg): sep_idx = arg.find('=') if sep_idx > -1: clean_args.append('%s=********' % arg[:sep_idx]) continue else: is_passwd = True arg = heuristic_log_sanitize(arg, self.no_log_values) clean_args.append(arg) clean_args = ' '.join(shlex_quote(arg) for arg in clean_args) if data: st_in = subprocess.PIPE # ZUUL: changed stderr to follow stdout kwargs = dict( executable=executable, shell=shell, close_fds=close_fds, stdin=st_in, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) # store the pwd prev_dir = os.getcwd() # make sure we're in the right working directory if cwd and os.path.isdir(cwd): cwd = os.path.abspath(os.path.expanduser(cwd)) kwargs['cwd'] = cwd try: os.chdir(cwd) except (OSError, IOError) as e: self.fail_json(rc=e.errno, msg="Could not open %s, %s" % (cwd, to_native(e)), exception=traceback.format_exc()) old_umask = None if umask: old_umask = os.umask(umask) t = None fail_json_kwargs = None try: if self._debug: self.log('Executing: ' + clean_args) # ZUUL: Replaced the execution loop with the zuul_runner run function cmd = subprocess.Popen(args, **kwargs) if self.no_log: t = None else: t = threading.Thread(target=follow, args=(cmd.stdout, zuul_log_id)) t.daemon = True t.start() # ZUUL: Our log thread will catch the output so don't do that here. # # the communication logic here is essentially taken from that # # of the _communicate() function in ssh.py # # stdout = b('') # stderr = b('') # # # ZUUL: stderr follows stdout # rpipes = [cmd.stdout] # # if data: # if not binary_data: # data += '\n' # if isinstance(data, text_type): # data = to_bytes(data) # cmd.stdin.write(data) # cmd.stdin.close() # # while True: # rfds, wfds, efds = select.select(rpipes, [], rpipes, 1) # stdout += self._read_from_pipes(rpipes, rfds, cmd.stdout) # # # ZUUL: stderr follows stdout # # stderr += self._read_from_pipes(rpipes, rfds, cmd.stderr) # # # if we're checking for prompts, do it now # if prompt_re: # if prompt_re.search(stdout) and not data: # if encoding: # stdout = to_native(stdout, encoding=encoding, errors=errors) # else: # stdout = stdout # return (257, stdout, "A prompt was encountered while running a command, but no input data was specified") # # only break out if no pipes are left to read or # # the pipes are completely read and # # the process is terminated # if (not rpipes or not rfds) and cmd.poll() is not None: # break # # No pipes are left to read but process is not yet terminated # # Only then it is safe to wait for the process to be finished # # NOTE: Actually cmd.poll() is always None here if rpipes is empty # elif not rpipes and cmd.poll() is None: # cmd.wait() # # The process is terminated. Since no pipes to read from are # # left, there is no need to call select() again. # break # ZUUL: If the console log follow thread *is* stuck in readline, # we can't close stdout (attempting to do so raises an # exception) , so this is disabled. # cmd.stdout.close() # cmd.stderr.close() rc = cmd.wait() # Give the thread that is writing the console log up to 10 seconds # to catch up and exit. If it hasn't done so by then, it is very # likely stuck in readline() because it spawed a child that is # holding stdout or stderr open. if t: t.join(10) with Console(zuul_log_id) as console: if t.isAlive(): console.addLine("[Zuul] standard output/error still open " "after child exited") # ZUUL: stdout and stderr are in the console log file # ZUUL: return the saved log lines so we can ship them back stdout = b('').join(_log_lines) else: stdout = b('') stderr = b('') except (OSError, IOError) as e: self.log("Error Executing CMD:%s Exception:%s" % (clean_args, to_native(e))) # ZUUL: store fail_json_kwargs and fail later in finally fail_json_kwargs = dict(rc=e.errno, msg=to_native(e), cmd=clean_args) except Exception as e: self.log("Error Executing CMD:%s Exception:%s" % (clean_args, to_native(traceback.format_exc()))) # ZUUL: store fail_json_kwargs and fail later in finally fail_json_kwargs = dict(rc=257, msg=to_native(e), exception=traceback.format_exc(), cmd=clean_args) finally: if t: with Console(zuul_log_id) as console: if t.isAlive(): console.addLine("[Zuul] standard output/error still open " "after child exited") if fail_json_kwargs: # we hit an exception and need to use the rc from # fail_json_kwargs rc = fail_json_kwargs['rc'] console.addLine("[Zuul] Task exit code: %s\n" % rc) if fail_json_kwargs: self.fail_json(**fail_json_kwargs) # Restore env settings for key, val in old_env_vals.items(): if val is None: del os.environ[key] else: os.environ[key] = val if old_umask: os.umask(old_umask) if rc != 0 and check_rc: msg = heuristic_log_sanitize(stderr.rstrip(), self.no_log_values) self.fail_json(cmd=clean_args, rc=rc, stdout=stdout, stderr=stderr, msg=msg) # reset the pwd os.chdir(prev_dir) if encoding is not None: return (rc, to_native(stdout, encoding=encoding, errors=errors), to_native(stderr, encoding=encoding, errors=errors)) return (rc, stdout, stderr)
def daemonize(module, cmd): ''' Execute a command while detaching as a daemon, returns rc, stdout, and stderr. :arg module: is an AnsibleModule object, used for it's utility methods :arg cmd: is a list or string representing the command and options to run This is complex because daemonization is hard for people. What we do is daemonize a part of this module, the daemon runs the command, picks up the return code and output, and returns it to the main process. ''' # init some vars chunk = 4096 # FIXME: pass in as arg? errors = 'surrogate_or_strict' # start it! try: pipe = os.pipe() pid = os.fork() except OSError: module.fail_json(msg="Error while attempting to fork: %s", exception=traceback.format_exc()) # we don't do any locking as this should be a unique module/process if pid == 0: os.close(pipe[0]) # Set stdin/stdout/stderr to /dev/null fd = os.open(os.devnull, os.O_RDWR) # clone stdin/out/err for num in range(3): if fd != num: os.dup2(fd, num) # close otherwise if fd not in range(3): os.close(fd) # Make us a daemon pid = os.fork() # end if not in child if pid > 0: os._exit(0) # get new process session and detach sid = os.setsid() if sid == -1: module.fail_json(msg="Unable to detach session while daemonizing") # avoid possible problems with cwd being removed os.chdir("/") pid = os.fork() if pid > 0: os._exit(0) # if command is string deal with py2 vs py3 conversions for shlex if not isinstance(cmd, list): if PY2: cmd = shlex.split(to_bytes(cmd, errors=errors)) else: cmd = shlex.split(to_text(cmd, errors=errors)) # make sure we always use byte strings run_cmd = [] for c in cmd: run_cmd.append(to_bytes(c, errors=errors)) # execute the command in forked process p = subprocess.Popen(run_cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=lambda: os.close(pipe[1])) fds = [p.stdout, p.stderr] # loop reading output till its done output = {p.stdout: b(""), p.sterr: b("")} while fds: rfd, wfd, efd = select.select(fds, [], fds, 1) if (rfd + wfd + efd) or p.poll(): for out in fds: if out in rfd: data = os.read(out.fileno(), chunk) if not data: fds.remove(out) output[out] += b(data) # even after fds close, we might want to wait for pid to die p.wait() # Return a pickled data of parent return_data = pickle.dumps([p.returncode, to_text(output[p.stdout]), to_text(output[p.stderr])], protocol=pickle.HIGHEST_PROTOCOL) os.write(pipe[1], to_bytes(return_data, errors=errors)) # clean up os.close(pipe[1]) os._exit(0) elif pid == -1: module.fail_json(msg="Unable to fork, no exception thrown, probably due to lack of resources, check logs.") else: # in parent os.close(pipe[1]) os.waitpid(pid, 0) # Grab response data after child finishes return_data = b("") while True: rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]]) if pipe[0] in rfd: data = os.read(pipe[0], chunk) if not data: break return_data += b(data) # Note: no need to specify encoding on py3 as this module sends the # pickle to itself (thus same python interpreter so we aren't mixing # py2 and py3) return pickle.loads(to_bytes(return_data, errors=errors))
def http_request(self, req): tmp_ca_cert_path, to_add_ca_cert_path, paths_checked = self.get_ca_certs( ) https_proxy = os.environ.get('https_proxy') context = None if HAS_SSLCONTEXT: context = self._make_context(to_add_ca_cert_path) # Detect if 'no_proxy' environment variable is set and if our URL is included use_proxy = self.detect_no_proxy(req.get_full_url()) if not use_proxy: # ignore proxy settings for this host request return req try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if https_proxy: proxy_parts = generic_urlparse(urlparse(https_proxy)) port = proxy_parts.get('port') or 443 s.connect((proxy_parts.get('hostname'), port)) if proxy_parts.get('scheme') == 'http': s.sendall(self.CONNECT_COMMAND % (self.hostname, self.port)) if proxy_parts.get('username'): credentials = "%s:%s" % (proxy_parts.get( 'username', ''), proxy_parts.get('password', '')) s.sendall( b('Proxy-Authorization: Basic %s\r\n') % base64.b64encode( to_bytes( credentials, errors='surrogate_or_strict')).strip()) s.sendall(b('\r\n')) connect_result = b("") while connect_result.find(b("\r\n\r\n")) <= 0: connect_result += s.recv(4096) # 128 kilobytes of headers should be enough for everyone. if len(connect_result) > 131072: raise ProxyError( 'Proxy sent too verbose headers. Only 128KiB allowed.' ) self.validate_proxy_response(connect_result) if context: ssl_s = context.wrap_socket( s, server_hostname=self.hostname) elif HAS_URLLIB3_SNI_SUPPORT: ssl_s = ssl_wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL, server_hostname=self.hostname) else: ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL) match_hostname(ssl_s.getpeercert(), self.hostname) else: raise ProxyError( 'Unsupported proxy scheme: %s. Currently ansible only supports HTTP proxies.' % proxy_parts.get('scheme')) else: s.connect((self.hostname, self.port)) if context: ssl_s = context.wrap_socket(s, server_hostname=self.hostname) elif HAS_URLLIB3_SNI_SUPPORT: ssl_s = ssl_wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL, server_hostname=self.hostname) else: ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL) match_hostname(ssl_s.getpeercert(), self.hostname) # close the ssl connection #ssl_s.unwrap() s.close() except (ssl.SSLError, socket.error): e = get_exception() # fail if we tried all of the certs but none worked if 'connection refused' in str(e).lower(): raise ConnectionError('Failed to connect to %s:%s.' % (self.hostname, self.port)) else: build_ssl_validation_error(self.hostname, self.port, paths_checked) except CertificateError: build_ssl_validation_error(self.hostname, self.port, paths_checked) try: # cleanup the temp file created, don't worry # if it fails for some reason os.remove(tmp_ca_cert_path) except: pass try: # cleanup the temp file created, don't worry # if it fails for some reason if to_add_ca_cert_path: os.remove(to_add_ca_cert_path) except: pass return req
def main(): module = AnsibleModule( argument_spec=dict( state=dict(choices=['file', 'directory', 'link', 'hard', 'touch', 'absent'], default=None), path=dict(aliases=['dest', 'name'], required=True, type='path'), original_basename=dict(required=False), # Internal use only, for recursive ops recurse=dict(default=False, type='bool'), force=dict(required=False, default=False, type='bool'), diff_peek=dict(default=None), # Internal use only, for internal checks in the action plugins validate=dict(required=False, default=None), # Internal use only, for template and copy src=dict(required=False, default=None, type='path'), ), add_file_common_args=True, supports_check_mode=True ) params = module.params state = params['state'] force = params['force'] diff_peek = params['diff_peek'] src = params['src'] b_src = to_bytes(src, errors='surrogate_or_strict') follow = params['follow'] # modify source as we later reload and pass, specially relevant when used by other modules. path = params['path'] b_path = to_bytes(path, errors='surrogate_or_strict') # short-circuit for diff_peek if diff_peek is not None: appears_binary = False try: f = open(b_path, 'rb') head = f.read(8192) f.close() if b("\x00") in head: appears_binary = True except: pass module.exit_json(path=path, changed=False, appears_binary=appears_binary) prev_state = get_state(b_path) # state should default to file, but since that creates many conflicts, # default to 'current' when it exists. if state is None: if prev_state != 'absent': state = prev_state else: state = 'file' # source is both the source of a symlink or an informational passing of the src for a template module # or copy module, even if this module never uses it, it is needed to key off some things if src is None: if state in ('link', 'hard'): if follow and state == 'link': # use the current target of the link as the source src = to_native(os.path.realpath(b_path), errors='strict') b_src = to_bytes(os.path.realpath(b_path), errors='strict') else: module.fail_json(msg='src and dest are required for creating links') # original_basename is used by other modules that depend on file. if os.path.isdir(b_path) and state not in ("link", "absent"): basename = None if params['original_basename']: basename = params['original_basename'] elif src is not None: basename = os.path.basename(src) if basename: params['path'] = path = os.path.join(path, basename) b_path = to_bytes(path, errors='surrogate_or_strict') # make sure the target path is a directory when we're doing a recursive operation recurse = params['recurse'] if recurse and state != 'directory': module.fail_json(path=path, msg="recurse option requires state to be 'directory'") file_args = module.load_file_common_arguments(params) changed = False diff = {'before': {'path': path}, 'after': {'path': path}, } state_change = False if prev_state != state: diff['before']['state'] = prev_state diff['after']['state'] = state state_change = True if state == 'absent': if state_change: if not module.check_mode: if prev_state == 'directory': try: shutil.rmtree(b_path, ignore_errors=False) except Exception: e = get_exception() module.fail_json(msg="rmtree failed: %s" % str(e)) else: try: os.unlink(b_path) except Exception: e = get_exception() module.fail_json(path=path, msg="unlinking failed: %s " % str(e)) module.exit_json(path=path, changed=True, diff=diff) else: module.exit_json(path=path, changed=False) elif state == 'file': if state_change: if follow and prev_state == 'link': # follow symlink and operate on original b_path = os.path.realpath(b_path) path = to_native(b_path, errors='strict') prev_state = get_state(b_path) file_args['path'] = path if prev_state not in ('file', 'hard'): # file is not absent and any other state is a conflict module.fail_json(path=path, msg='file (%s) is %s, cannot continue' % (path, prev_state)) changed = module.set_fs_attributes_if_different(file_args, changed, diff) module.exit_json(path=path, changed=changed, diff=diff) elif state == 'directory': if follow and prev_state == 'link': b_path = os.path.realpath(b_path) path = to_native(b_path, errors='strict') prev_state = get_state(b_path) if prev_state == 'absent': if module.check_mode: module.exit_json(changed=True, diff=diff) changed = True curpath = '' try: # Split the path so we can apply filesystem attributes recursively # from the root (/) directory for absolute paths or the base path # of a relative path. We can then walk the appropriate directory # path to apply attributes. for dirname in path.strip('/').split('/'): curpath = '/'.join([curpath, dirname]) # Remove leading slash if we're creating a relative path if not os.path.isabs(path): curpath = curpath.lstrip('/') b_curpath = to_bytes(curpath, errors='surrogate_or_strict') if not os.path.exists(b_curpath): try: os.mkdir(b_curpath) except OSError: ex = get_exception() # Possibly something else created the dir since the os.path.exists # check above. As long as it's a dir, we don't need to error out. if not (ex.errno == errno.EEXIST and os.path.isdir(b_curpath)): raise tmp_file_args = file_args.copy() tmp_file_args['path'] = curpath changed = module.set_fs_attributes_if_different(tmp_file_args, changed, diff) except Exception: e = get_exception() module.fail_json(path=path, msg='There was an issue creating %s as requested: %s' % (curpath, str(e))) # We already know prev_state is not 'absent', therefore it exists in some form. elif prev_state != 'directory': module.fail_json(path=path, msg='%s already exists as a %s' % (path, prev_state)) changed = module.set_fs_attributes_if_different(file_args, changed, diff) if recurse: changed |= recursive_set_attributes(module, to_bytes(file_args['path'], errors='surrogate_or_strict'), follow, file_args) module.exit_json(path=path, changed=changed, diff=diff) elif state in ('link', 'hard'): if os.path.isdir(b_path) and not os.path.islink(b_path): relpath = path else: b_relpath = os.path.dirname(b_path) relpath = to_native(b_relpath, errors='strict') absrc = os.path.join(relpath, src) b_absrc = to_bytes(absrc, errors='surrogate_or_strict') if not os.path.exists(b_absrc) and not force: module.fail_json(path=path, src=src, msg='src file does not exist, use "force=yes" if you really want to create the link: %s' % absrc) if state == 'hard': if not os.path.isabs(b_src): module.fail_json(msg="absolute paths are required") elif prev_state == 'directory': if not force: module.fail_json(path=path, msg='refusing to convert between %s and %s for %s' % (prev_state, state, path)) elif len(os.listdir(b_path)) > 0: # refuse to replace a directory that has files in it module.fail_json(path=path, msg='the directory %s is not empty, refusing to convert it' % path) elif prev_state in ('file', 'hard') and not force: module.fail_json(path=path, msg='refusing to convert between %s and %s for %s' % (prev_state, state, path)) if prev_state == 'absent': changed = True elif prev_state == 'link': b_old_src = os.readlink(b_path) if b_old_src != b_src: changed = True elif prev_state == 'hard': if not (state == 'hard' and os.stat(b_path).st_ino == os.stat(b_src).st_ino): changed = True if not force: module.fail_json(dest=path, src=src, msg='Cannot link, different hard link exists at destination') elif prev_state in ('file', 'directory'): changed = True if not force: module.fail_json(dest=path, src=src, msg='Cannot link, %s exists at destination' % prev_state) else: module.fail_json(dest=path, src=src, msg='unexpected position reached') if changed and not module.check_mode: if prev_state != 'absent': # try to replace atomically b_tmppath = to_bytes(os.path.sep).join( [os.path.dirname(b_path), to_bytes(".%s.%s.tmp" % (os.getpid(), time.time()))] ) try: if prev_state == 'directory' and (state == 'hard' or state == 'link'): os.rmdir(b_path) if state == 'hard': os.link(b_src, b_tmppath) else: os.symlink(b_src, b_tmppath) os.rename(b_tmppath, b_path) except OSError: e = get_exception() if os.path.exists(b_tmppath): os.unlink(b_tmppath) module.fail_json(path=path, msg='Error while replacing: %s' % to_native(e, nonstring='simplerepr')) else: try: if state == 'hard': os.link(b_src, b_path) else: os.symlink(b_src, b_path) except OSError: e = get_exception() module.fail_json(path=path, msg='Error while linking: %s' % to_native(e, nonstring='simplerepr')) if module.check_mode and not os.path.exists(b_path): module.exit_json(dest=path, src=src, changed=changed, diff=diff) changed = module.set_fs_attributes_if_different(file_args, changed, diff) module.exit_json(dest=path, src=src, changed=changed, diff=diff) elif state == 'touch': if not module.check_mode: if prev_state == 'absent': try: open(b_path, 'wb').close() except OSError: e = get_exception() module.fail_json(path=path, msg='Error, could not touch target: %s' % to_native(e, nonstring='simplerepr')) elif prev_state in ('file', 'directory', 'hard'): try: os.utime(b_path, None) except OSError: e = get_exception() module.fail_json(path=path, msg='Error while touching existing target: %s' % to_native(e, nonstring='simplerepr')) else: module.fail_json(msg='Cannot touch other than files, directories, and hardlinks (%s is %s)' % (path, prev_state)) try: module.set_fs_attributes_if_different(file_args, True, diff) except SystemExit: e = get_exception() if e.code: # We take this to mean that fail_json() was called from # somewhere in basic.py if prev_state == 'absent': # If we just created the file we can safely remove it os.remove(b_path) raise e module.exit_json(dest=path, changed=True, diff=diff) module.fail_json(path=path, msg='unexpected position reached')
def run_module(): module = AnsibleModule( argument_spec=dict( path=dict(required=True, type='path', aliases=['name']), inventory_hosts=dict(type='list'), inventory_path=dict(type='path'), limit=dict(type='list'), extra_vars=dict(type='dict'), extra_vars_files=dict(type='list'), tags=dict(type='list'), skip_tags=dict(type='list'), connection_method=dict(type='str', choices=[ 'smart', 'buildah', 'chroot', 'docker', 'funcd', 'iocage', 'jail', 'kubectl', 'libvirt_lxc', 'local', 'lxc', 'lxd', 'netconf', 'network_cli', 'oc', 'paramiko_ssh', 'persistent', 'saltstack', 'ssh', 'winrm', 'zone' ], default='smart'), connection_user=dict(type='str'), connection_timeout=dict(type='int', default=10), become=dict(type='bool', default=False), become_method=dict(type='str', choices=[ 'sudo', 'su', 'pbrun', 'pfexec', 'doas', 'dzdo', 'ksu', 'runas', 'pmrun', 'enable' ], default='sudo'), become_user=dict(type='str', default='root'), ssh_private_key_file=dict(type='path'), ssh_common_args=dict(type='dict'), ssh_extra_args=dict(type='dict'), scp_extra_args=dict(type='dict'), sftp_extra_args=dict(type='dict'), module_path=dict(type='list'), vault_id=dict(type='str'), vault_password_file=dict(type='path'), flush_cache=dict(type='bool', default=False), force_handlers=dict(type='bool', default=False), start_at_task=dict(type='str'), forks=dict(type='int', default=5)), mutually_exclusive=[['inventory_hosts', 'inventory_path']], required_if=dict(), supports_check_mode=True) path = module.params['path'] inventory_hosts = module.params['inventory_hosts'] inventory_path = module.params['inventory_path'] limit = module.params['limit'] extra_vars = module.params['extra_vars'] extra_vars_files = module.params['extra_vars_files'] tags = module.params['tags'] skip_tags = module.params['skip_tags'] connection_method = module.params['connection_method'] connection_user = module.params['connection_user'] connection_timeout = module.params['connection_timeout'] become = module.params['become'] become_method = module.params['become_method'] become_user = module.params['become_user'] ssh_private_key_file = module.params['ssh_private_key_file'] ssh_common_args = module.params['ssh_common_args'] ssh_extra_args = module.params['ssh_extra_args'] scp_extra_args = module.params['scp_extra_args'] sftp_extra_args = module.params['sftp_extra_args'] module_path = module.params['module_path'] vault_id = module.params['vault_id'] vault_password_file = module.params['vault_password_file'] flush_cache = module.params['flush_cache'] force_handlers = module.params['force_handlers'] start_at_task = module.params['start_at_task'] forks = module.params['forks'] if not os.path.exists(path): module.fail_json(rc=256, msg="Playbook '%s' does not exist." % path) command = 'ansible-playbook ' + path if inventory_hosts: command += ' --inventory ' + ','.join(inventory_hosts) + ',' if inventory_path: if os.path.exists(inventory_path): command += ' --inventory ' + inventory_path else: module.fail_json(rc=256, msg="Inventory path is not a valid path.") if limit: command += ' --limit ' + ','.join(limit) if extra_vars: for k, v in extra_vars.items(): command += " --extra-vars '%s=%s'" % (str(k), str(v)) if extra_vars_files: for evfile in extra_vars_files: command += " --extra-vars @%s" % evfile if tags: command += " --tags " + ','.join(tags) if skip_tags: command += " --skip-tags " + ','.join(skip_tags) if connection_method: command += " --connection %s" % connection_method if connection_user: command += " --user %s" % connection_user if connection_timeout: command += " --timeout %s" % connection_timeout if become: command += " --become --become-method %s --become-user %s" % ( become_method, become_user) if ssh_private_key_file: command += " --private-key %s" % ssh_private_key_file if ssh_common_args: args = "" for k, v in ssh_common_args.items(): args += '-o %s="%s"' % (k, v) command += " --ssh-common-args %s" % args if ssh_extra_args: args = "" for k, v in ssh_extra_args.items(): args += '-o %s="%s"' % (k, v) command += " --ssh-extra-args %s" % args if scp_extra_args: args = "" for k, v in ssh_common_args.items(): args += '-o %s="%s"' % (k, v) command += " --ssh-common-args %s" % args if sftp_extra_args: args = "" for k, v in ssh_common_args.items(): args += '-o %s="%s"' % (k, v) command += " --ssh-common-args %s" % args if module_path: command += " --module-path %s" % ','.join(module_path) if vault_id: command += " --vault-id %s" % vault_id if vault_password_file: command += " --vault-password-file %s" % vault_password_file if flush_cache: command += " --flush-cache" if force_handlers: command += " --force-handlers" if start_at_task: command += " --start-at-task '%s'" % start_at_task if forks: command += " --fork %s" % str(forks) os.chdir(os.path.dirname(os.path.realpath(path))) startd = datetime.datetime.now() rc, out, err = module.run_command(command) endd = datetime.datetime.now() delta = endd - startd if out is None: out = b('') if err is None: err = b('') module.exit_json( cmd=command, stdout=out.rstrip(b("\r\n")), stderr=err.rstrip(b("\r\n")), rc=rc, start=str(startd), end=str(endd), delta=str(delta), changed=True, )