예제 #1
0
def main():

    module = AnsibleModule(
        argument_spec=dict(path=dict(type='path',
                                     required=True,
                                     aliases=['dest']),
                           section=dict(type='str', required=True),
                           option=dict(type='str'),
                           value=dict(type='str'),
                           backup=dict(type='bool', default=False),
                           state=dict(type='str',
                                      default='present',
                                      choices=['absent', 'present']),
                           no_extra_spaces=dict(type='bool', default=False),
                           allow_no_value=dict(type='bool', default=False),
                           create=dict(type='bool', default=True)),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    path = module.params['path']
    section = module.params['section']
    option = module.params['option']
    value = module.params['value']
    state = module.params['state']
    backup = module.params['backup']
    no_extra_spaces = module.params['no_extra_spaces']
    allow_no_value = module.params['allow_no_value']
    create = module.params['create']

    if state == 'present' and not allow_no_value and value is None:
        module.fail_json(
            "Parameter 'value' must not be empty if state=present and allow_no_value=False"
        )

    (changed, backup_file, diff,
     msg) = do_ini(module, path, section, option, value, state, backup,
                   no_extra_spaces, create, allow_no_value)

    if not module.check_mode and os.path.exists(path):
        file_args = module.load_file_common_arguments(module.params)
        changed = module.set_fs_attributes_if_different(file_args, changed)

    results = dict(
        changed=changed,
        diff=diff,
        msg=msg,
        path=path,
    )
    if backup_file is not None:
        results['backup_file'] = backup_file

    # Mission complete
    module.exit_json(**results)
예제 #2
0
def main():

    # define available arguments/parameters a user can pass to the module
    module_args = dict(
        dest=dict(type='path', required=True, aliases=['path']),
        key=dict(type='str', required=True),
        value=dict(type='raw', required=False),
        state=dict(type='str',
                   required=False,
                   choices=['present', 'absent'],
                   default='present'),
        backup=dict(type='bool', required=False, default=False),
        create=dict(type='bool',
                    required=False,
                    default=True,
                    aliases=['force']),
    )

    # instantiate ansible module
    module = AnsibleModule(
        argument_spec=module_args,
        add_file_common_args=True,
        supports_check_mode=False  # TODO support check mode
    )

    # exit if any required external module imports failed
    if MISSING_LIB:
        module.exit_json(msg=missing_required_lib(MISSING_LIB.name),
                         exception=MISSING_LIB_ERROR)

    # do the stuff
    changed, backup_file, diff, msg = run_module(module, **module.params)

    # apply file system args e.g. owner/group/mode
    if os.path.exists(module.params['dest']):
        file_args = module.load_file_common_arguments(module.params)
        changed = module.set_fs_attributes_if_different(file_args, changed)

    # gather results
    results = dict(changed=changed,
                   diff=diff,
                   path=module.params['dest'],
                   msg=msg)

    if backup_file:
        results['backup_file'] = backup_file

    # Done!
    module.exit_json(**results)
예제 #3
0
파일: yaml_file.py 프로젝트: 10sr/junks
def main():
    module = AnsibleModule(
        argument_spec=dict(
            path=dict(required=True, aliases=['dest'], type='path'),
            data=dict(required=True, aliases=['content'], type='raw'),
            indent_size=dict(default=2, type='int'),
            backup=dict(default=False, type='bool'),
            create=dict(default=True, type='bool')
        ),
        add_file_common_args=True,
        supports_check_mode=True
    )

    path = module.params['path']
    data = module.params['data']
    indent_size = module.params['indent_size']
    backup = module.params['backup']
    create = module.params['create']

    try:
        result = do_yaml(
            module=module,
            filename=path,
            data=data,
            indent_size=indent_size,
            backup=backup,
            create=create
        )
        if not module.check_mode and os.path.exists(path):
            file_args = module.load_file_common_arguments(module.params)
            result['changed'] = module.set_fs_attributes_if_different(
                file_args, result['changed']
            )

    except _TaskFailedException as e:
        module.fail_json(
            rc=e.rc,
            msg=e.msg
        )
        return

    module.exit_json(
        changed=result['changed'],
        msg=result['msg'],
        path=path,
        backup_file=result['backup_file']
    )
    return
예제 #4
0
def main():
    module = AnsibleModule(
        argument_spec=dict(path=dict(type='path',
                                     required=True,
                                     aliases=['dest']),
                           patch=dict(type='list'),
                           resolve=dict(type='str'),
                           backup=dict(type='bool', default=False),
                           backup_file=dict(type='path', default=None),
                           create=dict(type='bool', default=True)),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    path = module.params['path']
    patch = module.params['patch']
    resolve = module.params['resolve']
    create = module.params['create']
    backup = module.params['backup']
    backup_file = module.params['backup_file']
    changed, msg, diff, resolved = False, None, None, None

    if path and patch:
        changed, msg, diff, backup_file = do_json_config(
            module,
            path,
            patch,
            backup=backup,
            backup_file=backup_file,
            create=create)
    elif path and resolve:
        changed, msg, resolved = do_resolve_node(module, path, resolve)

    if not module.check_mode and os.path.exists(path):
        file_args = module.load_file_common_arguments(module.params)
        changed = module.set_fs_attributes_if_different(file_args, changed)

    results = dict(
        changed=changed,
        msg=msg,
        path=path,
        backup=backup,
        backup_file=backup_file,
        diff=diff,
        resolve=resolved,
    )

    module.exit_json(**results)
예제 #5
0
def main():

    module = AnsibleModule(
        argument_spec=dict(
            path=dict(type='path', required=True, aliases=['dest']),
            section=dict(type='str', required=True),
            option=dict(type='str'),
            value=dict(type='str'),
            backup=dict(type='bool', default=False),
            state=dict(type='str', default='present', choices=['absent', 'present']),
            no_extra_spaces=dict(type='bool', default=False),
            allow_no_value=dict(type='bool', default=False, required=False),
            create=dict(type='bool', default=True)
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    path = module.params['path']
    section = module.params['section']
    option = module.params['option']
    value = module.params['value']
    state = module.params['state']
    backup = module.params['backup']
    no_extra_spaces = module.params['no_extra_spaces']
    allow_no_value = module.params['allow_no_value']
    create = module.params['create']

    (changed, backup_file, diff, msg) = do_ini(module, path, section, option, value, state, backup, no_extra_spaces, create, allow_no_value)

    if not module.check_mode and os.path.exists(path):
        file_args = module.load_file_common_arguments(module.params)
        changed = module.set_fs_attributes_if_different(file_args, changed)

    results = dict(
        changed=changed,
        diff=diff,
        msg=msg,
        path=path,
    )
    if backup_file is not None:
        results['backup_file'] = backup_file

    # Mission complete
    module.exit_json(**results)
예제 #6
0
def main():
    module = AnsibleModule(
            argument_spec = dict(
                path     = dict(type = 'path', required = True),
                state    = dict(type = 'str',  default  = 'present', choices = ['absent', 'present', 'local', 'exported']),
                create   = dict(type = 'bool', default  = False),
                backup   = dict(type = 'bool', default  = False),
                var      = dict(type = 'str',  required = True, aliases=['name', 'variable']),
                val      = dict(type = 'str',  default = '', aliases=['content', 'value']),
            ),
            add_file_common_args = True,
            supports_check_mode  = True,
    )

    params = module.params
    create = params['create']
    backup = params['backup']
    path   = params['path']
    var    = params['var']
    val    = params['val']
    state  = params['state']

    bin_path = to_bytes(path, errors='surrogate_or_strict')

    if os.path.isdir(bin_path):
        module.fail_json(rc=256, msg='Path %s is a directory' % path)

    (changed, backup_file, diff, msg) = apply(module, path, var, val, state, create, backup)

    if not module.check_mode and os.path.exists(path):
        file_args = module.load_file_common_arguments(module.params)
        changed   = module.set_fs_attributes_if_different(file_args, changed)

    results = dict(
            changed = changed,
            diff    = diff,
            msg     = msg,
            path    = path
    )

    if backup_file is not None:
        results['backup_file'] = backup_file

    module.exit_json(**results)
예제 #7
0
def main():
    module = AnsibleModule(
        argument_spec={
            'dest': {'type': 'path'},
            'call_fs_attributes': {'type': 'bool', 'default': True},
        },
        add_file_common_args=True,
    )

    results = {}

    with tempfile.NamedTemporaryFile(delete=False) as tf:
        file_args = module.load_file_common_arguments(module.params)
        module.atomic_move(tf.name, module.params['dest'])

        if module.params['call_fs_attributes']:
            results['changed'] = module.set_fs_attributes_if_different(file_args, True)

    module.exit_json(**results)
예제 #8
0
def main():
    # Module settings
    module = AnsibleModule(argument_spec=dict(
        alias=dict(type='list'),
        hostname=dict(),
        ip=dict(),
        path=dict(default='/etc/hosts'),
        state=dict(choices=['present', 'absent'], default='present')),
                           add_file_common_args=True,
                           supports_check_mode=True)

    # Make shorter variable
    state = module.params['state']

    # Check presence of the input options
    if (state == 'present' and
        (module.params['ip'] is None or module.params['hostname'] is None)):
        module.fail_json(msg="Options 'hostname' and 'ip' are required.")

    if (state == 'absent' and module.params['ip'] is None
            and module.params['hostname'] is None):
        module.fail_json(msg="Option 'hostname' or 'ip' is required.")

    # Read the hosts file
    hosts = EtcHosts(module)

    changed = False

    # Perform action depending on the state
    if state == 'present':
        changed, diff = hosts.add()
    elif state == 'absent':
        changed, diff = hosts.remove()

    # Change file attributes if needed
    if os.path.isfile(module.params['path']):
        file_args = module.load_file_common_arguments(module.params)
        changed = module.set_fs_attributes_if_different(file_args, changed)

    # Print status of the change
    module.exit_json(changed=changed, state=state, diff=diff)
예제 #9
0
파일: ini_file2.py 프로젝트: xfyecn/cuttle
def main():

    module = AnsibleModule(argument_spec=dict(
        path=dict(required=True, aliases=['dest'], type='path'),
        section=dict(required=True),
        option=dict(required=False),
        value=dict(required=False),
        backup=dict(default='no', type='bool'),
        state=dict(default='present', choices=['present', 'absent']),
        no_extra_spaces=dict(required=False, default=False, type='bool'),
        quote_value=dict(required=False, default=False, type='bool'),
        create=dict(default=True, type='bool')),
                           add_file_common_args=True,
                           supports_check_mode=True)

    path = module.params['path']
    section = module.params['section']
    option = module.params['option']
    value = module.params['value']
    state = module.params['state']
    backup = module.params['backup']
    no_extra_spaces = module.params['no_extra_spaces']
    create = module.params['create']
    quote_value = module.params['quote_value']

    (changed, backup_file, diff,
     msg) = do_ini(module, path, section, option, value, state, backup,
                   no_extra_spaces, create, quote_value)

    if not module.check_mode and os.path.exists(path):
        file_args = module.load_file_common_arguments(module.params)
        changed = module.set_fs_attributes_if_different(file_args, changed)

    results = {'changed': changed, 'msg': msg, 'path': path, 'diff': diff}
    if backup_file is not None:
        results['backup_file'] = backup_file

    # Mission complete
    module.exit_json(**results)
예제 #10
0
def main():

    global module

    module = AnsibleModule(
        argument_spec=dict(
            state=dict(type='str',
                       choices=[
                           'absent', 'directory', 'file', 'hard', 'link',
                           'touch'
                       ]),
            path=dict(type='path', required=True, aliases=['dest', 'name']),
            _original_basename=dict(
                type='str'),  # Internal use only, for recursive ops
            recurse=dict(type='bool', default=False),
            force=dict(type='bool', default=False
                       ),  # Note: Should not be in file_common_args in future
            follow=dict(
                type='bool',
                default=True),  # Note: Different default than file_common_args
            _diff_peek=dict(
                type='bool'
            ),  # Internal use only, for internal checks in the action plugins
            src=dict(type='path'
                     ),  # Note: Should not be in file_common_args in future
            modification_time=dict(type='str'),
            modification_time_format=dict(type='str', default='%Y%m%d%H%M.%S'),
            access_time=dict(type='str'),
            access_time_format=dict(type='str', default='%Y%m%d%H%M.%S'),
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    # When we rewrite basic.py, we will do something similar to this on instantiating an AnsibleModule
    sys.excepthook = _ansible_excepthook
    additional_parameter_handling(module.params)
    params = module.params

    state = params['state']
    recurse = params['recurse']
    force = params['force']
    follow = params['follow']
    path = params['path']
    src = params['src']

    if module.check_mode and state != 'absent':
        file_args = module.load_file_common_arguments(module.params)
        if file_args['owner']:
            check_owner_exists(module, file_args['owner'])
        if file_args['group']:
            check_group_exists(module, file_args['group'])

    timestamps = {}
    timestamps[
        'modification_time'] = keep_backward_compatibility_on_timestamps(
            params['modification_time'], state)
    timestamps['modification_time_format'] = params['modification_time_format']
    timestamps['access_time'] = keep_backward_compatibility_on_timestamps(
        params['access_time'], state)
    timestamps['access_time_format'] = params['access_time_format']

    # short-circuit for diff_peek
    if params['_diff_peek'] is not None:
        appears_binary = execute_diff_peek(
            to_bytes(path, errors='surrogate_or_strict'))
        module.exit_json(path=path,
                         changed=False,
                         appears_binary=appears_binary)

    if state == 'file':
        result = ensure_file_attributes(path, follow, timestamps)
    elif state == 'directory':
        result = ensure_directory(path, follow, recurse, timestamps)
    elif state == 'link':
        result = ensure_symlink(path, src, follow, force, timestamps)
    elif state == 'hard':
        result = ensure_hardlink(path, src, follow, force, timestamps)
    elif state == 'touch':
        result = execute_touch(path, follow, timestamps)
    elif state == 'absent':
        result = ensure_absent(path)

    module.exit_json(**result)
예제 #11
0
def main():
    argument_spec = url_argument_spec()
    argument_spec.update(
        dict(dest=dict(required=False, default=None, type='path'),
             url_username=dict(required=False, default=None, aliases=['user']),
             url_password=dict(required=False,
                               default=None,
                               aliases=['password']),
             body=dict(required=False, default=None, type='raw'),
             body_format=dict(required=False,
                              default='raw',
                              choices=['raw', 'json']),
             method=dict(required=False,
                         default='GET',
                         choices=[
                             'GET', 'POST', 'PUT', 'HEAD', 'DELETE', 'OPTIONS',
                             'PATCH', 'TRACE', 'CONNECT', 'REFRESH'
                         ]),
             return_content=dict(required=False, default='no', type='bool'),
             follow_redirects=dict(
                 required=False,
                 default='safe',
                 choices=['all', 'safe', 'none', 'yes', 'no']),
             creates=dict(required=False, default=None, type='path'),
             removes=dict(required=False, default=None, type='path'),
             status_code=dict(required=False, default=[200], type='list'),
             timeout=dict(required=False, default=30, type='int'),
             headers=dict(required=False, type='dict', default={})))

    module = AnsibleModule(argument_spec=argument_spec,
                           check_invalid_arguments=False,
                           add_file_common_args=True)

    url = module.params['url']
    body = module.params['body']
    body_format = module.params['body_format'].lower()
    method = module.params['method']
    dest = module.params['dest']
    return_content = module.params['return_content']
    creates = module.params['creates']
    removes = module.params['removes']
    status_code = [int(x) for x in list(module.params['status_code'])]
    socket_timeout = module.params['timeout']

    dict_headers = module.params['headers']

    if body_format == 'json':
        # Encode the body unless its a string, then assume it is pre-formatted JSON
        if not isinstance(body, six.string_types):
            body = json.dumps(body)
        lower_header_keys = [key.lower() for key in dict_headers]
        if 'content-type' not in lower_header_keys:
            dict_headers['Content-Type'] = 'application/json'

    # Grab all the http headers. Need this hack since passing multi-values is
    # currently a bit ugly. (e.g. headers='{"Content-Type":"application/json"}')
    for key, value in six.iteritems(module.params):
        if key.startswith("HEADER_"):
            skey = key.replace("HEADER_", "")
            dict_headers[skey] = value

    if creates is not None:
        # do not run the command if the line contains creates=filename
        # and the filename already exists.  This allows idempotence
        # of uri executions.
        if os.path.exists(creates):
            module.exit_json(stdout="skipped, since %s exists" % creates,
                             changed=False,
                             stderr=False,
                             rc=0)

    if removes is not None:
        # do not run the command if the line contains removes=filename
        # and the filename do not exists.  This allows idempotence
        # of uri executions.
        if not os.path.exists(removes):
            module.exit_json(stdout="skipped, since %s does not exist" %
                             removes,
                             changed=False,
                             stderr=False,
                             rc=0)

    # Make the request
    resp, content, dest = uri(module, url, dest, body, body_format, method,
                              dict_headers, socket_timeout)
    resp['status'] = int(resp['status'])

    # Write the file out if requested
    if dest is not None:
        if resp['status'] == 304:
            changed = False
        else:
            write_file(module, url, dest, content)
            # allow file attribute changes
            changed = True
            module.params['path'] = dest
            file_args = module.load_file_common_arguments(module.params)
            file_args['path'] = dest
            changed = module.set_fs_attributes_if_different(file_args, changed)
        resp['path'] = dest
    else:
        changed = False

    # Transmogrify the headers, replacing '-' with '_', since variables dont
    # work with dashes.
    # In python3, the headers are title cased.  Lowercase them to be
    # compatible with the python2 behaviour.
    uresp = {}
    for key, value in six.iteritems(resp):
        ukey = key.replace("-", "_").lower()
        uresp[ukey] = value

    try:
        uresp['location'] = absolute_location(url, uresp['location'])
    except KeyError:
        pass

    # Default content_encoding to try
    content_encoding = 'utf-8'
    if 'content_type' in uresp:
        content_type, params = cgi.parse_header(uresp['content_type'])
        if 'charset' in params:
            content_encoding = params['charset']
        u_content = to_text(content, encoding=content_encoding)
        if 'application/json' in content_type or 'text/json' in content_type:
            try:
                js = json.loads(u_content)
                uresp['json'] = js
            except:
                pass
    else:
        u_content = to_text(content, encoding=content_encoding)

    if resp['status'] not in status_code:
        uresp['msg'] = 'Status code was not %s: %s' % (status_code,
                                                       uresp.get('msg', ''))
        module.fail_json(content=u_content, **uresp)
    elif return_content:
        module.exit_json(changed=changed, content=u_content, **uresp)
    else:
        module.exit_json(changed=changed, **uresp)
예제 #12
0
def main():

    module = AnsibleModule(
        argument_spec=dict(
            state=dict(choices=['file', 'directory', 'link', 'hard', 'touch', 'absent'], default=None),
            path=dict(aliases=['dest', 'name'], required=True, type='path'),
            original_basename=dict(required=False),  # Internal use only, for recursive ops
            recurse=dict(default=False, type='bool'),
            force=dict(required=False, default=False, type='bool'),
            diff_peek=dict(default=None),  # Internal use only, for internal checks in the action plugins
            validate=dict(required=False, default=None),  # Internal use only, for template and copy
            src=dict(required=False, default=None, type='path'),
        ),
        add_file_common_args=True,
        supports_check_mode=True
    )

    params = module.params
    state = params['state']
    force = params['force']
    diff_peek = params['diff_peek']
    src = params['src']
    b_src = to_bytes(src, errors='surrogate_or_strict')
    follow = params['follow']

    # modify source as we later reload and pass, specially relevant when used by other modules.
    path = params['path']
    b_path = to_bytes(path, errors='surrogate_or_strict')

    # short-circuit for diff_peek
    if diff_peek is not None:
        appears_binary = False
        try:
            f = open(b_path, 'rb')
            head = f.read(8192)
            f.close()
            if b("\x00") in head:
                appears_binary = True
        except:
            pass
        module.exit_json(path=path, changed=False, appears_binary=appears_binary)

    prev_state = get_state(b_path)

    # state should default to file, but since that creates many conflicts,
    # default to 'current' when it exists.
    if state is None:
        if prev_state != 'absent':
            state = prev_state
        else:
            state = 'file'

    # source is both the source of a symlink or an informational passing of the src for a template module
    # or copy module, even if this module never uses it, it is needed to key off some things
    if src is None:
        if state in ('link', 'hard'):
            if follow and state == 'link':
                # use the current target of the link as the source
                src = to_native(os.path.realpath(b_path), errors='strict')
            else:
                module.fail_json(msg='src and dest are required for creating links')

    # original_basename is used by other modules that depend on file.
    if os.path.isdir(b_path) and state not in ("link", "absent"):
        basename = None
        if params['original_basename']:
            basename = params['original_basename']
        elif src is not None:
            basename = os.path.basename(src)
        if basename:
            params['path'] = path = os.path.join(path, basename)
            b_path = to_bytes(path, errors='surrogate_or_strict')

    # make sure the target path is a directory when we're doing a recursive operation
    recurse = params['recurse']
    if recurse and state != 'directory':
        module.fail_json(path=path, msg="recurse option requires state to be 'directory'")

    file_args = module.load_file_common_arguments(params)

    changed = False
    diff = {'before': {'path': path},
            'after': {'path': path},
            }

    state_change = False
    if prev_state != state:
        diff['before']['state'] = prev_state
        diff['after']['state'] = state
        state_change = True

    if state == 'absent':
        if state_change:
            if not module.check_mode:
                if prev_state == 'directory':
                    try:
                        shutil.rmtree(b_path, ignore_errors=False)
                    except Exception:
                        e = get_exception()
                        module.fail_json(msg="rmtree failed: %s" % str(e))
                else:
                    try:
                        os.unlink(b_path)
                    except Exception:
                        e = get_exception()
                        module.fail_json(path=path, msg="unlinking failed: %s " % str(e))
            module.exit_json(path=path, changed=True, diff=diff)
        else:
            module.exit_json(path=path, changed=False)

    elif state == 'file':

        if state_change:
            if follow and prev_state == 'link':
                # follow symlink and operate on original
                b_path = os.path.realpath(b_path)
                path = to_native(b_path, errors='strict')
                prev_state = get_state(b_path)
                file_args['path'] = path

        if prev_state not in ('file', 'hard'):
            # file is not absent and any other state is a conflict
            module.fail_json(path=path, msg='file (%s) is %s, cannot continue' % (path, prev_state))

        changed = module.set_fs_attributes_if_different(file_args, changed, diff)
        module.exit_json(path=path, changed=changed, diff=diff)

    elif state == 'directory':
        if follow and prev_state == 'link':
            b_path = os.path.realpath(b_path)
            path = to_native(b_path, errors='strict')
            prev_state = get_state(b_path)

        if prev_state == 'absent':
            if module.check_mode:
                module.exit_json(changed=True, diff=diff)
            changed = True
            curpath = ''

            try:
                # Split the path so we can apply filesystem attributes recursively
                # from the root (/) directory for absolute paths or the base path
                # of a relative path.  We can then walk the appropriate directory
                # path to apply attributes.
                for dirname in path.strip('/').split('/'):
                    curpath = '/'.join([curpath, dirname])
                    # Remove leading slash if we're creating a relative path
                    if not os.path.isabs(path):
                        curpath = curpath.lstrip('/')
                    b_curpath = to_bytes(curpath, errors='surrogate_or_strict')
                    if not os.path.exists(b_curpath):
                        try:
                            os.mkdir(b_curpath)
                        except OSError:
                            ex = get_exception()
                            # Possibly something else created the dir since the os.path.exists
                            # check above. As long as it's a dir, we don't need to error out.
                            if not (ex.errno == errno.EEXIST and os.path.isdir(b_curpath)):
                                raise
                        tmp_file_args = file_args.copy()
                        tmp_file_args['path'] = curpath
                        changed = module.set_fs_attributes_if_different(tmp_file_args, changed, diff)
            except Exception:
                e = get_exception()
                module.fail_json(path=path, msg='There was an issue creating %s as requested: %s' % (curpath, str(e)))

        # We already know prev_state is not 'absent', therefore it exists in some form.
        elif prev_state != 'directory':
            module.fail_json(path=path, msg='%s already exists as a %s' % (path, prev_state))

        changed = module.set_fs_attributes_if_different(file_args, changed, diff)

        if recurse:
            changed |= recursive_set_attributes(module, to_bytes(file_args['path'], errors='surrogate_or_strict'), follow, file_args)

        module.exit_json(path=path, changed=changed, diff=diff)

    elif state in ('link', 'hard'):

        if os.path.isdir(b_path) and not os.path.islink(b_path):
            relpath = path
        else:
            b_relpath = os.path.dirname(b_path)
            relpath = to_native(b_relpath, errors='strict')

        absrc = os.path.join(relpath, src)
        b_absrc = to_bytes(absrc, errors='surrogate_or_strict')
        if not os.path.exists(b_absrc) and not force:
            module.fail_json(path=path, src=src, msg='src file does not exist, use "force=yes" if you really want to create the link: %s' % absrc)

        if state == 'hard':
            if not os.path.isabs(b_src):
                module.fail_json(msg="absolute paths are required")
        elif prev_state == 'directory':
            if not force:
                module.fail_json(path=path, msg='refusing to convert between %s and %s for %s' % (prev_state, state, path))
            elif len(os.listdir(b_path)) > 0:
                # refuse to replace a directory that has files in it
                module.fail_json(path=path, msg='the directory %s is not empty, refusing to convert it' % path)
        elif prev_state in ('file', 'hard') and not force:
            module.fail_json(path=path, msg='refusing to convert between %s and %s for %s' % (prev_state, state, path))

        if prev_state == 'absent':
            changed = True
        elif prev_state == 'link':
            b_old_src = os.readlink(b_path)
            if b_old_src != b_src:
                changed = True
        elif prev_state == 'hard':
            if not (state == 'hard' and os.stat(b_path).st_ino == os.stat(b_src).st_ino):
                changed = True
                if not force:
                    module.fail_json(dest=path, src=src, msg='Cannot link, different hard link exists at destination')
        elif prev_state in ('file', 'directory'):
            changed = True
            if not force:
                module.fail_json(dest=path, src=src, msg='Cannot link, %s exists at destination' % prev_state)
        else:
            module.fail_json(dest=path, src=src, msg='unexpected position reached')

        if changed and not module.check_mode:
            if prev_state != 'absent':
                # try to replace atomically
                b_tmppath = to_bytes(os.path.sep).join(
                    [os.path.dirname(b_path), to_bytes(".%s.%s.tmp" % (os.getpid(), time.time()))]
                )
                try:
                    if prev_state == 'directory' and (state == 'hard' or state == 'link'):
                        os.rmdir(b_path)
                    if state == 'hard':
                        os.link(b_src, b_tmppath)
                    else:
                        os.symlink(b_src, b_tmppath)
                    os.rename(b_tmppath, b_path)
                except OSError:
                    e = get_exception()
                    if os.path.exists(b_tmppath):
                        os.unlink(b_tmppath)
                    module.fail_json(path=path, msg='Error while replacing: %s' % to_native(e, nonstring='simplerepr'))
            else:
                try:
                    if state == 'hard':
                        os.link(b_src, b_path)
                    else:
                        os.symlink(b_src, b_path)
                except OSError:
                    e = get_exception()
                    module.fail_json(path=path, msg='Error while linking: %s' % to_native(e, nonstring='simplerepr'))

        if module.check_mode and not os.path.exists(b_path):
            module.exit_json(dest=path, src=src, changed=changed, diff=diff)

        changed = module.set_fs_attributes_if_different(file_args, changed, diff)
        module.exit_json(dest=path, src=src, changed=changed, diff=diff)

    elif state == 'touch':
        if not module.check_mode:

            if prev_state == 'absent':
                try:
                    open(b_path, 'wb').close()
                except OSError:
                    e = get_exception()
                    module.fail_json(path=path, msg='Error, could not touch target: %s' % to_native(e, nonstring='simplerepr'))
            elif prev_state in ('file', 'directory', 'hard'):
                try:
                    os.utime(b_path, None)
                except OSError:
                    e = get_exception()
                    module.fail_json(path=path, msg='Error while touching existing target: %s' % to_native(e, nonstring='simplerepr'))
            else:
                module.fail_json(msg='Cannot touch other than files, directories, and hardlinks (%s is %s)' % (path, prev_state))
            try:
                module.set_fs_attributes_if_different(file_args, True, diff)
            except SystemExit:
                e = get_exception()
                if e.code:
                    # We take this to mean that fail_json() was called from
                    # somewhere in basic.py
                    if prev_state == 'absent':
                        # If we just created the file we can safely remove it
                        os.remove(b_path)
                raise e

        module.exit_json(dest=path, changed=True, diff=diff)

    module.fail_json(path=path, msg='unexpected position reached')
예제 #13
0
파일: copy.py 프로젝트: pabelanger/ansible
def main():

    global module

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path'),
            _original_basename=dict(
                type='str'
            ),  # used to handle 'dest is a directory' via template, a slight hack
            content=dict(type='str', no_log=True),
            dest=dict(type='path', required=True),
            backup=dict(type='bool', default=False),
            force=dict(type='bool', default=True, aliases=['thirsty']),
            validate=dict(type='str'),
            directory_mode=dict(type='raw'),
            remote_src=dict(type='bool'),
            local_follow=dict(type='bool'),
            checksum=dict(type='str'),
            follow=dict(type='bool', default=False),
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    if module.params.get('thirsty'):
        module.deprecate(
            'The alias "thirsty" has been deprecated and will be removed, use "force" instead',
            version='2.13',
            collection_name='ansible.builtin')

    src = module.params['src']
    b_src = to_bytes(src, errors='surrogate_or_strict')
    dest = module.params['dest']
    # Make sure we always have a directory component for later processing
    if os.path.sep not in dest:
        dest = '.{0}{1}'.format(os.path.sep, dest)
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    backup = module.params['backup']
    force = module.params['force']
    _original_basename = module.params.get('_original_basename', None)
    validate = module.params.get('validate', None)
    follow = module.params['follow']
    local_follow = module.params['local_follow']
    mode = module.params['mode']
    owner = module.params['owner']
    group = module.params['group']
    remote_src = module.params['remote_src']
    checksum = module.params['checksum']

    if not os.path.exists(b_src):
        module.fail_json(msg="Source %s not found" % (src))
    if not os.access(b_src, os.R_OK):
        module.fail_json(msg="Source %s not readable" % (src))

    # Preserve is usually handled in the action plugin but mode + remote_src has to be done on the
    # remote host
    if module.params['mode'] == 'preserve':
        module.params['mode'] = '0%03o' % stat.S_IMODE(os.stat(b_src).st_mode)
    mode = module.params['mode']

    checksum_dest = None

    if os.path.isfile(src):
        checksum_src = module.sha1(src)
    else:
        checksum_src = None

    # Backwards compat only.  This will be None in FIPS mode
    try:
        if os.path.isfile(src):
            md5sum_src = module.md5(src)
        else:
            md5sum_src = None
    except ValueError:
        md5sum_src = None

    changed = False

    if checksum and checksum_src != checksum:
        module.fail_json(
            msg=
            'Copied file does not match the expected checksum. Transfer failed.',
            checksum=checksum_src,
            expected_checksum=checksum)

    # Special handling for recursive copy - create intermediate dirs
    if dest.endswith(os.sep):
        if _original_basename:
            dest = os.path.join(dest, _original_basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')
        dirname = os.path.dirname(dest)
        b_dirname = to_bytes(dirname, errors='surrogate_or_strict')
        if not os.path.exists(b_dirname):
            try:
                (pre_existing_dir,
                 new_directory_list) = split_pre_existing_dir(dirname)
            except AnsibleModuleError as e:
                e.result['msg'] += ' Could not copy to {0}'.format(dest)
                module.fail_json(**e.results)

            os.makedirs(b_dirname)
            directory_args = module.load_file_common_arguments(module.params)
            directory_mode = module.params["directory_mode"]
            if directory_mode is not None:
                directory_args['mode'] = directory_mode
            else:
                directory_args['mode'] = None
            adjust_recursive_directory_permissions(pre_existing_dir,
                                                   new_directory_list, module,
                                                   directory_args, changed)

    if os.path.isdir(b_dest):
        basename = os.path.basename(src)
        if _original_basename:
            basename = _original_basename
        dest = os.path.join(dest, basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')

    if os.path.exists(b_dest):
        if os.path.islink(b_dest) and follow:
            b_dest = os.path.realpath(b_dest)
            dest = to_native(b_dest, errors='surrogate_or_strict')
        if not force:
            module.exit_json(msg="file already exists",
                             src=src,
                             dest=dest,
                             changed=False)
        if os.access(b_dest, os.R_OK) and os.path.isfile(b_dest):
            checksum_dest = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(b_dest)):
            try:
                # os.path.exists() can return false in some
                # circumstances where the directory does not have
                # the execute bit for the current user set, in
                # which case the stat() call will raise an OSError
                os.stat(os.path.dirname(b_dest))
            except OSError as e:
                if "permission denied" in to_native(e).lower():
                    module.fail_json(
                        msg="Destination directory %s is not accessible" %
                        (os.path.dirname(dest)))
            module.fail_json(msg="Destination directory %s does not exist" %
                             (os.path.dirname(dest)))

    if not os.access(os.path.dirname(b_dest),
                     os.W_OK) and not module.params['unsafe_writes']:
        module.fail_json(msg="Destination %s not writable" %
                         (os.path.dirname(dest)))

    backup_file = None
    if checksum_src != checksum_dest or os.path.islink(b_dest):
        if not module.check_mode:
            try:
                if backup:
                    if os.path.exists(b_dest):
                        backup_file = module.backup_local(dest)
                # allow for conversion from symlink.
                if os.path.islink(b_dest):
                    os.unlink(b_dest)
                    open(b_dest, 'w').close()
                if validate:
                    # if we have a mode, make sure we set it on the temporary
                    # file source as some validations may require it
                    if mode is not None:
                        module.set_mode_if_different(src, mode, False)
                    if owner is not None:
                        module.set_owner_if_different(src, owner, False)
                    if group is not None:
                        module.set_group_if_different(src, group, False)
                    if "%s" not in validate:
                        module.fail_json(msg="validate must contain %%s: %s" %
                                         (validate))
                    (rc, out, err) = module.run_command(validate % src)
                    if rc != 0:
                        module.fail_json(msg="failed to validate",
                                         exit_status=rc,
                                         stdout=out,
                                         stderr=err)
                b_mysrc = b_src
                if remote_src and os.path.isfile(b_src):
                    _, b_mysrc = tempfile.mkstemp(dir=os.path.dirname(b_dest))

                    shutil.copyfile(b_src, b_mysrc)
                    try:
                        shutil.copystat(b_src, b_mysrc)
                    except OSError as err:
                        if err.errno == errno.ENOSYS and mode == "preserve":
                            module.warn("Unable to copy stats {0}".format(
                                to_native(b_src)))
                        else:
                            raise

                # might be needed below
                if PY3 and hasattr(os, 'listxattr'):
                    try:
                        src_has_acls = 'system.posix_acl_access' in os.listxattr(
                            src)
                    except Exception as e:
                        # assume unwanted ACLs by default
                        src_has_acls = True

                module.atomic_move(
                    b_mysrc,
                    dest,
                    unsafe_writes=module.params['unsafe_writes'])

                if PY3 and hasattr(os, 'listxattr') and platform.system(
                ) == 'Linux' and not remote_src:
                    # atomic_move used above to copy src into dest might, in some cases,
                    # use shutil.copy2 which in turn uses shutil.copystat.
                    # Since Python 3.3, shutil.copystat copies file extended attributes:
                    # https://docs.python.org/3/library/shutil.html#shutil.copystat
                    # os.listxattr (along with others) was added to handle the operation.

                    # This means that on Python 3 we are copying the extended attributes which includes
                    # the ACLs on some systems - further limited to Linux as the documentation above claims
                    # that the extended attributes are copied only on Linux. Also, os.listxattr is only
                    # available on Linux.

                    # If not remote_src, then the file was copied from the controller. In that
                    # case, any filesystem ACLs are artifacts of the copy rather than preservation
                    # of existing attributes. Get rid of them:

                    if src_has_acls:
                        # FIXME If dest has any default ACLs, there are not applied to src now because
                        # they were overridden by copystat. Should/can we do anything about this?
                        # 'system.posix_acl_default' in os.listxattr(os.path.dirname(b_dest))

                        try:
                            clear_facls(dest)
                        except ValueError as e:
                            if 'setfacl' in to_native(e):
                                # No setfacl so we're okay.  The controller couldn't have set a facl
                                # without the setfacl command
                                pass
                            else:
                                raise
                        except RuntimeError as e:
                            # setfacl failed.
                            if 'Operation not supported' in to_native(e):
                                # The file system does not support ACLs.
                                pass
                            else:
                                raise

            except (IOError, OSError):
                module.fail_json(msg="failed to copy: %s to %s" % (src, dest),
                                 traceback=traceback.format_exc())
        changed = True
    else:
        changed = False

    # If neither have checksums, both src and dest are directories.
    if checksum_src is None and checksum_dest is None:
        if remote_src and os.path.isdir(module.params['src']):
            b_src = to_bytes(module.params['src'],
                             errors='surrogate_or_strict')
            b_dest = to_bytes(module.params['dest'],
                              errors='surrogate_or_strict')

            if src.endswith(os.path.sep) and os.path.isdir(
                    module.params['dest']):
                diff_files_changed = copy_diff_files(b_src, b_dest, module)
                left_only_changed = copy_left_only(b_src, b_dest, module)
                common_dirs_changed = copy_common_dirs(b_src, b_dest, module)
                owner_group_changed = chown_recursive(b_dest, module)
                if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed:
                    changed = True

            if src.endswith(
                    os.path.sep) and not os.path.exists(module.params['dest']):
                b_basename = to_bytes(os.path.basename(src),
                                      errors='surrogate_or_strict')
                b_dest = to_bytes(os.path.join(b_dest, b_basename),
                                  errors='surrogate_or_strict')
                b_src = to_bytes(os.path.join(module.params['src'], ""),
                                 errors='surrogate_or_strict')
                if not module.check_mode:
                    shutil.copytree(b_src, b_dest, symlinks=not (local_follow))
                chown_recursive(dest, module)
                changed = True

            if not src.endswith(os.path.sep) and os.path.isdir(
                    module.params['dest']):
                b_basename = to_bytes(os.path.basename(src),
                                      errors='surrogate_or_strict')
                b_dest = to_bytes(os.path.join(b_dest, b_basename),
                                  errors='surrogate_or_strict')
                b_src = to_bytes(os.path.join(module.params['src'], ""),
                                 errors='surrogate_or_strict')
                if not module.check_mode and not os.path.exists(b_dest):
                    shutil.copytree(b_src, b_dest, symlinks=not (local_follow))
                    changed = True
                    chown_recursive(dest, module)
                if module.check_mode and not os.path.exists(b_dest):
                    changed = True
                if os.path.exists(b_dest):
                    diff_files_changed = copy_diff_files(b_src, b_dest, module)
                    left_only_changed = copy_left_only(b_src, b_dest, module)
                    common_dirs_changed = copy_common_dirs(
                        b_src, b_dest, module)
                    owner_group_changed = chown_recursive(b_dest, module)
                    if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed:
                        changed = True

            if not src.endswith(os.path.sep) and not os.path.exists(
                    module.params['dest']):
                b_basename = to_bytes(os.path.basename(module.params['src']),
                                      errors='surrogate_or_strict')
                b_dest = to_bytes(os.path.join(b_dest, b_basename),
                                  errors='surrogate_or_strict')
                if not module.check_mode and not os.path.exists(b_dest):
                    os.makedirs(b_dest)
                    b_src = to_bytes(os.path.join(module.params['src'], ""),
                                     errors='surrogate_or_strict')
                    diff_files_changed = copy_diff_files(b_src, b_dest, module)
                    left_only_changed = copy_left_only(b_src, b_dest, module)
                    common_dirs_changed = copy_common_dirs(
                        b_src, b_dest, module)
                    owner_group_changed = chown_recursive(b_dest, module)
                    if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed:
                        changed = True
                if module.check_mode and not os.path.exists(b_dest):
                    changed = True

    res_args = dict(dest=dest,
                    src=src,
                    md5sum=md5sum_src,
                    checksum=checksum_src,
                    changed=changed)
    if backup_file:
        res_args['backup_file'] = backup_file

    if not module.check_mode:
        file_args = module.load_file_common_arguments(module.params, path=dest)
        res_args['changed'] = module.set_fs_attributes_if_different(
            file_args, res_args['changed'])

    module.exit_json(**res_args)
예제 #14
0
def main():
    argument_spec = url_argument_spec()

    # setup aliases
    argument_spec['url_username']['aliases'] = ['username']
    argument_spec['url_password']['aliases'] = ['password']

    argument_spec.update(
        url=dict(type='str', required=True),
        dest=dict(type='path', required=True),
        backup=dict(type='bool'),
        sha256sum=dict(type='str', default=''),
        checksum=dict(type='str', default=''),
        timeout=dict(type='int', default=10),
        headers=dict(type='raw'),
        tmp_dest=dict(type='path'),
    )

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=argument_spec,
        add_file_common_args=True,
        supports_check_mode=True,
        mutually_exclusive=[['checksum', 'sha256sum']],
    )

    url = module.params['url']
    dest = module.params['dest']
    backup = module.params['backup']
    force = module.params['force']
    sha256sum = module.params['sha256sum']
    checksum = module.params['checksum']
    use_proxy = module.params['use_proxy']
    timeout = module.params['timeout']
    tmp_dest = module.params['tmp_dest']

    result = dict(
        changed=False,
        checksum_dest=None,
        checksum_src=None,
        dest=dest,
        elapsed=0,
        url=url,
    )

    # Parse headers to dict
    if isinstance(module.params['headers'], dict):
        headers = module.params['headers']
    elif module.params['headers']:
        try:
            headers = dict(
                item.split(':', 1)
                for item in module.params['headers'].split(','))
            module.deprecate(
                'Supplying `headers` as a string is deprecated. Please use dict/hash format for `headers`',
                version='2.10')
        except Exception:
            module.fail_json(
                msg=
                "The string representation for the `headers` parameter requires a key:value,key:value syntax to be properly parsed.",
                **result)
    else:
        headers = None

    dest_is_dir = os.path.isdir(dest)
    last_mod_time = None

    # workaround for usage of deprecated sha256sum parameter
    if sha256sum:
        checksum = 'sha256:%s' % (sha256sum)

    # checksum specified, parse for algorithm and checksum
    if checksum:
        try:
            algorithm, checksum = checksum.split(':', 1)
        except ValueError:
            module.fail_json(
                msg=
                "The checksum parameter has to be in format <algorithm>:<checksum>",
                **result)

        if checksum.startswith('http://') or checksum.startswith(
                'https://') or checksum.startswith('ftp://'):
            checksum_url = checksum
            # download checksum file to checksum_tmpsrc
            checksum_tmpsrc, checksum_info = url_get(module, checksum_url,
                                                     dest, use_proxy,
                                                     last_mod_time, force,
                                                     timeout, headers,
                                                     tmp_dest)
            with open(checksum_tmpsrc) as f:
                lines = [line.rstrip('\n') for line in f]
            os.remove(checksum_tmpsrc)
            checksum_map = {}
            for line in lines:
                parts = line.split(None, 1)
                if len(parts) == 2:
                    checksum_map[parts[0]] = parts[1]
            filename = url_filename(url)

            # Look through each line in the checksum file for a hash corresponding to
            # the filename in the url, returning the first hash that is found.
            for cksum in (s for (s, f) in checksum_map.items()
                          if f.strip('./') == filename):
                checksum = cksum
                break
            else:
                checksum = None

            if checksum is None:
                module.fail_json(
                    msg="Unable to find a checksum for file '%s' in '%s'" %
                    (filename, checksum_url))
        # Remove any non-alphanumeric characters, including the infamous
        # Unicode zero-width space
        checksum = re.sub(r'\W+', '', checksum).lower()
        # Ensure the checksum portion is a hexdigest
        try:
            int(checksum, 16)
        except ValueError:
            module.fail_json(msg='The checksum format is invalid', **result)

    if not dest_is_dir and os.path.exists(dest):
        checksum_mismatch = False

        # If the download is not forced and there is a checksum, allow
        # checksum match to skip the download.
        if not force and checksum != '':
            destination_checksum = module.digest_from_file(dest, algorithm)

            if checksum != destination_checksum:
                checksum_mismatch = True

        # Not forcing redownload, unless checksum does not match
        if not force and checksum and not checksum_mismatch:
            # Not forcing redownload, unless checksum does not match
            # allow file attribute changes
            module.params['path'] = dest
            file_args = module.load_file_common_arguments(module.params)
            file_args['path'] = dest
            result['changed'] = module.set_fs_attributes_if_different(
                file_args, False)
            if result['changed']:
                module.exit_json(
                    msg="file already exists but file attributes changed",
                    **result)
            module.exit_json(msg="file already exists", **result)

        # If the file already exists, prepare the last modified time for the
        # request.
        mtime = os.path.getmtime(dest)
        last_mod_time = datetime.datetime.utcfromtimestamp(mtime)

        # If the checksum does not match we have to force the download
        # because last_mod_time may be newer than on remote
        if checksum_mismatch:
            force = True

    # download to tmpsrc
    start = datetime.datetime.utcnow()
    tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force,
                           timeout, headers, tmp_dest)
    result['elapsed'] = (datetime.datetime.utcnow() - start).seconds
    result['src'] = tmpsrc

    # Now the request has completed, we can finally generate the final
    # destination file name from the info dict.

    if dest_is_dir:
        filename = extract_filename_from_headers(info)
        if not filename:
            # Fall back to extracting the filename from the URL.
            # Pluck the URL from the info, since a redirect could have changed
            # it.
            filename = url_filename(info['url'])
        dest = os.path.join(dest, filename)
        result['dest'] = dest

    # raise an error if there is no tmpsrc file
    if not os.path.exists(tmpsrc):
        os.remove(tmpsrc)
        module.fail_json(msg="Request failed",
                         status_code=info['status'],
                         response=info['msg'],
                         **result)
    if not os.access(tmpsrc, os.R_OK):
        os.remove(tmpsrc)
        module.fail_json(msg="Source %s is not readable" % (tmpsrc), **result)
    result['checksum_src'] = module.sha1(tmpsrc)

    # check if there is no dest file
    if os.path.exists(dest):
        # raise an error if copy has no permission on dest
        if not os.access(dest, os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" % (dest),
                             **result)
        if not os.access(dest, os.R_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not readable" % (dest),
                             **result)
        result['checksum_dest'] = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(dest)):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s does not exist" %
                             (os.path.dirname(dest)),
                             **result)
        if not os.access(os.path.dirname(dest), os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" %
                             (os.path.dirname(dest)),
                             **result)

    if module.check_mode:
        if os.path.exists(tmpsrc):
            os.remove(tmpsrc)
        result['changed'] = ('checksum_dest' not in result or
                             result['checksum_src'] != result['checksum_dest'])
        module.exit_json(msg=info.get('msg', ''), **result)

    backup_file = None
    if result['checksum_src'] != result['checksum_dest']:
        try:
            if backup:
                if os.path.exists(dest):
                    backup_file = module.backup_local(dest)
            module.atomic_move(tmpsrc, dest)
        except Exception as e:
            if os.path.exists(tmpsrc):
                os.remove(tmpsrc)
            module.fail_json(msg="failed to copy %s to %s: %s" %
                             (tmpsrc, dest, to_native(e)),
                             exception=traceback.format_exc(),
                             **result)
        result['changed'] = True
    else:
        result['changed'] = False
        if os.path.exists(tmpsrc):
            os.remove(tmpsrc)

    if checksum != '':
        destination_checksum = module.digest_from_file(dest, algorithm)

        if checksum != destination_checksum:
            os.remove(dest)
            module.fail_json(
                msg="The checksum for %s did not match %s; it was %s." %
                (dest, checksum, destination_checksum),
                **result)

    # allow file attribute changes
    module.params['path'] = dest
    file_args = module.load_file_common_arguments(module.params)
    file_args['path'] = dest
    result['changed'] = module.set_fs_attributes_if_different(
        file_args, result['changed'])

    # Backwards compat only.  We'll return None on FIPS enabled systems
    try:
        result['md5sum'] = module.md5(dest)
    except ValueError:
        result['md5sum'] = None

    if backup_file:
        result['backup_file'] = backup_file

    # Mission complete
    module.exit_json(msg=info.get('msg', ''),
                     status_code=info.get('status', ''),
                     **result)
예제 #15
0
def main():

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec = dict(
            src               = dict(required=False, type='path'),
            original_basename = dict(required=False),  # used to handle 'dest is a directory' via template, a slight hack
            content           = dict(required=False, no_log=True),
            dest              = dict(required=True, type='path'),
            backup            = dict(default=False, type='bool'),
            force             = dict(default=True, aliases=['thirsty'], type='bool'),
            validate          = dict(required=False, type='str'),
            directory_mode    = dict(required=False),
            remote_src        = dict(required=False, type='bool'),
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    src = module.params['src']
    b_src = to_bytes(src, errors='surrogate_or_strict')
    dest = module.params['dest']
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    backup = module.params['backup']
    force = module.params['force']
    original_basename = module.params.get('original_basename', None)
    validate = module.params.get('validate', None)
    follow = module.params['follow']
    mode = module.params['mode']
    remote_src = module.params['remote_src']

    if not os.path.exists(b_src):
        module.fail_json(msg="Source %s not found" % (src))
    if not os.access(b_src, os.R_OK):
        module.fail_json(msg="Source %s not readable" % (src))
    if os.path.isdir(b_src):
        module.fail_json(msg="Remote copy does not support recursive copy of directory: %s" % (src))

    checksum_src = module.sha1(src)
    checksum_dest = None
    # Backwards compat only.  This will be None in FIPS mode
    try:
        md5sum_src = module.md5(src)
    except ValueError:
        md5sum_src = None

    changed = False

    # Special handling for recursive copy - create intermediate dirs
    if original_basename and dest.endswith(os.sep):
        dest = os.path.join(dest, original_basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')
        dirname = os.path.dirname(dest)
        b_dirname = to_bytes(dirname, errors='surrogate_or_strict')
        if not os.path.exists(b_dirname) and os.path.isabs(b_dirname):
            (pre_existing_dir, new_directory_list) = split_pre_existing_dir(dirname)
            os.makedirs(b_dirname)
            directory_args = module.load_file_common_arguments(module.params)
            directory_mode = module.params["directory_mode"]
            if directory_mode is not None:
                directory_args['mode'] = directory_mode
            else:
                directory_args['mode'] = None
            adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed)

    if os.path.isdir(b_dest):
        basename = os.path.basename(src)
        if original_basename:
            basename = original_basename
        dest = os.path.join(dest, basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')

    if os.path.exists(b_dest):
        if os.path.islink(b_dest) and follow:
            b_dest = os.path.realpath(b_dest)
            dest = to_native(b_dest, errors='surrogate_or_strict')
        if not force:
            module.exit_json(msg="file already exists", src=src, dest=dest, changed=False)
        if os.access(b_dest, os.R_OK):
            checksum_dest = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(b_dest)):
            try:
                # os.path.exists() can return false in some
                # circumstances where the directory does not have
                # the execute bit for the current user set, in
                # which case the stat() call will raise an OSError
                os.stat(os.path.dirname(b_dest))
            except OSError:
                e = get_exception()
                if "permission denied" in to_native(e).lower():
                    module.fail_json(msg="Destination directory %s is not accessible" % (os.path.dirname(dest)))
            module.fail_json(msg="Destination directory %s does not exist" % (os.path.dirname(dest)))
    if not os.access(os.path.dirname(b_dest), os.W_OK):
        module.fail_json(msg="Destination %s not writable" % (os.path.dirname(dest)))

    backup_file = None
    if checksum_src != checksum_dest or os.path.islink(b_dest):
        if not module.check_mode:
            try:
                if backup:
                    if os.path.exists(b_dest):
                        backup_file = module.backup_local(dest)
                # allow for conversion from symlink.
                if os.path.islink(b_dest):
                    os.unlink(b_dest)
                    open(b_dest, 'w').close()
                if validate:
                    # if we have a mode, make sure we set it on the temporary
                    # file source as some validations may require it
                    # FIXME: should we do the same for owner/group here too?
                    if mode is not None:
                        module.set_mode_if_different(src, mode, False)
                    if "%s" not in validate:
                        module.fail_json(msg="validate must contain %%s: %s" % (validate))
                    (rc, out, err) = module.run_command(validate % src)
                    if rc != 0:
                        module.fail_json(msg="failed to validate", exit_status=rc, stdout=out, stderr=err)
                b_mysrc = b_src
                if remote_src:
                    _, b_mysrc = tempfile.mkstemp(dir=os.path.dirname(b_dest))
                    shutil.copy2(b_src, b_mysrc)
                module.atomic_move(b_mysrc, dest, unsafe_writes=module.params['unsafe_writes'])
            except IOError:
                module.fail_json(msg="failed to copy: %s to %s" % (src, dest), traceback=traceback.format_exc())
        changed = True
    else:
        changed = False

    res_args = dict(
        dest=dest, src=src, md5sum=md5sum_src, checksum=checksum_src, changed=changed
    )
    if backup_file:
        res_args['backup_file'] = backup_file

    module.params['dest'] = dest
    if not module.check_mode:
        file_args = module.load_file_common_arguments(module.params)
        res_args['changed'] = module.set_fs_attributes_if_different(file_args, res_args['changed'])

    module.exit_json(**res_args)
예제 #16
0
def main():
    module = AnsibleModule(
        argument_spec = dict(
            path = dict(type='list', required=True),
            format  = dict(choices=['gz', 'bz2', 'zip', 'tar'], default='gz', required=False),
            dest = dict(required=False, type='path'),
            remove = dict(required=False, default=False, type='bool'),
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    params = module.params
    check_mode = module.check_mode
    paths = params['path']
    dest = params['dest']
    remove = params['remove']

    expanded_paths = []
    format = params['format']
    globby = False
    changed = False
    state = 'absent'

    # Simple or archive file compression (inapplicable with 'zip' since it's always an archive)
    archive = False
    successes = []

    for i, path in enumerate(paths):
        path = os.path.expanduser(os.path.expandvars(path))

        # Expand any glob characters. If found, add the expanded glob to the
        # list of expanded_paths, which might be empty.
        if ('*' in path or '?' in path):
            expanded_paths = expanded_paths + glob.glob(path)
            globby = True

        # If there are no glob characters the path is added to the expanded paths
        # whether the path exists or not
        else:
            expanded_paths.append(path)

    if len(expanded_paths) == 0:
        return module.fail_json(path=', '.join(paths), expanded_paths=', '.join(expanded_paths), msg='Error, no source paths were found')

    # If we actually matched multiple files or TRIED to, then
    # treat this as a multi-file archive
    archive = globby or os.path.isdir(expanded_paths[0]) or len(expanded_paths) > 1

    # Default created file name (for single-file archives) to
    # <file>.<format>
    if not dest and not archive:
        dest = '%s.%s' % (expanded_paths[0], format)

    # Force archives to specify 'dest'
    if archive and not dest:
        module.fail_json(dest=dest, path=', '.join(paths), msg='Error, must specify "dest" when archiving multiple files or trees')

    archive_paths = []
    missing = []
    arcroot = ''

    for path in expanded_paths:
        # Use the longest common directory name among all the files
        # as the archive root path
        if arcroot == '':
            arcroot = os.path.dirname(path) + os.sep
        else:
            for i in range(len(arcroot)):
                if path[i] != arcroot[i]:
                    break

            if i < len(arcroot):
                arcroot = os.path.dirname(arcroot[0:i+1])

            arcroot += os.sep

        # Don't allow archives to be created anywhere within paths to be removed
        if remove and os.path.isdir(path) and dest.startswith(path):
            module.fail_json(path=', '.join(paths), msg='Error, created archive can not be contained in source paths when remove=True')

        if os.path.lexists(path):
            archive_paths.append(path)
        else:
            missing.append(path)

    # No source files were found but the named archive exists: are we 'compress' or 'archive' now?
    if len(missing) == len(expanded_paths) and dest and os.path.exists(dest):
        # Just check the filename to know if it's an archive or simple compressed file
        if re.search(r'(\.tar|\.tar\.gz|\.tgz|.tbz2|\.tar\.bz2|\.zip)$', os.path.basename(dest), re.IGNORECASE):
            state = 'archive'
        else:
            state = 'compress'

    # Multiple files, or globbiness
    elif archive:
        if len(archive_paths) == 0:
            # No source files were found, but the archive is there.
            if os.path.lexists(dest):
                state = 'archive'
        elif len(missing) > 0:
            # SOME source files were found, but not all of them
            state = 'incomplete'

        archive = None
        size = 0
        errors = []

        if os.path.lexists(dest):
            size = os.path.getsize(dest)

        if state != 'archive':
            if check_mode:
                changed = True

            else:
                try:
                    # Slightly more difficult (and less efficient!) compression using zipfile module
                    if format == 'zip':
                        arcfile = zipfile.ZipFile(dest, 'w', zipfile.ZIP_DEFLATED)

                    # Easier compression using tarfile module
                    elif format == 'gz' or format == 'bz2':
                        arcfile = tarfile.open(dest, 'w|' + format)

                    # Or plain tar archiving
                    elif format == 'tar':
                        arcfile = tarfile.open(dest, 'w')

                    match_root = re.compile('^%s' % re.escape(arcroot))
                    for path in archive_paths:
                        if os.path.isdir(path):
                            # Recurse into directories
                            for dirpath, dirnames, filenames in os.walk(path, topdown=True):
                                if not dirpath.endswith(os.sep):
                                    dirpath += os.sep

                                for dirname in dirnames:
                                    fullpath = dirpath + dirname
                                    arcname = match_root.sub('', fullpath)

                                    try:
                                        if format == 'zip':
                                            arcfile.write(fullpath, arcname)
                                        else:
                                            arcfile.add(fullpath, arcname, recursive=False)

                                    except Exception:
                                        e = get_exception()
                                        errors.append('%s: %s' % (fullpath, str(e)))

                                for filename in filenames:
                                    fullpath = dirpath + filename
                                    arcname = match_root.sub('', fullpath)

                                    if not filecmp.cmp(fullpath, dest):
                                        try:
                                            if format == 'zip':
                                                arcfile.write(fullpath, arcname)
                                            else:
                                                arcfile.add(fullpath, arcname, recursive=False)

                                            successes.append(fullpath)
                                        except Exception:
                                            e = get_exception()
                                            errors.append('Adding %s: %s' % (path, str(e)))
                        else:
                            if format == 'zip':
                                arcfile.write(path, match_root.sub('', path))
                            else:
                                arcfile.add(path, match_root.sub('', path), recursive=False)

                            successes.append(path)

                except Exception:
                    e = get_exception()
                    return module.fail_json(msg='Error when writing %s archive at %s: %s' % (format == 'zip' and 'zip' or ('tar.' + format), dest, str(e)))

                if arcfile:
                    arcfile.close()
                    state = 'archive'

                if len(errors) > 0:
                    module.fail_json(msg='Errors when writing archive at %s: %s' % (dest, '; '.join(errors)))

        if state in ['archive', 'incomplete'] and remove:
            for path in successes:
                try:
                    if os.path.isdir(path):
                        shutil.rmtree(path)
                    elif not check_mode:
                        os.remove(path)
                except OSError:
                    e = get_exception()
                    errors.append(path)

            if len(errors) > 0:
                module.fail_json(dest=dest, msg='Error deleting some source files: ' + str(e), files=errors)

        # Rudimentary check: If size changed then file changed. Not perfect, but easy.
        if os.path.getsize(dest) != size:
            changed = True

        if len(successes) and state != 'incomplete':
            state = 'archive'

    # Simple, single-file compression
    else:
        path = expanded_paths[0]

        # No source or compressed file
        if not (os.path.exists(path) or os.path.lexists(dest)):
            state = 'absent'

        # if it already exists and the source file isn't there, consider this done
        elif not os.path.lexists(path) and os.path.lexists(dest):
            state = 'compress'

        else:
            if module.check_mode:
                if not os.path.exists(dest):
                    changed = True
            else:
                size = 0
                f_in = f_out = arcfile = None

                if os.path.lexists(dest):
                    size = os.path.getsize(dest)

                try:
                    if format == 'zip':
                        arcfile = zipfile.ZipFile(dest, 'w', zipfile.ZIP_DEFLATED)
                        arcfile.write(path, path[len(arcroot):])
                        arcfile.close()
                        state = 'archive' # because all zip files are archives

                    else:
                        f_in = open(path, 'rb')

                        if format == 'gz':
                            f_out = gzip.open(dest, 'wb')
                        elif format == 'bz2':
                            f_out = bz2.BZ2File(dest, 'wb')
                        else:
                            raise OSError("Invalid format")

                        shutil.copyfileobj(f_in, f_out)

                    successes.append(path)

                except OSError:
                    e = get_exception()
                    module.fail_json(path=path, dest=dest, msg='Unable to write to compressed file: %s' % str(e))

                if arcfile:
                    arcfile.close()
                if f_in:
                    f_in.close()
                if f_out:
                    f_out.close()

                # Rudimentary check: If size changed then file changed. Not perfect, but easy.
                if os.path.getsize(dest) != size:
                    changed = True

            state = 'compress'

        if remove and not check_mode:
            try:
                os.remove(path)

            except OSError:
                e = get_exception()
                module.fail_json(path=path, msg='Unable to remove source file: %s' % str(e))

    params['path'] = dest
    file_args = module.load_file_common_arguments(params)

    changed = module.set_fs_attributes_if_different(file_args, changed)

    module.exit_json(archived=successes, dest=dest, changed=changed, state=state, arcroot=arcroot, missing=missing, expanded_paths=expanded_paths)
예제 #17
0
def main():
    # Module settings
    module = AnsibleModule(
        argument_spec=dict(
            async=dict(type='bool'),
            bandwidth=dict(),
            baseurl=dict(),
            cost=dict(),
            deltarpm_metadata_percentage=dict(),
            deltarpm_percentage=dict(),
            description=dict(),
            enabled=dict(type='bool'),
            enablegroups=dict(type='bool'),
            exclude=dict(),
            failovermethod=dict(choices=['roundrobin', 'priority']),
            file=dict(),
            gpgcakey=dict(),
            gpgcheck=dict(type='bool'),
            gpgkey=dict(),
            http_caching=dict(choices=['all', 'packages', 'none']),
            include=dict(),
            includepkgs=dict(),
            ip_resolve=dict(choices=['4', '6', 'IPv4', 'IPv6', 'whatever']),
            keepalive=dict(type='bool'),
            keepcache=dict(choices=['0', '1']),
            metadata_expire=dict(),
            metadata_expire_filter=dict(
                choices=[
                    'never',
                    'read-only:past',
                    'read-only:present',
                    'read-only:future']),
            metalink=dict(),
            mirrorlist=dict(),
            mirrorlist_expire=dict(),
            name=dict(required=True),
            params=dict(type='dict'),
            password=dict(no_log=True),
            priority=dict(),
            protect=dict(type='bool'),
            proxy=dict(),
            proxy_password=dict(no_log=True),
            proxy_username=dict(),
            repo_gpgcheck=dict(type='bool'),
            reposdir=dict(default='/etc/yum.repos.d', type='path'),
            retries=dict(),
            s3_enabled=dict(type='bool'),
            skip_if_unavailable=dict(type='bool'),
            sslcacert=dict(),
            ssl_check_cert_permissions=dict(type='bool'),
            sslclientcert=dict(),
            sslclientkey=dict(),
            sslverify=dict(type='bool'),
            state=dict(choices=['present', 'absent'], default='present'),
            throttle=dict(),
            timeout=dict(),
            ui_repoid_vars=dict(),
            username=dict(),
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    # Update module parameters by user's parameters if defined
    if 'params' in module.params and isinstance(module.params['params'], dict):
        module.params.update(module.params['params'])
        # Remove the params
        module.params.pop('params', None)

    name = module.params['name']
    state = module.params['state']

    # Check if required parameters are present
    if state == 'present':
        if (
                module.params['baseurl'] is None and
                module.params['mirrorlist'] is None):
            module.fail_json(
                msg="Parameter 'baseurl' or 'mirrorlist' is required.")
        if module.params['description'] is None:
            module.fail_json(
                msg="Parameter 'description' is required.")

    # Rename "name" and "description" to ensure correct key sorting
    module.params['repoid'] = module.params['name']
    module.params['name'] = module.params['description']
    del module.params['description']

    # Define repo file name if it doesn't exist
    if module.params['file'] is None:
        module.params['file'] = module.params['repoid']

    # Instantiate the YumRepo object
    yumrepo = YumRepo(module)

    # Get repo status before change
    diff = {
        'before_header': yumrepo.params['dest'],
        'before': yumrepo.dump(),
        'after_header': yumrepo.params['dest'],
        'after': ''
    }

    # Perform action depending on the state
    if state == 'present':
        yumrepo.add()
    elif state == 'absent':
        yumrepo.remove()

    # Get repo status after change
    diff['after'] = yumrepo.dump()

    # Compare repo states
    changed = diff['before'] != diff['after']

    # Save the file only if not in check mode and if there was a change
    if not module.check_mode and changed:
        yumrepo.save()

    # Change file attributes if needed
    if os.path.isfile(module.params['dest']):
        file_args = module.load_file_common_arguments(module.params)
        changed = module.set_fs_attributes_if_different(file_args, changed)

    # Print status of the change
    module.exit_json(changed=changed, repo=name, state=state, diff=diff)
예제 #18
0
def main():
    module = AnsibleModule(
        argument_spec=dict(
            group_id=dict(default=None),
            artifact_id=dict(default=None),
            version=dict(default="latest"),
            classifier=dict(default=''),
            extension=dict(default='jar'),
            repository_url=dict(default=None),
            username=dict(default=None, aliases=['aws_secret_key']),
            password=dict(default=None, no_log=True, aliases=['aws_secret_access_key']),
            state=dict(default="present", choices=["present", "absent"]),  # TODO - Implement a "latest" state
            timeout=dict(default=10, type='int'),
            dest=dict(type="path", default=None),
            validate_certs=dict(required=False, default=True, type='bool'),
            keep_name=dict(required=False, default=False, type='bool'),
            verify_checksum=dict(required=False, default='download', choices=['never', 'download', 'change', 'always'])
        ),
        add_file_common_args=True
    )

    if not HAS_LXML_ETREE:
        module.fail_json(msg='module requires the lxml python library installed on the managed machine')

    repository_url = module.params["repository_url"]
    if not repository_url:
        repository_url = "http://repo1.maven.org/maven2"
    try:
        parsed_url = urlparse(repository_url)
    except AttributeError as e:
        module.fail_json(msg='url parsing went wrong %s' % e)

    local = parsed_url.scheme == "file"

    if parsed_url.scheme == 's3' and not HAS_BOTO:
        module.fail_json(msg='boto3 required for this module, when using s3:// repository URLs')

    group_id = module.params["group_id"]
    artifact_id = module.params["artifact_id"]
    version = module.params["version"]
    classifier = module.params["classifier"]
    extension = module.params["extension"]
    state = module.params["state"]
    dest = module.params["dest"]
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    keep_name = module.params["keep_name"]
    verify_checksum = module.params["verify_checksum"]
    verify_download = verify_checksum in ['download', 'always']
    verify_change = verify_checksum in ['change', 'always']

    downloader = MavenDownloader(module, repository_url, local)

    try:
        artifact = Artifact(group_id, artifact_id, version, classifier, extension)
    except ValueError as e:
        module.fail_json(msg=e.args[0])

    changed = False
    prev_state = "absent"

    if dest.endswith(os.sep):
        b_dest = to_bytes(dest, errors='surrogate_or_strict')
        if not os.path.exists(b_dest):
            (pre_existing_dir, new_directory_list) = split_pre_existing_dir(dest)
            os.makedirs(b_dest)
            directory_args = module.load_file_common_arguments(module.params)
            directory_mode = module.params["directory_mode"]
            if directory_mode is not None:
                directory_args['mode'] = directory_mode
            else:
                directory_args['mode'] = None
            changed = adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed)

    if os.path.isdir(b_dest):
        version_part = version
        if keep_name and version == 'latest':
            version_part = downloader.find_latest_version_available(artifact)

        if classifier:
            dest = posixpath.join(dest, "%s-%s-%s.%s" % (artifact_id, version_part, classifier, extension))
        else:
            dest = posixpath.join(dest, "%s-%s.%s" % (artifact_id, version_part, extension))
        b_dest = to_bytes(dest, errors='surrogate_or_strict')

    if os.path.lexists(b_dest) and ((not verify_change) or downloader.verify_md5(dest, downloader.find_uri_for_artifact(artifact))):
        prev_state = "present"

    if prev_state == "absent":
        try:
            download_error = downloader.download(artifact, verify_download, b_dest)
            if download_error is None:
                changed = True
            else:
                module.fail_json(msg="Cannot retrieve the artifact to destination: " + download_error)
        except ValueError as e:
            module.fail_json(msg=e.args[0])

    module.params['dest'] = dest
    file_args = module.load_file_common_arguments(module.params)
    changed = module.set_fs_attributes_if_different(file_args, changed)
    if changed:
        module.exit_json(state=state, dest=dest, group_id=group_id, artifact_id=artifact_id, version=version, classifier=classifier,
                         extension=extension, repository_url=repository_url, changed=changed)
    else:
        module.exit_json(state=state, dest=dest, changed=changed)
예제 #19
0
파일: file.py 프로젝트: 2ndQuadrant/ansible
def main():

    module = AnsibleModule(
        argument_spec=dict(
            state=dict(choices=['file', 'directory', 'link', 'hard', 'touch', 'absent'], default=None),
            path=dict(aliases=['dest', 'name'], required=True, type='path'),
            original_basename=dict(required=False),  # Internal use only, for recursive ops
            recurse=dict(default=False, type='bool'),
            force=dict(required=False, default=False, type='bool'),
            diff_peek=dict(default=None),  # Internal use only, for internal checks in the action plugins
            validate=dict(required=False, default=None),  # Internal use only, for template and copy
            src=dict(required=False, default=None, type='path'),
        ),
        add_file_common_args=True,
        supports_check_mode=True
    )

    params = module.params
    state = params['state']
    force = params['force']
    diff_peek = params['diff_peek']
    src = params['src']
    b_src = to_bytes(src, errors='surrogate_or_strict')
    follow = params['follow']

    # modify source as we later reload and pass, specially relevant when used by other modules.
    path = params['path']
    b_path = to_bytes(path, errors='surrogate_or_strict')

    # short-circuit for diff_peek
    if diff_peek is not None:
        appears_binary = False
        try:
            f = open(b_path, 'rb')
            head = f.read(8192)
            f.close()
            if b("\x00") in head:
                appears_binary = True
        except:
            pass
        module.exit_json(path=path, changed=False, appears_binary=appears_binary)

    prev_state = get_state(b_path)

    # state should default to file, but since that creates many conflicts,
    # default to 'current' when it exists.
    if state is None:
        if prev_state != 'absent':
            state = prev_state
        else:
            state = 'file'

    # source is both the source of a symlink or an informational passing of the src for a template module
    # or copy module, even if this module never uses it, it is needed to key off some things
    if src is None:
        if state in ('link', 'hard'):
            if follow and state == 'link':
                # use the current target of the link as the source
                src = to_native(os.path.realpath(b_path), errors='strict')
                b_src = to_bytes(os.path.realpath(b_path), errors='strict')
            else:
                module.fail_json(msg='src and dest are required for creating links')

    # original_basename is used by other modules that depend on file.
    if os.path.isdir(b_path) and state not in ("link", "absent"):
        basename = None
        if params['original_basename']:
            basename = params['original_basename']
        elif src is not None:
            basename = os.path.basename(src)
        if basename:
            params['path'] = path = os.path.join(path, basename)
            b_path = to_bytes(path, errors='surrogate_or_strict')

    # make sure the target path is a directory when we're doing a recursive operation
    recurse = params['recurse']
    if recurse and state != 'directory':
        module.fail_json(path=path, msg="recurse option requires state to be 'directory'")

    file_args = module.load_file_common_arguments(params)

    changed = False
    diff = {'before': {'path': path},
            'after': {'path': path},
            }

    state_change = False
    if prev_state != state:
        diff['before']['state'] = prev_state
        diff['after']['state'] = state
        state_change = True

    if state == 'absent':
        if state_change:
            if not module.check_mode:
                if prev_state == 'directory':
                    try:
                        shutil.rmtree(b_path, ignore_errors=False)
                    except Exception:
                        e = get_exception()
                        module.fail_json(msg="rmtree failed: %s" % str(e))
                else:
                    try:
                        os.unlink(b_path)
                    except Exception:
                        e = get_exception()
                        module.fail_json(path=path, msg="unlinking failed: %s " % str(e))
            module.exit_json(path=path, changed=True, diff=diff)
        else:
            module.exit_json(path=path, changed=False)

    elif state == 'file':

        if state_change:
            if follow and prev_state == 'link':
                # follow symlink and operate on original
                b_path = os.path.realpath(b_path)
                path = to_native(b_path, errors='strict')
                prev_state = get_state(b_path)
                file_args['path'] = path

        if prev_state not in ('file', 'hard'):
            # file is not absent and any other state is a conflict
            module.fail_json(path=path, msg='file (%s) is %s, cannot continue' % (path, prev_state))

        changed = module.set_fs_attributes_if_different(file_args, changed, diff)
        module.exit_json(path=path, changed=changed, diff=diff)

    elif state == 'directory':
        if follow and prev_state == 'link':
            b_path = os.path.realpath(b_path)
            path = to_native(b_path, errors='strict')
            prev_state = get_state(b_path)

        if prev_state == 'absent':
            if module.check_mode:
                module.exit_json(changed=True, diff=diff)
            changed = True
            curpath = ''

            try:
                # Split the path so we can apply filesystem attributes recursively
                # from the root (/) directory for absolute paths or the base path
                # of a relative path.  We can then walk the appropriate directory
                # path to apply attributes.
                for dirname in path.strip('/').split('/'):
                    curpath = '/'.join([curpath, dirname])
                    # Remove leading slash if we're creating a relative path
                    if not os.path.isabs(path):
                        curpath = curpath.lstrip('/')
                    b_curpath = to_bytes(curpath, errors='surrogate_or_strict')
                    if not os.path.exists(b_curpath):
                        try:
                            os.mkdir(b_curpath)
                        except OSError:
                            ex = get_exception()
                            # Possibly something else created the dir since the os.path.exists
                            # check above. As long as it's a dir, we don't need to error out.
                            if not (ex.errno == errno.EEXIST and os.path.isdir(b_curpath)):
                                raise
                        tmp_file_args = file_args.copy()
                        tmp_file_args['path'] = curpath
                        changed = module.set_fs_attributes_if_different(tmp_file_args, changed, diff)
            except Exception:
                e = get_exception()
                module.fail_json(path=path, msg='There was an issue creating %s as requested: %s' % (curpath, str(e)))

        # We already know prev_state is not 'absent', therefore it exists in some form.
        elif prev_state != 'directory':
            module.fail_json(path=path, msg='%s already exists as a %s' % (path, prev_state))

        changed = module.set_fs_attributes_if_different(file_args, changed, diff)

        if recurse:
            changed |= recursive_set_attributes(module, to_bytes(file_args['path'], errors='surrogate_or_strict'), follow, file_args)

        module.exit_json(path=path, changed=changed, diff=diff)

    elif state in ('link', 'hard'):

        if os.path.isdir(b_path) and not os.path.islink(b_path):
            relpath = path
        else:
            b_relpath = os.path.dirname(b_path)
            relpath = to_native(b_relpath, errors='strict')

        absrc = os.path.join(relpath, src)
        b_absrc = to_bytes(absrc, errors='surrogate_or_strict')
        if not os.path.exists(b_absrc) and not force:
            module.fail_json(path=path, src=src, msg='src file does not exist, use "force=yes" if you really want to create the link: %s' % absrc)

        if state == 'hard':
            if not os.path.isabs(b_src):
                module.fail_json(msg="absolute paths are required")
        elif prev_state == 'directory':
            if not force:
                module.fail_json(path=path, msg='refusing to convert between %s and %s for %s' % (prev_state, state, path))
            elif len(os.listdir(b_path)) > 0:
                # refuse to replace a directory that has files in it
                module.fail_json(path=path, msg='the directory %s is not empty, refusing to convert it' % path)
        elif prev_state in ('file', 'hard') and not force:
            module.fail_json(path=path, msg='refusing to convert between %s and %s for %s' % (prev_state, state, path))

        if prev_state == 'absent':
            changed = True
        elif prev_state == 'link':
            b_old_src = os.readlink(b_path)
            if b_old_src != b_src:
                changed = True
        elif prev_state == 'hard':
            if not (state == 'hard' and os.stat(b_path).st_ino == os.stat(b_src).st_ino):
                changed = True
                if not force:
                    module.fail_json(dest=path, src=src, msg='Cannot link, different hard link exists at destination')
        elif prev_state in ('file', 'directory'):
            changed = True
            if not force:
                module.fail_json(dest=path, src=src, msg='Cannot link, %s exists at destination' % prev_state)
        else:
            module.fail_json(dest=path, src=src, msg='unexpected position reached')

        if changed and not module.check_mode:
            if prev_state != 'absent':
                # try to replace atomically
                b_tmppath = to_bytes(os.path.sep).join(
                    [os.path.dirname(b_path), to_bytes(".%s.%s.tmp" % (os.getpid(), time.time()))]
                )
                try:
                    if prev_state == 'directory' and (state == 'hard' or state == 'link'):
                        os.rmdir(b_path)
                    if state == 'hard':
                        os.link(b_src, b_tmppath)
                    else:
                        os.symlink(b_src, b_tmppath)
                    os.rename(b_tmppath, b_path)
                except OSError:
                    e = get_exception()
                    if os.path.exists(b_tmppath):
                        os.unlink(b_tmppath)
                    module.fail_json(path=path, msg='Error while replacing: %s' % to_native(e, nonstring='simplerepr'))
            else:
                try:
                    if state == 'hard':
                        os.link(b_src, b_path)
                    else:
                        os.symlink(b_src, b_path)
                except OSError:
                    e = get_exception()
                    module.fail_json(path=path, msg='Error while linking: %s' % to_native(e, nonstring='simplerepr'))

        if module.check_mode and not os.path.exists(b_path):
            module.exit_json(dest=path, src=src, changed=changed, diff=diff)

        changed = module.set_fs_attributes_if_different(file_args, changed, diff)
        module.exit_json(dest=path, src=src, changed=changed, diff=diff)

    elif state == 'touch':
        if not module.check_mode:

            if prev_state == 'absent':
                try:
                    open(b_path, 'wb').close()
                except OSError:
                    e = get_exception()
                    module.fail_json(path=path, msg='Error, could not touch target: %s' % to_native(e, nonstring='simplerepr'))
            elif prev_state in ('file', 'directory', 'hard'):
                try:
                    os.utime(b_path, None)
                except OSError:
                    e = get_exception()
                    module.fail_json(path=path, msg='Error while touching existing target: %s' % to_native(e, nonstring='simplerepr'))
            else:
                module.fail_json(msg='Cannot touch other than files, directories, and hardlinks (%s is %s)' % (path, prev_state))
            try:
                module.set_fs_attributes_if_different(file_args, True, diff)
            except SystemExit:
                e = get_exception()
                if e.code:
                    # We take this to mean that fail_json() was called from
                    # somewhere in basic.py
                    if prev_state == 'absent':
                        # If we just created the file we can safely remove it
                        os.remove(b_path)
                raise e

        module.exit_json(dest=path, changed=True, diff=diff)

    module.fail_json(path=path, msg='unexpected position reached')
예제 #20
0
def run_module():
    module_args = dict(
        cluster=dict(type='str', required=False, default='ceph'),
        name=dict(type='str', required=False),
        state=dict(type='str', required=True),
        containerized=dict(type='str', required=False, default=None),
        caps=dict(type='dict', required=False, default=None),
        secret=dict(type='str', required=False, default=None),
        import_key=dict(type='bool', required=False, default=True),
        auid=dict(type='str', required=False, default=None),
        dest=dict(type='str', required=False, default='/etc/ceph/'),
    )

    module = AnsibleModule(
        argument_spec=module_args,
        supports_check_mode=True,
        add_file_common_args=True,
    )

    # Gather module parameters in variables
    state = module.params['state']
    name = module.params.get('name')
    cluster = module.params.get('cluster')
    containerized = module.params.get('containerized')
    caps = module.params.get('caps')
    secret = module.params.get('secret')
    import_key = module.params.get('import_key')
    auid = module.params.get('auid')
    dest = module.params.get('dest')

    result = dict(
        changed=False,
        stdout='',
        stderr='',
        rc='',
        start='',
        end='',
        delta='',
    )

    if module.check_mode:
        return result

    startd = datetime.datetime.now()

    # Test if the key exists, if it does we skip its creation
    # We only want to run this check when a key needs to be added
    # There is no guarantee that any cluster is running and we don't need one
    if import_key:
        rc, cmd, out, err = exec_commands(
            module, info_key(cluster, name, containerized))

    if state == "present":
        if not caps:
            fatal("Capabilities must be provided when state is 'present'", module)

        # We allow 'present' to override any existing key
        # ONLY if a secret is provided
        # if not we skip the creation
        if import_key:
            if rc == 0 and not secret:
                result["stdout"] = "skipped, since {0} already exists, if you want to update a key use 'state: update'".format(
                    name)
                result['rc'] = rc
                module.exit_json(**result)

        rc, cmd, out, err = exec_commands(module, create_key(
            module, result, cluster, name, secret, caps, import_key, auid, dest, containerized))

        file_path = os.path.join(
            dest + "/" + cluster + "." + name + ".keyring")
        file_args = module.load_file_common_arguments(module.params)
        file_args['path'] = file_path
        module.set_fs_attributes_if_different(file_args, False)
    elif state == "update":
        if not caps:
            fatal("Capabilities must be provided when state is 'update'", module)

        if rc != 0:
            result["stdout"] = "skipped, since {0} does not exist".format(name)
            result['rc'] = 0
            module.exit_json(**result)

        rc, cmd, out, err = exec_commands(
            module, update_key(cluster, name, caps, containerized))

    elif state == "absent":
        rc, cmd, out, err = exec_commands(
            module, delete_key(cluster, name, containerized))

    elif state == "info":
        if rc != 0:
            result["stdout"] = "skipped, since {0} does not exist".format(name)
            result['rc'] = 0
            module.exit_json(**result)

        rc, cmd, out, err = exec_commands(
            module, info_key(cluster, name, containerized))

    elif state == "list":
        rc, cmd, out, err = exec_commands(
            module, list_keys(cluster, containerized))

    else:
        module.fail_json(
            msg='State must either be "present" or "absent" or "update" or "list" or "info".', changed=False, rc=1)

    endd = datetime.datetime.now()
    delta = endd - startd

    result = dict(
        cmd=cmd,
        start=str(startd),
        end=str(endd),
        delta=str(delta),
        rc=rc,
        stdout=out.rstrip(b"\r\n"),
        stderr=err.rstrip(b"\r\n"),
        changed=True,
    )

    if rc != 0:
        module.fail_json(msg='non-zero return code', **result)

    module.exit_json(**result)
예제 #21
0
파일: uri.py 프로젝트: awiddersheim/ansible
def main():
    argument_spec = url_argument_spec()
    argument_spec.update(dict(
        dest=dict(type='path'),
        url_username=dict(type='str', aliases=['user']),
        url_password=dict(type='str', aliases=['password'], no_log=True),
        body=dict(type='raw'),
        body_format=dict(type='str', default='raw', choices=['raw', 'json']),
        method=dict(type='str', default='GET', choices=['GET', 'POST', 'PUT', 'HEAD', 'DELETE', 'OPTIONS', 'PATCH', 'TRACE', 'CONNECT', 'REFRESH']),
        return_content=dict(type='bool', default='no'),
        follow_redirects=dict(type='str', default='safe', choices=['all', 'no', 'none', 'safe', 'urllib2', 'yes']),
        creates=dict(type='path'),
        removes=dict(type='path'),
        status_code=dict(type='list', default=[200]),
        timeout=dict(type='int', default=30),
        headers=dict(type='dict', default={})
    ))

    module = AnsibleModule(
        argument_spec=argument_spec,
        # TODO: Remove check_invalid_arguments in 2.9
        check_invalid_arguments=False,
        add_file_common_args=True
    )

    url = module.params['url']
    body = module.params['body']
    body_format = module.params['body_format'].lower()
    method = module.params['method']
    dest = module.params['dest']
    return_content = module.params['return_content']
    creates = module.params['creates']
    removes = module.params['removes']
    status_code = [int(x) for x in list(module.params['status_code'])]
    socket_timeout = module.params['timeout']

    dict_headers = module.params['headers']

    if body_format == 'json':
        # Encode the body unless its a string, then assume it is pre-formatted JSON
        if not isinstance(body, six.string_types):
            body = json.dumps(body)
        lower_header_keys = [key.lower() for key in dict_headers]
        if 'content-type' not in lower_header_keys:
            dict_headers['Content-Type'] = 'application/json'

    # TODO: Deprecated section.  Remove in Ansible 2.9
    # Grab all the http headers. Need this hack since passing multi-values is
    # currently a bit ugly. (e.g. headers='{"Content-Type":"application/json"}')
    for key, value in six.iteritems(module.params):
        if key.startswith("HEADER_"):
            module.deprecate('Supplying headers via HEADER_* is deprecated. Please use `headers` to'
                             ' supply headers for the request', version='2.9')
            skey = key.replace("HEADER_", "")
            dict_headers[skey] = value
    # End deprecated section

    if creates is not None:
        # do not run the command if the line contains creates=filename
        # and the filename already exists.  This allows idempotence
        # of uri executions.
        if os.path.exists(creates):
            module.exit_json(stdout="skipped, since '%s' exists" % creates, changed=False, rc=0)

    if removes is not None:
        # do not run the command if the line contains removes=filename
        # and the filename do not exists.  This allows idempotence
        # of uri executions.
        if not os.path.exists(removes):
            module.exit_json(stdout="skipped, since '%s' does not exist" % removes, changed=False, rc=0)

    # Make the request
    resp, content, dest = uri(module, url, dest, body, body_format, method,
                              dict_headers, socket_timeout)
    resp['status'] = int(resp['status'])

    # Write the file out if requested
    if dest is not None:
        if resp['status'] == 304:
            changed = False
        else:
            write_file(module, url, dest, content)
            # allow file attribute changes
            changed = True
            module.params['path'] = dest
            file_args = module.load_file_common_arguments(module.params)
            file_args['path'] = dest
            changed = module.set_fs_attributes_if_different(file_args, changed)
        resp['path'] = dest
    else:
        changed = False

    # Transmogrify the headers, replacing '-' with '_', since variables don't
    # work with dashes.
    # In python3, the headers are title cased.  Lowercase them to be
    # compatible with the python2 behaviour.
    uresp = {}
    for key, value in six.iteritems(resp):
        ukey = key.replace("-", "_").lower()
        uresp[ukey] = value

    try:
        uresp['location'] = absolute_location(url, uresp['location'])
    except KeyError:
        pass

    # Default content_encoding to try
    content_encoding = 'utf-8'
    if 'content_type' in uresp:
        content_type, params = cgi.parse_header(uresp['content_type'])
        if 'charset' in params:
            content_encoding = params['charset']
        u_content = to_text(content, encoding=content_encoding)
        if any(candidate in content_type for candidate in JSON_CANDIDATES):
            try:
                js = json.loads(u_content)
                uresp['json'] = js
            except:
                pass
    else:
        u_content = to_text(content, encoding=content_encoding)

    if resp['status'] not in status_code:
        uresp['msg'] = 'Status code was %s and not %s: %s' % (resp['status'], status_code, uresp.get('msg', ''))
        module.fail_json(content=u_content, **uresp)
    elif return_content:
        module.exit_json(changed=changed, content=u_content, **uresp)
    else:
        module.exit_json(changed=changed, **uresp)
예제 #22
0
def main():

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path'),
            _original_basename=dict(
                type='str'
            ),  # used to handle 'dest is a directory' via template, a slight hack
            content=dict(type='str', no_log=True),
            dest=dict(type='path', required=True),
            backup=dict(type='bool', default=False),
            force=dict(type='bool', default=True, aliases=['thirsty']),
            validate=dict(type='str'),
            directory_mode=dict(type='raw'),
            remote_src=dict(type='bool'),
            local_follow=dict(type='bool'),
            checksum=dict(),
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    src = module.params['src']
    b_src = to_bytes(src, errors='surrogate_or_strict')
    dest = module.params['dest']
    # Make sure we always have a directory component for later processing
    if os.path.sep not in dest:
        dest = '.{0}{1}'.format(os.path.sep, dest)
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    backup = module.params['backup']
    force = module.params['force']
    _original_basename = module.params.get('_original_basename', None)
    validate = module.params.get('validate', None)
    follow = module.params['follow']
    local_follow = module.params['local_follow']
    mode = module.params['mode']
    owner = module.params['owner']
    group = module.params['group']
    remote_src = module.params['remote_src']
    checksum = module.params['checksum']

    if not os.path.exists(b_src):
        module.fail_json(msg="Source %s not found" % (src))
    if not os.access(b_src, os.R_OK):
        module.fail_json(msg="Source %s not readable" % (src))

    # Preserve is usually handled in the action plugin but mode + remote_src has to be done on the
    # remote host
    if module.params['mode'] == 'preserve':
        module.params['mode'] = '0%03o' % stat.S_IMODE(os.stat(b_src).st_mode)
    mode = module.params['mode']

    checksum_dest = None

    if os.path.isfile(src):
        checksum_src = module.sha1(src)
    else:
        checksum_src = None

    # Backwards compat only.  This will be None in FIPS mode
    try:
        if os.path.isfile(src):
            md5sum_src = module.md5(src)
        else:
            md5sum_src = None
    except ValueError:
        md5sum_src = None

    changed = False

    if checksum and checksum_src != checksum:
        module.fail_json(
            msg=
            'Copied file does not match the expected checksum. Transfer failed.',
            checksum=checksum_src,
            expected_checksum=checksum)

    # Special handling for recursive copy - create intermediate dirs
    if _original_basename and dest.endswith(os.sep):
        dest = os.path.join(dest, _original_basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')
        dirname = os.path.dirname(dest)
        b_dirname = to_bytes(dirname, errors='surrogate_or_strict')
        if not os.path.exists(b_dirname):
            try:
                (pre_existing_dir,
                 new_directory_list) = split_pre_existing_dir(dirname)
            except AnsibleModuleError as e:
                e.result['msg'] += ' Could not copy to {0}'.format(dest)
                module.fail_json(**e.results)

            os.makedirs(b_dirname)
            directory_args = module.load_file_common_arguments(module.params)
            directory_mode = module.params["directory_mode"]
            if directory_mode is not None:
                directory_args['mode'] = directory_mode
            else:
                directory_args['mode'] = None
            adjust_recursive_directory_permissions(pre_existing_dir,
                                                   new_directory_list, module,
                                                   directory_args, changed)

    if os.path.isdir(b_dest):
        basename = os.path.basename(src)
        if _original_basename:
            basename = _original_basename
        dest = os.path.join(dest, basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')

    if os.path.exists(b_dest):
        if os.path.islink(b_dest) and follow:
            b_dest = os.path.realpath(b_dest)
            dest = to_native(b_dest, errors='surrogate_or_strict')
        if not force:
            module.exit_json(msg="file already exists",
                             src=src,
                             dest=dest,
                             changed=False)
        if os.access(b_dest, os.R_OK) and os.path.isfile(dest):
            checksum_dest = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(b_dest)):
            try:
                # os.path.exists() can return false in some
                # circumstances where the directory does not have
                # the execute bit for the current user set, in
                # which case the stat() call will raise an OSError
                os.stat(os.path.dirname(b_dest))
            except OSError as e:
                if "permission denied" in to_native(e).lower():
                    module.fail_json(
                        msg="Destination directory %s is not accessible" %
                        (os.path.dirname(dest)))
            module.fail_json(msg="Destination directory %s does not exist" %
                             (os.path.dirname(dest)))

    if not os.access(os.path.dirname(b_dest),
                     os.W_OK) and not module.params['unsafe_writes']:
        module.fail_json(msg="Destination %s not writable" %
                         (os.path.dirname(dest)))

    backup_file = None
    if checksum_src != checksum_dest or os.path.islink(b_dest):
        if not module.check_mode:
            try:
                if backup:
                    if os.path.exists(b_dest):
                        backup_file = module.backup_local(dest)
                # allow for conversion from symlink.
                if os.path.islink(b_dest):
                    os.unlink(b_dest)
                    open(b_dest, 'w').close()
                if validate:
                    # if we have a mode, make sure we set it on the temporary
                    # file source as some validations may require it
                    if mode is not None:
                        module.set_mode_if_different(src, mode, False)
                    if owner is not None:
                        module.set_owner_if_different(src, owner, False)
                    if group is not None:
                        module.set_group_if_different(src, group, False)
                    if "%s" not in validate:
                        module.fail_json(msg="validate must contain %%s: %s" %
                                         (validate))
                    (rc, out, err) = module.run_command(validate % src)
                    if rc != 0:
                        module.fail_json(msg="failed to validate",
                                         exit_status=rc,
                                         stdout=out,
                                         stderr=err)
                b_mysrc = b_src
                if remote_src and os.path.isfile(b_src):
                    _, b_mysrc = tempfile.mkstemp(dir=os.path.dirname(b_dest))

                    shutil.copyfile(b_src, b_mysrc)
                    try:
                        shutil.copystat(b_src, b_mysrc)
                    except OSError as err:
                        if err.errno == errno.ENOSYS and mode == "preserve":
                            module.warn("Unable to copy stats {0}".format(
                                to_native(b_src)))
                        else:
                            raise
                module.atomic_move(
                    b_mysrc,
                    dest,
                    unsafe_writes=module.params['unsafe_writes'])
            except (IOError, OSError):
                module.fail_json(msg="failed to copy: %s to %s" % (src, dest),
                                 traceback=traceback.format_exc())
        changed = True
    else:
        changed = False

    if checksum_src is None and checksum_dest is None:
        if remote_src and os.path.isdir(module.params['src']):
            b_src = to_bytes(module.params['src'],
                             errors='surrogate_or_strict')
            b_dest = to_bytes(module.params['dest'],
                              errors='surrogate_or_strict')

            if src.endswith(os.path.sep) and os.path.isdir(
                    module.params['dest']):
                diff_files_changed = copy_diff_files(b_src, b_dest, module)
                left_only_changed = copy_left_only(b_src, b_dest, module)
                common_dirs_changed = copy_common_dirs(b_src, b_dest, module)
                owner_group_changed = chown_recursive(b_dest, module)
                if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed:
                    changed = True

            if src.endswith(
                    os.path.sep) and not os.path.exists(module.params['dest']):
                b_basename = to_bytes(os.path.basename(src),
                                      errors='surrogate_or_strict')
                b_dest = to_bytes(os.path.join(b_dest, b_basename),
                                  errors='surrogate_or_strict')
                b_src = to_bytes(os.path.join(module.params['src'], ""),
                                 errors='surrogate_or_strict')
                if not module.check_mode:
                    shutil.copytree(b_src, b_dest, symlinks=not (local_follow))
                chown_recursive(dest, module)
                changed = True

            if not src.endswith(os.path.sep) and os.path.isdir(
                    module.params['dest']):
                b_basename = to_bytes(os.path.basename(src),
                                      errors='surrogate_or_strict')
                b_dest = to_bytes(os.path.join(b_dest, b_basename),
                                  errors='surrogate_or_strict')
                b_src = to_bytes(os.path.join(module.params['src'], ""),
                                 errors='surrogate_or_strict')
                if not module.check_mode and not os.path.exists(b_dest):
                    shutil.copytree(b_src, b_dest, symlinks=not (local_follow))
                    changed = True
                    chown_recursive(dest, module)
                if module.check_mode and not os.path.exists(b_dest):
                    changed = True
                if os.path.exists(b_dest):
                    diff_files_changed = copy_diff_files(b_src, b_dest, module)
                    left_only_changed = copy_left_only(b_src, b_dest, module)
                    common_dirs_changed = copy_common_dirs(
                        b_src, b_dest, module)
                    owner_group_changed = chown_recursive(b_dest, module)
                    if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed:
                        changed = True

            if not src.endswith(os.path.sep) and not os.path.exists(
                    module.params['dest']):
                b_basename = to_bytes(os.path.basename(module.params['src']),
                                      errors='surrogate_or_strict')
                b_dest = to_bytes(os.path.join(b_dest, b_basename),
                                  errors='surrogate_or_strict')
                if not module.check_mode and not os.path.exists(b_dest):
                    os.makedirs(b_dest)
                    b_src = to_bytes(os.path.join(module.params['src'], ""),
                                     errors='surrogate_or_strict')
                    diff_files_changed = copy_diff_files(b_src, b_dest, module)
                    left_only_changed = copy_left_only(b_src, b_dest, module)
                    common_dirs_changed = copy_common_dirs(
                        b_src, b_dest, module)
                    owner_group_changed = chown_recursive(b_dest, module)
                    if diff_files_changed or left_only_changed or common_dirs_changed or owner_group_changed:
                        changed = True
                if module.check_mode and not os.path.exists(b_dest):
                    changed = True

    res_args = dict(dest=dest,
                    src=src,
                    md5sum=md5sum_src,
                    checksum=checksum_src,
                    changed=changed)
    if backup_file:
        res_args['backup_file'] = backup_file

    module.params['dest'] = dest
    if not module.check_mode:
        file_args = module.load_file_common_arguments(module.params)
        res_args['changed'] = module.set_fs_attributes_if_different(
            file_args, res_args['changed'])

    module.exit_json(**res_args)
예제 #23
0
def main():
    argument_spec = url_argument_spec()
    argument_spec.update(
        url=dict(type='str', required=True),
        dest=dict(type='path', required=True),
        backup=dict(type='bool'),
        sha256sum=dict(type='str', default=''),
        checksum=dict(type='str', default=''),
        timeout=dict(type='int', default=10),
        headers=dict(type='raw'),
        tmp_dest=dict(type='path'),
    )

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=argument_spec,
        add_file_common_args=True,
        supports_check_mode=True,
        mutually_exclusive=(['checksum', 'sha256sum']),
    )

    url = module.params['url']
    dest = module.params['dest']
    backup = module.params['backup']
    force = module.params['force']
    sha256sum = module.params['sha256sum']
    checksum = module.params['checksum']
    use_proxy = module.params['use_proxy']
    timeout = module.params['timeout']
    tmp_dest = module.params['tmp_dest']

    # Parse headers to dict
    if isinstance(module.params['headers'], dict):
        headers = module.params['headers']
    elif module.params['headers']:
        try:
            headers = dict(item.split(':', 1) for item in module.params['headers'].split(','))
            module.deprecate('Supplying `headers` as a string is deprecated. Please use dict/hash format for `headers`', version='2.10')
        except Exception:
            module.fail_json(msg="The string representation for the `headers` parameter requires a key:value,key:value syntax to be properly parsed.")
    else:
        headers = None

    dest_is_dir = os.path.isdir(dest)
    last_mod_time = None

    # workaround for usage of deprecated sha256sum parameter
    if sha256sum:
        checksum = 'sha256:%s' % (sha256sum)

    # checksum specified, parse for algorithm and checksum
    if checksum:
        try:
            algorithm, checksum = checksum.rsplit(':', 1)
            # Remove any non-alphanumeric characters, including the infamous
            # Unicode zero-width space
            checksum = re.sub(r'\W+', '', checksum).lower()
            # Ensure the checksum portion is a hexdigest
            int(checksum, 16)
        except ValueError:
            module.fail_json(msg="The checksum parameter has to be in format <algorithm>:<checksum>")

    if not dest_is_dir and os.path.exists(dest):
        checksum_mismatch = False

        # If the download is not forced and there is a checksum, allow
        # checksum match to skip the download.
        if not force and checksum != '':
            destination_checksum = module.digest_from_file(dest, algorithm)

            if checksum == destination_checksum:
                module.exit_json(msg="file already exists", dest=dest, url=url, changed=False)

            checksum_mismatch = True

        # Not forcing redownload, unless checksum does not match
        if not force and not checksum_mismatch:
            # allow file attribute changes
            module.params['path'] = dest
            file_args = module.load_file_common_arguments(module.params)
            file_args['path'] = dest
            changed = module.set_fs_attributes_if_different(file_args, False)

            if changed:
                module.exit_json(msg="file already exists but file attributes changed", dest=dest, url=url, changed=changed)
            module.exit_json(msg="file already exists", dest=dest, url=url, changed=changed)

        # If the file already exists, prepare the last modified time for the
        # request.
        mtime = os.path.getmtime(dest)
        last_mod_time = datetime.datetime.utcfromtimestamp(mtime)

        # If the checksum does not match we have to force the download
        # because last_mod_time may be newer than on remote
        if checksum_mismatch:
            force = True

    # download to tmpsrc
    tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force, timeout, headers, tmp_dest)

    # Now the request has completed, we can finally generate the final
    # destination file name from the info dict.

    if dest_is_dir:
        filename = extract_filename_from_headers(info)
        if not filename:
            # Fall back to extracting the filename from the URL.
            # Pluck the URL from the info, since a redirect could have changed
            # it.
            filename = url_filename(info['url'])
        dest = os.path.join(dest, filename)

    checksum_src = None
    checksum_dest = None

    # If the remote URL exists, we're done with check mode
    if module.check_mode:
        os.remove(tmpsrc)
        res_args = dict(url=url, dest=dest, src=tmpsrc, changed=True, msg=info.get('msg', ''))
        module.exit_json(**res_args)

    # raise an error if there is no tmpsrc file
    if not os.path.exists(tmpsrc):
        os.remove(tmpsrc)
        module.fail_json(msg="Request failed", status_code=info['status'], response=info['msg'])
    if not os.access(tmpsrc, os.R_OK):
        os.remove(tmpsrc)
        module.fail_json(msg="Source %s is not readable" % (tmpsrc))
    checksum_src = module.sha1(tmpsrc)

    # check if there is no dest file
    if os.path.exists(dest):
        # raise an error if copy has no permission on dest
        if not os.access(dest, os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" % (dest))
        if not os.access(dest, os.R_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not readable" % (dest))
        checksum_dest = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(dest)):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s does not exist" % (os.path.dirname(dest)))
        if not os.access(os.path.dirname(dest), os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" % (os.path.dirname(dest)))

    backup_file = None
    if checksum_src != checksum_dest:
        try:
            if backup:
                if os.path.exists(dest):
                    backup_file = module.backup_local(dest)
            module.atomic_move(tmpsrc, dest)
        except Exception as e:
            if os.path.exists(tmpsrc):
                os.remove(tmpsrc)
            module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, to_native(e)),
                             exception=traceback.format_exc())
        changed = True
    else:
        changed = False
        if os.path.exists(tmpsrc):
            os.remove(tmpsrc)

    if checksum != '':
        destination_checksum = module.digest_from_file(dest, algorithm)

        if checksum != destination_checksum:
            os.remove(dest)
            module.fail_json(msg="The checksum for %s did not match %s; it was %s." % (dest, checksum, destination_checksum))

    # allow file attribute changes
    module.params['path'] = dest
    file_args = module.load_file_common_arguments(module.params)
    file_args['path'] = dest
    changed = module.set_fs_attributes_if_different(file_args, changed)

    # Backwards compat only.  We'll return None on FIPS enabled systems
    try:
        md5sum = module.md5(dest)
    except ValueError:
        md5sum = None

    res_args = dict(
        url=url, dest=dest, src=tmpsrc, md5sum=md5sum, checksum_src=checksum_src,
        checksum_dest=checksum_dest, changed=changed, msg=info.get('msg', ''), status_code=info.get('status', '')
    )
    if backup_file:
        res_args['backup_file'] = backup_file

    # Mission complete
    module.exit_json(**res_args)
예제 #24
0
def main():
    argument_spec = url_argument_spec()

    # setup aliases
    argument_spec['url_username']['aliases'] = ['username']
    argument_spec['url_password']['aliases'] = ['password']

    argument_spec.update(
        url=dict(type='str', required=True),
        dest=dict(type='path', required=True),
        backup=dict(type='bool', default=False),
        checksum=dict(type='str', default=''),
        timeout=dict(type='int', default=10),
        headers=dict(type='dict'),
        tmp_dest=dict(type='path'),
        unredirected_headers=dict(type='list', elements='str', default=[]),
    )

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=argument_spec,
        add_file_common_args=True,
        supports_check_mode=True,
    )

    url = module.params['url']
    dest = module.params['dest']
    backup = module.params['backup']
    force = module.params['force']
    checksum = module.params['checksum']
    use_proxy = module.params['use_proxy']
    timeout = module.params['timeout']
    headers = module.params['headers']
    tmp_dest = module.params['tmp_dest']
    unredirected_headers = module.params['unredirected_headers']

    result = dict(
        changed=False,
        checksum_dest=None,
        checksum_src=None,
        dest=dest,
        elapsed=0,
        url=url,
    )

    dest_is_dir = os.path.isdir(dest)
    last_mod_time = None

    # checksum specified, parse for algorithm and checksum
    if checksum:
        try:
            algorithm, checksum = checksum.split(':', 1)
        except ValueError:
            module.fail_json(
                msg=
                "The checksum parameter has to be in format <algorithm>:<checksum>",
                **result)

        if is_url(checksum):
            checksum_url = checksum
            # download checksum file to checksum_tmpsrc
            checksum_tmpsrc, checksum_info = url_get(
                module,
                checksum_url,
                dest,
                use_proxy,
                last_mod_time,
                force,
                timeout,
                headers,
                tmp_dest,
                unredirected_headers=unredirected_headers)
            with open(checksum_tmpsrc) as f:
                lines = [line.rstrip('\n') for line in f]
            os.remove(checksum_tmpsrc)
            checksum_map = []
            filename = url_filename(url)
            if len(lines) == 1 and len(lines[0].split()) == 1:
                # Only a single line with a single string
                # treat it as a checksum only file
                checksum_map.append((lines[0], filename))
            else:
                # The assumption here is the file is in the format of
                # checksum filename
                for line in lines:
                    # Split by one whitespace to keep the leading type char ' ' (whitespace) for text and '*' for binary
                    parts = line.split(" ", 1)
                    if len(parts) == 2:
                        # Remove the leading type char, we expect
                        if parts[1].startswith((
                                " ",
                                "*",
                        )):
                            parts[1] = parts[1][1:]

                        # Append checksum and path without potential leading './'
                        checksum_map.append((parts[0], parts[1].lstrip("./")))

            # Look through each line in the checksum file for a hash corresponding to
            # the filename in the url, returning the first hash that is found.
            for cksum in (s for (s, f) in checksum_map if f == filename):
                checksum = cksum
                break
            else:
                checksum = None

            if checksum is None:
                module.fail_json(
                    msg="Unable to find a checksum for file '%s' in '%s'" %
                    (filename, checksum_url))
        # Remove any non-alphanumeric characters, including the infamous
        # Unicode zero-width space
        checksum = re.sub(r'\W+', '', checksum).lower()
        # Ensure the checksum portion is a hexdigest
        try:
            int(checksum, 16)
        except ValueError:
            module.fail_json(msg='The checksum format is invalid', **result)

    if not dest_is_dir and os.path.exists(dest):
        checksum_mismatch = False

        # If the download is not forced and there is a checksum, allow
        # checksum match to skip the download.
        if not force and checksum != '':
            destination_checksum = module.digest_from_file(dest, algorithm)

            if checksum != destination_checksum:
                checksum_mismatch = True

        # Not forcing redownload, unless checksum does not match
        if not force and checksum and not checksum_mismatch:
            # Not forcing redownload, unless checksum does not match
            # allow file attribute changes
            file_args = module.load_file_common_arguments(module.params,
                                                          path=dest)
            result['changed'] = module.set_fs_attributes_if_different(
                file_args, False)
            if result['changed']:
                module.exit_json(
                    msg="file already exists but file attributes changed",
                    **result)
            module.exit_json(msg="file already exists", **result)

        # If the file already exists, prepare the last modified time for the
        # request.
        mtime = os.path.getmtime(dest)
        last_mod_time = datetime.datetime.utcfromtimestamp(mtime)

        # If the checksum does not match we have to force the download
        # because last_mod_time may be newer than on remote
        if checksum_mismatch:
            force = True

    # download to tmpsrc
    start = datetime.datetime.utcnow()
    method = 'HEAD' if module.check_mode else 'GET'
    tmpsrc, info = url_get(module,
                           url,
                           dest,
                           use_proxy,
                           last_mod_time,
                           force,
                           timeout,
                           headers,
                           tmp_dest,
                           method,
                           unredirected_headers=unredirected_headers)
    result['elapsed'] = (datetime.datetime.utcnow() - start).seconds
    result['src'] = tmpsrc

    # Now the request has completed, we can finally generate the final
    # destination file name from the info dict.

    if dest_is_dir:
        filename = extract_filename_from_headers(info)
        if not filename:
            # Fall back to extracting the filename from the URL.
            # Pluck the URL from the info, since a redirect could have changed
            # it.
            filename = url_filename(info['url'])
        dest = os.path.join(dest, filename)
        result['dest'] = dest

    # raise an error if there is no tmpsrc file
    if not os.path.exists(tmpsrc):
        os.remove(tmpsrc)
        module.fail_json(msg="Request failed",
                         status_code=info['status'],
                         response=info['msg'],
                         **result)
    if not os.access(tmpsrc, os.R_OK):
        os.remove(tmpsrc)
        module.fail_json(msg="Source %s is not readable" % (tmpsrc), **result)
    result['checksum_src'] = module.sha1(tmpsrc)

    # check if there is no dest file
    if os.path.exists(dest):
        # raise an error if copy has no permission on dest
        if not os.access(dest, os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" % (dest),
                             **result)
        if not os.access(dest, os.R_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not readable" % (dest),
                             **result)
        result['checksum_dest'] = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(dest)):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s does not exist" %
                             (os.path.dirname(dest)),
                             **result)
        if not os.access(os.path.dirname(dest), os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" %
                             (os.path.dirname(dest)),
                             **result)

    if module.check_mode:
        if os.path.exists(tmpsrc):
            os.remove(tmpsrc)
        result['changed'] = ('checksum_dest' not in result or
                             result['checksum_src'] != result['checksum_dest'])
        module.exit_json(msg=info.get('msg', ''), **result)

    backup_file = None
    if result['checksum_src'] != result['checksum_dest']:
        try:
            if backup:
                if os.path.exists(dest):
                    backup_file = module.backup_local(dest)
            module.atomic_move(tmpsrc,
                               dest,
                               unsafe_writes=module.params['unsafe_writes'])
        except Exception as e:
            if os.path.exists(tmpsrc):
                os.remove(tmpsrc)
            module.fail_json(msg="failed to copy %s to %s: %s" %
                             (tmpsrc, dest, to_native(e)),
                             exception=traceback.format_exc(),
                             **result)
        result['changed'] = True
    else:
        result['changed'] = False
        if os.path.exists(tmpsrc):
            os.remove(tmpsrc)

    if checksum != '':
        destination_checksum = module.digest_from_file(dest, algorithm)

        if checksum != destination_checksum:
            os.remove(dest)
            module.fail_json(
                msg="The checksum for %s did not match %s; it was %s." %
                (dest, checksum, destination_checksum),
                **result)

    # allow file attribute changes
    file_args = module.load_file_common_arguments(module.params, path=dest)
    result['changed'] = module.set_fs_attributes_if_different(
        file_args, result['changed'])

    # Backwards compat only.  We'll return None on FIPS enabled systems
    try:
        result['md5sum'] = module.md5(dest)
    except ValueError:
        result['md5sum'] = None

    if backup_file:
        result['backup_file'] = backup_file

    # Mission complete
    module.exit_json(msg=info.get('msg', ''),
                     status_code=info.get('status', ''),
                     **result)
예제 #25
0
파일: assemble.py 프로젝트: ernstp/ansible
def main():

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec = dict(
            src = dict(required=True, type='path'),
            delimiter = dict(required=False),
            dest = dict(required=True, type='path'),
            backup=dict(default=False, type='bool'),
            remote_src=dict(default=False, type='bool'),
            regexp = dict(required=False),
            ignore_hidden = dict(default=False, type='bool'),
            validate = dict(required=False, type='str'),
        ),
        add_file_common_args=True
    )

    changed   = False
    path_hash   = None
    dest_hash   = None
    src       = module.params['src']
    dest      = module.params['dest']
    backup    = module.params['backup']
    delimiter = module.params['delimiter']
    regexp    = module.params['regexp']
    compiled_regexp = None
    ignore_hidden = module.params['ignore_hidden']
    validate = module.params.get('validate', None)

    result = dict(src=src, dest=dest)
    if not os.path.exists(src):
        module.fail_json(msg="Source (%s) does not exist" % src)

    if not os.path.isdir(src):
        module.fail_json(msg="Source (%s) is not a directory" % src)

    if regexp is not None:
        try:
            compiled_regexp = re.compile(regexp)
        except re.error as e:
            module.fail_json(msg="Invalid Regexp (%s) in \"%s\"" % (to_native(e), regexp))

    if validate and "%s" not in validate:
        module.fail_json(msg="validate must contain %%s: %s" % validate)

    path = assemble_from_fragments(src, delimiter, compiled_regexp, ignore_hidden)
    path_hash = module.sha1(path)
    result['checksum'] = path_hash

    # Backwards compat.  This won't return data if FIPS mode is active
    try:
        pathmd5 = module.md5(path)
    except ValueError:
        pathmd5 = None
    result['md5sum'] = pathmd5

    if os.path.exists(dest):
        dest_hash = module.sha1(dest)

    if path_hash != dest_hash:
        if validate:
            (rc, out, err) = module.run_command(validate % path)
            result['validation'] = dict(rc=rc, stdout=out, stderr=err)
            if rc != 0:
                cleanup(path)
                module.fail_json(msg="failed to validate: rc:%s error:%s" % (rc, err))
        if backup and dest_hash is not None:
            result['backup_file'] = module.backup_local(dest)

        module.atomic_move(path, dest, unsafe_writes=module.params['unsafe_writes'])
        changed = True

    cleanup(path, result)

    # handle file permissions
    file_args = module.load_file_common_arguments(module.params)
    result['changed'] = module.set_fs_attributes_if_different(file_args, changed)

    # Mission complete
    result['msg'] = "OK"
    module.exit_json(**result)
예제 #26
0
def main():
    module = AnsibleModule(
        argument_spec=dict(
            path=dict(type='list', required=True),
            format=dict(choices=['gz', 'bz2', 'zip', 'tar'],
                        default='gz',
                        required=False),
            dest=dict(required=False, type='path'),
            remove=dict(required=False, default=False, type='bool'),
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    params = module.params
    check_mode = module.check_mode
    paths = params['path']
    dest = params['dest']
    remove = params['remove']

    expanded_paths = []
    format = params['format']
    globby = False
    changed = False
    state = 'absent'

    # Simple or archive file compression (inapplicable with 'zip' since it's always an archive)
    archive = False
    successes = []

    for i, path in enumerate(paths):
        path = os.path.expanduser(os.path.expandvars(path))

        # Expand any glob characters. If found, add the expanded glob to the
        # list of expanded_paths, which might be empty.
        if ('*' in path or '?' in path):
            expanded_paths = expanded_paths + glob.glob(path)
            globby = True

        # If there are no glob characters the path is added to the expanded paths
        # whether the path exists or not
        else:
            expanded_paths.append(path)

    if len(expanded_paths) == 0:
        return module.fail_json(path=', '.join(paths),
                                expanded_paths=', '.join(expanded_paths),
                                msg='Error, no source paths were found')

    # If we actually matched multiple files or TRIED to, then
    # treat this as a multi-file archive
    archive = globby or os.path.isdir(
        expanded_paths[0]) or len(expanded_paths) > 1

    # Default created file name (for single-file archives) to
    # <file>.<format>
    if not dest and not archive:
        dest = '%s.%s' % (expanded_paths[0], format)

    # Force archives to specify 'dest'
    if archive and not dest:
        module.fail_json(
            dest=dest,
            path=', '.join(paths),
            msg=
            'Error, must specify "dest" when archiving multiple files or trees'
        )

    archive_paths = []
    missing = []
    arcroot = ''

    for path in expanded_paths:
        # Use the longest common directory name among all the files
        # as the archive root path
        if arcroot == '':
            arcroot = os.path.dirname(path) + os.sep
        else:
            for i in range(len(arcroot)):
                if path[i] != arcroot[i]:
                    break

            if i < len(arcroot):
                arcroot = os.path.dirname(arcroot[0:i + 1])

            arcroot += os.sep

        # Don't allow archives to be created anywhere within paths to be removed
        if remove and os.path.isdir(path) and dest.startswith(path):
            module.fail_json(
                path=', '.join(paths),
                msg=
                'Error, created archive can not be contained in source paths when remove=True'
            )

        if os.path.lexists(path):
            archive_paths.append(path)
        else:
            missing.append(path)

    # No source files were found but the named archive exists: are we 'compress' or 'archive' now?
    if len(missing) == len(expanded_paths) and dest and os.path.exists(dest):
        # Just check the filename to know if it's an archive or simple compressed file
        if re.search(r'(\.tar|\.tar\.gz|\.tgz|.tbz2|\.tar\.bz2|\.zip)$',
                     os.path.basename(dest), re.IGNORECASE):
            state = 'archive'
        else:
            state = 'compress'

    # Multiple files, or globbiness
    elif archive:
        if len(archive_paths) == 0:
            # No source files were found, but the archive is there.
            if os.path.lexists(dest):
                state = 'archive'
        elif len(missing) > 0:
            # SOME source files were found, but not all of them
            state = 'incomplete'

        archive = None
        size = 0
        errors = []

        if os.path.lexists(dest):
            size = os.path.getsize(dest)

        if state != 'archive':
            if check_mode:
                changed = True

            else:
                try:
                    # Slightly more difficult (and less efficient!) compression using zipfile module
                    if format == 'zip':
                        arcfile = zipfile.ZipFile(dest, 'w',
                                                  zipfile.ZIP_DEFLATED)

                    # Easier compression using tarfile module
                    elif format == 'gz' or format == 'bz2':
                        arcfile = tarfile.open(dest, 'w|' + format)

                    # Or plain tar archiving
                    elif format == 'tar':
                        arcfile = tarfile.open(dest, 'w')

                    match_root = re.compile('^%s' % re.escape(arcroot))
                    for path in archive_paths:
                        if os.path.isdir(path):
                            # Recurse into directories
                            for dirpath, dirnames, filenames in os.walk(
                                    path, topdown=True):
                                if not dirpath.endswith(os.sep):
                                    dirpath += os.sep

                                for dirname in dirnames:
                                    fullpath = dirpath + dirname
                                    arcname = match_root.sub('', fullpath)

                                    try:
                                        if format == 'zip':
                                            arcfile.write(fullpath, arcname)
                                        else:
                                            arcfile.add(fullpath,
                                                        arcname,
                                                        recursive=False)

                                    except Exception:
                                        e = get_exception()
                                        errors.append('%s: %s' %
                                                      (fullpath, str(e)))

                                for filename in filenames:
                                    fullpath = dirpath + filename
                                    arcname = match_root.sub('', fullpath)

                                    if not filecmp.cmp(fullpath, dest):
                                        try:
                                            if format == 'zip':
                                                arcfile.write(
                                                    fullpath, arcname)
                                            else:
                                                arcfile.add(fullpath,
                                                            arcname,
                                                            recursive=False)

                                            successes.append(fullpath)
                                        except Exception:
                                            e = get_exception()
                                            errors.append('Adding %s: %s' %
                                                          (path, str(e)))
                        else:
                            if format == 'zip':
                                arcfile.write(path, match_root.sub('', path))
                            else:
                                arcfile.add(path,
                                            match_root.sub('', path),
                                            recursive=False)

                            successes.append(path)

                except Exception:
                    e = get_exception()
                    return module.fail_json(
                        msg='Error when writing %s archive at %s: %s' %
                        (format == 'zip' and 'zip' or ('tar.' + format), dest,
                         str(e)))

                if arcfile:
                    arcfile.close()
                    state = 'archive'

                if len(errors) > 0:
                    module.fail_json(
                        msg='Errors when writing archive at %s: %s' %
                        (dest, '; '.join(errors)))

        if state in ['archive', 'incomplete'] and remove:
            for path in successes:
                try:
                    if os.path.isdir(path):
                        shutil.rmtree(path)
                    elif not check_mode:
                        os.remove(path)
                except OSError:
                    e = get_exception()
                    errors.append(path)

            if len(errors) > 0:
                module.fail_json(dest=dest,
                                 msg='Error deleting some source files: ' +
                                 str(e),
                                 files=errors)

        # Rudimentary check: If size changed then file changed. Not perfect, but easy.
        if os.path.getsize(dest) != size:
            changed = True

        if len(successes) and state != 'incomplete':
            state = 'archive'

    # Simple, single-file compression
    else:
        path = expanded_paths[0]

        # No source or compressed file
        if not (os.path.exists(path) or os.path.lexists(dest)):
            state = 'absent'

        # if it already exists and the source file isn't there, consider this done
        elif not os.path.lexists(path) and os.path.lexists(dest):
            state = 'compress'

        else:
            if module.check_mode:
                if not os.path.exists(dest):
                    changed = True
            else:
                size = 0
                f_in = f_out = arcfile = None

                if os.path.lexists(dest):
                    size = os.path.getsize(dest)

                try:
                    if format == 'zip':
                        arcfile = zipfile.ZipFile(dest, 'w',
                                                  zipfile.ZIP_DEFLATED)
                        arcfile.write(path, path[len(arcroot):])
                        arcfile.close()
                        state = 'archive'  # because all zip files are archives

                    else:
                        f_in = open(path, 'rb')

                        if format == 'gz':
                            f_out = gzip.open(dest, 'wb')
                        elif format == 'bz2':
                            f_out = bz2.BZ2File(dest, 'wb')
                        else:
                            raise OSError("Invalid format")

                        shutil.copyfileobj(f_in, f_out)

                    successes.append(path)

                except OSError:
                    e = get_exception()
                    module.fail_json(
                        path=path,
                        dest=dest,
                        msg='Unable to write to compressed file: %s' % str(e))

                if arcfile:
                    arcfile.close()
                if f_in:
                    f_in.close()
                if f_out:
                    f_out.close()

                # Rudimentary check: If size changed then file changed. Not perfect, but easy.
                if os.path.getsize(dest) != size:
                    changed = True

            state = 'compress'

        if remove and not check_mode:
            try:
                os.remove(path)

            except OSError:
                e = get_exception()
                module.fail_json(path=path,
                                 msg='Unable to remove source file: %s' %
                                 str(e))

    params['path'] = dest
    file_args = module.load_file_common_arguments(params)

    changed = module.set_fs_attributes_if_different(file_args, changed)

    module.exit_json(archived=successes,
                     dest=dest,
                     changed=changed,
                     state=state,
                     arcroot=arcroot,
                     missing=missing,
                     expanded_paths=expanded_paths)
예제 #27
0
def main():
    module = AnsibleModule(
        argument_spec=dict(
            group_id=dict(default=None),
            artifact_id=dict(default=None),
            version=dict(default="latest"),
            classifier=dict(default=''),
            extension=dict(default='jar'),
            repository_url=dict(default=None),
            username=dict(default=None, aliases=['aws_secret_key']),
            password=dict(default=None,
                          no_log=True,
                          aliases=['aws_secret_access_key']),
            state=dict(default="present",
                       choices=["present", "absent"
                                ]),  # TODO - Implement a "latest" state
            timeout=dict(default=10, type='int'),
            dest=dict(type="path", default=None),
            validate_certs=dict(required=False, default=True, type='bool'),
            keep_name=dict(required=False, default=False, type='bool'),
            verify_checksum=dict(
                required=False,
                default='download',
                choices=['never', 'download', 'change', 'always']),
        ),
        add_file_common_args=True)

    if not HAS_LXML_ETREE:
        module.fail_json(
            msg=
            'module requires the lxml python library installed on the managed machine'
        )

    repository_url = module.params["repository_url"]
    if not repository_url:
        repository_url = "http://repo1.maven.org/maven2"

    try:
        parsed_url = urlparse(repository_url)
    except AttributeError as e:
        module.fail_json(msg='url parsing went wrong %s' % e)

    if parsed_url.scheme == 's3' and not HAS_BOTO:
        module.fail_json(
            msg=
            'boto3 required for this module, when using s3:// repository URLs')

    group_id = module.params["group_id"]
    artifact_id = module.params["artifact_id"]
    version = module.params["version"]
    classifier = module.params["classifier"]
    extension = module.params["extension"]
    state = module.params["state"]
    dest = module.params["dest"]
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    keep_name = module.params["keep_name"]
    verify_checksum = module.params["verify_checksum"]
    verify_download = verify_checksum in ['download', 'always']
    verify_change = verify_checksum in ['change', 'always']

    downloader = MavenDownloader(module, repository_url)

    try:
        artifact = Artifact(group_id, artifact_id, version, classifier,
                            extension)
    except ValueError as e:
        module.fail_json(msg=e.args[0])

    changed = False
    prev_state = "absent"

    if dest.endswith(os.sep):
        b_dest = to_bytes(dest, errors='surrogate_or_strict')
        if not os.path.exists(b_dest):
            (pre_existing_dir,
             new_directory_list) = split_pre_existing_dir(dest)
            os.makedirs(b_dest)
            directory_args = module.load_file_common_arguments(module.params)
            directory_mode = module.params["directory_mode"]
            if directory_mode is not None:
                directory_args['mode'] = directory_mode
            else:
                directory_args['mode'] = None
            changed = adjust_recursive_directory_permissions(
                pre_existing_dir, new_directory_list, module, directory_args,
                changed)

    if os.path.isdir(b_dest):
        version_part = version
        if keep_name and version == 'latest':
            version_part = downloader.find_latest_version_available(artifact)

        if classifier:
            dest = posixpath.join(
                dest, "%s-%s-%s.%s" %
                (artifact_id, version_part, classifier, extension))
        else:
            dest = posixpath.join(
                dest, "%s-%s.%s" % (artifact_id, version_part, extension))
        b_dest = to_bytes(dest, errors='surrogate_or_strict')

    if os.path.lexists(b_dest) and (
        (not verify_change) or downloader.verify_md5(
            dest,
            downloader.find_uri_for_artifact(artifact) + '.md5')):
        prev_state = "present"

    if prev_state == "absent":
        try:
            download_error = downloader.download(artifact, verify_download,
                                                 b_dest)
            if download_error is None:
                changed = True
            else:
                module.fail_json(
                    msg="Cannot download the artifact to destination: " +
                    download_error)
        except ValueError as e:
            module.fail_json(msg=e.args[0])

    module.params['dest'] = dest
    file_args = module.load_file_common_arguments(module.params)
    changed = module.set_fs_attributes_if_different(file_args, changed)
    if changed:
        module.exit_json(state=state,
                         dest=dest,
                         group_id=group_id,
                         artifact_id=artifact_id,
                         version=version,
                         classifier=classifier,
                         extension=extension,
                         repository_url=repository_url,
                         changed=changed)
    else:
        module.exit_json(state=state, dest=dest, changed=changed)
예제 #28
0
def run_module():
    module_args = dict(
        cluster=dict(type='str', required=False, default='ceph'),
        name=dict(type='str', required=False),
        state=dict(type='str',
                   required=False,
                   default='present',
                   choices=[
                       'present', 'update', 'absent', 'list', 'info',
                       'fetch_initial_keys', 'generate_secret'
                   ]),
        caps=dict(type='dict', required=False, default=None),
        secret=dict(type='str', required=False, default=None, no_log=True),
        import_key=dict(type='bool', required=False, default=True),
        dest=dict(type='str', required=False, default='/etc/ceph/'),
        user=dict(type='str', required=False, default='client.admin'),
        user_key=dict(type='str', required=False, default=None),
        output_format=dict(type='str',
                           required=False,
                           default='json',
                           choices=['json', 'plain', 'xml', 'yaml']))

    module = AnsibleModule(
        argument_spec=module_args,
        supports_check_mode=True,
        add_file_common_args=True,
    )

    file_args = module.load_file_common_arguments(module.params)

    # Gather module parameters in variables
    state = module.params['state']
    name = module.params.get('name')
    cluster = module.params.get('cluster')
    caps = module.params.get('caps')
    secret = module.params.get('secret')
    import_key = module.params.get('import_key')
    dest = module.params.get('dest')
    user = module.params.get('user')
    user_key = module.params.get('user_key')
    output_format = module.params.get('output_format')

    changed = False

    result = dict(
        changed=changed,
        stdout='',
        stderr='',
        rc=0,
        start='',
        end='',
        delta='',
    )

    if module.check_mode:
        module.exit_json(**result)

    startd = datetime.datetime.now()

    # will return either the image name or None
    container_image = is_containerized()

    # Test if the key exists, if it does we skip its creation
    # We only want to run this check when a key needs to be added
    # There is no guarantee that any cluster is running and we don't need one
    _secret = secret
    _caps = caps
    key_exist = 1

    if not user_key:
        user_key_filename = '{}.{}.keyring'.format(cluster, user)
        user_key_dir = '/etc/ceph'
        user_key_path = os.path.join(user_key_dir, user_key_filename)
    else:
        user_key_path = user_key

    if (state in ["present", "update"]):
        # if dest is not a directory, the user wants to change the file's name
        # (e,g: /etc/ceph/ceph.mgr.ceph-mon2.keyring)
        if not os.path.isdir(dest):
            file_path = dest
        else:
            if 'bootstrap' in dest:
                # Build a different path for bootstrap keys as there are stored
                # as /var/lib/ceph/bootstrap-rbd/ceph.keyring
                keyring_filename = cluster + '.keyring'
            else:
                keyring_filename = cluster + "." + name + ".keyring"
            file_path = os.path.join(dest, keyring_filename)

        file_args['path'] = file_path

        if import_key:
            _info_key = []
            rc, cmd, out, err = exec_commands(
                module,
                info_key(cluster, name, user, user_key_path, output_format,
                         container_image))  # noqa E501
            key_exist = rc
            if not caps and key_exist != 0:
                fatal("Capabilities must be provided when state is 'present'",
                      module)  # noqa E501
            if key_exist != 0 and secret is None and caps is None:
                fatal(
                    "Keyring doesn't exist, you must provide 'secret' and 'caps'",
                    module)  # noqa E501
            if key_exist == 0:
                _info_key = json.loads(out)
                if not secret:
                    secret = _info_key[0]['key']
                _secret = _info_key[0]['key']
                if not caps:
                    caps = _info_key[0]['caps']
                _caps = _info_key[0]['caps']
                if secret == _secret and caps == _caps:
                    if not os.path.isfile(file_path):
                        rc, cmd, out, err = exec_commands(
                            module,
                            get_key(cluster, user, user_key_path, name,
                                    file_path, container_image))  # noqa E501
                        result["rc"] = rc
                        if rc != 0:
                            result[
                                "stdout"] = "Couldn't fetch the key {0} at {1}.".format(
                                    name, file_path)  # noqa E501
                            module.exit_json(**result)
                        result[
                            "stdout"] = "fetched the key {0} at {1}.".format(
                                name, file_path)  # noqa E501

                    result[
                        "stdout"] = "{0} already exists and doesn't need to be updated.".format(
                            name)  # noqa E501
                    result["rc"] = 0
                    module.set_fs_attributes_if_different(file_args, False)
                    module.exit_json(**result)
        else:
            if os.path.isfile(file_path) and not secret or not caps:
                result[
                    "stdout"] = "{0} already exists in {1} you must provide secret *and* caps when import_key is {2}".format(
                        name, dest, import_key)  # noqa E501
                result["rc"] = 0
                module.exit_json(**result)
        if (key_exist == 0 and
            (secret != _secret
             or caps != _caps)) or key_exist != 0:  # noqa E501
            rc, cmd, out, err = exec_commands(
                module,
                create_key(module, result, cluster, user, user_key_path, name,
                           secret, caps, import_key, file_path,
                           container_image))  # noqa E501
            if rc != 0:
                result["stdout"] = "Couldn't create or update {0}".format(name)
                result["stderr"] = err
                module.exit_json(**result)
            module.set_fs_attributes_if_different(file_args, False)
            changed = True

    elif state == "absent":
        if key_exist == 0:
            rc, cmd, out, err = exec_commands(
                module,
                delete_key(cluster, user, user_key_path, name,
                           container_image))
            if rc == 0:
                changed = True
        else:
            rc = 0

    elif state == "info":
        rc, cmd, out, err = exec_commands(
            module,
            info_key(cluster, name, user, user_key_path, output_format,
                     container_image))  # noqa E501

    elif state == "list":
        rc, cmd, out, err = exec_commands(
            module, list_keys(cluster, user, user_key_path, container_image))

    elif state == "fetch_initial_keys":
        hostname = socket.gethostname().split('.', 1)[0]
        user = "******"
        keyring_filename = cluster + "-" + hostname + "/keyring"
        user_key_path = os.path.join("/var/lib/ceph/mon/", keyring_filename)
        rc, cmd, out, err = exec_commands(
            module, list_keys(cluster, user, user_key_path, container_image))
        if rc != 0:
            result["stdout"] = "failed to retrieve ceph keys"
            result["sdterr"] = err
            result['rc'] = 0
            module.exit_json(**result)

        entities = lookup_ceph_initial_entities(module, out)

        output_format = "plain"
        for entity in entities:
            key_path = build_key_path(cluster, entity)
            if key_path is None:
                fatal("Failed to build key path, no entity yet?", module)
            elif os.path.isfile(key_path):
                # if the key is already on the filesystem
                # there is no need to fetch it again
                continue

            extra_args = [
                '-o',
                key_path,
            ]

            info_cmd = info_key(cluster, entity, user, user_key_path,
                                output_format, container_image)
            # we use info_cmd[0] because info_cmd is an array made of an array
            info_cmd[0].extend(extra_args)
            rc, cmd, out, err = exec_commands(module, info_cmd)  # noqa E501

            file_args = module.load_file_common_arguments(module.params)
            file_args['path'] = key_path
            module.set_fs_attributes_if_different(file_args, False)
    elif state == "generate_secret":
        out = generate_secret().decode()
        cmd = ''
        rc = 0
        err = ''
        changed = True

    endd = datetime.datetime.now()
    delta = endd - startd

    result = dict(
        cmd=cmd,
        start=str(startd),
        end=str(endd),
        delta=str(delta),
        rc=rc,
        stdout=out.rstrip("\r\n"),
        stderr=err.rstrip("\r\n"),
        changed=changed,
    )

    if rc != 0:
        module.fail_json(msg='non-zero return code', **result)

    module.exit_json(**result)
def main():
    '''
    This function is the main function of this module
    '''
    # argument_spec = postgres_common_argument_spec()
    argument_spec = dict()
    argument_spec.update(
        address=dict(type='str', default='samehost', aliases=['source', 'src']),
        backup_file=dict(type='str'),
        contype=dict(type='str', default=None, choices=PG_HBA_TYPES),
        create=dict(type='bool', default=False),
        databases=dict(type='str', default='all'),
        dest=dict(type='path', required=True),
        method=dict(type='str', default='md5', choices=PG_HBA_METHODS),
        netmask=dict(type='str'),
        options=dict(type='str'),
        order=dict(type='str', default="sdu", choices=PG_HBA_ORDERS),
        state=dict(type='str', default="present", choices=["absent", "present"]),
        users=dict(type='str', default='all')
    )
    module = AnsibleModule(
        argument_spec=argument_spec,
        add_file_common_args=True,
        supports_check_mode=True
    )
    if IPADDRESS_IMP_ERR is not None:
        module.fail_json(msg=missing_required_lib('ipaddress'), exception=IPADDRESS_IMP_ERR)

    contype = module.params["contype"]
    create = bool(module.params["create"] or module.check_mode)
    if module.check_mode:
        backup = False
    else:
        backup = module.params['backup']
        backup_file = module.params['backup_file']
    databases = module.params["databases"]
    dest = module.params["dest"]

    method = module.params["method"]
    netmask = module.params["netmask"]
    options = module.params["options"]
    order = module.params["order"]
    source = module.params["address"]
    state = module.params["state"]
    users = module.params["users"]

    ret = {'msgs': []}
    try:
        pg_hba = PgHba(dest, order, backup=backup, create=create)
    except PgHbaError as error:
        module.fail_json(msg='Error reading file:\n{0}'.format(error))

    if contype:
        try:
            for database in databases.split(','):
                for user in users.split(','):
                    rule = PgHbaRule(contype, database, user, source, netmask, method, options)
                    if state == "present":
                        ret['msgs'].append('Adding')
                        pg_hba.add_rule(rule)
                    else:
                        ret['msgs'].append('Removing')
                        pg_hba.remove_rule(rule)
        except PgHbaError as error:
            module.fail_json(msg='Error modifying rules:\n{0}'.format(error))
        file_args = module.load_file_common_arguments(module.params)
        ret['changed'] = changed = pg_hba.changed()
        if changed:
            ret['msgs'].append('Changed')
            ret['diff'] = pg_hba.diff

            if not module.check_mode:
                ret['msgs'].append('Writing')
                try:
                    if pg_hba.write(backup_file):
                        module.set_fs_attributes_if_different(file_args, True, pg_hba.diff,
                                                              expand=False)
                except PgHbaError as error:
                    module.fail_json(msg='Error writing file:\n{0}'.format(error))
                if pg_hba.last_backup:
                    ret['backup_file'] = pg_hba.last_backup

    ret['pg_hba'] = [rule for rule in pg_hba.get_rules()]
    module.exit_json(**ret)
예제 #30
0
def main():
    argument_spec = url_argument_spec()
    argument_spec.update(
        url=dict(type='str', required=True),
        dest=dict(type='path', required=True),
        backup=dict(type='bool'),
        sha256sum=dict(type='str', default=''),
        checksum=dict(type='str', default=''),
        timeout=dict(type='int', default=10),
        headers=dict(type='raw'),
        tmp_dest=dict(type='path'),
    )

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=argument_spec,
        add_file_common_args=True,
        supports_check_mode=True,
        mutually_exclusive=(['checksum', 'sha256sum']),
    )

    url = module.params['url']
    dest = module.params['dest']
    backup = module.params['backup']
    force = module.params['force']
    sha256sum = module.params['sha256sum']
    checksum = module.params['checksum']
    use_proxy = module.params['use_proxy']
    timeout = module.params['timeout']
    tmp_dest = module.params['tmp_dest']

    # Parse headers to dict
    if isinstance(module.params['headers'], dict):
        headers = module.params['headers']
    elif module.params['headers']:
        try:
            headers = dict(
                item.split(':', 1)
                for item in module.params['headers'].split(','))
            module.deprecate(
                'Supplying `headers` as a string is deprecated. Please use dict/hash format for `headers`',
                version='2.10')
        except Exception:
            module.fail_json(
                msg=
                "The string representation for the `headers` parameter requires a key:value,key:value syntax to be properly parsed."
            )
    else:
        headers = None

    dest_is_dir = os.path.isdir(dest)
    last_mod_time = None

    # workaround for usage of deprecated sha256sum parameter
    if sha256sum:
        checksum = 'sha256:%s' % (sha256sum)

    # checksum specified, parse for algorithm and checksum
    if checksum:
        try:
            algorithm, checksum = checksum.rsplit(':', 1)
            # Remove any non-alphanumeric characters, including the infamous
            # Unicode zero-width space
            checksum = re.sub(r'\W+', '', checksum).lower()
            # Ensure the checksum portion is a hexdigest
            int(checksum, 16)
        except ValueError:
            module.fail_json(
                msg=
                "The checksum parameter has to be in format <algorithm>:<checksum>"
            )

    if not dest_is_dir and os.path.exists(dest):
        checksum_mismatch = False

        # If the download is not forced and there is a checksum, allow
        # checksum match to skip the download.
        if not force and checksum != '':
            destination_checksum = module.digest_from_file(dest, algorithm)

            if checksum == destination_checksum:
                module.exit_json(msg="file already exists",
                                 dest=dest,
                                 url=url,
                                 changed=False)

            checksum_mismatch = True

        # Not forcing redownload, unless checksum does not match
        if not force and not checksum_mismatch:
            # allow file attribute changes
            module.params['path'] = dest
            file_args = module.load_file_common_arguments(module.params)
            file_args['path'] = dest
            changed = module.set_fs_attributes_if_different(file_args, False)

            if changed:
                module.exit_json(
                    msg="file already exists but file attributes changed",
                    dest=dest,
                    url=url,
                    changed=changed)
            module.exit_json(msg="file already exists",
                             dest=dest,
                             url=url,
                             changed=changed)

        # If the file already exists, prepare the last modified time for the
        # request.
        mtime = os.path.getmtime(dest)
        last_mod_time = datetime.datetime.utcfromtimestamp(mtime)

        # If the checksum does not match we have to force the download
        # because last_mod_time may be newer than on remote
        if checksum_mismatch:
            force = True

    # download to tmpsrc
    tmpsrc, info = url_get(module, url, dest, use_proxy, last_mod_time, force,
                           timeout, headers, tmp_dest)

    # Now the request has completed, we can finally generate the final
    # destination file name from the info dict.

    if dest_is_dir:
        filename = extract_filename_from_headers(info)
        if not filename:
            # Fall back to extracting the filename from the URL.
            # Pluck the URL from the info, since a redirect could have changed
            # it.
            filename = url_filename(info['url'])
        dest = os.path.join(dest, filename)

    checksum_src = None
    checksum_dest = None

    # If the remote URL exists, we're done with check mode
    if module.check_mode:
        os.remove(tmpsrc)
        res_args = dict(url=url,
                        dest=dest,
                        src=tmpsrc,
                        changed=True,
                        msg=info.get('msg', ''))
        module.exit_json(**res_args)

    # raise an error if there is no tmpsrc file
    if not os.path.exists(tmpsrc):
        os.remove(tmpsrc)
        module.fail_json(msg="Request failed",
                         status_code=info['status'],
                         response=info['msg'])
    if not os.access(tmpsrc, os.R_OK):
        os.remove(tmpsrc)
        module.fail_json(msg="Source %s is not readable" % (tmpsrc))
    checksum_src = module.sha1(tmpsrc)

    # check if there is no dest file
    if os.path.exists(dest):
        # raise an error if copy has no permission on dest
        if not os.access(dest, os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" % (dest))
        if not os.access(dest, os.R_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not readable" % (dest))
        checksum_dest = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(dest)):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s does not exist" %
                             (os.path.dirname(dest)))
        if not os.access(os.path.dirname(dest), os.W_OK):
            os.remove(tmpsrc)
            module.fail_json(msg="Destination %s is not writable" %
                             (os.path.dirname(dest)))

    backup_file = None
    if checksum_src != checksum_dest:
        try:
            if backup:
                if os.path.exists(dest):
                    backup_file = module.backup_local(dest)
            module.atomic_move(tmpsrc, dest)
        except Exception as e:
            if os.path.exists(tmpsrc):
                os.remove(tmpsrc)
            module.fail_json(msg="failed to copy %s to %s: %s" %
                             (tmpsrc, dest, to_native(e)),
                             exception=traceback.format_exc())
        changed = True
    else:
        changed = False
        if os.path.exists(tmpsrc):
            os.remove(tmpsrc)

    if checksum != '':
        destination_checksum = module.digest_from_file(dest, algorithm)

        if checksum != destination_checksum:
            os.remove(dest)
            module.fail_json(
                msg="The checksum for %s did not match %s; it was %s." %
                (dest, checksum, destination_checksum))

    # allow file attribute changes
    module.params['path'] = dest
    file_args = module.load_file_common_arguments(module.params)
    file_args['path'] = dest
    changed = module.set_fs_attributes_if_different(file_args, changed)

    # Backwards compat only.  We'll return None on FIPS enabled systems
    try:
        md5sum = module.md5(dest)
    except ValueError:
        md5sum = None

    res_args = dict(url=url,
                    dest=dest,
                    src=tmpsrc,
                    md5sum=md5sum,
                    checksum_src=checksum_src,
                    checksum_dest=checksum_dest,
                    changed=changed,
                    msg=info.get('msg', ''),
                    status_code=info.get('status', ''))
    if backup_file:
        res_args['backup_file'] = backup_file

    # Mission complete
    module.exit_json(**res_args)
예제 #31
0
def main():

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path'),
            original_basename=dict(type='str'),  # used to handle 'dest is a directory' via template, a slight hack
            content=dict(type='str', no_log=True),
            dest=dict(type='path', required=True),
            backup=dict(type='bool', default=False),
            force=dict(type='bool', default=True, aliases=['thirsty']),
            validate=dict(type='str'),
            directory_mode=dict(type='raw'),
            remote_src=dict(type='bool'),
            local_follow=dict(type='bool'),
            checksum=dict(),
        ),
        add_file_common_args=True,
        supports_check_mode=True,
    )

    src = module.params['src']
    b_src = to_bytes(src, errors='surrogate_or_strict')
    dest = module.params['dest']
    # Make sure we always have a directory component for later processing
    if os.path.sep not in dest:
        dest = '.{0}{1}'.format(os.path.sep, dest)
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    backup = module.params['backup']
    force = module.params['force']
    original_basename = module.params.get('original_basename', None)
    validate = module.params.get('validate', None)
    follow = module.params['follow']
    remote_src = module.params['remote_src']
    checksum = module.params['checksum']

    if not os.path.exists(b_src):
        module.fail_json(msg="Source %s not found" % (src))
    if not os.access(b_src, os.R_OK):
        module.fail_json(msg="Source %s not readable" % (src))
    if os.path.isdir(b_src):
        module.fail_json(msg="Remote copy does not support recursive copy of directory: %s" % (src))

    # Preserve is usually handled in the action plugin but mode + remote_src has to be done on the
    # remote host
    if module.params['mode'] == 'preserve':
        module.params['mode'] = '0%03o' % stat.S_IMODE(os.stat(b_src).st_mode)
    mode = module.params['mode']

    checksum_src = module.sha1(src)
    checksum_dest = None
    # Backwards compat only.  This will be None in FIPS mode
    try:
        md5sum_src = module.md5(src)
    except ValueError:
        md5sum_src = None

    changed = False

    if checksum and checksum_src != checksum:
        module.fail_json(
            msg='Copied file does not match the expected checksum. Transfer failed.',
            checksum=checksum_src,
            expected_checksum=checksum
        )

    # Special handling for recursive copy - create intermediate dirs
    if original_basename and dest.endswith(os.sep):
        dest = os.path.join(dest, original_basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')
        dirname = os.path.dirname(dest)
        b_dirname = to_bytes(dirname, errors='surrogate_or_strict')
        if not os.path.exists(b_dirname) and os.path.isabs(b_dirname):
            (pre_existing_dir, new_directory_list) = split_pre_existing_dir(dirname)
            os.makedirs(b_dirname)
            directory_args = module.load_file_common_arguments(module.params)
            directory_mode = module.params["directory_mode"]
            if directory_mode is not None:
                directory_args['mode'] = directory_mode
            else:
                directory_args['mode'] = None
            adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed)

    if os.path.isdir(b_dest):
        basename = os.path.basename(src)
        if original_basename:
            basename = original_basename
        dest = os.path.join(dest, basename)
        b_dest = to_bytes(dest, errors='surrogate_or_strict')

    if os.path.exists(b_dest):
        if os.path.islink(b_dest) and follow:
            b_dest = os.path.realpath(b_dest)
            dest = to_native(b_dest, errors='surrogate_or_strict')
        if not force:
            module.exit_json(msg="file already exists", src=src, dest=dest, changed=False)
        if os.access(b_dest, os.R_OK):
            checksum_dest = module.sha1(dest)
    else:
        if not os.path.exists(os.path.dirname(b_dest)):
            try:
                # os.path.exists() can return false in some
                # circumstances where the directory does not have
                # the execute bit for the current user set, in
                # which case the stat() call will raise an OSError
                os.stat(os.path.dirname(b_dest))
            except OSError as e:
                if "permission denied" in to_native(e).lower():
                    module.fail_json(msg="Destination directory %s is not accessible" % (os.path.dirname(dest)))
            module.fail_json(msg="Destination directory %s does not exist" % (os.path.dirname(dest)))

    if not os.access(os.path.dirname(b_dest), os.W_OK) and not module.params['unsafe_writes']:
        module.fail_json(msg="Destination %s not writable" % (os.path.dirname(dest)))

    backup_file = None
    if checksum_src != checksum_dest or os.path.islink(b_dest):
        if not module.check_mode:
            try:
                if backup:
                    if os.path.exists(b_dest):
                        backup_file = module.backup_local(dest)
                # allow for conversion from symlink.
                if os.path.islink(b_dest):
                    os.unlink(b_dest)
                    open(b_dest, 'w').close()
                if validate:
                    # if we have a mode, make sure we set it on the temporary
                    # file source as some validations may require it
                    # FIXME: should we do the same for owner/group here too?
                    if mode is not None:
                        module.set_mode_if_different(src, mode, False)
                    if "%s" not in validate:
                        module.fail_json(msg="validate must contain %%s: %s" % (validate))
                    (rc, out, err) = module.run_command(validate % src)
                    if rc != 0:
                        module.fail_json(msg="failed to validate", exit_status=rc, stdout=out, stderr=err)
                b_mysrc = b_src
                if remote_src:
                    _, b_mysrc = tempfile.mkstemp(dir=os.path.dirname(b_dest))
                    shutil.copy2(b_src, b_mysrc)
                module.atomic_move(b_mysrc, dest, unsafe_writes=module.params['unsafe_writes'])
            except IOError:
                module.fail_json(msg="failed to copy: %s to %s" % (src, dest), traceback=traceback.format_exc())
        changed = True
    else:
        changed = False

    res_args = dict(
        dest=dest, src=src, md5sum=md5sum_src, checksum=checksum_src, changed=changed
    )
    if backup_file:
        res_args['backup_file'] = backup_file

    module.params['dest'] = dest
    if not module.check_mode:
        file_args = module.load_file_common_arguments(module.params)
        res_args['changed'] = module.set_fs_attributes_if_different(file_args, res_args['changed'])

    module.exit_json(**res_args)
def main():
    # Module settings
    argument_spec = dict(
        bandwidth=dict(),
        baseurl=dict(type='list'),
        cost=dict(),
        deltarpm_metadata_percentage=dict(),
        deltarpm_percentage=dict(),
        description=dict(),
        enabled=dict(type='bool'),
        enablegroups=dict(type='bool'),
        exclude=dict(type='list'),
        failovermethod=dict(choices=['roundrobin', 'priority']),
        file=dict(),
        gpgcakey=dict(),
        gpgcheck=dict(type='bool'),
        gpgkey=dict(type='list'),
        http_caching=dict(choices=['all', 'packages', 'none']),
        include=dict(),
        includepkgs=dict(type='list'),
        ip_resolve=dict(choices=['4', '6', 'IPv4', 'IPv6', 'whatever']),
        keepalive=dict(type='bool'),
        keepcache=dict(choices=['0', '1']),
        metadata_expire=dict(),
        metadata_expire_filter=dict(
            choices=[
                'never',
                'read-only:past',
                'read-only:present',
                'read-only:future']),
        metalink=dict(),
        mirrorlist=dict(),
        mirrorlist_expire=dict(),
        name=dict(required=True),
        params=dict(type='dict'),
        password=dict(no_log=True),
        priority=dict(),
        protect=dict(type='bool'),
        proxy=dict(),
        proxy_password=dict(no_log=True),
        proxy_username=dict(),
        repo_gpgcheck=dict(type='bool'),
        reposdir=dict(default='/etc/yum.repos.d', type='path'),
        retries=dict(),
        s3_enabled=dict(type='bool'),
        skip_if_unavailable=dict(type='bool'),
        sslcacert=dict(aliases=['ca_cert']),
        ssl_check_cert_permissions=dict(type='bool'),
        sslclientcert=dict(aliases=['client_cert']),
        sslclientkey=dict(aliases=['client_key']),
        sslverify=dict(type='bool', aliases=['validate_certs']),
        state=dict(choices=['present', 'absent'], default='present'),
        throttle=dict(),
        timeout=dict(),
        ui_repoid_vars=dict(),
        username=dict(),
    )

    argument_spec['async'] = dict(type='bool')

    module = AnsibleModule(
        argument_spec=argument_spec,
        add_file_common_args=True,
        supports_check_mode=True,
    )

    # Params was removed
    # https://meetbot.fedoraproject.org/ansible-meeting/2017-09-28/ansible_dev_meeting.2017-09-28-15.00.log.html
    if module.params['params']:
        module.fail_json(msg="The params option to yum_repository was removed in Ansible 2.5 since it circumvents Ansible's option handling")

    name = module.params['name']
    state = module.params['state']

    # Check if required parameters are present
    if state == 'present':
        if (
                module.params['baseurl'] is None and
                module.params['metalink'] is None and
                module.params['mirrorlist'] is None):
            module.fail_json(
                msg="Parameter 'baseurl', 'metalink' or 'mirrorlist' is required.")
        if module.params['description'] is None:
            module.fail_json(
                msg="Parameter 'description' is required.")

    # Rename "name" and "description" to ensure correct key sorting
    module.params['repoid'] = module.params['name']
    module.params['name'] = module.params['description']
    del module.params['description']

    # Change list type to string for baseurl and gpgkey
    for list_param in ['baseurl', 'gpgkey']:
        if (
                list_param in module.params and
                module.params[list_param] is not None):
            module.params[list_param] = "\n".join(module.params[list_param])

    # Define repo file name if it doesn't exist
    if module.params['file'] is None:
        module.params['file'] = module.params['repoid']

    # Instantiate the YumRepo object
    yumrepo = YumRepo(module)

    # Get repo status before change
    diff = {
        'before_header': yumrepo.params['dest'],
        'before': yumrepo.dump(),
        'after_header': yumrepo.params['dest'],
        'after': ''
    }

    # Perform action depending on the state
    if state == 'present':
        yumrepo.add()
    elif state == 'absent':
        yumrepo.remove()

    # Get repo status after change
    diff['after'] = yumrepo.dump()

    # Compare repo states
    changed = diff['before'] != diff['after']

    # Save the file only if not in check mode and if there was a change
    if not module.check_mode and changed:
        yumrepo.save()

    # Change file attributes if needed
    if os.path.isfile(module.params['dest']):
        file_args = module.load_file_common_arguments(module.params)
        changed = module.set_fs_attributes_if_different(file_args, changed)

    # Print status of the change
    module.exit_json(changed=changed, repo=name, state=state, diff=diff)
예제 #33
0
def run_module():
    module_args = dict(
        cluster=dict(type='str', required=False, default='ceph'),
        name=dict(type='str', required=False),
        state=dict(type='str', required=True),
        caps=dict(type='dict', required=False, default=None),
        secret=dict(type='str', required=False, default=None),
        import_key=dict(type='bool', required=False, default=True),
        dest=dict(type='str', required=False, default='/etc/ceph/'),
    )

    module = AnsibleModule(
        argument_spec=module_args,
        supports_check_mode=True,
        add_file_common_args=True,
    )

    # Gather module parameters in variables
    state = module.params['state']
    name = module.params.get('name')
    cluster = module.params.get('cluster')
    caps = module.params.get('caps')
    secret = module.params.get('secret')
    import_key = module.params.get('import_key')
    dest = module.params.get('dest')

    result = dict(
        changed=False,
        stdout='',
        stderr='',
        rc='',
        start='',
        end='',
        delta='',
    )

    if module.check_mode:
        return result

    startd = datetime.datetime.now()

    # will return either the image name or None
    container_image = is_containerized()

    # Test if the key exists, if it does we skip its creation
    # We only want to run this check when a key needs to be added
    # There is no guarantee that any cluster is running and we don't need one
    if import_key:
        user = "******"
        keyring_filename = cluster + '.' + user + '.keyring'
        user_key = os.path.join("/etc/ceph/", keyring_filename)
        output_format = "json"
        rc, cmd, out, err = exec_commands(
            module,
            info_key(cluster, name, user, user_key, output_format,
                     container_image))  # noqa E501

    if state == "present":
        if not caps:
            fatal("Capabilities must be provided when state is 'present'",
                  module)  # noqa E501

        # if dest is not a directory, the user wants to change the file's name
        # (e,g: /etc/ceph/ceph.mgr.ceph-mon2.keyring)
        if not os.path.isdir(dest):
            file_path = dest
        elif 'bootstrap' in dest:
            # Build a different path for bootstrap keys as there are stored as
            # /var/lib/ceph/bootstrap-rbd/ceph.keyring
            keyring_filename = cluster + '.keyring'
            file_path = os.path.join(dest, keyring_filename)
        else:
            keyring_filename = cluster + "." + name + ".keyring"
            file_path = os.path.join(dest, keyring_filename)

        # We allow 'present' to override any existing key
        # ONLY if a secret is provided
        # if not we skip the creation
        if import_key:
            if rc == 0 and not secret:
                # If the key exists in Ceph we must fetch it on the system
                # because nothing tells us it exists on the fs or not
                rc, cmd, out, err = exec_commands(
                    module, get_key(cluster, name, file_path,
                                    container_image))  # noqa E501
                result[
                    "stdout"] = "skipped, since {0} already exists, we only fetched the key at {1}. If you want to update a key use 'state: update'".format(  # noqa E501
                        name, file_path)
                result['rc'] = rc
                module.exit_json(**result)

        rc, cmd, out, err = exec_commands(
            module,
            create_key(module, result, cluster, name, secret, caps, import_key,
                       file_path, container_image))  # noqa E501

        file_args = module.load_file_common_arguments(module.params)
        file_args['path'] = file_path
        module.set_fs_attributes_if_different(file_args, False)
    elif state == "update":
        if not caps:
            fatal("Capabilities must be provided when state is 'update'",
                  module)  # noqa E501

        if rc != 0:
            result["stdout"] = "skipped, since {0} does not exist".format(name)
            result['rc'] = 0
            module.exit_json(**result)

        rc, cmd, out, err = exec_commands(
            module, update_key(cluster, name, caps, container_image))
        # After the update we don't need to overwrite the key on the filesystem
        # since the secret has not changed

    elif state == "absent":
        rc, cmd, out, err = exec_commands(
            module, delete_key(cluster, name, container_image))

    elif state == "info":
        if rc != 0:
            result["stdout"] = "skipped, since {0} does not exist".format(name)
            result['rc'] = 0
            module.exit_json(**result)

        user = "******"
        keyring_filename = cluster + '.' + user + '.keyring'
        user_key = os.path.join("/etc/ceph/", keyring_filename)
        output_format = "json"
        rc, cmd, out, err = exec_commands(
            module,
            info_key(cluster, name, user, user_key, output_format,
                     container_image))  # noqa E501

    elif state == "list":
        user = "******"
        keyring_filename = cluster + '.' + user + '.keyring'
        user_key = os.path.join("/etc/ceph/", keyring_filename)
        rc, cmd, out, err = exec_commands(
            module, list_keys(cluster, user, user_key, container_image))

    elif state == "fetch_initial_keys":
        hostname = socket.gethostname().split('.', 1)[0]
        user = "******"
        keyring_filename = cluster + "-" + hostname + "/keyring"
        user_key = os.path.join("/var/lib/ceph/mon/", keyring_filename)
        rc, cmd, out, err = exec_commands(
            module, list_keys(cluster, user, user_key, container_image))
        if rc != 0:
            result["stdout"] = "failed to retrieve ceph keys".format(name)
            result["sdterr"] = err
            result['rc'] = 0
            module.exit_json(**result)

        entities = lookup_ceph_initial_entities(module, out)

        output_format = "plain"
        for entity in entities:
            key_path = build_key_path(cluster, entity)
            if key_path is None:
                fatal("Failed to build key path, no entity yet?", module)
            elif os.path.isfile(key_path):
                # if the key is already on the filesystem
                # there is no need to fetch it again
                continue

            extra_args = [
                '-o',
                key_path,
            ]

            info_cmd = info_key(cluster, entity, user, user_key, output_format,
                                container_image)
            # we use info_cmd[0] because info_cmd is an array made of an array
            info_cmd[0].extend(extra_args)
            rc, cmd, out, err = exec_commands(module, info_cmd)  # noqa E501

            file_args = module.load_file_common_arguments(module.params)
            file_args['path'] = key_path
            module.set_fs_attributes_if_different(file_args, False)
    else:
        module.fail_json(
            msg=
            'State must either be "present" or "absent" or "update" or "list" or "info" or "fetch_initial_keys".',
            changed=False,
            rc=1)  # noqa E501

    endd = datetime.datetime.now()
    delta = endd - startd

    result = dict(
        cmd=cmd,
        start=str(startd),
        end=str(endd),
        delta=str(delta),
        rc=rc,
        stdout=out.rstrip("\r\n"),
        stderr=err.rstrip("\r\n"),
        changed=True,
    )

    if rc != 0:
        module.fail_json(msg='non-zero return code', **result)

    module.exit_json(**result)
예제 #34
0
def main():
    argument_spec = url_argument_spec()
    argument_spec.update(
        dest=dict(type='path'),
        url_username=dict(type='str', aliases=['user']),
        url_password=dict(type='str', aliases=['password'], no_log=True),
        body=dict(type='raw'),
        body_format=dict(type='str',
                         default='raw',
                         choices=['form-urlencoded', 'json', 'raw']),
        src=dict(type='path'),
        method=dict(type='str', default='GET'),
        return_content=dict(type='bool', default=False),
        follow_redirects=dict(
            type='str',
            default='safe',
            choices=['all', 'no', 'none', 'safe', 'urllib2', 'yes']),
        creates=dict(type='path'),
        removes=dict(type='path'),
        status_code=dict(type='list', default=[200]),
        timeout=dict(type='int', default=30),
        headers=dict(type='dict', default={}),
        unix_socket=dict(type='path'),
    )

    module = AnsibleModule(
        argument_spec=argument_spec,
        # TODO: Remove check_invalid_arguments in 2.9
        check_invalid_arguments=False,
        add_file_common_args=True,
        mutually_exclusive=[['body', 'src']],
    )

    url = module.params['url']
    body = module.params['body']
    body_format = module.params['body_format'].lower()
    method = module.params['method'].upper()
    dest = module.params['dest']
    return_content = module.params['return_content']
    creates = module.params['creates']
    removes = module.params['removes']
    status_code = [int(x) for x in list(module.params['status_code'])]
    socket_timeout = module.params['timeout']

    dict_headers = module.params['headers']

    if not re.match('^[A-Z]+$', method):
        module.fail_json(
            msg=
            "Parameter 'method' needs to be a single word in uppercase, like GET or POST."
        )

    if body_format == 'json':
        # Encode the body unless its a string, then assume it is pre-formatted JSON
        if not isinstance(body, string_types):
            body = json.dumps(body)
        if 'content-type' not in [header.lower() for header in dict_headers]:
            dict_headers['Content-Type'] = 'application/json'
    elif body_format == 'form-urlencoded':
        if not isinstance(body, string_types):
            try:
                body = form_urlencoded(body)
            except ValueError as e:
                module.fail_json(
                    msg='failed to parse body as form_urlencoded: %s' %
                    to_native(e),
                    elapsed=0)
        if 'content-type' not in [header.lower() for header in dict_headers]:
            dict_headers['Content-Type'] = 'application/x-www-form-urlencoded'

    # TODO: Deprecated section.  Remove in Ansible 2.9
    # Grab all the http headers. Need this hack since passing multi-values is
    # currently a bit ugly. (e.g. headers='{"Content-Type":"application/json"}')
    for key, value in iteritems(module.params):
        if key.startswith("HEADER_"):
            module.deprecate(
                'Supplying headers via HEADER_* is deprecated. Please use `headers` to'
                ' supply headers for the request',
                version='2.9')
            skey = key.replace("HEADER_", "")
            dict_headers[skey] = value
    # End deprecated section

    if creates is not None:
        # do not run the command if the line contains creates=filename
        # and the filename already exists.  This allows idempotence
        # of uri executions.
        if os.path.exists(creates):
            module.exit_json(stdout="skipped, since '%s' exists" % creates,
                             changed=False)

    if removes is not None:
        # do not run the command if the line contains removes=filename
        # and the filename does not exist.  This allows idempotence
        # of uri executions.
        if not os.path.exists(removes):
            module.exit_json(stdout="skipped, since '%s' does not exist" %
                             removes,
                             changed=False)

    # Make the request
    start = datetime.datetime.utcnow()
    resp, content, dest = uri(module, url, dest, body, body_format, method,
                              dict_headers, socket_timeout)
    resp['elapsed'] = (datetime.datetime.utcnow() - start).seconds
    resp['status'] = int(resp['status'])

    # Write the file out if requested
    if dest is not None:
        if resp['status'] == 304:
            resp['changed'] = False
        else:
            write_file(module, url, dest, content, resp)
            # allow file attribute changes
            resp['changed'] = True
            module.params['path'] = dest
            file_args = module.load_file_common_arguments(module.params)
            file_args['path'] = dest
            resp['changed'] = module.set_fs_attributes_if_different(
                file_args, resp['changed'])
        resp['path'] = dest
    else:
        resp['changed'] = False

    # Transmogrify the headers, replacing '-' with '_', since variables don't
    # work with dashes.
    # In python3, the headers are title cased.  Lowercase them to be
    # compatible with the python2 behaviour.
    uresp = {}
    for key, value in iteritems(resp):
        ukey = key.replace("-", "_").lower()
        uresp[ukey] = value

    if 'location' in uresp:
        uresp['location'] = absolute_location(url, uresp['location'])

    # Default content_encoding to try
    content_encoding = 'utf-8'
    if 'content_type' in uresp:
        content_type, params = cgi.parse_header(uresp['content_type'])
        if 'charset' in params:
            content_encoding = params['charset']
        u_content = to_text(content, encoding=content_encoding)
        if any(candidate in content_type for candidate in JSON_CANDIDATES):
            try:
                js = json.loads(u_content)
                uresp['json'] = js
            except Exception:
                if PY2:
                    sys.exc_clear(
                    )  # Avoid false positive traceback in fail_json() on Python 2
    else:
        u_content = to_text(content, encoding=content_encoding)

    if resp['status'] not in status_code:
        uresp['msg'] = 'Status code was %s and not %s: %s' % (
            resp['status'], status_code, uresp.get('msg', ''))
        module.fail_json(content=u_content, **uresp)
    elif return_content:
        module.exit_json(content=u_content, **uresp)
    else:
        module.exit_json(**uresp)
예제 #35
0
def main():
    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path', required=True),
            original_basename=dict(type='str'),  # used to handle 'dest is a directory' via template, a slight hack
            dest=dict(type='path', required=True),
            remote_src=dict(type='bool', default=False),
            creates=dict(type='path'),
            list_files=dict(type='bool', default=False),
            keep_newer=dict(type='bool', default=False),
            exclude=dict(type='list', default=[]),
            extra_opts=dict(type='list', default=[]),
            validate_certs=dict(type='bool', default=True),
        ),
        add_file_common_args=True,
        # check-mode only works for zip files, we cover that later
        supports_check_mode=True,
    )

    src = module.params['src']
    dest = module.params['dest']
    remote_src = module.params['remote_src']
    file_args = module.load_file_common_arguments(module.params)

    # did tar file arrive?
    if not os.path.exists(src):
        if not remote_src:
            module.fail_json(msg="Source '%s' failed to transfer" % src)
        # If remote_src=true, and src= contains ://, try and download the file to a temp directory.
        elif '://' in src:
            tempdir = os.path.dirname(os.path.realpath(__file__))
            package = os.path.join(tempdir, str(src.rsplit('/', 1)[1]))
            try:
                rsp, info = fetch_url(module, src)
                # If download fails, raise a proper exception
                if rsp is None:
                    raise Exception(info['msg'])

                # open in binary mode for python3
                f = open(package, 'wb')
                # Read 1kb at a time to save on ram
                while True:
                    data = rsp.read(BUFSIZE)
                    data = to_bytes(data, errors='surrogate_or_strict')

                    if len(data) < 1:
                        break  # End of file, break while loop

                    f.write(data)
                f.close()
                src = package
            except Exception as e:
                module.fail_json(msg="Failure downloading %s, %s" % (src, to_native(e)))
        else:
            module.fail_json(msg="Source '%s' does not exist" % src)
    if not os.access(src, os.R_OK):
        module.fail_json(msg="Source '%s' not readable" % src)

    # skip working with 0 size archives
    try:
        if os.path.getsize(src) == 0:
            module.fail_json(msg="Invalid archive '%s', the file is 0 bytes" % src)
    except Exception as e:
        module.fail_json(msg="Source '%s' not readable, %s" % (src, to_native(e)))

    # is dest OK to receive tar file?
    if not os.path.isdir(dest):
        module.fail_json(msg="Destination '%s' is not a directory" % dest)

    handler = pick_handler(src, dest, file_args, module)

    res_args = dict(handler=handler.__class__.__name__, dest=dest, src=src)

    # do we need to do unpack?
    check_results = handler.is_unarchived()

    # DEBUG
    # res_args['check_results'] = check_results

    if module.check_mode:
        res_args['changed'] = not check_results['unarchived']
    elif check_results['unarchived']:
        res_args['changed'] = False
    else:
        # do the unpack
        try:
            res_args['extract_results'] = handler.unarchive()
            if res_args['extract_results']['rc'] != 0:
                module.fail_json(msg="failed to unpack %s to %s" % (src, dest), **res_args)
        except IOError:
            module.fail_json(msg="failed to unpack %s to %s" % (src, dest), **res_args)
        else:
            res_args['changed'] = True

    # Get diff if required
    if check_results.get('diff', False):
        res_args['diff'] = {'prepared': check_results['diff']}

    # Run only if we found differences (idempotence) or diff was missing
    if res_args.get('diff', True) and not module.check_mode:
        # do we need to change perms?
        for filename in handler.files_in_archive:
            file_args['path'] = os.path.join(dest, filename)

            try:
                res_args['changed'] = module.set_fs_attributes_if_different(file_args, res_args['changed'], expand=False)
            except (IOError, OSError) as e:
                module.fail_json(msg="Unexpected error when accessing exploded file: %s" % to_native(e), **res_args)

    if module.params['list_files']:
        res_args['files'] = handler.files_in_archive

    module.exit_json(**res_args)
예제 #36
0
def main():
    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path', required=True),
            dest=dict(type='path', required=True),
            remote_src=dict(type='bool', default=False),
            creates=dict(type='path'),
            list_files=dict(type='bool', default=False),
            keep_newer=dict(type='bool', default=False),
            exclude=dict(type='list', default=[]),
            extra_opts=dict(type='list', default=[]),
            validate_certs=dict(type='bool', default=True),
        ),
        add_file_common_args=True,
        # check-mode only works for zip files, we cover that later
        supports_check_mode=True,
    )

    src = module.params['src']
    dest = module.params['dest']
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    remote_src = module.params['remote_src']
    file_args = module.load_file_common_arguments(module.params)

    # did tar file arrive?
    if not os.path.exists(src):
        if not remote_src:
            module.fail_json(msg="Source '%s' failed to transfer" % src)
        # If remote_src=true, and src= contains ://, try and download the file to a temp directory.
        elif '://' in src:
            src = fetch_file(module, src)
        else:
            module.fail_json(msg="Source '%s' does not exist" % src)
    if not os.access(src, os.R_OK):
        module.fail_json(msg="Source '%s' not readable" % src)

    # skip working with 0 size archives
    try:
        if os.path.getsize(src) == 0:
            module.fail_json(msg="Invalid archive '%s', the file is 0 bytes" %
                             src)
    except Exception as e:
        module.fail_json(msg="Source '%s' not readable, %s" %
                         (src, to_native(e)))

    # is dest OK to receive tar file?
    if not os.path.isdir(b_dest):
        module.fail_json(msg="Destination '%s' is not a directory" % dest)

    handler = pick_handler(src, b_dest, file_args, module)

    res_args = dict(handler=handler.__class__.__name__, dest=dest, src=src)

    # do we need to do unpack?
    check_results = handler.is_unarchived()

    # DEBUG
    # res_args['check_results'] = check_results

    if module.check_mode:
        res_args['changed'] = not check_results['unarchived']
    elif check_results['unarchived']:
        res_args['changed'] = False
    else:
        # do the unpack
        try:
            res_args['extract_results'] = handler.unarchive()
            if res_args['extract_results']['rc'] != 0:
                module.fail_json(msg="failed to unpack %s to %s" % (src, dest),
                                 **res_args)
        except IOError:
            module.fail_json(msg="failed to unpack %s to %s" % (src, dest),
                             **res_args)
        else:
            res_args['changed'] = True

    # Get diff if required
    if check_results.get('diff', False):
        res_args['diff'] = {'prepared': check_results['diff']}

    # Run only if we found differences (idempotence) or diff was missing
    if res_args.get('diff', True) and not module.check_mode:
        # do we need to change perms?
        for filename in handler.files_in_archive:
            file_args['path'] = os.path.join(
                b_dest, to_bytes(filename, errors='surrogate_or_strict'))

            try:
                res_args['changed'] = module.set_fs_attributes_if_different(
                    file_args, res_args['changed'], expand=False)
            except (IOError, OSError) as e:
                module.fail_json(
                    msg="Unexpected error when accessing exploded file: %s" %
                    to_native(e),
                    **res_args)

    if module.params['list_files']:
        res_args['files'] = handler.files_in_archive

    module.exit_json(**res_args)
예제 #37
0
def main():
    module = AnsibleModule(
        argument_spec=dict(
            group_id=dict(required=True),
            artifact_id=dict(required=True),
            version=dict(default=None),
            version_by_spec=dict(default=None),
            classifier=dict(default=''),
            extension=dict(default='jar'),
            repository_url=dict(default='https://repo1.maven.org/maven2'),
            username=dict(default=None, aliases=['aws_secret_key']),
            password=dict(default=None, no_log=True, aliases=['aws_secret_access_key']),
            headers=dict(type='dict'),
            force_basic_auth=dict(default=False, type='bool'),
            state=dict(default="present", choices=["present", "absent"]),  # TODO - Implement a "latest" state
            timeout=dict(default=10, type='int'),
            dest=dict(type="path", required=True),
            validate_certs=dict(required=False, default=True, type='bool'),
            client_cert=dict(type="path", required=False),
            client_key=dict(type="path", required=False),
            keep_name=dict(required=False, default=False, type='bool'),
            verify_checksum=dict(required=False, default='download', choices=['never', 'download', 'change', 'always']),
            checksum_alg=dict(required=False, default='md5', choices=['md5', 'sha1']),
            directory_mode=dict(type='str'),
        ),
        add_file_common_args=True,
        mutually_exclusive=([('version', 'version_by_spec')])
    )

    if not HAS_LXML_ETREE:
        module.fail_json(msg=missing_required_lib('lxml'), exception=LXML_ETREE_IMP_ERR)

    if module.params['version_by_spec'] and not HAS_SEMANTIC_VERSION:
        module.fail_json(msg=missing_required_lib('semantic_version'), exception=SEMANTIC_VERSION_IMP_ERR)

    repository_url = module.params["repository_url"]
    if not repository_url:
        repository_url = "https://repo1.maven.org/maven2"
    try:
        parsed_url = urlparse(repository_url)
    except AttributeError as e:
        module.fail_json(msg='url parsing went wrong %s' % e)

    local = parsed_url.scheme == "file"

    if parsed_url.scheme == 's3' and not HAS_BOTO:
        module.fail_json(msg=missing_required_lib('boto3', reason='when using s3:// repository URLs'),
                         exception=BOTO_IMP_ERR)

    group_id = module.params["group_id"]
    artifact_id = module.params["artifact_id"]
    version = module.params["version"]
    version_by_spec = module.params["version_by_spec"]
    classifier = module.params["classifier"]
    extension = module.params["extension"]
    headers = module.params['headers']
    state = module.params["state"]
    dest = module.params["dest"]
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    keep_name = module.params["keep_name"]
    verify_checksum = module.params["verify_checksum"]
    verify_download = verify_checksum in ['download', 'always']
    verify_change = verify_checksum in ['change', 'always']
    checksum_alg = module.params["checksum_alg"]

    downloader = MavenDownloader(module, repository_url, local, headers)

    if not version_by_spec and not version:
        version = "latest"

    try:
        artifact = Artifact(group_id, artifact_id, version, version_by_spec, classifier, extension)
    except ValueError as e:
        module.fail_json(msg=e.args[0])

    changed = False
    prev_state = "absent"

    if dest.endswith(os.sep):
        b_dest = to_bytes(dest, errors='surrogate_or_strict')
        if not os.path.exists(b_dest):
            (pre_existing_dir, new_directory_list) = split_pre_existing_dir(dest)
            os.makedirs(b_dest)
            directory_args = module.load_file_common_arguments(module.params)
            directory_mode = module.params["directory_mode"]
            if directory_mode is not None:
                directory_args['mode'] = directory_mode
            else:
                directory_args['mode'] = None
            changed = adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed)

    if os.path.isdir(b_dest):
        version_part = version
        if version == 'latest':
            version_part = downloader.find_latest_version_available(artifact)
        elif version_by_spec:
            version_part = downloader.find_version_by_spec(artifact)

        filename = "{artifact_id}{version_part}{classifier}.{extension}".format(
            artifact_id=artifact_id,
            version_part="-{0}".format(version_part) if keep_name else "",
            classifier="-{0}".format(classifier) if classifier else "",
            extension=extension
        )
        dest = posixpath.join(dest, filename)

        b_dest = to_bytes(dest, errors='surrogate_or_strict')

    if os.path.lexists(b_dest) and ((not verify_change) or not downloader.is_invalid_checksum(dest, downloader.find_uri_for_artifact(artifact), checksum_alg)):
        prev_state = "present"

    if prev_state == "absent":
        try:
            download_error = downloader.download(module.tmpdir, artifact, verify_download, b_dest, checksum_alg)
            if download_error is None:
                changed = True
            else:
                module.fail_json(msg="Cannot retrieve the artifact to destination: " + download_error)
        except ValueError as e:
            module.fail_json(msg=e.args[0])

    try:
        file_args = module.load_file_common_arguments(module.params, path=dest)
    except TypeError:
        # The path argument is only supported in Ansible-base 2.10+. Fall back to
        # pre-2.10 behavior for older Ansible versions.
        module.params['path'] = dest
        file_args = module.load_file_common_arguments(module.params)
    changed = module.set_fs_attributes_if_different(file_args, changed)
    if changed:
        module.exit_json(state=state, dest=dest, group_id=group_id, artifact_id=artifact_id, version=version, classifier=classifier,
                         extension=extension, repository_url=repository_url, changed=changed)
    else:
        module.exit_json(state=state, dest=dest, changed=changed)
def main():
    argument_spec = dict(
        action=dict(type='str', default='export', choices=['export', 'parse']),
        other_certificates=dict(type='list',
                                elements='path',
                                aliases=['ca_certificates']),
        certificate_path=dict(type='path'),
        force=dict(type='bool', default=False),
        friendly_name=dict(type='str', aliases=['name']),
        iter_size=dict(type='int', default=2048),
        maciter_size=dict(type='int', default=1),
        passphrase=dict(type='str', no_log=True),
        path=dict(type='path', required=True),
        privatekey_passphrase=dict(type='str', no_log=True),
        privatekey_path=dict(type='path'),
        state=dict(type='str',
                   default='present',
                   choices=['absent', 'present']),
        src=dict(type='path'),
        backup=dict(type='bool', default=False),
    )

    required_if = [
        ['action', 'parse', ['src']],
    ]

    module = AnsibleModule(
        add_file_common_args=True,
        argument_spec=argument_spec,
        required_if=required_if,
        supports_check_mode=True,
    )

    if not pyopenssl_found:
        module.fail_json(msg=missing_required_lib('pyOpenSSL'),
                         exception=PYOPENSSL_IMP_ERR)

    base_dir = os.path.dirname(module.params['path']) or '.'
    if not os.path.isdir(base_dir):
        module.fail_json(
            name=base_dir,
            msg=
            "The directory '%s' does not exist or the path is not a directory"
            % base_dir)

    try:
        pkcs12 = Pkcs(module)
        changed = False

        if module.params['state'] == 'present':
            if module.check_mode:
                result = pkcs12.dump()
                result['changed'] = module.params['force'] or not pkcs12.check(
                    module)
                module.exit_json(**result)

            if not pkcs12.check(
                    module, perms_required=False) or module.params['force']:
                if module.params['action'] == 'export':
                    if not module.params['friendly_name']:
                        module.fail_json(msg='Friendly_name is required')
                    pkcs12_content = pkcs12.generate(module)
                    pkcs12.write(module, pkcs12_content, 0o600)
                    changed = True
                else:
                    pkey, cert, other_certs, friendly_name = pkcs12.parse()
                    dump_content = '%s%s%s' % (to_native(pkey), to_native(
                        cert), to_native(b''.join(other_certs)))
                    pkcs12.write(module, to_bytes(dump_content))

            file_args = module.load_file_common_arguments(module.params)
            if module.set_fs_attributes_if_different(file_args, changed):
                changed = True
        else:
            if module.check_mode:
                result = pkcs12.dump()
                result['changed'] = os.path.exists(module.params['path'])
                module.exit_json(**result)

            if os.path.exists(module.params['path']):
                pkcs12.remove(module)
                changed = True

        result = pkcs12.dump()
        result['changed'] = changed
        if os.path.exists(module.params['path']):
            file_mode = "%04o" % stat.S_IMODE(
                os.stat(module.params['path']).st_mode)
            result['mode'] = file_mode

        module.exit_json(**result)
    except crypto_utils.OpenSSLObjectError as exc:
        module.fail_json(msg=to_native(exc))
예제 #39
0
def main():
    argument_spec = url_argument_spec()
    argument_spec.update(
        dest=dict(type='path'),
        url_username=dict(type='str', aliases=['user']),
        url_password=dict(type='str', aliases=['password'], no_log=True),
        body=dict(type='raw'),
        body_format=dict(type='str',
                         default='raw',
                         choices=['form-urlencoded', 'json', 'raw']),
        src=dict(type='path'),
        method=dict(type='str', default='GET'),
        return_content=dict(type='bool', default=False),
        follow_redirects=dict(
            type='str',
            default='safe',
            choices=['all', 'no', 'none', 'safe', 'urllib2', 'yes']),
        creates=dict(type='path'),
        removes=dict(type='path'),
        status_code=dict(type='list', default=[200]),
        timeout=dict(type='int', default=30),
        headers=dict(type='dict', default={}),
        unix_socket=dict(type='path'),
        remote_src=dict(type='bool', default=False),
    )

    module = AnsibleModule(
        argument_spec=argument_spec,
        add_file_common_args=True,
        mutually_exclusive=[['body', 'src']],
    )

    if module.params.get('thirsty'):
        module.deprecate(
            'The alias "thirsty" has been deprecated and will be removed, use "force" instead',
            version='2.13')

    url = module.params['url']
    body = module.params['body']
    body_format = module.params['body_format'].lower()
    method = module.params['method'].upper()
    dest = module.params['dest']
    return_content = module.params['return_content']
    creates = module.params['creates']
    removes = module.params['removes']
    status_code = [int(x) for x in list(module.params['status_code'])]
    socket_timeout = module.params['timeout']

    dict_headers = module.params['headers']

    if not re.match('^[A-Z]+$', method):
        module.fail_json(
            msg=
            "Parameter 'method' needs to be a single word in uppercase, like GET or POST."
        )

    if body_format == 'json':
        # Encode the body unless its a string, then assume it is pre-formatted JSON
        if not isinstance(body, string_types):
            body = json.dumps(body)
        if 'content-type' not in [header.lower() for header in dict_headers]:
            dict_headers['Content-Type'] = 'application/json'
    elif body_format == 'form-urlencoded':
        if not isinstance(body, string_types):
            try:
                body = form_urlencoded(body)
            except ValueError as e:
                module.fail_json(
                    msg='failed to parse body as form_urlencoded: %s' %
                    to_native(e),
                    elapsed=0)
        if 'content-type' not in [header.lower() for header in dict_headers]:
            dict_headers['Content-Type'] = 'application/x-www-form-urlencoded'

    if creates is not None:
        # do not run the command if the line contains creates=filename
        # and the filename already exists.  This allows idempotence
        # of uri executions.
        if os.path.exists(creates):
            module.exit_json(stdout="skipped, since '%s' exists" % creates,
                             changed=False)

    if removes is not None:
        # do not run the command if the line contains removes=filename
        # and the filename does not exist.  This allows idempotence
        # of uri executions.
        if not os.path.exists(removes):
            module.exit_json(stdout="skipped, since '%s' does not exist" %
                             removes,
                             changed=False)

    # Make the request
    start = datetime.datetime.utcnow()
    resp, content, dest = uri(module, url, dest, body, body_format, method,
                              dict_headers, socket_timeout)
    resp['elapsed'] = (datetime.datetime.utcnow() - start).seconds
    resp['status'] = int(resp['status'])
    resp['changed'] = False

    # Write the file out if requested
    if dest is not None:
        if resp['status'] in status_code and resp['status'] != 304:
            write_file(module, url, dest, content, resp)
            # allow file attribute changes
            resp['changed'] = True
            module.params['path'] = dest
            file_args = module.load_file_common_arguments(module.params,
                                                          path=dest)
            resp['changed'] = module.set_fs_attributes_if_different(
                file_args, resp['changed'])
        resp['path'] = dest

    # Transmogrify the headers, replacing '-' with '_', since variables don't
    # work with dashes.
    # In python3, the headers are title cased.  Lowercase them to be
    # compatible with the python2 behaviour.
    uresp = {}
    for key, value in iteritems(resp):
        ukey = key.replace("-", "_").lower()
        uresp[ukey] = value

    if 'location' in uresp:
        uresp['location'] = absolute_location(url, uresp['location'])

    # Default content_encoding to try
    content_encoding = 'utf-8'
    if 'content_type' in uresp:
        # Handle multiple Content-Type headers
        charsets = []
        content_types = []
        for value in uresp['content_type'].split(','):
            ct, params = cgi.parse_header(value)
            if ct not in content_types:
                content_types.append(ct)
            if 'charset' in params:
                if params['charset'] not in charsets:
                    charsets.append(params['charset'])

        if content_types:
            content_type = content_types[0]
            if len(content_types) > 1:
                module.warn(
                    'Received multiple conflicting Content-Type values (%s), using %s'
                    % (', '.join(content_types), content_type))
        if charsets:
            content_encoding = charsets[0]
            if len(charsets) > 1:
                module.warn(
                    'Received multiple conflicting charset values (%s), using %s'
                    % (', '.join(charsets), content_encoding))

        u_content = to_text(content, encoding=content_encoding)
        if any(candidate in content_type for candidate in JSON_CANDIDATES):
            try:
                js = json.loads(u_content)
                uresp['json'] = js
            except Exception:
                if PY2:
                    sys.exc_clear(
                    )  # Avoid false positive traceback in fail_json() on Python 2
    else:
        u_content = to_text(content, encoding=content_encoding)

    if resp['status'] not in status_code:
        uresp['msg'] = 'Status code was %s and not %s: %s' % (
            resp['status'], status_code, uresp.get('msg', ''))
        module.fail_json(content=u_content, **uresp)
    elif return_content:
        module.exit_json(content=u_content, **uresp)
    else:
        module.exit_json(**uresp)
예제 #40
0
def main():
    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path', required=True),
            original_basename=dict(
                type='str'
            ),  # used to handle 'dest is a directory' via template, a slight hack
            dest=dict(type='path', required=True),
            remote_src=dict(type='bool', default=False),
            creates=dict(type='path'),
            list_files=dict(type='bool', default=False),
            keep_newer=dict(type='bool', default=False),
            exclude=dict(type='list', default=[]),
            extra_opts=dict(type='list', default=[]),
            validate_certs=dict(type='bool', default=True),
        ),
        add_file_common_args=True,
        # check-mode only works for zip files, we cover that later
        supports_check_mode=True,
    )

    src = module.params['src']
    dest = module.params['dest']
    remote_src = module.params['remote_src']
    file_args = module.load_file_common_arguments(module.params)

    # did tar file arrive?
    if not os.path.exists(src):
        if not remote_src:
            module.fail_json(msg="Source '%s' failed to transfer" % src)
        # If remote_src=true, and src= contains ://, try and download the file to a temp directory.
        elif '://' in src:
            tempdir = os.path.dirname(os.path.realpath(__file__))
            package = os.path.join(tempdir, str(src.rsplit('/', 1)[1]))
            try:
                rsp, info = fetch_url(module, src)
                # If download fails, raise a proper exception
                if rsp is None:
                    raise Exception(info['msg'])

                # open in binary mode for python3
                f = open(package, 'wb')
                # Read 1kb at a time to save on ram
                while True:
                    data = rsp.read(BUFSIZE)
                    data = to_bytes(data, errors='surrogate_or_strict')

                    if len(data) < 1:
                        break  # End of file, break while loop

                    f.write(data)
                f.close()
                src = package
            except Exception as e:
                module.fail_json(msg="Failure downloading %s, %s" %
                                 (src, to_native(e)))
        else:
            module.fail_json(msg="Source '%s' does not exist" % src)
    if not os.access(src, os.R_OK):
        module.fail_json(msg="Source '%s' not readable" % src)

    # skip working with 0 size archives
    try:
        if os.path.getsize(src) == 0:
            module.fail_json(msg="Invalid archive '%s', the file is 0 bytes" %
                             src)
    except Exception as e:
        module.fail_json(msg="Source '%s' not readable, %s" %
                         (src, to_native(e)))

    # is dest OK to receive tar file?
    if not os.path.isdir(dest):
        module.fail_json(msg="Destination '%s' is not a directory" % dest)

    handler = pick_handler(src, dest, file_args, module)

    res_args = dict(handler=handler.__class__.__name__, dest=dest, src=src)

    # do we need to do unpack?
    check_results = handler.is_unarchived()

    # DEBUG
    # res_args['check_results'] = check_results

    if module.check_mode:
        res_args['changed'] = not check_results['unarchived']
    elif check_results['unarchived']:
        res_args['changed'] = False
    else:
        # do the unpack
        try:
            res_args['extract_results'] = handler.unarchive()
            if res_args['extract_results']['rc'] != 0:
                module.fail_json(msg="failed to unpack %s to %s" % (src, dest),
                                 **res_args)
        except IOError:
            module.fail_json(msg="failed to unpack %s to %s" % (src, dest),
                             **res_args)
        else:
            res_args['changed'] = True

    # Get diff if required
    if check_results.get('diff', False):
        res_args['diff'] = {'prepared': check_results['diff']}

    # Run only if we found differences (idempotence) or diff was missing
    if res_args.get('diff', True) and not module.check_mode:
        # do we need to change perms?
        for filename in handler.files_in_archive:
            file_args['path'] = os.path.join(dest, filename)
            try:
                res_args['changed'] = module.set_fs_attributes_if_different(
                    file_args, res_args['changed'], expand=False)
            except (IOError, OSError) as e:
                module.fail_json(
                    msg="Unexpected error when accessing exploded file: %s" %
                    to_native(e),
                    **res_args)

    if module.params['list_files']:
        res_args['files'] = handler.files_in_archive

    module.exit_json(**res_args)
예제 #41
0
def main():
    '''
    This function is the main function of this module
    '''
    module = AnsibleModule(argument_spec=dict(
        contype=dict(type='str', default=None, choices=PG_HBA_TYPES + ['']),
        create=dict(type='bool', default=False),
        databases=dict(type='str', default='all'),
        dest=dict(type='path', required=True),
        method=dict(type='str', default='md5', choices=PG_HBA_METHODS),
        netmask=dict(type='str', default=''),
        options=dict(type='str', default=''),
        order=dict(type='str', default="sdu", choices=PG_HBA_ORDERS),
        state=dict(type='str',
                   default="present",
                   choices=["absent", "present"]),
        src=dict(type='str', default='samehost', aliases=['source']),
        users=dict(type='str', default='all')),
                           add_file_common_args=True,
                           supports_check_mode=True)
    if not HAS_IPADDRESS:
        module.fail_json(msg='Missing required libraries.')

    contype = module.params["contype"]
    create = module.params["create"]
    if module.check_mode:
        backup = False
    else:
        backup = module.params['backup']
    databases = module.params["databases"]
    dest = module.params["dest"]

    method = module.params["method"]
    netmask = module.params["netmask"]
    options = module.params["options"]
    order = module.params["order"]
    source = module.params["src"]
    state = module.params["state"]
    users = module.params["users"]

    ret = {'msgs': []}

    pg_hba = PgHba(dest, order, backup=backup, create=create)

    if contype:
        for rule in new_rules(contype, databases, users, source, netmask,
                              method, options):
            if state == "present":
                ret['msgs'].append('Adding')
                pg_hba.add_rule(rule)
            else:
                ret['msgs'].append('Removing')
                pg_hba.remove_rule(rule)

        file_args = module.load_file_common_arguments(module.params)
        ret['changed'] = changed = pg_hba.changed()
        if changed:
            ret['msgs'].append('Changed')
            ret['diff'] = pg_hba.diff

        if not module.check_mode:
            ret['msgs'].append('Writing')
            pg_hba.write()
            module.set_fs_attributes_if_different(file_args,
                                                  True,
                                                  pg_hba.diff,
                                                  expand=False)
            if pg_hba.last_backup:
                ret['backup_file'] = pg_hba.last_backup

    ret['pg_hba'] = [rule for rule in pg_hba.get_rules()]
    module.exit_json(**ret)
예제 #42
0
def main():

    module = AnsibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(required=True, type='path'),
            delimiter=dict(required=False),
            dest=dict(required=True, type='path'),
            backup=dict(default=False, type='bool'),
            remote_src=dict(default=False, type='bool'),
            regexp=dict(required=False),
            ignore_hidden=dict(default=False, type='bool'),
            validate=dict(required=False, type='str'),
        ),
        add_file_common_args=True,
    )

    changed = False
    path_hash = None
    dest_hash = None
    src = module.params['src']
    dest = module.params['dest']
    backup = module.params['backup']
    delimiter = module.params['delimiter']
    regexp = module.params['regexp']
    compiled_regexp = None
    ignore_hidden = module.params['ignore_hidden']
    validate = module.params.get('validate', None)

    result = dict(src=src, dest=dest)
    if not os.path.exists(src):
        module.fail_json(msg="Source (%s) does not exist" % src)

    if not os.path.isdir(src):
        module.fail_json(msg="Source (%s) is not a directory" % src)

    if regexp is not None:
        try:
            compiled_regexp = re.compile(regexp)
        except re.error as e:
            module.fail_json(msg="Invalid Regexp (%s) in \"%s\"" %
                             (to_native(e), regexp))

    if validate and "%s" not in validate:
        module.fail_json(msg="validate must contain %%s: %s" % validate)

    path = assemble_from_fragments(src, delimiter, compiled_regexp,
                                   ignore_hidden, module.tmpdir)
    path_hash = module.sha1(path)
    result['checksum'] = path_hash

    # Backwards compat.  This won't return data if FIPS mode is active
    try:
        pathmd5 = module.md5(path)
    except ValueError:
        pathmd5 = None
    result['md5sum'] = pathmd5

    if os.path.exists(dest):
        dest_hash = module.sha1(dest)

    if path_hash != dest_hash:
        if validate:
            (rc, out, err) = module.run_command(validate % path)
            result['validation'] = dict(rc=rc, stdout=out, stderr=err)
            if rc != 0:
                cleanup(path)
                module.fail_json(msg="failed to validate: rc:%s error:%s" %
                                 (rc, err))
        if backup and dest_hash is not None:
            result['backup_file'] = module.backup_local(dest)

        module.atomic_move(path,
                           dest,
                           unsafe_writes=module.params['unsafe_writes'])
        changed = True

    cleanup(path, result)

    # handle file permissions
    file_args = module.load_file_common_arguments(module.params)
    result['changed'] = module.set_fs_attributes_if_different(
        file_args, changed)

    # Mission complete
    result['msg'] = "OK"
    module.exit_json(**result)
예제 #43
0
def run_module():
    module_args = dict(
        cluster=dict(type='str', required=False, default='ceph'),
        name=dict(type='str', required=False),
        state=dict(type='str', required=True),
        caps=dict(type='dict', required=False, default=None),
        secret=dict(type='str', required=False, default=None),
        import_key=dict(type='bool', required=False, default=True),
        dest=dict(type='str', required=False, default='/etc/ceph/'),
    )

    module = AnsibleModule(
        argument_spec=module_args,
        supports_check_mode=True,
        add_file_common_args=True,
    )

    # Gather module parameters in variables
    state = module.params['state']
    name = module.params.get('name')
    cluster = module.params.get('cluster')
    caps = module.params.get('caps')
    secret = module.params.get('secret')
    import_key = module.params.get('import_key')
    dest = module.params.get('dest')

    result = dict(
        changed=False,
        stdout='',
        stderr='',
        rc='',
        start='',
        end='',
        delta='',
    )

    if module.check_mode:
        return result

    startd = datetime.datetime.now()

    # will return either the image name or None
    container_image = is_containerized()

    # Test if the key exists, if it does we skip its creation
    # We only want to run this check when a key needs to be added
    # There is no guarantee that any cluster is running and we don't need one
    if import_key:
        user = "******"
        keyring_filename = cluster + '.' + user + '.keyring'
        user_key = os.path.join("/etc/ceph/", keyring_filename)
        output_format = "json"
        rc, cmd, out, err = exec_commands(
            module, info_key(cluster, name, user, user_key, output_format, container_image))  # noqa E501

    if state == "present":
        if not caps:
            fatal("Capabilities must be provided when state is 'present'", module)  # noqa E501

        # if dest is not a directory, the user wants to change the file's name
        # (e,g: /etc/ceph/ceph.mgr.ceph-mon2.keyring)
        if not os.path.isdir(dest):
            file_path = dest
        elif 'bootstrap' in dest:
            # Build a different path for bootstrap keys as there are stored as
            # /var/lib/ceph/bootstrap-rbd/ceph.keyring
            keyring_filename = cluster + '.keyring'
            file_path = os.path.join(dest, keyring_filename)
        else:
            keyring_filename = cluster + "." + name + ".keyring"
            file_path = os.path.join(dest, keyring_filename)

        # We allow 'present' to override any existing key
        # ONLY if a secret is provided
        # if not we skip the creation
        if import_key:
            if rc == 0 and not secret:
                # If the key exists in Ceph we must fetch it on the system
                # because nothing tells us it exists on the fs or not
                rc, cmd, out, err = exec_commands(module, get_key(cluster, name, file_path, container_image))  # noqa E501
                result["stdout"] = "skipped, since {0} already exists, we only fetched the key at {1}. If you want to update a key use 'state: update'".format(  # noqa E501
                    name, file_path)
                result['rc'] = rc
                module.exit_json(**result)

        rc, cmd, out, err = exec_commands(module, create_key(
            module, result, cluster, name, secret, caps, import_key, file_path, container_image))  # noqa E501

        file_args = module.load_file_common_arguments(module.params)
        file_args['path'] = file_path
        module.set_fs_attributes_if_different(file_args, False)
    elif state == "update":
        if not caps:
            fatal("Capabilities must be provided when state is 'update'", module)  # noqa E501

        if rc != 0:
            result["stdout"] = "skipped, since {0} does not exist".format(name)
            result['rc'] = 0
            module.exit_json(**result)

        rc, cmd, out, err = exec_commands(
            module, update_key(cluster, name, caps, container_image))
        # After the update we don't need to overwrite the key on the filesystem
        # since the secret has not changed

    elif state == "absent":
        rc, cmd, out, err = exec_commands(
            module, delete_key(cluster, name, container_image))

    elif state == "info":
        if rc != 0:
            result["stdout"] = "skipped, since {0} does not exist".format(name)
            result['rc'] = 0
            module.exit_json(**result)

        user = "******"
        keyring_filename = cluster + '.' + user + '.keyring'
        user_key = os.path.join("/etc/ceph/", keyring_filename)
        output_format = "json"
        rc, cmd, out, err = exec_commands(
            module, info_key(cluster, name, user, user_key, output_format, container_image))  # noqa E501

    elif state == "list":
        user = "******"
        keyring_filename = cluster + '.' + user + '.keyring'
        user_key = os.path.join("/etc/ceph/", keyring_filename)
        rc, cmd, out, err = exec_commands(
            module, list_keys(cluster, user, user_key, container_image))

    elif state == "fetch_initial_keys":
        hostname = socket.gethostname().split('.', 1)[0]
        user = "******"
        keyring_filename = cluster + "-" + hostname + "/keyring"
        user_key = os.path.join("/var/lib/ceph/mon/", keyring_filename)
        rc, cmd, out, err = exec_commands(
            module, list_keys(cluster, user, user_key, container_image))
        if rc != 0:
            result["stdout"] = "failed to retrieve ceph keys".format(name)
            result["sdterr"] = err
            result['rc'] = 0
            module.exit_json(**result)

        entities = lookup_ceph_initial_entities(module, out)

        output_format = "plain"
        for entity in entities:
            key_path = build_key_path(cluster, entity)
            if key_path is None:
                fatal("Failed to build key path, no entity yet?", module)
            elif os.path.isfile(key_path):
                # if the key is already on the filesystem
                # there is no need to fetch it again
                continue

            extra_args = [
                '-o',
                key_path,
            ]

            info_cmd = info_key(cluster, entity, user,
                                user_key, output_format, container_image)
            # we use info_cmd[0] because info_cmd is an array made of an array
            info_cmd[0].extend(extra_args)
            rc, cmd, out, err = exec_commands(
                module, info_cmd)  # noqa E501

            file_args = module.load_file_common_arguments(module.params)
            file_args['path'] = key_path
            module.set_fs_attributes_if_different(file_args, False)
    else:
        module.fail_json(
            msg='State must either be "present" or "absent" or "update" or "list" or "info" or "fetch_initial_keys".', changed=False, rc=1)  # noqa E501

    endd = datetime.datetime.now()
    delta = endd - startd

    result = dict(
        cmd=cmd,
        start=str(startd),
        end=str(endd),
        delta=str(delta),
        rc=rc,
        stdout=out.rstrip("\r\n"),
        stderr=err.rstrip("\r\n"),
        changed=True,
    )

    if rc != 0:
        module.fail_json(msg='non-zero return code', **result)

    module.exit_json(**result)