Ejemplo n.º 1
0
def main():
    module = AssibleModule(argument_spec=dict(
        state=dict(type='str', default='file', choices=['file', 'directory']),
        path=dict(type='path'),
        prefix=dict(type='str', default='assible.'),
        suffix=dict(type='str', default=''),
    ), )

    try:
        if module.params['state'] == 'file':
            handle, path = mkstemp(
                prefix=module.params['prefix'],
                suffix=module.params['suffix'],
                dir=module.params['path'],
            )
            close(handle)
        elif module.params['state'] == 'directory':
            path = mkdtemp(
                prefix=module.params['prefix'],
                suffix=module.params['suffix'],
                dir=module.params['path'],
            )

        module.exit_json(changed=True, path=path)
    except Exception as e:
        module.fail_json(msg=to_native(e), exception=format_exc())
Ejemplo n.º 2
0
def main():
    module = AssibleModule(
        argument_spec=dict(
            database=dict(type='str', required=True),
            key=dict(type='str'),
            service=dict(type='str'),
            split=dict(type='str'),
            fail_key=dict(type='bool', default=True),
        ),
        supports_check_mode=True,
    )

    colon = ['passwd', 'shadow', 'group', 'gshadow']

    database = module.params['database']
    key = module.params.get('key')
    split = module.params.get('split')
    service = module.params.get('service')
    fail_key = module.params.get('fail_key')

    getent_bin = module.get_bin_path('getent', True)

    if key is not None:
        cmd = [getent_bin, database, key]
    else:
        cmd = [getent_bin, database]

    if service is not None:
        cmd.extend(['-s', service])

    if split is None and database in colon:
        split = ':'

    try:
        rc, out, err = module.run_command(cmd)
    except Exception as e:
        module.fail_json(msg=to_native(e), exception=traceback.format_exc())

    msg = "Unexpected failure!"
    dbtree = 'getent_%s' % database
    results = {dbtree: {}}

    if rc == 0:
        for line in out.splitlines():
            record = line.split(split)
            results[dbtree][record[0]] = record[1:]

        module.exit_json(assible_facts=results)

    elif rc == 1:
        msg = "Missing arguments, or database unknown."
    elif rc == 2:
        msg = "One or more supplied key could not be found in the database."
        if not fail_key:
            results[dbtree][key] = None
            module.exit_json(assible_facts=results, msg=msg)
    elif rc == 3:
        msg = "Enumeration not supported on this database."

    module.fail_json(msg=msg)
Ejemplo n.º 3
0
def main():
    if "--interactive" in sys.argv:
        import assible.module_utils.basic
        assible.module_utils.basic._ASSIBLE_ARGS = json.dumps(
            dict(ASSIBLE_MODULE_ARGS=dict(fail_mode="graceful")))

    module = AssibleModule(argument_spec=dict(
        fail_mode=dict(type='list', default=['success'])))

    result = dict(changed=True)

    fail_mode = module.params['fail_mode']

    try:
        if 'leading_junk' in fail_mode:
            print("leading junk before module output")

        if 'graceful' in fail_mode:
            module.fail_json(msg="failed gracefully")

        if 'exception' in fail_mode:
            raise Exception('failing via exception')

        if 'stderr' in fail_mode:
            print('printed to stderr', file=sys.stderr)

        module.exit_json(**result)

    finally:
        if 'trailing_junk' in fail_mode:
            print("trailing junk after module output")
Ejemplo n.º 4
0
def main():

    module = AssibleModule(argument_spec=dict(
        jid=dict(type='str', required=True),
        mode=dict(type='str', default='status', choices=['cleanup', 'status']),
        # passed in from the async_status action plugin
        _async_dir=dict(type='path', required=True),
    ))

    mode = module.params['mode']
    jid = module.params['jid']
    async_dir = module.params['_async_dir']

    # setup logging directory
    logdir = os.path.expanduser(async_dir)
    log_path = os.path.join(logdir, jid)

    if not os.path.exists(log_path):
        module.fail_json(msg="could not find job",
                         assible_job_id=jid,
                         started=1,
                         finished=1)

    if mode == 'cleanup':
        os.unlink(log_path)
        module.exit_json(assible_job_id=jid, erased=log_path)

    # NOT in cleanup mode, assume regular status mode
    # no remote kill mode currently exists, but probably should
    # consider log_path + ".pid" file and also unlink that above

    data = None
    try:
        with open(log_path) as f:
            data = json.loads(f.read())
    except Exception:
        if not data:
            # file not written yet?  That means it is running
            module.exit_json(results_file=log_path,
                             assible_job_id=jid,
                             started=1,
                             finished=0)
        else:
            module.fail_json(assible_job_id=jid,
                             results_file=log_path,
                             msg="Could not parse job output: %s" % data,
                             started=1,
                             finished=1)

    if 'started' not in data:
        data['finished'] = 1
        data['assible_job_id'] = jid
    elif 'finished' not in data:
        data['finished'] = 0

    # Fix error: TypeError: exit_json() keywords must be strings
    data = dict([(to_native(k), v) for k, v in iteritems(data)])

    module.exit_json(**data)
Ejemplo n.º 5
0
def main():
    module = AssibleModule(
        argument_spec=dict(
            state=dict(default="present", choices=["present", "latest", "absent"], required=False),
            name=dict(aliases=["pkg"], required=True, type='list'),
            cached=dict(default=False, type='bool'),
            annotation=dict(default="", required=False),
            pkgsite=dict(default="", required=False),
            rootdir=dict(default="", required=False, type='path'),
            chroot=dict(default="", required=False, type='path'),
            jail=dict(default="", required=False, type='str'),
            autoremove=dict(default=False, type='bool')),
        supports_check_mode=True,
        mutually_exclusive=[["rootdir", "chroot", "jail"]])

    pkgng_path = module.get_bin_path('pkg', True)

    p = module.params

    pkgs = p["name"]

    changed = False
    msgs = []
    dir_arg = ""

    if p["rootdir"] != "":
        old_pkgng = pkgng_older_than(module, pkgng_path, [1, 5, 0])
        if old_pkgng:
            module.fail_json(msg="To use option 'rootdir' pkg version must be 1.5 or greater")
        else:
            dir_arg = "--rootdir %s" % (p["rootdir"])

    if p["chroot"] != "":
        dir_arg = '--chroot %s' % (p["chroot"])

    if p["jail"] != "":
        dir_arg = '--jail %s' % (p["jail"])

    if p["state"] in ("present", "latest"):
        _changed, _msg = install_packages(module, pkgng_path, pkgs, p["cached"], p["pkgsite"], dir_arg, p["state"])
        changed = changed or _changed
        msgs.append(_msg)

    elif p["state"] == "absent":
        _changed, _msg = remove_packages(module, pkgng_path, pkgs, dir_arg)
        changed = changed or _changed
        msgs.append(_msg)

    if p["autoremove"]:
        _changed, _msg = autoremove_packages(module, pkgng_path, dir_arg)
        changed = changed or _changed
        msgs.append(_msg)

    if p["annotation"]:
        _changed, _msg = annotate_packages(module, pkgng_path, pkgs, p["annotation"], dir_arg)
        changed = changed or _changed
        msgs.append(_msg)

    module.exit_json(changed=changed, msg=", ".join(msgs))
Ejemplo n.º 6
0
def main():
    """main entry point for module execution
    """
    argument_spec = dict(
        commands=dict(type="list", required=True),
        wait_for=dict(type="list", aliases=["waitfor"]),
        match=dict(default="all", choices=["all", "any"]),
        retries=dict(default=10, type="int"),
        interval=dict(default=1, type="int"),
    )

    argument_spec.update(ios_argument_spec)

    module = AssibleModule(argument_spec=argument_spec,
                           supports_check_mode=True)

    warnings = list()
    result = {"changed": False, "warnings": warnings}
    commands = parse_commands(module, warnings)
    wait_for = module.params["wait_for"] or list()

    try:
        conditionals = [Conditional(c) for c in wait_for]
    except AttributeError as exc:
        module.fail_json(msg=to_text(exc))

    retries = module.params["retries"]
    interval = module.params["interval"]
    match = module.params["match"]

    while retries > 0:
        responses = run_commands(module, commands)

        for item in list(conditionals):
            if item(responses):
                if match == "any":
                    conditionals = list()
                    break
                conditionals.remove(item)

        if not conditionals:
            break

        time.sleep(interval)
        retries -= 1

    if conditionals:
        failed_conditions = [item.raw for item in conditionals]
        msg = "One or more conditional statements have not been satisfied"
        module.fail_json(msg=msg, failed_conditions=failed_conditions)

    result.update({
        "stdout": responses,
        "stdout_lines": list(to_lines(responses))
    })

    module.exit_json(**result)
Ejemplo n.º 7
0
def main():
    module = AssibleModule(
        argument_spec=dict(
            name=dict(required=True, type="str"),
        ),
        supports_check_mode=True,
    )
    if not HAS_PSUTIL:
        module.fail_json(msg="Missing required 'psutil' python module. Try installing it with: pip install psutil")
    name = module.params["name"]
    response = dict(pids=get_pid(name))
    module.exit_json(**response)
Ejemplo n.º 8
0
def main():
    module = AssibleModule(
        argument_spec=dict(
            path=dict(type='path', required=True, aliases=['dest', 'destfile', 'name']),
            state=dict(type='str', default='present', choices=['absent', 'present']),
            regexp=dict(type='str', aliases=['regex']),
            line=dict(type='str', aliases=['value']),
            insertafter=dict(type='str'),
            insertbefore=dict(type='str'),
            backrefs=dict(type='bool', default=False),
            create=dict(type='bool', default=False),
            backup=dict(type='bool', default=False),
            firstmatch=dict(type='bool', default=False),
            validate=dict(type='str'),
        ),
        mutually_exclusive=[['insertbefore', 'insertafter']],
        add_file_common_args=True,
        supports_check_mode=True,
    )

    params = module.params
    create = params['create']
    backup = params['backup']
    backrefs = params['backrefs']
    path = params['path']
    firstmatch = params['firstmatch']
    regexp = params['regexp']
    line = params['line']

    if regexp == '':
        module.warn(
            "The regular expression is an empty string, which will match every line in the file. "
            "This may have unintended consequences, such as replacing the last line in the file rather than appending. "
            "If this is desired, use '^' to match every line in the file and avoid this warning.")

    b_path = to_bytes(path, errors='surrogate_or_strict')
    if os.path.isdir(b_path):
        module.fail_json(rc=256, msg='Path %s is a directory !' % path)

    if params['state'] == 'present':
        if backrefs and regexp is None:
            module.fail_json(msg='regexp is required with backrefs=true')

        if line is None:
            module.fail_json(msg='line is required with state=present')

        # Deal with the insertafter default value manually, to avoid errors
        # because of the mutually_exclusive mechanism.
        ins_bef, ins_aft = params['insertbefore'], params['insertafter']
        if ins_bef is None and ins_aft is None:
            ins_aft = 'EOF'

        present(module, path, regexp, line,
                ins_aft, ins_bef, create, backup, backrefs, firstmatch)
    else:
        if regexp is None and line is None:
            module.fail_json(msg='one of line or regexp is required with state=absent')

        absent(module, path, regexp, line, backup)
Ejemplo n.º 9
0
def run_module():
    # define available arguments/parameters a user can pass to the module
    module_args = dict(name=dict(type='str', required=True),
                       new=dict(type='bool', required=False, default=False))

    # seed the result dict in the object
    # we primarily care about changed and state
    # changed is if this module effectively modified the target
    # state will include any data that you want your module to pass back
    # for consumption, for example, in a subsequent task
    result = dict(changed=False, original_message='', message='')

    # the AssibleModule object will be our abstraction working with Assible
    # this includes instantiation, a couple of common attr would be the
    # args/params passed to the execution, as well as if the module
    # supports check mode
    module = AssibleModule(argument_spec=module_args, supports_check_mode=True)

    # if the user is working with this module in only check mode we do not
    # want to make any changes to the environment, just return the current
    # state with no modifications
    if module.check_mode:
        module.exit_json(**result)

    # manipulate or modify the state as needed (this is going to be the
    # part where your module will do what it needs to do)
    result['original_message'] = module.params['name']
    result['message'] = 'goodbye'

    # use whatever logic you need to determine whether or not this module
    # made any modifications to your target
    if module.params['new']:
        result['changed'] = True

    # during the execution of the module, if there is an exception or a
    # conditional state that effectively causes a failure, run
    # AssibleModule.fail_json() to pass in the message and the result
    if module.params['name'] == 'fail me':
        module.fail_json(msg='You requested this to fail', **result)

    # in the event of a successful module execution, you will want to
    # simple AssibleModule.exit_json(), passing the key/value results
    module.exit_json(**result)
Ejemplo n.º 10
0
def main():
    module = AssibleModule(
        argument_spec=dict(src=dict(type='path',
                                    required=True,
                                    aliases=['path']), ),
        supports_check_mode=True,
    )
    source = module.params['src']

    if not os.path.exists(source):
        module.fail_json(msg="file not found: %s" % source)
    if not os.access(source, os.R_OK):
        module.fail_json(msg="file is not readable: %s" % source)

    with open(source, 'rb') as source_fh:
        source_content = source_fh.read()
    data = base64.b64encode(source_content)

    module.exit_json(content=data, source=source, encoding='base64')
Ejemplo n.º 11
0
def main():
    if "--interactive" in sys.argv:
        import assible.module_utils.basic
        assible.module_utils.basic._ASSIBLE_ARGS = json.dumps(dict(
            ASSIBLE_MODULE_ARGS=dict(
                fail_mode="graceful"
            )
        ))

    module = AssibleModule(
        argument_spec=dict(
            fail_mode=dict(type='list', default=['success'])
        )
    )

    result = dict(changed=True)

    fail_mode = module.params['fail_mode']

    try:
        if 'leading_junk' in fail_mode:
            print("leading junk before module output")

        if 'graceful' in fail_mode:
            module.fail_json(msg="failed gracefully")

        if 'exception' in fail_mode:
            raise Exception('failing via exception')

        if 'recovered_fail' in fail_mode:
            result = {"msg": "succeeded", "failed": False, "changed": True}
            # Wait in the middle to setup a race where the controller reads incomplete data from our
            # special async_status the first poll
            time.sleep(5)

        module.exit_json(**result)

    finally:
        if 'trailing_junk' in fail_mode:
            print("trailing junk after module output")
Ejemplo n.º 12
0
def main():
    module = AssibleModule(
        argument_spec=dict(
            path=dict(type='path'),
            content=dict(type='str'),
        ),
        required_one_of=(['path', 'content'], ),
        mutually_exclusive=(['path', 'content'], ),
        supports_check_mode=True,
    )

    if not CRYPTOGRAPHY_FOUND:
        module.fail_json(msg=missing_required_lib(
            'cryptography >= {0}'.format(MINIMAL_CRYPTOGRAPHY_VERSION)),
                         exception=CRYPTOGRAPHY_IMP_ERR)

    try:
        crl = CRLInfo(module)
        result = crl.get_info()
        module.exit_json(**result)
    except crypto_utils.OpenSSLObjectError as e:
        module.fail_json(msg=to_native(e))
Ejemplo n.º 13
0
def main():
    module = AssibleModule(
        argument_spec=dict(
            ignore_selinux_state=dict(type='bool', default=False),
            login=dict(type='str', required=True),
            seuser=dict(type='str'),
            selevel=dict(type='str', aliases=['serange'], default='s0'),
            state=dict(type='str', default='present', choices=['absent', 'present']),
            reload=dict(type='bool', default=True),
        ),
        required_if=[
            ["state", "present", ["seuser"]]
        ],
        supports_check_mode=True
    )
    if not HAVE_SELINUX:
        module.fail_json(msg=missing_required_lib("libselinux"), exception=SELINUX_IMP_ERR)

    if not HAVE_SEOBJECT:
        module.fail_json(msg=missing_required_lib("seobject from policycoreutils"), exception=SEOBJECT_IMP_ERR)

    ignore_selinux_state = module.params['ignore_selinux_state']

    if not get_runtime_status(ignore_selinux_state):
        module.fail_json(msg="SELinux is disabled on this host.")

    login = module.params['login']
    seuser = module.params['seuser']
    serange = module.params['selevel']
    state = module.params['state']
    do_reload = module.params['reload']

    result = {
        'login': login,
        'seuser': seuser,
        'serange': serange,
        'state': state,
    }

    if state == 'present':
        result['changed'] = semanage_login_add(module, login, seuser, do_reload, serange)
    elif state == 'absent':
        result['changed'] = semanage_login_del(module, login, seuser, do_reload)
    else:
        module.fail_json(msg='Invalid value of argument "state": {0}'.format(state))

    module.exit_json(**result)
Ejemplo n.º 14
0
def main():
    module = AssibleModule(
        argument_spec=dict(
            ignore_selinux_state=dict(type='bool', default=False),
            target=dict(type='str', required=True, aliases=['path']),
            ftype=dict(type='str', default='a', choices=option_to_file_type_str.keys()),
            setype=dict(type='str', required=True),
            seuser=dict(type='str'),
            selevel=dict(type='str', aliases=['serange']),
            state=dict(type='str', default='present', choices=['absent', 'present']),
            reload=dict(type='bool', default=True),
        ),
        supports_check_mode=True,
    )
    if not HAVE_SELINUX:
        module.fail_json(msg=missing_required_lib("libselinux-python"), exception=SELINUX_IMP_ERR)

    if not HAVE_SEOBJECT:
        module.fail_json(msg=missing_required_lib("policycoreutils-python"), exception=SEOBJECT_IMP_ERR)

    ignore_selinux_state = module.params['ignore_selinux_state']

    if not get_runtime_status(ignore_selinux_state):
        module.fail_json(msg="SELinux is disabled on this host.")

    target = module.params['target']
    ftype = module.params['ftype']
    setype = module.params['setype']
    seuser = module.params['seuser']
    serange = module.params['selevel']
    state = module.params['state']
    do_reload = module.params['reload']

    result = dict(target=target, ftype=ftype, setype=setype, state=state)

    if state == 'present':
        semanage_fcontext_modify(module, result, target, ftype, setype, do_reload, serange, seuser)
    elif state == 'absent':
        semanage_fcontext_delete(module, result, target, ftype, do_reload)
    else:
        module.fail_json(msg='Invalid value of argument "state": {0}'.format(state))
Ejemplo n.º 15
0
def main():
    module = AssibleModule(
        argument_spec=dict(
            name=dict(type='str', required=True),
            state=dict(type='str', default='present', choices=['absent', 'present']),
        ),
        supports_check_mode=True,
    )

    name = module.params['name']
    state = module.params['state']

    if not os.path.exists("/etc/locale.gen"):
        if os.path.exists("/var/lib/locales/supported.d/"):
            # Ubuntu created its own system to manage locales.
            ubuntuMode = True
        else:
            module.fail_json(msg="/etc/locale.gen and /var/lib/locales/supported.d/local are missing. Is the package \"locales\" installed?")
    else:
        # We found the common way to manage locales.
        ubuntuMode = False

    if not is_available(name, ubuntuMode):
        module.fail_json(msg="The locale you've entered is not available "
                             "on your system.")

    if is_present(name):
        prev_state = "present"
    else:
        prev_state = "absent"
    changed = (prev_state != state)

    if module.check_mode:
        module.exit_json(changed=changed)
    else:
        if changed:
            try:
                if ubuntuMode is False:
                    apply_change(state, name)
                else:
                    apply_change_ubuntu(state, name)
            except EnvironmentError as e:
                module.fail_json(msg=to_native(e), exitValue=e.errno)

        module.exit_json(name=name, changed=changed, msg="OK")
Ejemplo n.º 16
0
def main():
    argument_spec = postgres_common_argument_spec()
    argument_spec.update(
        database=dict(required=True, aliases=['db', 'login_db']),
        state=dict(default='present', choices=['present', 'absent']),
        privs=dict(required=False, aliases=['priv']),
        type=dict(default='table',
                  choices=[
                      'table',
                      'sequence',
                      'function',
                      'database',
                      'schema',
                      'language',
                      'tablespace',
                      'group',
                      'default_privs',
                      'foreign_data_wrapper',
                      'foreign_server',
                      'type',
                  ]),
        objs=dict(required=False, aliases=['obj']),
        schema=dict(required=False),
        roles=dict(required=True, aliases=['role']),
        session_role=dict(required=False),
        target_roles=dict(required=False),
        grant_option=dict(required=False,
                          type='bool',
                          aliases=['admin_option']),
        host=dict(default='', aliases=['login_host']),
        unix_socket=dict(default='', aliases=['login_unix_socket']),
        login=dict(default='postgres', aliases=['login_user']),
        password=dict(default='', aliases=['login_password'], no_log=True),
        fail_on_role=dict(type='bool', default=True),
    )

    module = AssibleModule(
        argument_spec=argument_spec,
        supports_check_mode=True,
    )

    fail_on_role = module.params['fail_on_role']

    # Create type object as namespace for module params
    p = type('Params', (), module.params)
    # param "schema": default, allowed depends on param "type"
    if p.type in ['table', 'sequence', 'function', 'type', 'default_privs']:
        p.schema = p.schema or 'public'
    elif p.schema:
        module.fail_json(msg='Argument "schema" is not allowed '
                         'for type "%s".' % p.type)

    # param "objs": default, required depends on param "type"
    if p.type == 'database':
        p.objs = p.objs or p.database
    elif not p.objs:
        module.fail_json(msg='Argument "objs" is required '
                         'for type "%s".' % p.type)

    # param "privs": allowed, required depends on param "type"
    if p.type == 'group':
        if p.privs:
            module.fail_json(msg='Argument "privs" is not allowed '
                             'for type "group".')
    elif not p.privs:
        module.fail_json(msg='Argument "privs" is required '
                         'for type "%s".' % p.type)

    # Connect to Database
    if not psycopg2:
        module.fail_json(msg=missing_required_lib('psycopg2'),
                         exception=PSYCOPG2_IMP_ERR)
    try:
        conn = Connection(p, module)
    except psycopg2.Error as e:
        module.fail_json(msg='Could not connect to database: %s' %
                         to_native(e),
                         exception=traceback.format_exc())
    except TypeError as e:
        if 'sslrootcert' in e.args[0]:
            module.fail_json(
                msg=
                'Postgresql server must be at least version 8.4 to support sslrootcert'
            )
        module.fail_json(msg="unable to connect to database: %s" %
                         to_native(e),
                         exception=traceback.format_exc())
    except ValueError as e:
        # We raise this when the psycopg library is too old
        module.fail_json(msg=to_native(e))

    if p.session_role:
        try:
            conn.cursor.execute('SET ROLE "%s"' % p.session_role)
        except Exception as e:
            module.fail_json(msg="Could not switch to role %s: %s" %
                             (p.session_role, to_native(e)),
                             exception=traceback.format_exc())

    try:
        # privs
        if p.privs:
            privs = frozenset(pr.upper() for pr in p.privs.split(','))
            if not privs.issubset(VALID_PRIVS):
                module.fail_json(msg='Invalid privileges specified: %s' %
                                 privs.difference(VALID_PRIVS))
        else:
            privs = None
        # objs:
        if p.type == 'table' and p.objs == 'ALL_IN_SCHEMA':
            objs = conn.get_all_tables_in_schema(p.schema)
        elif p.type == 'sequence' and p.objs == 'ALL_IN_SCHEMA':
            objs = conn.get_all_sequences_in_schema(p.schema)
        elif p.type == 'function' and p.objs == 'ALL_IN_SCHEMA':
            objs = conn.get_all_functions_in_schema(p.schema)
        elif p.type == 'default_privs':
            if p.objs == 'ALL_DEFAULT':
                objs = frozenset(VALID_DEFAULT_OBJS.keys())
            else:
                objs = frozenset(obj.upper() for obj in p.objs.split(','))
                if not objs.issubset(VALID_DEFAULT_OBJS):
                    module.fail_json(
                        msg='Invalid Object set specified: %s' %
                        objs.difference(VALID_DEFAULT_OBJS.keys()))
            # Again, do we have valid privs specified for object type:
            valid_objects_for_priv = frozenset(
                obj for obj in objs if privs.issubset(VALID_DEFAULT_OBJS[obj]))
            if not valid_objects_for_priv == objs:
                module.fail_json(
                    msg=
                    'Invalid priv specified. Valid object for priv: {0}. Objects: {1}'
                    .format(valid_objects_for_priv, objs))
        else:
            objs = p.objs.split(',')

            # function signatures are encoded using ':' to separate args
            if p.type == 'function':
                objs = [obj.replace(':', ',') for obj in objs]

        # roles
        if p.roles == 'PUBLIC':
            roles = 'PUBLIC'
        else:
            roles = p.roles.split(',')

            if len(roles) == 1 and not role_exists(module, conn.cursor,
                                                   roles[0]):
                module.exit_json(changed=False)

                if fail_on_role:
                    module.fail_json(msg="Role '%s' does not exist" %
                                     roles[0].strip())

                else:
                    module.warn("Role '%s' does not exist, nothing to do" %
                                roles[0].strip())

        # check if target_roles is set with type: default_privs
        if p.target_roles and not p.type == 'default_privs':
            module.warn(
                '"target_roles" will be ignored '
                'Argument "type: default_privs" is required for usage of "target_roles".'
            )

        # target roles
        if p.target_roles:
            target_roles = p.target_roles.split(',')
        else:
            target_roles = None

        changed = conn.manipulate_privs(
            obj_type=p.type,
            privs=privs,
            objs=objs,
            roles=roles,
            target_roles=target_roles,
            state=p.state,
            grant_option=p.grant_option,
            schema_qualifier=p.schema,
            fail_on_role=fail_on_role,
        )

    except Error as e:
        conn.rollback()
        module.fail_json(msg=e.message, exception=traceback.format_exc())

    except psycopg2.Error as e:
        conn.rollback()
        module.fail_json(msg=to_native(e.message))

    if module.check_mode:
        conn.rollback()
    else:
        conn.commit()
    module.exit_json(changed=changed, queries=executed_queries)
Ejemplo n.º 17
0
def main():
    module = AssibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path', required=True),
            dest=dict(type='path', required=True),
            remote_src=dict(type='bool', default=False),
            creates=dict(type='path'),
            list_files=dict(type='bool', default=False),
            keep_newer=dict(type='bool', default=False),
            exclude=dict(type='list', elements='str', default=[]),
            extra_opts=dict(type='list', elements='str', default=[]),
            validate_certs=dict(type='bool', default=True),
        ),
        add_file_common_args=True,
        # check-mode only works for zip files, we cover that later
        supports_check_mode=True,
    )

    src = module.params['src']
    dest = module.params['dest']
    b_dest = to_bytes(dest, errors='surrogate_or_strict')
    remote_src = module.params['remote_src']
    file_args = module.load_file_common_arguments(module.params)

    # did tar file arrive?
    if not os.path.exists(src):
        if not remote_src:
            module.fail_json(msg="Source '%s' failed to transfer" % src)
        # If remote_src=true, and src= contains ://, try and download the file to a temp directory.
        elif '://' in src:
            src = fetch_file(module, src)
        else:
            module.fail_json(msg="Source '%s' does not exist" % src)
    if not os.access(src, os.R_OK):
        module.fail_json(msg="Source '%s' not readable" % src)

    # skip working with 0 size archives
    try:
        if os.path.getsize(src) == 0:
            module.fail_json(msg="Invalid archive '%s', the file is 0 bytes" % src)
    except Exception as e:
        module.fail_json(msg="Source '%s' not readable, %s" % (src, to_native(e)))

    # is dest OK to receive tar file?
    if not os.path.isdir(b_dest):
        module.fail_json(msg="Destination '%s' is not a directory" % dest)

    handler = pick_handler(src, b_dest, file_args, module)

    res_args = dict(handler=handler.__class__.__name__, dest=dest, src=src)

    # do we need to do unpack?
    check_results = handler.is_unarchived()

    # DEBUG
    # res_args['check_results'] = check_results

    if module.check_mode:
        res_args['changed'] = not check_results['unarchived']
    elif check_results['unarchived']:
        res_args['changed'] = False
    else:
        # do the unpack
        try:
            res_args['extract_results'] = handler.unarchive()
            if res_args['extract_results']['rc'] != 0:
                module.fail_json(msg="failed to unpack %s to %s" % (src, dest), **res_args)
        except IOError:
            module.fail_json(msg="failed to unpack %s to %s" % (src, dest), **res_args)
        else:
            res_args['changed'] = True

    # Get diff if required
    if check_results.get('diff', False):
        res_args['diff'] = {'prepared': check_results['diff']}

    # Run only if we found differences (idempotence) or diff was missing
    if res_args.get('diff', True) and not module.check_mode:
        # do we need to change perms?
        for filename in handler.files_in_archive:
            file_args['path'] = os.path.join(b_dest, to_bytes(filename, errors='surrogate_or_strict'))

            try:
                res_args['changed'] = module.set_fs_attributes_if_different(file_args, res_args['changed'], expand=False)
            except (IOError, OSError) as e:
                module.fail_json(msg="Unexpected error when accessing exploded file: %s" % to_native(e), **res_args)

    if module.params['list_files']:
        res_args['files'] = handler.files_in_archive

    module.exit_json(**res_args)
Ejemplo n.º 18
0
def main():
    module = AssibleModule(argument_spec=dict(
        login_user=dict(default=None),
        login_password=dict(default=None, no_log=True),
        login_host=dict(default='localhost'),
        login_port=dict(default='27017'),
        login_database=dict(default=None),
        replica_set=dict(default=None),
        database=dict(required=True, aliases=['db']),
        name=dict(required=True, aliases=['user']),
        password=dict(aliases=['pass'], no_log=True),
        ssl=dict(default=False, type='bool'),
        roles=dict(default=None, type='list', elements='raw'),
        state=dict(default='present', choices=['absent', 'present']),
        update_password=dict(default="always", choices=["always",
                                                        "on_create"]),
        ssl_cert_reqs=dict(
            default='CERT_REQUIRED',
            choices=['CERT_NONE', 'CERT_OPTIONAL', 'CERT_REQUIRED']),
    ),
                           supports_check_mode=True)

    if not pymongo_found:
        module.fail_json(msg=missing_required_lib('pymongo'))

    login_user = module.params['login_user']
    login_password = module.params['login_password']
    login_host = module.params['login_host']
    login_port = module.params['login_port']
    login_database = module.params['login_database']

    replica_set = module.params['replica_set']
    db_name = module.params['database']
    user = module.params['name']
    password = module.params['password']
    ssl = module.params['ssl']
    roles = module.params['roles'] or []
    state = module.params['state']
    update_password = module.params['update_password']

    try:
        connection_params = {
            "host": login_host,
            "port": int(login_port),
        }

        if replica_set:
            connection_params["replicaset"] = replica_set

        if ssl:
            connection_params["ssl"] = ssl
            connection_params["ssl_cert_reqs"] = getattr(
                ssl_lib, module.params['ssl_cert_reqs'])

        client = MongoClient(**connection_params)

        # NOTE: this check must be done ASAP.
        # We doesn't need to be authenticated (this ability has lost in PyMongo 3.6)
        if LooseVersion(PyMongoVersion) <= LooseVersion('3.5'):
            check_compatibility(module, client)

        if login_user is None and login_password is None:
            mongocnf_creds = load_mongocnf()
            if mongocnf_creds is not False:
                login_user = mongocnf_creds['user']
                login_password = mongocnf_creds['password']
        elif login_password is None or login_user is None:
            module.fail_json(
                msg=
                'when supplying login arguments, both login_user and login_password must be provided'
            )

        if login_user is not None and login_password is not None:
            client.admin.authenticate(login_user,
                                      login_password,
                                      source=login_database)
        elif LooseVersion(PyMongoVersion) >= LooseVersion('3.0'):
            if db_name != "admin":
                module.fail_json(
                    msg=
                    'The localhost login exception only allows the first admin account to be created'
                )
            # else: this has to be the first admin user added

    except Exception as e:
        module.fail_json(msg='unable to connect to database: %s' %
                         to_native(e),
                         exception=traceback.format_exc())

    if state == 'present':
        if password is None and update_password == 'always':
            module.fail_json(
                msg=
                'password parameter required when adding a user unless update_password is set to on_create'
            )

        try:
            if update_password != 'always':
                uinfo = user_find(client, user, db_name)
                if uinfo:
                    password = None
                    if not check_if_roles_changed(uinfo, roles, db_name):
                        module.exit_json(changed=False, user=user)

            if module.check_mode:
                module.exit_json(changed=True, user=user)

            user_add(module, client, db_name, user, password, roles)
        except Exception as e:
            module.fail_json(msg='Unable to add or update user: %s' %
                             to_native(e),
                             exception=traceback.format_exc())
        finally:
            try:
                client.close()
            except Exception:
                pass
            # Here we can  check password change if mongo provide a query for that : https://jira.mongodb.org/browse/SERVER-22848
            # newuinfo = user_find(client, user, db_name)
            # if uinfo['role'] == newuinfo['role'] and CheckPasswordHere:
            #    module.exit_json(changed=False, user=user)

    elif state == 'absent':
        try:
            user_remove(module, client, db_name, user)
        except Exception as e:
            module.fail_json(msg='Unable to remove user: %s' % to_native(e),
                             exception=traceback.format_exc())
        finally:
            try:
                client.close()
            except Exception:
                pass
    module.exit_json(changed=True, user=user)
Ejemplo n.º 19
0
def main():
    module = AssibleModule(argument_spec=dict(
        command=dict(required=True),
        chdir=dict(type='path'),
        creates=dict(type='path'),
        removes=dict(type='path'),
        responses=dict(type='dict', required=True),
        timeout=dict(type='int', default=30),
        echo=dict(type='bool', default=False),
    ))

    if not HAS_PEXPECT:
        module.fail_json(msg=missing_required_lib("pexpect"),
                         exception=PEXPECT_IMP_ERR)

    chdir = module.params['chdir']
    args = module.params['command']
    creates = module.params['creates']
    removes = module.params['removes']
    responses = module.params['responses']
    timeout = module.params['timeout']
    echo = module.params['echo']

    events = dict()
    for key, value in responses.items():
        if isinstance(value, list):
            response = response_closure(module, key, value)
        else:
            response = u'%s\n' % to_text(value).rstrip(u'\n')

        events[to_text(key)] = response

    if args.strip() == '':
        module.fail_json(rc=256, msg="no command given")

    if chdir:
        chdir = os.path.abspath(chdir)
        os.chdir(chdir)

    if creates:
        # do not run the command if the line contains creates=filename
        # and the filename already exists.  This allows idempotence
        # of command executions.
        if os.path.exists(creates):
            module.exit_json(cmd=args,
                             stdout="skipped, since %s exists" % creates,
                             changed=False,
                             rc=0)

    if removes:
        # do not run the command if the line contains removes=filename
        # and the filename does not exist.  This allows idempotence
        # of command executions.
        if not os.path.exists(removes):
            module.exit_json(cmd=args,
                             stdout="skipped, since %s does not exist" %
                             removes,
                             changed=False,
                             rc=0)

    startd = datetime.datetime.now()

    try:
        try:
            # Prefer pexpect.run from pexpect>=4
            out, rc = pexpect.run(args,
                                  timeout=timeout,
                                  withexitstatus=True,
                                  events=events,
                                  cwd=chdir,
                                  echo=echo,
                                  encoding='utf-8')
        except TypeError:
            # Use pexpect.runu in pexpect>=3.3,<4
            out, rc = pexpect.runu(args,
                                   timeout=timeout,
                                   withexitstatus=True,
                                   events=events,
                                   cwd=chdir,
                                   echo=echo)
    except (TypeError, AttributeError) as e:
        # This should catch all insufficient versions of pexpect
        # We deem them insufficient for their lack of ability to specify
        # to not echo responses via the run/runu functions, which would
        # potentially leak sensitive information
        module.fail_json(msg='Insufficient version of pexpect installed '
                         '(%s), this module requires pexpect>=3.3. '
                         'Error was %s' % (pexpect.__version__, to_native(e)))
    except pexpect.ExceptionPexpect as e:
        module.fail_json(msg='%s' % to_native(e),
                         exception=traceback.format_exc())

    endd = datetime.datetime.now()
    delta = endd - startd

    if out is None:
        out = ''

    result = dict(
        cmd=args,
        stdout=out.rstrip('\r\n'),
        rc=rc,
        start=str(startd),
        end=str(endd),
        delta=str(delta),
        changed=True,
    )

    if rc is None:
        module.fail_json(msg='command exceeded timeout', **result)
    elif rc != 0:
        module.fail_json(msg='non-zero return code', **result)

    module.exit_json(**result)
Ejemplo n.º 20
0
def main():
    module = AssibleModule(
        argument_spec=dict(
            vg=dict(type='str', required=True),
            pvs=dict(type='list'),
            pesize=dict(type='str', default='4'),
            pv_options=dict(type='str', default=''),
            vg_options=dict(type='str', default=''),
            state=dict(type='str',
                       default='present',
                       choices=['absent', 'present']),
            force=dict(type='bool', default=False),
        ),
        supports_check_mode=True,
    )

    vg = module.params['vg']
    state = module.params['state']
    force = module.boolean(module.params['force'])
    pesize = module.params['pesize']
    pvoptions = module.params['pv_options'].split()
    vgoptions = module.params['vg_options'].split()

    dev_list = []
    if module.params['pvs']:
        dev_list = list(module.params['pvs'])
    elif state == 'present':
        module.fail_json(msg="No physical volumes given.")

    # LVM always uses real paths not symlinks so replace symlinks with actual path
    for idx, dev in enumerate(dev_list):
        dev_list[idx] = os.path.realpath(dev)

    if state == 'present':
        # check given devices
        for test_dev in dev_list:
            if not os.path.exists(test_dev):
                module.fail_json(msg="Device %s not found." % test_dev)

        # get pv list
        pvs_cmd = module.get_bin_path('pvs', True)
        if dev_list:
            pvs_filter_pv_name = ' || '.join(
                'pv_name = {0}'.format(x)
                for x in itertools.chain(dev_list, module.params['pvs']))
            pvs_filter_vg_name = 'vg_name = {0}'.format(vg)
            pvs_filter = "--select '{0} || {1}' ".format(
                pvs_filter_pv_name, pvs_filter_vg_name)
        else:
            pvs_filter = ''
        rc, current_pvs, err = module.run_command(
            "%s --noheadings -o pv_name,vg_name --separator ';' %s" %
            (pvs_cmd, pvs_filter))
        if rc != 0:
            module.fail_json(msg="Failed executing pvs command.",
                             rc=rc,
                             err=err)

        # check pv for devices
        pvs = parse_pvs(module, current_pvs)
        used_pvs = [
            pv for pv in pvs
            if pv['name'] in dev_list and pv['vg_name'] and pv['vg_name'] != vg
        ]
        if used_pvs:
            module.fail_json(msg="Device %s is already in %s volume group." %
                             (used_pvs[0]['name'], used_pvs[0]['vg_name']))

    vgs_cmd = module.get_bin_path('vgs', True)
    rc, current_vgs, err = module.run_command(
        "%s --noheadings -o vg_name,pv_count,lv_count --separator ';'" %
        vgs_cmd)

    if rc != 0:
        module.fail_json(msg="Failed executing vgs command.", rc=rc, err=err)

    changed = False

    vgs = parse_vgs(current_vgs)

    for test_vg in vgs:
        if test_vg['name'] == vg:
            this_vg = test_vg
            break
    else:
        this_vg = None

    if this_vg is None:
        if state == 'present':
            # create VG
            if module.check_mode:
                changed = True
            else:
                # create PV
                pvcreate_cmd = module.get_bin_path('pvcreate', True)
                for current_dev in dev_list:
                    rc, _, err = module.run_command(
                        [pvcreate_cmd] + pvoptions +
                        ['-f', str(current_dev)])
                    if rc == 0:
                        changed = True
                    else:
                        module.fail_json(
                            msg="Creating physical volume '%s' failed" %
                            current_dev,
                            rc=rc,
                            err=err)
                vgcreate_cmd = module.get_bin_path('vgcreate')
                rc, _, err = module.run_command([vgcreate_cmd] + vgoptions +
                                                ['-s', pesize, vg] + dev_list)
                if rc == 0:
                    changed = True
                else:
                    module.fail_json(msg="Creating volume group '%s' failed" %
                                     vg,
                                     rc=rc,
                                     err=err)
    else:
        if state == 'absent':
            if module.check_mode:
                module.exit_json(changed=True)
            else:
                if this_vg['lv_count'] == 0 or force:
                    # remove VG
                    vgremove_cmd = module.get_bin_path('vgremove', True)
                    rc, _, err = module.run_command("%s --force %s" %
                                                    (vgremove_cmd, vg))
                    if rc == 0:
                        module.exit_json(changed=True)
                    else:
                        module.fail_json(
                            msg="Failed to remove volume group %s" % (vg),
                            rc=rc,
                            err=err)
                else:
                    module.fail_json(
                        msg=
                        "Refuse to remove non-empty volume group %s without force=yes"
                        % (vg))

        # resize VG
        current_devs = [
            os.path.realpath(pv['name']) for pv in pvs if pv['vg_name'] == vg
        ]
        devs_to_remove = list(set(current_devs) - set(dev_list))
        devs_to_add = list(set(dev_list) - set(current_devs))

        if devs_to_add or devs_to_remove:
            if module.check_mode:
                changed = True
            else:
                if devs_to_add:
                    devs_to_add_string = ' '.join(devs_to_add)
                    # create PV
                    pvcreate_cmd = module.get_bin_path('pvcreate', True)
                    for current_dev in devs_to_add:
                        rc, _, err = module.run_command(
                            [pvcreate_cmd] + pvoptions +
                            ['-f', str(current_dev)])
                        if rc == 0:
                            changed = True
                        else:
                            module.fail_json(
                                msg="Creating physical volume '%s' failed" %
                                current_dev,
                                rc=rc,
                                err=err)
                    # add PV to our VG
                    vgextend_cmd = module.get_bin_path('vgextend', True)
                    rc, _, err = module.run_command(
                        "%s %s %s" % (vgextend_cmd, vg, devs_to_add_string))
                    if rc == 0:
                        changed = True
                    else:
                        module.fail_json(msg="Unable to extend %s by %s." %
                                         (vg, devs_to_add_string),
                                         rc=rc,
                                         err=err)

                # remove some PV from our VG
                if devs_to_remove:
                    devs_to_remove_string = ' '.join(devs_to_remove)
                    vgreduce_cmd = module.get_bin_path('vgreduce', True)
                    rc, _, err = module.run_command(
                        "%s --force %s %s" %
                        (vgreduce_cmd, vg, devs_to_remove_string))
                    if rc == 0:
                        changed = True
                    else:
                        module.fail_json(msg="Unable to reduce %s by %s." %
                                         (vg, devs_to_remove_string),
                                         rc=rc,
                                         err=err)

    module.exit_json(changed=changed)
Ejemplo n.º 21
0
def main():
    module = AssibleModule(
        argument_spec=dict(
            paths=dict(type='list',
                       required=True,
                       aliases=['name', 'path'],
                       elements='str'),
            patterns=dict(type='list',
                          default=['*'],
                          aliases=['pattern'],
                          elements='str'),
            excludes=dict(type='list', aliases=['exclude'], elements='str'),
            contains=dict(type='str'),
            read_whole_file=dict(type='bool', default=False),
            file_type=dict(type='str',
                           default="file",
                           choices=['any', 'directory', 'file', 'link']),
            age=dict(type='str'),
            age_stamp=dict(type='str',
                           default="mtime",
                           choices=['atime', 'ctime', 'mtime']),
            size=dict(type='str'),
            recurse=dict(type='bool', default=False),
            hidden=dict(type='bool', default=False),
            follow=dict(type='bool', default=False),
            get_checksum=dict(type='bool', default=False),
            use_regex=dict(type='bool', default=False),
            depth=dict(type='int'),
        ),
        supports_check_mode=True,
    )

    params = module.params

    filelist = []

    if params['age'] is None:
        age = None
    else:
        # convert age to seconds:
        m = re.match(r"^(-?\d+)(s|m|h|d|w)?$", params['age'].lower())
        seconds_per_unit = {
            "s": 1,
            "m": 60,
            "h": 3600,
            "d": 86400,
            "w": 604800
        }
        if m:
            age = int(m.group(1)) * seconds_per_unit.get(m.group(2), 1)
        else:
            module.fail_json(age=params['age'], msg="failed to process age")

    if params['size'] is None:
        size = None
    else:
        # convert size to bytes:
        m = re.match(r"^(-?\d+)(b|k|m|g|t)?$", params['size'].lower())
        bytes_per_unit = {
            "b": 1,
            "k": 1024,
            "m": 1024**2,
            "g": 1024**3,
            "t": 1024**4
        }
        if m:
            size = int(m.group(1)) * bytes_per_unit.get(m.group(2), 1)
        else:
            module.fail_json(size=params['size'], msg="failed to process size")

    now = time.time()
    msg = ''
    looked = 0
    for npath in params['paths']:
        npath = os.path.expanduser(os.path.expandvars(npath))
        if os.path.isdir(npath):
            for root, dirs, files in os.walk(npath,
                                             followlinks=params['follow']):
                looked = looked + len(files) + len(dirs)
                for fsobj in (files + dirs):
                    fsname = os.path.normpath(os.path.join(root, fsobj))
                    if params['depth']:
                        wpath = npath.rstrip(os.path.sep) + os.path.sep
                        depth = int(fsname.count(os.path.sep)) - int(
                            wpath.count(os.path.sep)) + 1
                        if depth > params['depth']:
                            continue
                    if os.path.basename(fsname).startswith(
                            '.') and not params['hidden']:
                        continue

                    try:
                        st = os.lstat(fsname)
                    except Exception:
                        msg += "%s was skipped as it does not seem to be a valid file or it cannot be accessed\n" % fsname
                        continue

                    r = {'path': fsname}
                    if params['file_type'] == 'any':
                        if pfilter(fsobj, params['patterns'],
                                   params['excludes'],
                                   params['use_regex']) and agefilter(
                                       st, now, age, params['age_stamp']):

                            r.update(statinfo(st))
                            if stat.S_ISREG(
                                    st.st_mode) and params['get_checksum']:
                                r['checksum'] = module.sha1(fsname)
                            filelist.append(r)

                    elif stat.S_ISDIR(
                            st.st_mode) and params['file_type'] == 'directory':
                        if pfilter(fsobj, params['patterns'],
                                   params['excludes'],
                                   params['use_regex']) and agefilter(
                                       st, now, age, params['age_stamp']):

                            r.update(statinfo(st))
                            filelist.append(r)

                    elif stat.S_ISREG(
                            st.st_mode) and params['file_type'] == 'file':
                        if pfilter(fsobj, params['patterns'], params['excludes'], params['use_regex']) and \
                           agefilter(st, now, age, params['age_stamp']) and \
                           sizefilter(st, size) and contentfilter(fsname, params['contains'], params['read_whole_file']):

                            r.update(statinfo(st))
                            if params['get_checksum']:
                                r['checksum'] = module.sha1(fsname)
                            filelist.append(r)

                    elif stat.S_ISLNK(
                            st.st_mode) and params['file_type'] == 'link':
                        if pfilter(fsobj, params['patterns'],
                                   params['excludes'],
                                   params['use_regex']) and agefilter(
                                       st, now, age, params['age_stamp']):

                            r.update(statinfo(st))
                            filelist.append(r)

                if not params['recurse']:
                    break
        else:
            msg += "%s was skipped as it does not seem to be a valid directory or it cannot be accessed\n" % npath

    matched = len(filelist)
    module.exit_json(files=filelist,
                     changed=False,
                     msg=msg,
                     matched=matched,
                     examined=looked)
Ejemplo n.º 22
0
def main():
    module = AssibleModule(
        argument_spec=dict(
            state=dict(type='str',
                       default='present',
                       choices=['absent', 'present']),
            name=dict(type='str', required=True),
            gid=dict(type='int'),
            system=dict(type='bool', default=False),
            local=dict(type='bool', default=False),
            non_unique=dict(type='bool', default=False),
        ),
        supports_check_mode=True,
        required_if=[
            ['non_unique', True, ['gid']],
        ],
    )

    group = Group(module)

    module.debug('Group instantiated - platform %s' % group.platform)
    if group.distribution:
        module.debug('Group instantiated - distribution %s' %
                     group.distribution)

    rc = None
    out = ''
    err = ''
    result = {}
    result['name'] = group.name
    result['state'] = group.state

    if group.state == 'absent':

        if group.group_exists():
            if module.check_mode:
                module.exit_json(changed=True)
            (rc, out, err) = group.group_del()
            if rc != 0:
                module.fail_json(name=group.name, msg=err)

    elif group.state == 'present':

        if not group.group_exists():
            if module.check_mode:
                module.exit_json(changed=True)
            (rc, out, err) = group.group_add(gid=group.gid,
                                             system=group.system)
        else:
            (rc, out, err) = group.group_mod(gid=group.gid)

        if rc is not None and rc != 0:
            module.fail_json(name=group.name, msg=err)

    if rc is None:
        result['changed'] = False
    else:
        result['changed'] = True
    if out:
        result['stdout'] = out
    if err:
        result['stderr'] = err

    if group.group_exists():
        info = group.group_info()
        result['system'] = group.system
        result['gid'] = info[2]

    module.exit_json(**result)
Ejemplo n.º 23
0
def main():
    module = AssibleModule(
        argument_spec=dict(
            name=dict(type='str', required=True, aliases=['pkg']),
            question=dict(type='str', aliases=['selection', 'setting']),
            vtype=dict(type='str',
                       choices=[
                           'boolean', 'error', 'multiselect', 'note',
                           'password', 'seen', 'select', 'string', 'text',
                           'title'
                       ]),
            value=dict(type='str', aliases=['answer']),
            unseen=dict(type='bool'),
        ),
        required_together=(['question', 'vtype', 'value'], ),
        supports_check_mode=True,
    )

    # TODO: enable passing array of options and/or debconf file from get-selections dump
    pkg = module.params["name"]
    question = module.params["question"]
    vtype = module.params["vtype"]
    value = module.params["value"]
    unseen = module.params["unseen"]

    prev = get_selections(module, pkg)

    changed = False
    msg = ""

    if question is not None:
        if vtype is None or value is None:
            module.fail_json(
                msg=
                "when supplying a question you must supply a valid vtype and value"
            )

        # if question doesn't exist, value cannot match
        if question not in prev:
            changed = True
        else:

            existing = prev[question]

            # ensure we compare booleans supplied to the way debconf sees them (true/false strings)
            if vtype == 'boolean':
                value = to_text(value).lower()
                existing = to_text(prev[question]).lower()

            if value != existing:
                changed = True

    if changed:
        if not module.check_mode:
            rc, msg, e = set_selection(module, pkg, question, vtype, value,
                                       unseen)
            if rc:
                module.fail_json(msg=e)

        curr = {question: value}
        if question in prev:
            prev = {question: prev[question]}
        else:
            prev[question] = ''
        if module._diff:
            after = prev.copy()
            after.update(curr)
            diff_dict = {'before': prev, 'after': after}
        else:
            diff_dict = {}

        module.exit_json(changed=changed,
                         msg=msg,
                         current=curr,
                         previous=prev,
                         diff=diff_dict)

    module.exit_json(changed=changed, msg=msg, current=prev)
Ejemplo n.º 24
0
class AssibleDockerClient(Client):
    def __init__(self,
                 argument_spec=None,
                 supports_check_mode=False,
                 mutually_exclusive=None,
                 required_together=None,
                 required_if=None,
                 min_docker_version=MIN_DOCKER_VERSION,
                 min_docker_api_version=None,
                 option_minimal_versions=None,
                 option_minimal_versions_ignore_params=None,
                 fail_results=None):

        # Modules can put information in here which will always be returned
        # in case client.fail() is called.
        self.fail_results = fail_results or {}

        merged_arg_spec = dict()
        merged_arg_spec.update(DOCKER_COMMON_ARGS)
        if argument_spec:
            merged_arg_spec.update(argument_spec)
            self.arg_spec = merged_arg_spec

        mutually_exclusive_params = []
        mutually_exclusive_params += DOCKER_MUTUALLY_EXCLUSIVE
        if mutually_exclusive:
            mutually_exclusive_params += mutually_exclusive

        required_together_params = []
        required_together_params += DOCKER_REQUIRED_TOGETHER
        if required_together:
            required_together_params += required_together

        self.module = AssibleModule(
            argument_spec=merged_arg_spec,
            supports_check_mode=supports_check_mode,
            mutually_exclusive=mutually_exclusive_params,
            required_together=required_together_params,
            required_if=required_if)

        NEEDS_DOCKER_PY2 = (LooseVersion(min_docker_version) >=
                            LooseVersion('2.0.0'))

        self.docker_py_version = LooseVersion(docker_version)

        if HAS_DOCKER_MODELS and HAS_DOCKER_SSLADAPTER:
            self.fail(
                "Cannot have both the docker-py and docker python modules (old and new version of Docker "
                "SDK for Python) installed together as they use the same namespace and cause a corrupt "
                "installation. Please uninstall both packages, and re-install only the docker-py or docker "
                "python module (for %s's Python %s). It is recommended to install the docker module if no "
                "support for Python 2.6 is required. Please note that simply uninstalling one of the modules "
                "can leave the other module in a broken state." %
                (platform.node(), sys.executable))

        if not HAS_DOCKER_PY:
            if NEEDS_DOCKER_PY2:
                msg = missing_required_lib("Docker SDK for Python: docker")
                msg = msg + ", for example via `pip install docker`. The error was: %s"
            else:
                msg = missing_required_lib(
                    "Docker SDK for Python: docker (Python >= 2.7) or docker-py (Python 2.6)"
                )
                msg = msg + ", for example via `pip install docker` or `pip install docker-py` (Python 2.6). The error was: %s"
            self.fail(msg % HAS_DOCKER_ERROR)

        if self.docker_py_version < LooseVersion(min_docker_version):
            msg = "Error: Docker SDK for Python version is %s (%s's Python %s). Minimum version required is %s."
            if not NEEDS_DOCKER_PY2:
                # The minimal required version is < 2.0 (and the current version as well).
                # Advertise docker (instead of docker-py) for non-Python-2.6 users.
                msg += DOCKERPYUPGRADE_RECOMMEND_DOCKER
            elif docker_version < LooseVersion('2.0'):
                msg += DOCKERPYUPGRADE_SWITCH_TO_DOCKER
            else:
                msg += DOCKERPYUPGRADE_UPGRADE_DOCKER
            self.fail(msg % (docker_version, platform.node(), sys.executable,
                             min_docker_version))

        self.debug = self.module.params.get('debug')
        self.check_mode = self.module.check_mode
        self._connect_params = get_connect_params(self.auth_params,
                                                  fail_function=self.fail)

        try:
            super(AssibleDockerClient, self).__init__(**self._connect_params)
            self.docker_api_version_str = self.version()['ApiVersion']
        except APIError as exc:
            self.fail("Docker API error: %s" % exc)
        except Exception as exc:
            self.fail("Error connecting: %s" % exc)

        self.docker_api_version = LooseVersion(self.docker_api_version_str)
        if min_docker_api_version is not None:
            if self.docker_api_version < LooseVersion(min_docker_api_version):
                self.fail(
                    'Docker API version is %s. Minimum version required is %s.'
                    % (self.docker_api_version_str, min_docker_api_version))

        if option_minimal_versions is not None:
            self._get_minimal_versions(option_minimal_versions,
                                       option_minimal_versions_ignore_params)

    def log(self, msg, pretty_print=False):
        pass
        # if self.debug:
        #     log_file = open('docker.log', 'a')
        #     if pretty_print:
        #         log_file.write(json.dumps(msg, sort_keys=True, indent=4, separators=(',', ': ')))
        #         log_file.write(u'\n')
        #     else:
        #         log_file.write(msg + u'\n')

    def fail(self, msg, **kwargs):
        self.fail_results.update(kwargs)
        self.module.fail_json(msg=msg, **sanitize_result(self.fail_results))

    @staticmethod
    def _get_value(param_name, param_value, env_variable, default_value):
        if param_value is not None:
            # take module parameter value
            if param_value in BOOLEANS_TRUE:
                return True
            if param_value in BOOLEANS_FALSE:
                return False
            return param_value

        if env_variable is not None:
            env_value = os.environ.get(env_variable)
            if env_value is not None:
                # take the env variable value
                if param_name == 'cert_path':
                    return os.path.join(env_value, 'cert.pem')
                if param_name == 'cacert_path':
                    return os.path.join(env_value, 'ca.pem')
                if param_name == 'key_path':
                    return os.path.join(env_value, 'key.pem')
                if env_value in BOOLEANS_TRUE:
                    return True
                if env_value in BOOLEANS_FALSE:
                    return False
                return env_value

        # take the default
        return default_value

    @property
    def auth_params(self):
        # Get authentication credentials.
        # Precedence: module parameters-> environment variables-> defaults.

        self.log('Getting credentials')

        params = dict()
        for key in DOCKER_COMMON_ARGS:
            params[key] = self.module.params.get(key)

        if self.module.params.get('use_tls'):
            # support use_tls option in docker_image.py. This will be deprecated.
            use_tls = self.module.params.get('use_tls')
            if use_tls == 'encrypt':
                params['tls'] = True
            if use_tls == 'verify':
                params['validate_certs'] = True

        result = dict(
            docker_host=self._get_value('docker_host', params['docker_host'],
                                        'DOCKER_HOST', DEFAULT_DOCKER_HOST),
            tls_hostname=self._get_value('tls_hostname',
                                         params['tls_hostname'],
                                         'DOCKER_TLS_HOSTNAME',
                                         DEFAULT_TLS_HOSTNAME),
            api_version=self._get_value('api_version', params['api_version'],
                                        'DOCKER_API_VERSION', 'auto'),
            cacert_path=self._get_value('cacert_path', params['ca_cert'],
                                        'DOCKER_CERT_PATH', None),
            cert_path=self._get_value('cert_path', params['client_cert'],
                                      'DOCKER_CERT_PATH', None),
            key_path=self._get_value('key_path', params['client_key'],
                                     'DOCKER_CERT_PATH', None),
            ssl_version=self._get_value('ssl_version', params['ssl_version'],
                                        'DOCKER_SSL_VERSION', None),
            tls=self._get_value('tls', params['tls'], 'DOCKER_TLS',
                                DEFAULT_TLS),
            tls_verify=self._get_value('tls_verfy', params['validate_certs'],
                                       'DOCKER_TLS_VERIFY',
                                       DEFAULT_TLS_VERIFY),
            timeout=self._get_value('timeout', params['timeout'],
                                    'DOCKER_TIMEOUT', DEFAULT_TIMEOUT_SECONDS),
        )

        update_tls_hostname(result)

        return result

    def _handle_ssl_error(self, error):
        match = re.match(r"hostname.*doesn\'t match (\'.*\')", str(error))
        if match:
            self.fail(
                "You asked for verification that Docker daemons certificate's hostname matches %s. "
                "The actual certificate's hostname is %s. Most likely you need to set DOCKER_TLS_HOSTNAME "
                "or pass `tls_hostname` with a value of %s. You may also use TLS without verification by "
                "setting the `tls` parameter to true." %
                (self.auth_params['tls_hostname'], match.group(1),
                 match.group(1)))
        self.fail("SSL Exception: %s" % (error))

    def _get_minimal_versions(self,
                              option_minimal_versions,
                              ignore_params=None):
        self.option_minimal_versions = dict()
        for option in self.module.argument_spec:
            if ignore_params is not None:
                if option in ignore_params:
                    continue
            self.option_minimal_versions[option] = dict()
        self.option_minimal_versions.update(option_minimal_versions)

        for option, data in self.option_minimal_versions.items():
            # Test whether option is supported, and store result
            support_docker_py = True
            support_docker_api = True
            if 'docker_py_version' in data:
                support_docker_py = self.docker_py_version >= LooseVersion(
                    data['docker_py_version'])
            if 'docker_api_version' in data:
                support_docker_api = self.docker_api_version >= LooseVersion(
                    data['docker_api_version'])
            data['supported'] = support_docker_py and support_docker_api
            # Fail if option is not supported but used
            if not data['supported']:
                # Test whether option is specified
                if 'detect_usage' in data:
                    used = data['detect_usage'](self)
                else:
                    used = self.module.params.get(option) is not None
                    if used and 'default' in self.module.argument_spec[option]:
                        used = self.module.params[
                            option] != self.module.argument_spec[option][
                                'default']
                if used:
                    # If the option is used, compose error message.
                    if 'usage_msg' in data:
                        usg = data['usage_msg']
                    else:
                        usg = 'set %s option' % (option, )
                    if not support_docker_api:
                        msg = 'Docker API version is %s. Minimum version required is %s to %s.'
                        msg = msg % (self.docker_api_version_str,
                                     data['docker_api_version'], usg)
                    elif not support_docker_py:
                        msg = "Docker SDK for Python version is %s (%s's Python %s). Minimum version required is %s to %s. "
                        if LooseVersion(data['docker_py_version']
                                        ) < LooseVersion('2.0.0'):
                            msg += DOCKERPYUPGRADE_RECOMMEND_DOCKER
                        elif self.docker_py_version < LooseVersion('2.0.0'):
                            msg += DOCKERPYUPGRADE_SWITCH_TO_DOCKER
                        else:
                            msg += DOCKERPYUPGRADE_UPGRADE_DOCKER
                        msg = msg % (docker_version, platform.node(),
                                     sys.executable, data['docker_py_version'],
                                     usg)
                    else:
                        # should not happen
                        msg = 'Cannot %s with your configuration.' % (usg, )
                    self.fail(msg)

    def get_container_by_id(self, container_id):
        try:
            self.log("Inspecting container Id %s" % container_id)
            result = self.inspect_container(container=container_id)
            self.log("Completed container inspection")
            return result
        except NotFound as dummy:
            return None
        except Exception as exc:
            self.fail("Error inspecting container: %s" % exc)

    def get_container(self, name=None):
        '''
        Lookup a container and return the inspection results.
        '''
        if name is None:
            return None

        search_name = name
        if not name.startswith('/'):
            search_name = '/' + name

        result = None
        try:
            for container in self.containers(all=True):
                self.log("testing container: %s" % (container['Names']))
                if isinstance(container['Names'],
                              list) and search_name in container['Names']:
                    result = container
                    break
                if container['Id'].startswith(name):
                    result = container
                    break
                if container['Id'] == name:
                    result = container
                    break
        except SSLError as exc:
            self._handle_ssl_error(exc)
        except Exception as exc:
            self.fail("Error retrieving container list: %s" % exc)

        if result is None:
            return None

        return self.get_container_by_id(result['Id'])

    def get_network(self, name=None, network_id=None):
        '''
        Lookup a network and return the inspection results.
        '''
        if name is None and network_id is None:
            return None

        result = None

        if network_id is None:
            try:
                for network in self.networks():
                    self.log("testing network: %s" % (network['Name']))
                    if name == network['Name']:
                        result = network
                        break
                    if network['Id'].startswith(name):
                        result = network
                        break
            except SSLError as exc:
                self._handle_ssl_error(exc)
            except Exception as exc:
                self.fail("Error retrieving network list: %s" % exc)

        if result is not None:
            network_id = result['Id']

        if network_id is not None:
            try:
                self.log("Inspecting network Id %s" % network_id)
                result = self.inspect_network(network_id)
                self.log("Completed network inspection")
            except NotFound as dummy:
                return None
            except Exception as exc:
                self.fail("Error inspecting network: %s" % exc)

        return result

    def find_image(self, name, tag):
        '''
        Lookup an image (by name and tag) and return the inspection results.
        '''
        if not name:
            return None

        self.log("Find image %s:%s" % (name, tag))
        images = self._image_lookup(name, tag)
        if not images:
            # In API <= 1.20 seeing 'docker.io/<name>' as the name of images pulled from docker hub
            registry, repo_name = auth.resolve_repository_name(name)
            if registry == 'docker.io':
                # If docker.io is explicitly there in name, the image
                # isn't found in some cases (#41509)
                self.log("Check for docker.io image: %s" % repo_name)
                images = self._image_lookup(repo_name, tag)
                if not images and repo_name.startswith('library/'):
                    # Sometimes library/xxx images are not found
                    lookup = repo_name[len('library/'):]
                    self.log("Check for docker.io image: %s" % lookup)
                    images = self._image_lookup(lookup, tag)
                if not images:
                    # Last case: if docker.io wasn't there, it can be that
                    # the image wasn't found either (#15586)
                    lookup = "%s/%s" % (registry, repo_name)
                    self.log("Check for docker.io image: %s" % lookup)
                    images = self._image_lookup(lookup, tag)

        if len(images) > 1:
            self.fail("Registry returned more than one result for %s:%s" %
                      (name, tag))

        if len(images) == 1:
            try:
                inspection = self.inspect_image(images[0]['Id'])
            except Exception as exc:
                self.fail("Error inspecting image %s:%s - %s" %
                          (name, tag, str(exc)))
            return inspection

        self.log("Image %s:%s not found." % (name, tag))
        return None

    def find_image_by_id(self, image_id):
        '''
        Lookup an image (by ID) and return the inspection results.
        '''
        if not image_id:
            return None

        self.log("Find image %s (by ID)" % image_id)
        try:
            inspection = self.inspect_image(image_id)
        except Exception as exc:
            self.fail("Error inspecting image ID %s - %s" %
                      (image_id, str(exc)))
        return inspection

    def _image_lookup(self, name, tag):
        '''
        Including a tag in the name parameter sent to the Docker SDK for Python images method
        does not work consistently. Instead, get the result set for name and manually check
        if the tag exists.
        '''
        try:
            response = self.images(name=name)
        except Exception as exc:
            self.fail("Error searching for image %s - %s" % (name, str(exc)))
        images = response
        if tag:
            lookup = "%s:%s" % (name, tag)
            lookup_digest = "%s@%s" % (name, tag)
            images = []
            for image in response:
                tags = image.get('RepoTags')
                digests = image.get('RepoDigests')
                if (tags and lookup in tags) or (digests
                                                 and lookup_digest in digests):
                    images = [image]
                    break
        return images

    def pull_image(self, name, tag="latest"):
        '''
        Pull an image
        '''
        self.log("Pulling image %s:%s" % (name, tag))
        old_tag = self.find_image(name, tag)
        try:
            for line in self.pull(name, tag=tag, stream=True, decode=True):
                self.log(line, pretty_print=True)
                if line.get('error'):
                    if line.get('errorDetail'):
                        error_detail = line.get('errorDetail')
                        self.fail("Error pulling %s - code: %s message: %s" %
                                  (name, error_detail.get('code'),
                                   error_detail.get('message')))
                    else:
                        self.fail("Error pulling %s - %s" %
                                  (name, line.get('error')))
        except Exception as exc:
            self.fail("Error pulling image %s:%s - %s" % (name, tag, str(exc)))

        new_tag = self.find_image(name, tag)

        return new_tag, old_tag == new_tag

    def report_warnings(self, result, warnings_key=None):
        '''
        Checks result of client operation for warnings, and if present, outputs them.

        warnings_key should be a list of keys used to crawl the result dictionary.
        For example, if warnings_key == ['a', 'b'], the function will consider
        result['a']['b'] if these keys exist. If the result is a non-empty string, it
        will be reported as a warning. If the result is a list, every entry will be
        reported as a warning.

        In most cases (if warnings are returned at all), warnings_key should be
        ['Warnings'] or ['Warning']. The default value (if not specified) is ['Warnings'].
        '''
        if warnings_key is None:
            warnings_key = ['Warnings']
        for key in warnings_key:
            if not isinstance(result, Mapping):
                return
            result = result.get(key)
        if isinstance(result, Sequence):
            for warning in result:
                self.module.warn('Docker warning: {0}'.format(warning))
        elif isinstance(result, string_types) and result:
            self.module.warn('Docker warning: {0}'.format(result))

    def inspect_distribution(self, image, **kwargs):
        '''
        Get image digest by directly calling the Docker API when running Docker SDK < 4.0.0
        since prior versions did not support accessing private repositories.
        '''
        if self.docker_py_version < LooseVersion('4.0.0'):
            registry = auth.resolve_repository_name(image)[0]
            header = auth.get_config_header(self, registry)
            if header:
                return self._result(self._get(
                    self._url('/distribution/{0}/json', image),
                    headers={'X-Registry-Auth': header}),
                                    json=True)
        return super(AssibleDockerClient,
                     self).inspect_distribution(image, **kwargs)
Ejemplo n.º 25
0
def main():
    argument_spec = pgutils.postgres_common_argument_spec()
    argument_spec.update(
        db=dict(type='str', required=True, aliases=['name']),
        owner=dict(type='str', default=''),
        template=dict(type='str', default=''),
        encoding=dict(type='str', default=''),
        lc_collate=dict(type='str', default=''),
        lc_ctype=dict(type='str', default=''),
        state=dict(type='str', default='present', choices=['absent', 'dump', 'present', 'restore']),
        target=dict(type='path', default=''),
        target_opts=dict(type='str', default=''),
        maintenance_db=dict(type='str', default="postgres"),
        session_role=dict(type='str'),
        conn_limit=dict(type='str', default=''),
        tablespace=dict(type='path', default=''),
        dump_extra_args=dict(type='str', default=None),
    )

    module = AssibleModule(
        argument_spec=argument_spec,
        supports_check_mode=True
    )

    db = module.params["db"]
    owner = module.params["owner"]
    template = module.params["template"]
    encoding = module.params["encoding"]
    lc_collate = module.params["lc_collate"]
    lc_ctype = module.params["lc_ctype"]
    target = module.params["target"]
    target_opts = module.params["target_opts"]
    state = module.params["state"]
    changed = False
    maintenance_db = module.params['maintenance_db']
    session_role = module.params["session_role"]
    conn_limit = module.params['conn_limit']
    tablespace = module.params['tablespace']
    dump_extra_args = module.params['dump_extra_args']

    raw_connection = state in ("dump", "restore")

    if not raw_connection:
        pgutils.ensure_required_libs(module)

    # To use defaults values, keyword arguments must be absent, so
    # check which values are empty and don't include in the **kw
    # dictionary
    params_map = {
        "login_host": "host",
        "login_user": "******",
        "login_password": "******",
        "port": "port",
        "ssl_mode": "sslmode",
        "ca_cert": "sslrootcert"
    }
    kw = dict((params_map[k], v) for (k, v) in iteritems(module.params)
              if k in params_map and v != '' and v is not None)

    # If a login_unix_socket is specified, incorporate it here.
    is_localhost = "host" not in kw or kw["host"] == "" or kw["host"] == "localhost"

    if is_localhost and module.params["login_unix_socket"] != "":
        kw["host"] = module.params["login_unix_socket"]

    if target == "":
        target = "{0}/{1}.sql".format(os.getcwd(), db)
        target = os.path.expanduser(target)

    if not raw_connection:
        try:
            db_connection = psycopg2.connect(database=maintenance_db, **kw)

            # Enable autocommit so we can create databases
            if psycopg2.__version__ >= '2.4.2':
                db_connection.autocommit = True
            else:
                db_connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
            cursor = db_connection.cursor(cursor_factory=psycopg2.extras.DictCursor)

        except TypeError as e:
            if 'sslrootcert' in e.args[0]:
                module.fail_json(msg='Postgresql server must be at least version 8.4 to support sslrootcert. Exception: {0}'.format(to_native(e)),
                                 exception=traceback.format_exc())
            module.fail_json(msg="unable to connect to database: %s" % to_native(e), exception=traceback.format_exc())

        except Exception as e:
            module.fail_json(msg="unable to connect to database: %s" % to_native(e), exception=traceback.format_exc())

        if session_role:
            try:
                cursor.execute('SET ROLE "%s"' % session_role)
            except Exception as e:
                module.fail_json(msg="Could not switch role: %s" % to_native(e), exception=traceback.format_exc())

    try:
        if module.check_mode:
            if state == "absent":
                changed = db_exists(cursor, db)
            elif state == "present":
                changed = not db_matches(cursor, db, owner, template, encoding, lc_collate, lc_ctype, conn_limit, tablespace)
            module.exit_json(changed=changed, db=db, executed_commands=executed_commands)

        if state == "absent":
            try:
                changed = db_delete(cursor, db)
            except SQLParseError as e:
                module.fail_json(msg=to_native(e), exception=traceback.format_exc())

        elif state == "present":
            try:
                changed = db_create(cursor, db, owner, template, encoding, lc_collate, lc_ctype, conn_limit, tablespace)
            except SQLParseError as e:
                module.fail_json(msg=to_native(e), exception=traceback.format_exc())

        elif state in ("dump", "restore"):
            method = state == "dump" and db_dump or db_restore
            try:
                if state == 'dump':
                    rc, stdout, stderr, cmd = method(module, target, target_opts, db, dump_extra_args, **kw)
                else:
                    rc, stdout, stderr, cmd = method(module, target, target_opts, db, **kw)

                if rc != 0:
                    module.fail_json(msg=stderr, stdout=stdout, rc=rc, cmd=cmd)
                else:
                    module.exit_json(changed=True, msg=stdout, stderr=stderr, rc=rc, cmd=cmd,
                                     executed_commands=executed_commands)
            except SQLParseError as e:
                module.fail_json(msg=to_native(e), exception=traceback.format_exc())

    except NotSupportedError as e:
        module.fail_json(msg=to_native(e), exception=traceback.format_exc())
    except SystemExit:
        # Avoid catching this on Python 2.4
        raise
    except Exception as e:
        module.fail_json(msg="Database query failed: %s" % to_native(e), exception=traceback.format_exc())

    module.exit_json(changed=changed, db=db, executed_commands=executed_commands)
Ejemplo n.º 26
0
def main():

    module = AssibleModule(
        # not checking because of daisy chain to file module
        argument_spec=dict(
            src=dict(type='path', required=True),
            delimiter=dict(type='str'),
            dest=dict(type='path', required=True),
            backup=dict(type='bool', default=False),
            remote_src=dict(type='bool', default=True),
            regexp=dict(type='str'),
            ignore_hidden=dict(type='bool', default=False),
            validate=dict(type='str'),
        ),
        add_file_common_args=True,
    )

    changed = False
    path_hash = None
    dest_hash = None
    src = module.params['src']
    dest = module.params['dest']
    backup = module.params['backup']
    delimiter = module.params['delimiter']
    regexp = module.params['regexp']
    compiled_regexp = None
    ignore_hidden = module.params['ignore_hidden']
    validate = module.params.get('validate', None)

    result = dict(src=src, dest=dest)
    if not os.path.exists(src):
        module.fail_json(msg="Source (%s) does not exist" % src)

    if not os.path.isdir(src):
        module.fail_json(msg="Source (%s) is not a directory" % src)

    if regexp is not None:
        try:
            compiled_regexp = re.compile(regexp)
        except re.error as e:
            module.fail_json(msg="Invalid Regexp (%s) in \"%s\"" %
                             (to_native(e), regexp))

    if validate and "%s" not in validate:
        module.fail_json(msg="validate must contain %%s: %s" % validate)

    path = assemble_from_fragments(src, delimiter, compiled_regexp,
                                   ignore_hidden, module.tmpdir)
    path_hash = module.sha1(path)
    result['checksum'] = path_hash

    # Backwards compat.  This won't return data if FIPS mode is active
    try:
        pathmd5 = module.md5(path)
    except ValueError:
        pathmd5 = None
    result['md5sum'] = pathmd5

    if os.path.exists(dest):
        dest_hash = module.sha1(dest)

    if path_hash != dest_hash:
        if validate:
            (rc, out, err) = module.run_command(validate % path)
            result['validation'] = dict(rc=rc, stdout=out, stderr=err)
            if rc != 0:
                cleanup(path)
                module.fail_json(msg="failed to validate: rc:%s error:%s" %
                                 (rc, err))
        if backup and dest_hash is not None:
            result['backup_file'] = module.backup_local(dest)

        module.atomic_move(path,
                           dest,
                           unsafe_writes=module.params['unsafe_writes'])
        changed = True

    cleanup(path, result)

    # handle file permissions
    file_args = module.load_file_common_arguments(module.params)
    result['changed'] = module.set_fs_attributes_if_different(
        file_args, changed)

    # Mission complete
    result['msg'] = "OK"
    module.exit_json(**result)
Ejemplo n.º 27
0
def main():
    module = AssibleModule(
        argument_spec=dict(
            repo=dict(type='str', required=True),
            state=dict(type='str',
                       default='present',
                       choices=['absent', 'present']),
            mode=dict(type='raw'),
            update_cache=dict(type='bool',
                              default=True,
                              aliases=['update-cache']),
            update_cache_retries=dict(type='int', default=5),
            update_cache_retry_max_delay=dict(type='int', default=12),
            filename=dict(type='str'),
            # This should not be needed, but exists as a failsafe
            install_python_apt=dict(type='bool', default=True),
            validate_certs=dict(type='bool', default=True),
            codename=dict(type='str'),
        ),
        supports_check_mode=True,
    )

    params = module.params
    repo = module.params['repo']
    state = module.params['state']
    update_cache = module.params['update_cache']
    # Note: mode is referenced in SourcesList class via the passed in module (self here)

    sourceslist = None

    if not HAVE_PYTHON_APT:
        if params['install_python_apt']:
            install_python_apt(module)
        else:
            module.fail_json(
                msg='%s is not installed, and install_python_apt is False' %
                PYTHON_APT)

    if not repo:
        module.fail_json(
            msg='Please set argument \'repo\' to a non-empty value')

    if isinstance(distro, aptsources_distro.Distribution):
        sourceslist = UbuntuSourcesList(
            module,
            add_ppa_signing_keys_callback=get_add_ppa_signing_key_callback(
                module))
    else:
        module.fail_json(
            msg='Module apt_repository is not supported on target.')

    sourceslist_before = copy.deepcopy(sourceslist)
    sources_before = sourceslist.dump()

    try:
        if state == 'present':
            sourceslist.add_source(repo)
        elif state == 'absent':
            sourceslist.remove_source(repo)
    except InvalidSource as err:
        module.fail_json(msg='Invalid repository string: %s' % to_native(err))

    sources_after = sourceslist.dump()
    changed = sources_before != sources_after

    if changed and module._diff:
        diff = []
        for filename in set(sources_before.keys()).union(sources_after.keys()):
            diff.append({
                'before':
                sources_before.get(filename, ''),
                'after':
                sources_after.get(filename, ''),
                'before_header':
                (filename, '/dev/null')[filename not in sources_before],
                'after_header':
                (filename, '/dev/null')[filename not in sources_after]
            })
    else:
        diff = {}

    if changed and not module.check_mode:
        try:
            sourceslist.save()
            if update_cache:
                err = ''
                update_cache_retries = module.params.get(
                    'update_cache_retries')
                update_cache_retry_max_delay = module.params.get(
                    'update_cache_retry_max_delay')
                randomize = random.randint(0, 1000) / 1000.0

                for retry in range(update_cache_retries):
                    try:
                        cache = apt.Cache()
                        cache.update()
                        break
                    except apt.cache.FetchFailedException as e:
                        err = to_native(e)

                    # Use exponential backoff with a max fail count, plus a little bit of randomness
                    delay = 2**retry + randomize
                    if delay > update_cache_retry_max_delay:
                        delay = update_cache_retry_max_delay + randomize
                    time.sleep(delay)
                else:
                    revert_sources_list(sources_before, sources_after,
                                        sourceslist_before)
                    module.fail_json(msg='Failed to update apt cache: %s' %
                                     (err if err else 'unknown reason'))

        except (OSError, IOError) as err:
            revert_sources_list(sources_before, sources_after,
                                sourceslist_before)
            module.fail_json(msg=to_native(err))

    module.exit_json(changed=changed, repo=repo, state=state, diff=diff)
Ejemplo n.º 28
0
def main():
    module = AssibleModule(
        argument_spec=dict(
            state=dict(type='str',
                       default='present',
                       choices=['present', 'absent']),
            mode=dict(type='str',
                      default='generate',
                      choices=['generate', 'update']),
            force=dict(type='bool', default=False),
            backup=dict(type='bool', default=False),
            path=dict(type='path', required=True),
            privatekey_path=dict(type='path'),
            privatekey_content=dict(type='str'),
            privatekey_passphrase=dict(type='str', no_log=True),
            issuer=dict(type='dict'),
            last_update=dict(type='str', default='+0s'),
            next_update=dict(type='str'),
            digest=dict(type='str', default='sha256'),
            ignore_timestamps=dict(type='bool', default=False),
            return_content=dict(type='bool', default=False),
            revoked_certificates=dict(
                type='list',
                elements='dict',
                options=dict(
                    path=dict(type='path'),
                    content=dict(type='str'),
                    serial_number=dict(type='int'),
                    revocation_date=dict(type='str', default='+0s'),
                    issuer=dict(type='list', elements='str'),
                    issuer_critical=dict(type='bool', default=False),
                    reason=dict(type='str',
                                choices=[
                                    'unspecified', 'key_compromise',
                                    'ca_compromise', 'affiliation_changed',
                                    'superseded', 'cessation_of_operation',
                                    'certificate_hold', 'privilege_withdrawn',
                                    'aa_compromise', 'remove_from_crl'
                                ]),
                    reason_critical=dict(type='bool', default=False),
                    invalidity_date=dict(type='str'),
                    invalidity_date_critical=dict(type='bool', default=False),
                ),
                required_one_of=[['path', 'content', 'serial_number']],
                mutually_exclusive=[['path', 'content', 'serial_number']],
            ),
        ),
        required_if=[
            ('state', 'present', ['privatekey_path',
                                  'privatekey_content'], True),
            ('state', 'present',
             ['issuer', 'next_update', 'revoked_certificates'], False),
        ],
        mutually_exclusive=(['privatekey_path', 'privatekey_content'], ),
        supports_check_mode=True,
        add_file_common_args=True,
    )

    if not CRYPTOGRAPHY_FOUND:
        module.fail_json(msg=missing_required_lib(
            'cryptography >= {0}'.format(MINIMAL_CRYPTOGRAPHY_VERSION)),
                         exception=CRYPTOGRAPHY_IMP_ERR)

    try:
        crl = CRL(module)

        if module.params['state'] == 'present':
            if module.check_mode:
                result = crl.dump(check_mode=True)
                result['changed'] = module.params['force'] or not crl.check()
                module.exit_json(**result)

            crl.generate()
        else:
            if module.check_mode:
                result = crl.dump(check_mode=True)
                result['changed'] = os.path.exists(module.params['path'])
                module.exit_json(**result)

            crl.remove()

        result = crl.dump()
        module.exit_json(**result)
    except crypto_utils.OpenSSLObjectError as exc:
        module.fail_json(msg=to_native(exc))
Ejemplo n.º 29
0
def main():
    arg_spec = dict(
        path=dict(required=True, aliases=["dest", "destfile"]),
        name=dict(required=True, aliases=["username"]),
        password=dict(required=False, default=None, no_log=True),
        crypt_scheme=dict(required=False, default="apr_md5_crypt"),
        state=dict(required=False, default="present"),
        create=dict(type='bool', default='yes'),
    )
    module = AssibleModule(argument_spec=arg_spec,
                           add_file_common_args=True,
                           supports_check_mode=True)

    path = module.params['path']
    username = module.params['name']
    password = module.params['password']
    crypt_scheme = module.params['crypt_scheme']
    state = module.params['state']
    create = module.params['create']
    check_mode = module.check_mode

    if not passlib_installed:
        module.fail_json(msg=missing_required_lib("passlib"),
                         exception=PASSLIB_IMP_ERR)

    # Check file for blank lines in effort to avoid "need more than 1 value to unpack" error.
    try:
        f = open(path, "r")
    except IOError:
        # No preexisting file to remove blank lines from
        f = None
    else:
        try:
            lines = f.readlines()
        finally:
            f.close()

        # If the file gets edited, it returns true, so only edit the file if it has blank lines
        strip = False
        for line in lines:
            if not line.strip():
                strip = True
                break

        if strip:
            # If check mode, create a temporary file
            if check_mode:
                temp = tempfile.NamedTemporaryFile()
                path = temp.name
            f = open(path, "w")
            try:
                [f.write(line) for line in lines if line.strip()]
            finally:
                f.close()

    try:
        if state == 'present':
            (msg, changed) = present(path, username, password, crypt_scheme,
                                     create, check_mode)
        elif state == 'absent':
            if not os.path.exists(path):
                module.exit_json(msg="%s not present" % username,
                                 warnings="%s does not exist" % path,
                                 changed=False)
            (msg, changed) = absent(path, username, check_mode)
        else:
            module.fail_json(msg="Invalid state: %s" % state)

        check_file_attrs(module, changed, msg)
        module.exit_json(msg=msg, changed=changed)
    except Exception as e:
        module.fail_json(msg=to_native(e))
Ejemplo n.º 30
0
def main():
    module = AssibleModule(
        supports_check_mode=True,
        argument_spec=dict(
            table=dict(type='str',
                       default='filter',
                       choices=['filter', 'nat', 'mangle', 'raw', 'security']),
            state=dict(type='str',
                       default='present',
                       choices=['absent', 'present']),
            action=dict(type='str',
                        default='append',
                        choices=['append', 'insert']),
            ip_version=dict(type='str',
                            default='ipv4',
                            choices=['ipv4', 'ipv6']),
            chain=dict(type='str'),
            rule_num=dict(type='str'),
            protocol=dict(type='str'),
            wait=dict(type='str'),
            source=dict(type='str'),
            to_source=dict(type='str'),
            destination=dict(type='str'),
            to_destination=dict(type='str'),
            match=dict(type='list', elements='str', default=[]),
            tcp_flags=dict(type='dict',
                           options=dict(flags=dict(type='list',
                                                   elements='str'),
                                        flags_set=dict(type='list',
                                                       elements='str'))),
            jump=dict(type='str'),
            gateway=dict(type='str'),
            log_prefix=dict(type='str'),
            log_level=dict(
                type='str',
                choices=[
                    '0', '1', '2', '3', '4', '5', '6', '7', 'emerg', 'alert',
                    'crit', 'error', 'warning', 'notice', 'info', 'debug'
                ],
                default=None,
            ),
            goto=dict(type='str'),
            in_interface=dict(type='str'),
            out_interface=dict(type='str'),
            fragment=dict(type='str'),
            set_counters=dict(type='str'),
            source_port=dict(type='str'),
            destination_port=dict(type='str'),
            to_ports=dict(type='str'),
            set_dscp_mark=dict(type='str'),
            set_dscp_mark_class=dict(type='str'),
            comment=dict(type='str'),
            ctstate=dict(type='list', elements='str', default=[]),
            src_range=dict(type='str'),
            dst_range=dict(type='str'),
            limit=dict(type='str'),
            limit_burst=dict(type='str'),
            uid_owner=dict(type='str'),
            gid_owner=dict(type='str'),
            reject_with=dict(type='str'),
            icmp_type=dict(type='str'),
            syn=dict(type='str',
                     default='ignore',
                     choices=['ignore', 'match', 'negate']),
            flush=dict(type='bool', default=False),
            policy=dict(type='str',
                        choices=['ACCEPT', 'DROP', 'QUEUE', 'RETURN']),
        ),
        mutually_exclusive=(
            ['set_dscp_mark', 'set_dscp_mark_class'],
            ['flush', 'policy'],
        ),
        required_if=[
            ['jump', 'TEE', ['gateway']],
            ['jump', 'tee', ['gateway']],
        ])
    args = dict(
        changed=False,
        failed=False,
        ip_version=module.params['ip_version'],
        table=module.params['table'],
        chain=module.params['chain'],
        flush=module.params['flush'],
        rule=' '.join(construct_rule(module.params)),
        state=module.params['state'],
    )

    ip_version = module.params['ip_version']
    iptables_path = module.get_bin_path(BINS[ip_version], True)

    # Check if chain option is required
    if args['flush'] is False and args['chain'] is None:
        module.fail_json(
            msg="Either chain or flush parameter must be specified.")

    if module.params.get('log_prefix', None) or module.params.get(
            'log_level', None):
        if module.params['jump'] is None:
            module.params['jump'] = 'LOG'
        elif module.params['jump'] != 'LOG':
            module.fail_json(
                msg="Logging options can only be used with the LOG jump target."
            )

    # Check if wait option is supported
    iptables_version = LooseVersion(get_iptables_version(
        iptables_path, module))

    if iptables_version >= LooseVersion(IPTABLES_WAIT_SUPPORT_ADDED):
        if iptables_version < LooseVersion(
                IPTABLES_WAIT_WITH_SECONDS_SUPPORT_ADDED):
            module.params['wait'] = ''
    else:
        module.params['wait'] = None

    # Flush the table
    if args['flush'] is True:
        args['changed'] = True
        if not module.check_mode:
            flush_table(iptables_path, module, module.params)

    # Set the policy
    elif module.params['policy']:
        current_policy = get_chain_policy(iptables_path, module, module.params)
        if not current_policy:
            module.fail_json(msg='Can\'t detect current policy')

        changed = current_policy != module.params['policy']
        args['changed'] = changed
        if changed and not module.check_mode:
            set_chain_policy(iptables_path, module, module.params)

    else:
        insert = (module.params['action'] == 'insert')
        rule_is_present = check_present(iptables_path, module, module.params)
        should_be_present = (args['state'] == 'present')

        # Check if target is up to date
        args['changed'] = (rule_is_present != should_be_present)
        if args['changed'] is False:
            # Target is already up to date
            module.exit_json(**args)

        # Check only; don't modify
        if not module.check_mode:
            if should_be_present:
                if insert:
                    insert_rule(iptables_path, module, module.params)
                else:
                    append_rule(iptables_path, module, module.params)
            else:
                remove_rule(iptables_path, module, module.params)

    module.exit_json(**args)