コード例 #1
0
 def _validate_data_set_or_path(self, path):
     arg_defs = dict(
         path=dict(arg_type="data_set_or_path"),
     )
     parser = BetterArgParser(arg_defs)
     parsed_args = parser.parse_args({"path": path})
     return parsed_args.get("path")
コード例 #2
0
def run_module():
    module_args = dict(commands=dict(type="raw",
                                     required=True,
                                     aliases=["command"]), )

    module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
    result = dict(changed=False, )

    arg_defs = dict(commands=dict(type=list_or_str_type,
                                  required=True,
                                  aliases=["command"]), )
    try:
        parser = BetterArgParser(arg_defs)
        parsed_args = parser.parse_args(module.params)
    except ValueError as e:
        module.fail_json(msg=repr(e), **result)

    commands = parsed_args.get("commands")

    try:
        result["output"] = run_tso_command(commands, module)
        for cmd in result.get("output"):
            if cmd.get("rc") != 0:
                module.fail_json(msg='The TSO command "' +
                                 cmd.get("command", "") +
                                 '" execution failed.',
                                 **result)

        result["changed"] = True
        module.exit_json(**result)

    except Exception as e:
        module.fail_json(msg="An unexpected error occurred: {0}".format(
            repr(e)),
                         **result)
コード例 #3
0
 def _validate_data_set_name(self, ds):
     arg_defs = dict(
         ds=dict(arg_type="data_set"),
     )
     parser = BetterArgParser(arg_defs)
     parsed_args = parser.parse_args({"ds": ds})
     return parsed_args.get("ds")
コード例 #4
0
 def _validate_encoding(self, encoding):
     arg_defs = dict(
         encoding=dict(arg_type="encoding"),
     )
     parser = BetterArgParser(arg_defs)
     parsed_args = parser.parse_args({"encoding": encoding})
     return parsed_args.get("encoding")
コード例 #5
0
ファイル: job.py プロジェクト: thedoubl3j/ibm_zos_core
def job_output(module, job_id=None, owner=None, job_name=None, dd_name=None):
    """Get the output from a z/OS job based on various search criteria.

    Arguments:
        module {AnsibleModule} -- The AnsibleModule object from the running module.

    Keyword Arguments:
        job_id {str} -- The job ID to search for (default: {''})
        owner {str} -- The owner of the job (default: {''})
        job_name {str} -- The job name search for (default: {''})
        dd_name {str} -- The data definition to retrieve (default: {''})

    Raises:
        RuntimeError: When job output cannot be retrieved successfully but job exists.
        RuntimeError: When no job output is found

    Returns:
        dict[str, list[dict]] -- The output information for a given job.
    """

    arg_defs = dict(
        job_id=dict(arg_type="qualifier_pattern"),
        owner=dict(arg_type="qualifier_pattern"),
        job_name=dict(arg_type="qualifier_pattern"),
        dd_name=dict(arg_type=_ddname_pattern),
    )

    parser = BetterArgParser(arg_defs)
    parsed_args = parser.parse_args(
        {"job_id": job_id, "owner": owner, "job_name": job_name, "dd_name": dd_name}
    )

    job_id = parsed_args.get("job_id") or ""
    job_name = parsed_args.get("job_name") or ""
    owner = parsed_args.get("owner") or ""
    ddname = parsed_args.get("ddname") or ""

    job_detail_json = {}
    rc, out, err = _get_job_json_str(module, job_id, owner, job_name, dd_name)
    if rc != 0:
        raise RuntimeError(
            "Failed to retrieve job output. RC: {0} Error: {1}".format(
                str(rc), str(err)
            )
        )
    if not out:
        raise RuntimeError("Failed to retrieve job output. No job output found.")
    job_detail_json = json.loads(out, strict=False)
    for job in job_detail_json.get("jobs"):
        job["ret_code"] = {} if job.get("ret_code") is None else job.get("ret_code")
        job["ret_code"]["code"] = _get_return_code_num(
            job.get("ret_code", {}).get("msg", "")
        )
        job["ret_code"]["msg_code"] = _get_return_code_str(
            job.get("ret_code", {}).get("msg", "")
        )
        job["ret_code"]["msg_txt"] = ""
    return job_detail_json
コード例 #6
0
def parse_params(params):
    arg_defs = dict(
        cmd=dict(arg_type="str", required=True),
        verbose=dict(arg_type="bool", required=False),
        debug=dict(arg_type="bool", required=False),
    )
    parser = BetterArgParser(arg_defs)
    new_params = parser.parse_args(params)
    return new_params
コード例 #7
0
def parse_params(params):
    arg_defs = dict(
        system=dict(arg_type=system_type, required=False),
        message_id=dict(arg_type=message_id_type, required=False),
        job_name=dict(arg_type=job_name_type, required=False),
    )
    parser = BetterArgParser(arg_defs)
    new_params = parser.parse_args(params)
    return new_params
コード例 #8
0
def parse_and_validate_args(params):
    """Parse and validate arguments to be used by remainder of module.

    Args:
        params (dict): The params as returned from AnsibleModule instantiation.

    Returns:
        dict: The updated params after additional parsing and validation.
    """
    arg_defs = dict(
        operation=dict(type="str",
                       required=True,
                       choices=["backup", "restore"]),
        data_sets=dict(
            required=False,
            type="dict",
            options=dict(
                include=dict(type=data_set_pattern_type, required=False),
                exclude=dict(type=data_set_pattern_type, required=False),
            ),
        ),
        space=dict(
            type=space_type,
            required=False,
            aliases=["size"],
            dependencies=["full_volume"],
        ),
        space_type=dict(
            type=space_type_type,
            required=False,
            aliases=["unit"],
            dependencies=["full_volume"],
        ),
        volume=dict(type="volume", required=False, dependencies=["data_sets"]),
        full_volume=dict(type=full_volume_type,
                         default=False,
                         dependencies=["volume"]),
        temp_volume=dict(type="volume",
                         required=False,
                         aliases=["dest_volume"]),
        backup_name=dict(type=backup_name_type, required=False),
        recover=dict(type="bool", default=False),
        overwrite=dict(type="bool", default=False),
        sms_storage_class=dict(type=sms_type, required=False),
        sms_management_class=dict(type=sms_type, required=False),
        hlq=dict(type=hlq_type,
                 default=hlq_default,
                 dependencies=["operation"]),
    )

    parsed_args = BetterArgParser(arg_defs).parse_args(params)
    parsed_args = {
        key: value
        for key, value in parsed_args.items() if value is not None
    }
    return parsed_args
コード例 #9
0
ファイル: job.py プロジェクト: feynmanliang/ibm_zos_core
def job_output(job_id=None, owner=None, job_name=None, dd_name=None):
    """Get the output from a z/OS job based on various search criteria.

    Keyword Arguments:
        job_id {str} -- The job ID to search for (default: {None})
        owner {str} -- The owner of the job (default: {None})
        job_name {str} -- The job name search for (default: {None})
        dd_name {str} -- The data definition to retrieve (default: {None})

    Raises:
        RuntimeError: When job output cannot be retrieved successfully but job exists.
        RuntimeError: When no job output is found

    Returns:
        list[dict] -- The output information for a list of jobs matching specified criteria.
    """
    arg_defs = dict(
        job_id=dict(arg_type="qualifier_pattern"),
        owner=dict(arg_type="qualifier_pattern"),
        job_name=dict(arg_type="qualifier_pattern"),
        dd_name=dict(arg_type=_ddname_pattern),
    )

    parser = BetterArgParser(arg_defs)
    parsed_args = parser.parse_args(
        {"job_id": job_id, "owner": owner, "job_name": job_name, "dd_name": dd_name}
    )

    job_id = parsed_args.get("job_id") or "*"
    job_name = parsed_args.get("job_name") or "*"
    owner = parsed_args.get("owner") or "*"
    dd_name = parsed_args.get("ddname") or ""

    job_detail_json = _get_job_output(job_id, owner, job_name, dd_name)
    if len(job_detail_json) == 0:
        # some systems have issues with "*" while some require it to see results
        job_id = "" if job_id == "*" else job_id
        owner = "" if owner == "*" else owner
        job_name = "" if job_name == "*" else job_name
        job_detail_json = _get_job_output(job_id, owner, job_name, dd_name)

    for job in job_detail_json:
        job["ret_code"] = {} if job.get("ret_code") is None else job.get("ret_code")
        job["ret_code"]["code"] = _get_return_code_num(
            job.get("ret_code").get("msg", "")
        )
        job["ret_code"]["msg_code"] = _get_return_code_str(
            job.get("ret_code").get("msg", "")
        )
        job["ret_code"]["msg_txt"] = ""
        if job.get("ret_code").get("msg", "") == "":
            job["ret_code"]["msg"] = "AC"
    return job_detail_json
コード例 #10
0
ファイル: job.py プロジェクト: tdanekkb/ibm_zos_core
def job_status(job_id=None, owner=None, job_name=None):
    """Get the status information of a z/OS job based on various search criteria.

    Keyword Arguments:
        job_id {str} -- The job ID to search for (default: {None})
        owner {str} -- The owner of the job (default: {None})
        job_name {str} -- The job name search for (default: {None})

    Raises:
        RuntimeError: When job status cannot be retrieved successfully but job exists.
        RuntimeError: When no job status is found.

    Returns:
        list[dict] -- The status information for a list of jobs matching search criteria.
    """
    arg_defs = dict(
        job_id=dict(arg_type="qualifier_pattern"),
        owner=dict(arg_type="qualifier_pattern"),
        job_name=dict(arg_type="qualifier_pattern"),
    )

    parser = BetterArgParser(arg_defs)
    parsed_args = parser.parse_args({
        "job_id": job_id,
        "owner": owner,
        "job_name": job_name
    })

    job_id = parsed_args.get("job_id") or ""
    job_name = parsed_args.get("job_name") or ""
    owner = parsed_args.get("owner") or ""

    job_status_json = {}
    rc, out, err = _get_job_status_str(job_id, owner, job_name)
    if rc != 0:
        raise RuntimeError(
            "Failed to retrieve job status. RC: {0} Error: {1}".format(
                str(rc), str(err)))
    if not out:
        raise RuntimeError(
            "Failed to retrieve job status. No job status found.")
    job_status_json = json.loads(out, strict=False)
    for job in job_status_json:
        job["ret_code"] = {} if job.get("ret_code") is None else job.get(
            "ret_code")
        job["ret_code"]["code"] = _get_return_code_num(
            job.get("ret_code").get("msg", ""))
        job["ret_code"]["msg_code"] = _get_return_code_str(
            job.get("ret_code").get("msg", ""))
        job["ret_code"]["msg_txt"] = ""
        if job.get("ret_code").get("msg", "") == "":
            job["ret_code"]["msg"] = "AC"
    return job_status_json
コード例 #11
0
ファイル: job.py プロジェクト: feynmanliang/ibm_zos_core
def job_status(job_id=None, owner=None, job_name=None):
    """Get the status information of a z/OS job based on various search criteria.

    Keyword Arguments:
        job_id {str} -- The job ID to search for (default: {None})
        owner {str} -- The owner of the job (default: {None})
        job_name {str} -- The job name search for (default: {None})

    Raises:
        RuntimeError: When job status cannot be retrieved successfully but job exists.
        RuntimeError: When no job status is found.

    Returns:
        list[dict] -- The status information for a list of jobs matching search criteria.
    """
    arg_defs = dict(
        job_id=dict(arg_type="qualifier_pattern"),
        owner=dict(arg_type="qualifier_pattern"),
        job_name=dict(arg_type="qualifier_pattern"),
    )

    parser = BetterArgParser(arg_defs)
    parsed_args = parser.parse_args(
        {"job_id": job_id, "owner": owner, "job_name": job_name}
    )

    job_id = parsed_args.get("job_id") or "*"
    job_name = parsed_args.get("job_name") or "*"
    owner = parsed_args.get("owner") or "*"

    job_status_json = _get_job_status(job_id, owner, job_name)
    if len(job_status_json) == 0:
        job_id = "" if job_id == "*" else job_id
        job_name = "" if job_name == "*" else job_name
        owner = "" if owner == "*" else owner
        job_status_json = _get_job_status(job_id, owner, job_name)

    for job in job_status_json:
        job["ret_code"] = {} if job.get("ret_code") is None else job.get("ret_code")
        job["ret_code"]["code"] = _get_return_code_num(
            job.get("ret_code").get("msg", "")
        )
        job["ret_code"]["msg_code"] = _get_return_code_str(
            job.get("ret_code").get("msg", "")
        )
        job["ret_code"]["msg_txt"] = ""
        if job.get("ret_code").get("msg", "") == "":
            job["ret_code"]["msg"] = "AC"
    return job_status_json
コード例 #12
0
ファイル: zos_find.py プロジェクト: feynmanliang/ibm_zos_core
def main():
    module = AnsibleModule(argument_spec=dict(
        age=dict(type="str", required=False),
        age_stamp=dict(type="str",
                       required=False,
                       choices=["creation_date", "ref_date"],
                       default="creation_date"),
        contains=dict(type="str", required=False),
        excludes=dict(type="list", required=False, aliases=["exclude"]),
        patterns=dict(type="list", required=True),
        size=dict(type="str", required=False),
        pds_patterns=dict(
            type="list", required=False, aliases=["pds_pattern", "pds_paths"]),
        resource_type=dict(type="str",
                           required=False,
                           default="nonvsam",
                           choices=["cluster", "data", "index", "nonvsam"]),
        volume=dict(type="list", required=False, aliases=["volumes"])))

    arg_def = dict(age=dict(arg_type="str", required=False),
                   age_stamp=dict(arg_type="str",
                                  required=False,
                                  choices=["creation_date", "ref_date"],
                                  default="creation_date"),
                   contains=dict(arg_type="str", required=False),
                   excludes=dict(arg_type="list",
                                 required=False,
                                 aliases=["exclude"]),
                   patterns=dict(arg_type="list", required=True),
                   size=dict(arg_type="str", required=False),
                   pds_patterns=dict(arg_type="list",
                                     required=False,
                                     aliases=["pds_pattern", "pds_paths"]),
                   resource_type=dict(
                       arg_type="str",
                       required=False,
                       default="nonvsam",
                       choices=["cluster", "data", "index", "nonvsam"]),
                   volume=dict(arg_type="list",
                               required=False,
                               aliases=["volumes"]))
    try:
        BetterArgParser(arg_def).parse_args(module.params)
    except ValueError as err:
        module.fail_json(msg="Parameter verification failed", stderr=str(err))
    module.exit_json(**run_module(module))
コード例 #13
0
def run_module():
    global module

    module_args = dict(
        command_input=dict(type="str",
                           required=True,
                           choices=["build", "BUILD", "delete", "DELETE"]),
        compression=dict(type="str",
                         required=False,
                         choices=[
                             "precomp", "postcomp", "precomp,postcomp",
                             "PRECOMP", "POSTCOMP", "PRECOMP,POSTCOMP"
                         ]),
        psb_name=dict(type="list", elements="str", required=False),
        dbd_name=dict(type="list", elements="str", required=False),
        acb_lib=dict(type="str", required=True),
        psb_lib=dict(type="list", required=True),
        dbd_lib=dict(type="list", required=True),
        reslib=dict(type="list", required=False),
        steplib=dict(type="list", required=False),
        build_psb=dict(type="bool", required=False, default=True),
    )

    result = dict(changed=True, msg='', content='', rc='', debug='')

    module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)

    # Retrieve properties set by the user
    module_defs = dict(
        command_input=dict(arg_type="str", required=True),
        compression=dict(arg_type="str", required=False, default=""),
        psb_name=dict(arg_type=str_or_list_of_str, required=False),
        dbd_name=dict(arg_type=str_or_list_of_str, required=False),
        acb_lib=dict(arg_type="str", required=True),
        psb_lib=dict(arg_type="list", elements="str", required=True),
        dbd_lib=dict(arg_type="list", elements="str", required=True),
        reslib=dict(arg_type="list", elements="str", required=False),
        steplib=dict(arg_type="list", elements="str", required=False),
        build_psb=dict(arg_type="bool", required=False, default=True),
    )

    # Parse the properties
    parser = BetterArgParser(module_defs)
    parsed_args = parser.parse_args(module.params)

    command_input = parsed_args.get("command_input")
    compression = parsed_args.get("compression")
    psb_name = parsed_args.get("psb_name")
    dbd_name = parsed_args.get("dbd_name")
    acb_lib = parsed_args.get("acb_lib")
    psb_lib = parsed_args.get("psb_lib")
    dbd_lib = parsed_args.get("dbd_lib")
    reslib = parsed_args.get("reslib")
    steplib = parsed_args.get("steplib")
    build_psb = parsed_args.get("build_psb")

    if not steplib:
        try:
            steplib = []
            steplib_str = env_fallback('STEPLIB')
            list_str = steplib_str.split(" ")
            steplib += list_str
        except AnsibleFallbackNotFound as e:
            module.fail_json(
                msg=
                "The input option 'steplib' is not provided. Please provide it in the environment "
                "variables 'STEPLIB', or in the module input option 'steplib'. ",
                **result)

    try:
        acbgen_obj = acbgen(command_input, compression, psb_name, dbd_name,
                            acb_lib, psb_lib, dbd_lib, reslib, steplib,
                            build_psb)
        response = acbgen_obj.execute()

        if response.get('rc') and int(response.get('rc')) > 4:
            result['changed'] = False
            result['content'] = response.get('output', "")
            result['msg'] = em.FAILURE_MSG
            result['debug'] = response.get('error', "")
            result['rc'] = response.get('rc')
        else:
            result['changed'] = True
            result['content'] = response.get('output', "")
            result['debug'] = response.get('error', "")
            result['msg'] = em.SUCCESS_MSG
            if response.get('rc', 8) <= 4:
                result['rc'] = 0

    except Exception as e:
        result['msg'] = repr(e)
        module.fail_json(**result)
    finally:
        pass

    module.exit_json(**result)
コード例 #14
0
def run_module():

    module_args = dict(
        src=dict(type="str", required=True),
        wait=dict(type="bool", required=False),
        location=dict(
            type="str",
            default="DATA_SET",
            choices=["DATA_SET", "USS", "LOCAL"],
        ),
        encoding=dict(
            type="str",
            default="UTF-8",
            choices=[
                "UTF-8", "ASCII", "ISO-8859-1", "EBCDIC", "IBM-037", "IBM-1047"
            ],
        ),
        volume=dict(type="str", required=False),
        return_output=dict(type="bool", required=False, default=True),
        wait_time_s=dict(type="int", default=60),
        max_rc=dict(type="int", required=False),
        temp_file=dict(type="path", required=False),
    )

    module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)

    arg_defs = dict(
        src=dict(arg_type=data_set_or_path_type, required=True),
        wait=dict(arg_type="bool", required=False),
        location=dict(
            arg_type="str",
            default="DATA_SET",
            choices=["DATA_SET", "USS", "LOCAL"],
        ),
        encoding=dict(arg_type=encoding_type, default="UTF-8"),
        volume=dict(arg_type="volume", required=False),
        return_output=dict(arg_type="bool", default=True),
        wait_time_s=dict(arg_type="int", required=False, default=60),
        max_rc=dict(arg_type="int", required=False),
        temp_file=dict(arg_type="path", required=False),
    )

    parser = BetterArgParser(arg_defs)
    parsed_args = parser.parse_args(module.params)

    result = dict(changed=False)

    location = parsed_args.get("location")
    volume = parsed_args.get("volume")
    wait = parsed_args.get("wait")
    src = parsed_args.get("src")
    return_output = parsed_args.get("return_output")
    wait_time_s = parsed_args.get("wait_time_s")
    max_rc = parsed_args.get("max_rc")
    # get temporary file names for copied files
    temp_file = parsed_args.get("temp_file")
    if temp_file:
        temp_file_2 = NamedTemporaryFile(delete=True)

    if wait_time_s <= 0:
        module.fail_json(
            msg=
            "The option wait_time_s is not valid it just be greater than 0.",
            **result)

    DSN_REGEX = r"^(([A-Z]{1}[A-Z0-9]{0,7})([.]{1})){1,21}[A-Z]{1}[A-Z0-9]{0,7}([(]([A-Z]{1}[A-Z0-9]{0,7})[)]){0,1}?$"

    # calculate the job elapse time
    duration = 0
    try:
        if location == "DATA_SET":
            data_set_name_pattern = re.compile(DSN_REGEX, re.IGNORECASE)
            check = data_set_name_pattern.fullmatch(src)
            if check:
                if volume is None or volume == "":
                    jobId = submit_pds_jcl(src)
                else:
                    jobId = submit_jcl_in_volume(src, volume, module)
            else:
                module.fail_json(
                    msg=
                    "The parameter src for data set is not a valid name pattern. Please check the src input.",
                    **result)
        elif location == "USS":
            jobId = submit_uss_jcl(src, module)
        else:
            # For local file, it has been copied to the temp directory in action plugin.
            encoding = parsed_args.get("encoding")
            if encoding == "EBCDIC" or encoding == "IBM-037" or encoding == "IBM-1047":
                jobId = submit_uss_jcl(temp_file, module)
            # 'UTF-8' 'ASCII' encoding will be converted.
            elif (encoding == "UTF-8" or encoding == "ISO-8859-1"
                  or encoding == "ASCII" or encoding is None):
                (conv_rc, stdout, stderr) = module.run_command(
                    "iconv -f ISO8859-1 -t IBM-1047 %s > %s" %
                    (quote(temp_file), quote(temp_file_2.name)),
                    use_unsafe_shell=True,
                )
                if conv_rc == 0:
                    jobId = submit_uss_jcl(temp_file_2.name, module)
                else:
                    module.fail_json(
                        msg=
                        "The Local file encoding conversion failed. Please check the source file."
                        + stderr,
                        **result)
            else:
                module.fail_json(msg=(
                    "The Local file encoding format is not supported."
                    "The supported encoding is UTF-8, ASCII, ISO-8859-1, EBCDIC, IBM-037, IBM-1047. Default is UTF-8."
                ),
                                 **result)
    except SubmitJCLError as e:
        module.fail_json(msg=repr(e), **result)
    if jobId is None or jobId == "":
        result["job_id"] = jobId
        module.fail_json(
            msg=
            "JOB ID RETURNED IS None. PLEASE CHECK WHETHER THE JCL IS CORRECT.",
            **result)

    result["job_id"] = jobId
    if wait is True:
        try:
            waitJob = query_jobs_status(jobId)
        except SubmitJCLError as e:
            module.fail_json(msg=repr(e), **result)
        while waitJob[0].get("status") == "AC":  # AC means in progress
            sleep(1)
            duration = duration + 1
            waitJob = Jobs.list(job_id=jobId)
            if waitJob[0].get("status") == "CC":  # CC means completed
                break
            if duration == wait_time_s:  # Long running task. timeout return
                break

    try:
        result = get_job_info(module, jobId, return_output)
        if wait is True and return_output is True and max_rc is not None:
            assert_valid_return_code(
                max_rc,
                result.get("jobs")[0].get("ret_code").get("code"))
    except SubmitJCLError as e:
        module.fail_json(msg=repr(e), **result)
    except Exception as e:
        module.fail_json(msg=repr(e), **result)
    finally:
        if temp_file:
            remove(temp_file)
    result["duration"] = duration
    if duration == wait_time_s:
        result["message"] = {
            "stdout":
            "Submit JCL operation succeeded but it is a long running job. Timeout is "
            + str(wait_time_s) + " seconds."
        }
    else:
        result["message"] = {"stdout": "Submit JCL operation succeeded."}
    result["changed"] = True
    module.exit_json(**result)
コード例 #15
0
    def _validate_common_input(self):
        try:
            module_defs = dict(
                online_batch=dict(arg_type="bool", required=False),
                ims_id=dict(arg_type="str", required=False),
                dbrc=dict(arg_type="bool", required=False),
                irlm_id=dict(arg_type="str", required=False),
                reslib=dict(arg_type="list",
                            elements="data_set",
                            required=False),
                buffer_pool_param_dataset=dict(arg_type="data_set",
                                               required=False),
                primary_log_dataset=dict(
                    arg_type="dict",
                    options=dict(
                        dataset_name=dict(arg_type="data_set", required=True),
                        disposition=dict(arg_type="str",
                                         required=False,
                                         choices=['EXCL', 'OLD', 'SHR',
                                                  'NEW']),
                        primary=dict(arg_type="int", required=False),
                        primary_unit=dict(arg_type="str",
                                          required=False,
                                          choices=[
                                              'K', 'KB', 'M', 'MB', 'G', 'GB',
                                              'C', 'CYL', 'T', 'TRK'
                                          ]),
                        secondary=dict(arg_type="int", required=False),
                        secondary_unit=dict(arg_type="str",
                                            required=False,
                                            choices=[
                                                'K', 'KB', 'M', 'MB', 'G',
                                                'GB', 'C', 'CYL', 'T', 'TRK'
                                            ]),
                        normal_disposition=dict(
                            arg_type="str",
                            required=False,
                            choices=['KEEP', 'DELETE', 'CATLG', 'UNCATLG']),
                        conditional_disposition=dict(
                            arg_type="str",
                            required=False,
                            choices=['KEEP', 'DELETE', 'CATLG', 'UNCATLG']),
                        record_format=dict(
                            arg_type="str",
                            required=False,
                            choices=['FB', 'VB', 'FBA', 'VBA', 'U']),
                        record_length=dict(arg_type="int", required=False),
                        block_size=dict(arg_type="int", required=False),
                        type=dict(arg_type="str",
                                  required=False,
                                  choices=[
                                      'SEQ', 'BASIC', 'LARGE', 'PDS', 'PDSE',
                                      'LIBRARY', 'LDS', 'RRDS', 'ESDS', 'KSDS'
                                  ]),
                        volumes=dict(type="list",
                                     required=False,
                                     elements="str"),
                        storage_class=dict(type="str", required=False),
                        management_class=dict(type="str", required=False),
                        data_class=dict(type="str", required=False)),
                    required=False),
                psb_lib=dict(arg_type="list",
                             elements="data_set",
                             required=True),
                dbd_lib=dict(arg_type="list",
                             elements="data_set",
                             required=True),
                proclib=dict(arg_type="list",
                             elements="data_set",
                             required=True),
                steplib=dict(arg_type="list",
                             elements="data_set",
                             required=False),
                sysprint=dict(arg_type="data_set", required=False),
            )

            parser = BetterArgParser(module_defs)
            self.parsed_args = parser.parse_args(self.params)

        except ValueError as error:
            self.result['msg'] = error.args
            self.result['rc'] = 1
            self.module.fail_json(**self.result)
コード例 #16
0
def parse_and_validate_args(params):
    params = fix_old_size_arg(params)

    arg_defs = dict(
        # Used for batch data set args
        batch=dict(
            type="list",
            elements="dict",
            options=dict(
                name=dict(
                    type=data_set_name,
                    default=data_set_name,
                    dependencies=["type", "state"],
                ),
                state=dict(
                    type="str",
                    default="present",
                    choices=["present", "absent", "cataloged", "uncataloged"],
                ),
                type=dict(type=data_set_type,
                          required=False,
                          dependencies=["state"]),
                space_type=dict(type=space_type,
                                required=False,
                                dependencies=["state"]),
                space_primary=dict(type="int",
                                   required=False,
                                   dependencies=["state"]),
                space_secondary=dict(type="int",
                                     required=False,
                                     dependencies=["state"]),
                record_format=dict(
                    type=record_format,
                    required=False,
                    dependencies=["state"],
                    aliases=["format"],
                ),
                sms_management_class=dict(type=sms_class,
                                          required=False,
                                          dependencies=["state"]),
                # I know this alias is odd, ZOAU used to document they supported
                # SMS data class when they were actually passing as storage class
                # support for backwards compatability with previous module versions
                sms_storage_class=dict(
                    type=sms_class,
                    required=False,
                    dependencies=["state"],
                    aliases=["data_class"],
                ),
                sms_data_class=dict(type=sms_class,
                                    required=False,
                                    dependencies=["state"]),
                block_size=dict(
                    type=valid_when_state_present,
                    required=False,
                    dependencies=["state"],
                ),
                directory_blocks=dict(
                    type=valid_when_state_present,
                    required=False,
                    dependencies=["state"],
                ),
                record_length=dict(
                    type=record_length,
                    required=False,
                    dependencies=["state", "record_format"],
                ),
                key_offset=dict(
                    type=valid_when_state_present,
                    required=False,
                    dependencies=["state", "type", "key_length"],
                ),
                key_length=dict(
                    type=valid_when_state_present,
                    required=False,
                    dependencies=["state", "type"],
                ),
                replace=dict(
                    type="bool",
                    default=False,
                ),
                volumes=dict(
                    type=volumes,
                    required=False,
                    aliases=["volume"],
                    dependencies=["state"],
                ),
            ),
        ),
        # For individual data set args
        name=dict(
            type=data_set_name,
            default=data_set_name,
            required=False,
            dependencies=["type", "state", "batch"],
        ),
        state=dict(
            type="str",
            default="present",
            choices=["present", "absent", "cataloged", "uncataloged"],
        ),
        type=dict(type=data_set_type, required=False, dependencies=["state"]),
        space_type=dict(type=space_type,
                        required=False,
                        dependencies=["state"]),
        space_primary=dict(type="int", required=False, dependencies=["state"]),
        space_secondary=dict(type="int",
                             required=False,
                             dependencies=["state"]),
        record_format=dict(
            type=record_format,
            required=False,
            dependencies=["state"],
            aliases=["format"],
        ),
        sms_management_class=dict(type=sms_class,
                                  required=False,
                                  dependencies=["state"]),
        # I know this alias is odd, ZOAU used to document they supported
        # SMS data class when they were actually passing as storage class
        # support for backwards compatability with previous module versions
        sms_storage_class=dict(
            type=sms_class,
            required=False,
            dependencies=["state"],
            aliases=["data_class"],
        ),
        sms_data_class=dict(type=sms_class,
                            required=False,
                            dependencies=["state"]),
        block_size=dict(
            type=valid_when_state_present,
            required=False,
            dependencies=["state"],
        ),
        directory_blocks=dict(
            type=valid_when_state_present,
            required=False,
            dependencies=["state"],
        ),
        record_length=dict(
            type=record_length,
            required=False,
            dependencies=["state", "record_format"],
        ),
        key_offset=dict(type=valid_when_state_present, required=False),
        key_length=dict(type=valid_when_state_present, required=False),
        replace=dict(
            type="bool",
            default=False,
        ),
        volumes=dict(
            type=volumes,
            required=False,
            aliases=["volume"],
            dependencies=["state"],
        ),
        mutually_exclusive=[
            ["batch", "name"],
            # ["batch", "state"],
            # ["batch", "space_type"],
            ["batch", "space_primary"],
            ["batch", "space_secondary"],
            ["batch", "record_format"],
            ["batch", "sms_management_class"],
            ["batch", "sms_storage_class"],
            ["batch", "sms_data_class"],
            ["batch", "block_size"],
            ["batch", "record_length"],
            ["batch", "key_offset"],
            ["batch", "key_length"],
            # ["batch", "replace"],
            ["batch", "volumes"],
        ],
    )
    parser = BetterArgParser(arg_defs)
    parsed_args = parser.parse_args(params)
    parsed_args = {
        key: value
        for key, value in parsed_args.items() if value is not None
    }
    return parsed_args
コード例 #17
0
def run_module():
    module_args = dict(
        src=dict(type="str", required=True),
        wait=dict(type="bool", required=False),
        location=dict(
            type="str",
            default="DATA_SET",
            choices=["DATA_SET", "USS", "LOCAL"],
        ),
        encoding=dict(type="dict", required=False),
        volume=dict(type="str", required=False),
        return_output=dict(type="bool", required=False, default=True),
        wait_time_s=dict(type="int", default=60),
        max_rc=dict(type="int", required=False),
        temp_file=dict(type="path", required=False),
    )

    module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
    encoding = module.params.get("encoding")
    if encoding is None:
        encoding = {
            "from": Defaults.DEFAULT_ASCII_CHARSET,
            "to": Defaults.get_default_system_charset(),
        }
    if encoding.get("from") is None:
        encoding["from"] = Defaults.DEFAULT_ASCII_CHARSET
    if encoding.get("to") is None:
        encoding["to"] = Defaults.get_default_system_charset()

    arg_defs = dict(
        src=dict(arg_type="data_set_or_path", required=True),
        wait=dict(arg_type="bool", required=False),
        location=dict(
            arg_type="str",
            default="DATA_SET",
            choices=["DATA_SET", "USS", "LOCAL"],
        ),
        from_encoding=dict(arg_type="encoding",
                           default=Defaults.DEFAULT_ASCII_CHARSET),
        to_encoding=dict(arg_type="encoding",
                         default=Defaults.DEFAULT_EBCDIC_USS_CHARSET),
        volume=dict(arg_type="volume", required=False),
        return_output=dict(arg_type="bool", default=True),
        wait_time_s=dict(arg_type="int", required=False, default=60),
        max_rc=dict(arg_type="int", required=False),
        temp_file=dict(arg_type="path", required=False),
    )

    result = dict(changed=False)
    module.params.update(
        dict(
            from_encoding=encoding.get("from"),
            to_encoding=encoding.get("to"),
        ))
    try:
        parser = BetterArgParser(arg_defs)
        parsed_args = parser.parse_args(module.params)
    except ValueError as err:
        module.fail_json(msg=str(err), **result)

    location = parsed_args.get("location")
    volume = parsed_args.get("volume")
    wait = parsed_args.get("wait")
    src = parsed_args.get("src")
    return_output = parsed_args.get("return_output")
    wait_time_s = parsed_args.get("wait_time_s")
    max_rc = parsed_args.get("max_rc")
    # get temporary file names for copied files
    temp_file = parsed_args.get("temp_file")
    if temp_file:
        temp_file_2 = NamedTemporaryFile(delete=True)

    if wait_time_s <= 0:
        module.fail_json(
            msg=
            "The option wait_time_s is not valid it just be greater than 0.",
            **result)

    DSN_REGEX = r"^(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)){0,1}$"
    try:
        if location == "DATA_SET":
            data_set_name_pattern = re.compile(DSN_REGEX, re.IGNORECASE)
            check = data_set_name_pattern.fullmatch(src)
            if check:
                if volume is None or volume == "":
                    jobId = submit_pds_jcl(src, module)
                else:
                    jobId = submit_jcl_in_volume(src, volume, module)
            else:
                module.fail_json(
                    msg=
                    "The parameter src for data set is not a valid name pattern. Please check the src input.",
                    **result)
        elif location == "USS":
            jobId = submit_uss_jcl(src, module)
        else:
            # For local file, it has been copied to the temp directory in action plugin.
            from_encoding = encoding.get("from")
            to_encoding = encoding.get("to")
            (conv_rc, stdout, stderr) = module.run_command(
                "iconv -f {0} -t {1} {2} > {3}".format(
                    from_encoding,
                    to_encoding,
                    quote(temp_file),
                    quote(temp_file_2.name),
                ),
                use_unsafe_shell=True,
            )
            if conv_rc == 0:
                jobId = submit_uss_jcl(temp_file_2.name, module)
            else:
                module.fail_json(
                    msg=
                    "The Local file encoding conversion failed. Please check the source file."
                    + stderr or "",
                    **result)
    except SubmitJCLError as e:
        module.fail_json(msg=repr(e), **result)
    if jobId is None or jobId == "":
        result["job_id"] = jobId
        module.fail_json(
            msg=
            "JOB ID RETURNED IS None. PLEASE CHECK WHETHER THE JCL IS CORRECT.",
            **result)

    result["job_id"] = jobId
    duration = 0
    if wait is True:
        # calculate the job elapse time
        try:
            waitJob = query_jobs_status(module, jobId)
            job_msg = waitJob[0].get("ret_code").get("msg")
        except SubmitJCLError as e:
            module.fail_json(msg=repr(e), **result)
        # while (job_msg.startswith("CC") or job_msg.startswith("ABEND")) is False:
        while not re.search(
                "^(?:{0})".format("|".join(JOB_COMPLETION_MESSAGES)), job_msg):
            sleep(1)
            duration = duration + 1
            waitJob = job_output(job_id=jobId)
            job_msg = waitJob[0].get("ret_code").get("msg")
            if re.search("^(?:{0})".format("|".join(JOB_COMPLETION_MESSAGES)),
                         job_msg):
                break
            if duration == wait_time_s:  # Long running task. timeout return
                break

    try:
        result = get_job_info(module, jobId, return_output)
        if wait is True and return_output is True and max_rc is not None:
            assert_valid_return_code(
                max_rc,
                result.get("jobs")[0].get("ret_code").get("code"))
    except SubmitJCLError as e:
        module.fail_json(msg=repr(e), **result)
    except Exception as e:
        module.fail_json(msg=repr(e), **result)
    finally:
        if temp_file:
            remove(temp_file)
    result["duration"] = duration
    if duration == wait_time_s:
        result["message"] = {
            "stdout":
            "Submit JCL operation succeeded but it is a long running job. Timeout is "
            + str(wait_time_s) + " seconds."
        }
    else:
        result["message"] = {"stdout": "Submit JCL operation succeeded."}
    result["changed"] = True
    module.exit_json(**result)
コード例 #18
0
    def validate_purge_input(self):

        self._validate_common_input()

        try:
            module_defs = dict(
                mode=dict(arg_type="str",
                          required=True,
                          choices=['ANALYSIS', 'PURGE', 'BOTH']),
                delete_dbd_by_version=dict(
                    arg_type="list",
                    elements="dict",
                    required=False,
                    options=dict(member_name=dict(arg_type="str",
                                                  required=True),
                                 version_number=dict(arg_type="int",
                                                     required=True))),
                update_retention_criteria=dict(
                    arg_type="list",
                    elements="dict",
                    required=False,
                    options=dict(resource=dict(arg_type="str",
                                               required=True,
                                               choices=['DBD', 'PSB']),
                                 member_name=dict(arg_type="str",
                                                  required=True),
                                 instances=dict(arg_type="int", required=True),
                                 days=dict(arg_type="int", required=False)),
                ),
                delete=dict(arg_type="list",
                            elements="dict",
                            required=False,
                            options=dict(resource=dict(arg_type="str",
                                                       required=True,
                                                       choices=['DBD', 'PSB']),
                                         member_name=dict(arg_type="str",
                                                          required=True),
                                         time_stamp=dict(arg_type="str",
                                                         required=True))),
                managed_acbs=dict(arg_type="bool", required=False),
                resource_chkp_freq=dict(arg_type="int", required=False),
                sysut1=dict(
                    arg_type="dict",
                    options=dict(
                        dataset_name=dict(arg_type="data_set", required=True),
                        disposition=dict(arg_type="str",
                                         required=False,
                                         choices=['EXCL', 'OLD', 'SHR',
                                                  'NEW']),
                        primary=dict(arg_type="int", required=False),
                        primary_unit=dict(arg_type="str",
                                          required=False,
                                          choices=[
                                              'K', 'KB', 'M', 'MB', 'G', 'GB',
                                              'C', 'CYL', 'T', 'TRK'
                                          ]),
                        secondary=dict(arg_type="int", required=False),
                        secondary_unit=dict(arg_type="str",
                                            required=False,
                                            choices=[
                                                'K', 'KB', 'M', 'MB', 'G',
                                                'GB', 'C', 'CYL', 'T', 'TRK'
                                            ]),
                        normal_disposition=dict(arg_type="str",
                                                required=False,
                                                choices=[
                                                    'KEEP', 'DELETE', 'CATLG',
                                                    'CATALOG', 'UNCATLG'
                                                ]),
                        conditional_disposition=dict(arg_type="str",
                                                     required=False,
                                                     choices=[
                                                         'KEEP', 'DELETE',
                                                         'CATLG', 'CATALOG',
                                                         'UNCATLG'
                                                     ]),
                        block_size=dict(arg_type="int", required=False),
                        type=dict(arg_type="str",
                                  required=False,
                                  choices=[
                                      'SEQ', 'BASIC', 'LARGE', 'PDS', 'PDSE',
                                      'LIBRARY', 'LDS', 'RRDS', 'ESDS', 'KSDS'
                                  ]),
                        volumes=dict(type="list",
                                     required=False,
                                     elements="str"),
                        storage_class=dict(type="str", required=False),
                        management_class=dict(type="str", required=False),
                        data_class=dict(type="str", required=False)),
                    required=False))

            parser = BetterArgParser(module_defs)
            self.parsed_args.update(parser.parse_args(self.params))

            if self.parsed_args.get(
                    "mode") == "ANALYSIS" or self.parsed_args.get(
                        "mode") == "BOTH":
                if self.parsed_args.get("delete") is not None:
                    self.result[
                        'msg'] = "Cannot specify delete parameters with 'ANALYSIS' or 'BOTH' mode"
                    self.result['rc'] = 1
                    self.module.fail_json(**self.result)

            if self.parsed_args.get("mode") == "PURGE":
                if self.parsed_args.get(
                        "update_retention_criteria") is not None:
                    self.result[
                        'msg'] = "Cannot specify update_retention_criteria parameter with 'PURGE' mode"
                    self.result['rc'] = 1
                    self.module.fail_json(**self.result)

        except ValueError as error:
            self.result['msg'] = error.args
            self.result['rc'] = 1
            self.module.fail_json(**self.result)

        return self.parsed_args
コード例 #19
0
    def validate_populate_input(self):

        self._validate_common_input()

        try:
            module_defs = dict(
                modstat=dict(arg_type="data_set", required=False),
                mode=dict(arg_type="str",
                          required=True,
                          choices=['LOAD', 'UPDATE', 'READ']),
                check_timestamp=dict(arg_type="bool", required=False),
                secondary_log_dataset=dict(
                    arg_type="dict",
                    options=dict(
                        dataset_name=dict(arg_type="data_set", required=True),
                        disposition=dict(arg_type="str",
                                         required=False,
                                         choices=['EXCL', 'OLD', 'SHR',
                                                  'NEW']),
                        primary=dict(arg_type="int", required=False),
                        primary_unit=dict(arg_type="str",
                                          required=False,
                                          choices=[
                                              'K', 'KB', 'M', 'MB', 'G', 'GB',
                                              'C', 'CYL', 'T', 'TRK'
                                          ]),
                        secondary=dict(arg_type="int", required=False),
                        secondary_unit=dict(arg_type="str",
                                            required=False,
                                            choices=[
                                                'K', 'KB', 'M', 'MB', 'G',
                                                'GB', 'C', 'CYL', 'T', 'TRK'
                                            ]),
                        normal_disposition=dict(arg_type="str",
                                                required=False,
                                                choices=[
                                                    'KEEP', 'DELETE', 'CATLG',
                                                    'CATALOG', 'UNCATLG'
                                                ]),
                        conditional_disposition=dict(arg_type="str",
                                                     required=False,
                                                     choices=[
                                                         'KEEP', 'DELETE',
                                                         'CATLG', 'CATALOG',
                                                         'UNCATLG'
                                                     ]),
                        record_format=dict(
                            arg_type="str",
                            required=False,
                            choices=['FB', 'VB', 'FBA', 'VBA', 'U']),
                        record_length=dict(arg_type="int", required=False),
                        block_size=dict(arg_type="int", required=False),
                        type=dict(arg_type="str",
                                  required=False,
                                  choices=[
                                      'SEQ', 'BASIC', 'LARGE', 'PDS', 'PDSE',
                                      'LIBRARY', 'LDS', 'RRDS', 'ESDS', 'KSDS'
                                  ]),
                        volumes=dict(type="list",
                                     required=False,
                                     elements="str"),
                        storage_class=dict(type="str", required=False),
                        management_class=dict(type="str", required=False),
                        data_class=dict(type="str", required=False)),
                    required=False),
                acb_lib=dict(arg_type="list",
                             elements="data_set",
                             required=True),
                bootstrap_dataset=dict(
                    arg_type="dict",
                    required=False,
                    options=dict(
                        dataset_name=dict(arg_type="data_set", required=True),
                        disposition=dict(arg_type="str",
                                         required=False,
                                         choices=['EXCL', 'OLD', 'SHR',
                                                  'NEW']),
                        block_size=dict(arg_type="int", required=False),
                        primary=dict(arg_type="int", required=False),
                        primary_unit=dict(arg_type="str",
                                          required=False,
                                          choices=[
                                              'K', 'KB', 'M', 'MB', 'G', 'GB',
                                              'C', 'CYL', 'T', 'TRK'
                                          ]),
                        secondary=dict(arg_type="int", required=False),
                        secondary_unit=dict(arg_type="str",
                                            required=False,
                                            choices=[
                                                'K', 'KB', 'M', 'MB', 'G',
                                                'GB', 'C', 'CYL', 'T', 'TRK'
                                            ]),
                        normal_disposition=dict(arg_type="str",
                                                required=False,
                                                choices=[
                                                    'KEEP', 'DELETE', 'CATLG',
                                                    'CATALOG', 'UNCATLG'
                                                ]),
                        conditional_disposition=dict(arg_type="str",
                                                     required=False,
                                                     choices=[
                                                         'KEEP', 'DELETE',
                                                         'CATLG', 'CATALOG',
                                                         'UNCATLG'
                                                     ]),
                        storage_class=dict(type="str", required=False),
                        management_class=dict(type="str", required=False),
                        data_class=dict(type="str", required=False),
                        volumes=dict(type="list",
                                     required=False,
                                     elements="str"))),
                directory_datasets=dict(
                    arg_type="list",
                    elements="dict",
                    required=False,
                    options=dict(
                        dataset_name=dict(arg_type="data_set", required=True),
                        disposition=dict(arg_type="str",
                                         required=False,
                                         choices=['EXCL', 'OLD', 'SHR',
                                                  'NEW']),
                        primary=dict(arg_type="int", required=False),
                        primary_unit=dict(arg_type="str",
                                          required=False,
                                          choices=[
                                              'K', 'KB', 'M', 'MB', 'G', 'GB',
                                              'C', 'CYL', 'T', 'TRK'
                                          ]),
                        secondary=dict(arg_type="int", required=False),
                        secondary_unit=dict(arg_type="str",
                                            required=False,
                                            choices=[
                                                'K', 'KB', 'M', 'MB', 'G',
                                                'GB', 'C', 'CYL', 'T', 'TRK'
                                            ]),
                        normal_disposition=dict(arg_type="str",
                                                required=False,
                                                choices=[
                                                    'KEEP', 'DELETE', 'CATLG',
                                                    'CATALOG', 'UNCATLG'
                                                ]),
                        conditional_disposition=dict(arg_type="str",
                                                     required=False,
                                                     choices=[
                                                         'KEEP', 'DELETE',
                                                         'CATLG', 'CATALOG',
                                                         'UNCATLG'
                                                     ]),
                        storage_class=dict(type="str", required=False),
                        management_class=dict(type="str", required=False),
                        data_class=dict(type="str", required=False),
                        volumes=dict(type="list",
                                     required=False,
                                     elements="str"))),
                temp_acb_dataset=dict(
                    arg_type="dict",
                    required=False,
                    options=dict(dataset_name=dict(arg_type="data_set",
                                                   required=True),
                                 disposition=dict(
                                     arg_type="str",
                                     required=False,
                                     choices=['EXCL', 'OLD', 'SHR', 'NEW']),
                                 primary=dict(arg_type="int", required=False),
                                 primary_unit=dict(arg_type="str",
                                                   required=False,
                                                   choices=[
                                                       'K', 'KB', 'M', 'MB',
                                                       'G', 'GB', 'C', 'CYL',
                                                       'T', 'TRK'
                                                   ]),
                                 secondary=dict(arg_type="int",
                                                required=False),
                                 secondary_unit=dict(arg_type="str",
                                                     required=False,
                                                     choices=[
                                                         'K', 'KB', 'M', 'MB',
                                                         'G', 'GB', 'C', 'CYL',
                                                         'T', 'TRK'
                                                     ]),
                                 normal_disposition=dict(arg_type="str",
                                                         required=False,
                                                         choices=[
                                                             'KEEP', 'DELETE',
                                                             'CATLG',
                                                             'CATALOG',
                                                             'UNCATLG'
                                                         ]),
                                 conditional_disposition=dict(arg_type="str",
                                                              required=False,
                                                              choices=[
                                                                  'KEEP',
                                                                  'DELETE',
                                                                  'CATLG',
                                                                  'CATALOG',
                                                                  'UNCATLG'
                                                              ]),
                                 volumes=dict(type="list",
                                              required=False,
                                              elements="str"),
                                 storage_class=dict(type="str",
                                                    required=False),
                                 management_class=dict(type="str",
                                                       required=False),
                                 data_class=dict(type="str", required=False))),
                directory_staging_dataset=dict(
                    arg_type="dict",
                    required=False,
                    options=dict(
                        dataset_name=dict(arg_type="data_set", required=True),
                        disposition=dict(arg_type="str",
                                         required=False,
                                         choices=['EXCL', 'OLD', 'SHR',
                                                  'NEW']),
                        primary=dict(arg_type="int", required=False),
                        primary_unit=dict(arg_type="str",
                                          required=False,
                                          choices=[
                                              'K', 'KB', 'M', 'MB', 'G', 'GB',
                                              'C', 'CYL', 'T', 'TRK'
                                          ]),
                        secondary=dict(arg_type="int", required=False),
                        secondary_unit=dict(arg_type="str",
                                            required=False,
                                            choices=[
                                                'K', 'KB', 'M', 'MB', 'G',
                                                'GB', 'C', 'CYL', 'T', 'TRK'
                                            ]),
                        normal_disposition=dict(arg_type="str",
                                                required=False,
                                                choices=[
                                                    'KEEP', 'DELETE', 'CATLG',
                                                    'CATALOG', 'UNCATLG'
                                                ]),
                        conditional_disposition=dict(arg_type="str",
                                                     required=False,
                                                     choices=[
                                                         'KEEP', 'DELETE',
                                                         'CATLG', 'CATALOG',
                                                         'UNCATLG'
                                                     ]),
                        storage_class=dict(type="str", required=False),
                        management_class=dict(type="str", required=False),
                        data_class=dict(type="str", required=False),
                        volumes=dict(type="list",
                                     required=False,
                                     elements="str"))),
                sysabend=dict(arg_type="data_set", required=False),
                control_statements=dict(
                    arg_type="dict",
                    options=dict(
                        print_duplicate_resources=dict(
                            arg_type="bool",
                            required=False),  #, default=False),
                        max_error_msgs=dict(arg_type="int", required=False),
                        resource_chkp_freq=dict(arg_type="int",
                                                required=False),
                        segment_chkp_freq=dict(arg_type="int", required=False),
                        print_inserted_resources=dict(
                            arg_type="bool",
                            required=False),  #, default=True),
                        managed_acbs=dict(
                            arg_type="dict",
                            required=False,
                            options=dict(
                                setup=dict(arg_type="bool", required=False),
                                stage=dict(arg_type="dict",
                                           required=False,
                                           options=dict(
                                               save_acb=dict(arg_type="str",
                                                             required=False,
                                                             choices=[
                                                                 'LATEST',
                                                                 'UNCOND'
                                                             ]),
                                               clean_staging_dataset=dict(
                                                   arg_type="bool",
                                                   required=False,
                                                   default=False),
                                               gsampcb=dict(arg_type="bool",
                                                            required=False,
                                                            default=False),
                                               gsamdbd=dict(arg_type="str",
                                                            required=False))),
                                update=dict(
                                    arg_type="dict",
                                    required=False,
                                    options=dict(
                                        replace_acb=dict(
                                            arg_type="str",
                                            required=False,
                                            choices=['LATEST', 'UNCOND']),
                                        share_mode=dict(arg_type="bool",
                                                        required=False,
                                                        default=False),
                                        gsampcb=dict(arg_type="bool",
                                                     required=False,
                                                     default=False),
                                        gsamdbd=dict(arg_type="str",
                                                     required=False)))))),
                    required=False))

            parser = BetterArgParser(module_defs)
            self.parsed_args.update(parser.parse_args(self.params))

            if self.parsed_args.get('directory_datasets') is not None:
                self.directory_datasets = self.parsed_args.get(
                    'directory_datasets')
                self._validate_directory_staging_dataset()

            self._validate_optional_datasets()
            self._validate_acb_mode()

        except ValueError as error:
            self.result['msg'] = error.args
            self.result['rc'] = 1
            self.module.fail_json(**self.result)

        return self.parsed_args