Ejemplo n.º 1
0
def check_mvs_dataset(ds):
    """ To call data_set utils to check if the MVS data set exists or not """
    check_rc = False
    ds_type = None
    if not data_set.DataSet.data_set_exists(ds):
        raise EncodeError(
            "Data set {0} is not cataloged, please check data set provided in"
            "the src option.".format(ds))
    else:
        check_rc = True
        ds_type = data_set.DataSetUtils(ds).ds_type()
        if not ds_type:
            raise EncodeError(
                "Unable to determine data set type of {0}".format(ds))
    return check_rc, ds_type
Ejemplo n.º 2
0
def backupOper(module, src, backup):
    # analysis the file type
    ds_utils = data_set.DataSetUtils(src)
    file_type = ds_utils.ds_type()
    if file_type != 'USS' and file_type not in DS_TYPE:
        message = "{0} data set type is NOT supported".format(str(file_type))
        module.fail_json(msg=message)

    # backup can be True(bool) or none-zero length string. string indicates that backup_name was provided.
    # setting backup to None if backup_name wasn't provided. if backup=None, Backup module will use
    # pre-defined naming scheme and return the created destination name.
    if isinstance(backup, bool):
        backup = None
    try:
        if file_type == 'USS':
            backup_name = Backup.uss_file_backup(src,
                                                 backup_name=backup,
                                                 compress=False)
        else:
            backup_name = Backup.mvs_file_backup(dsn=src, bk_dsn=backup)
    except Exception:
        module.fail_json(msg="creating backup has failed")

    return backup_name
Ejemplo n.º 3
0
def run_module():
    # ********************************************************** #
    #                Module initialization                       #
    # ********************************************************** #
    module = AnsibleModule(argument_spec=dict(
        src=dict(required=True, type="str"),
        dest=dict(required=True, type="path"),
        fail_on_missing=dict(required=False, default=True, type="bool"),
        flat=dict(required=False, default=True, type="bool"),
        is_binary=dict(required=False, default=False, type="bool"),
        use_qualifier=dict(required=False, default=False, type="bool"),
        validate_checksum=dict(required=False, default=True, type="bool"),
        encoding=dict(required=False, type="dict"),
        sftp_port=dict(type="int", required=False),
        ignore_sftp_stderr=dict(type="bool", default=False, required=False),
        local_charset=dict(type="str"),
    ))

    src = module.params.get("src")
    if module.params.get("use_qualifier"):
        module.params["src"] = datasets.hlq() + "." + src

    # ********************************************************** #
    #                   Verify paramater validity                #
    # ********************************************************** #

    arg_def = dict(
        src=dict(arg_type="data_set_or_path", required=True),
        dest=dict(arg_type="path", required=True),
        fail_on_missing=dict(arg_type="bool", required=False, default=True),
        is_binary=dict(arg_type="bool", required=False, default=False),
        use_qualifier=dict(arg_type="bool", required=False, default=False),
    )

    if not module.params.get("encoding") and not module.params.get(
            "is_binary"):
        mvs_src = data_set.is_data_set(src)
        remote_charset = encode.Defaults.get_default_system_charset()

        module.params["encoding"] = {
            "from": encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET
            if mvs_src else remote_charset,
            "to": module.params.get("local_charset"),
        }

    if module.params.get("encoding"):
        module.params.update(
            dict(
                from_encoding=module.params.get("encoding").get("from"),
                to_encoding=module.params.get("encoding").get("to"),
            ))
        arg_def.update(
            dict(
                from_encoding=dict(arg_type="encoding"),
                to_encoding=dict(arg_type="encoding"),
            ))

    fetch_handler = FetchHandler(module)
    try:
        parser = better_arg_parser.BetterArgParser(arg_def)
        parsed_args = parser.parse_args(module.params)
    except ValueError as err:
        module.fail_json(msg="Parameter verification failed", stderr=str(err))
    src = parsed_args.get("src")
    b_src = to_bytes(src)
    fail_on_missing = boolean(parsed_args.get("fail_on_missing"))
    is_binary = boolean(parsed_args.get("is_binary"))
    encoding = module.params.get("encoding")

    # ********************************************************** #
    #  Check for data set existence and determine its type       #
    # ********************************************************** #

    res_args = dict()
    _fetch_member = "(" in src and src.endswith(")")
    ds_name = src if not _fetch_member else src[:src.find("(")]
    try:
        ds_utils = data_set.DataSetUtils(ds_name)
        if not ds_utils.exists():
            if fail_on_missing:
                module.fail_json(msg=("The source '{0}' does not exist or is "
                                      "uncataloged".format(ds_name)))
            module.exit_json(
                note=("Source '{0}' was not found. No data was fetched".format(
                    ds_name)))
        ds_type = ds_utils.ds_type()
        if not ds_type:
            module.fail_json(msg="Unable to determine data set type")

    except Exception as err:
        module.fail_json(msg="Error while gathering data set information",
                         stderr=str(err))

    # ********************************************************** #
    #                  Fetch a sequential data set               #
    # ********************************************************** #

    if ds_type == "PS":
        file_path = fetch_handler._fetch_mvs_data(src, is_binary, encoding)
        res_args["remote_path"] = file_path

    # ********************************************************** #
    #    Fetch a partitioned data set or one of its members      #
    # ********************************************************** #

    elif ds_type == "PO":
        if _fetch_member:
            member_name = src[src.find("(") + 1:src.find(")")]
            if not ds_utils.member_exists(member_name):
                module.fail_json(
                    msg=("The data set member '{0}' was not found inside data "
                         "set '{1}'").format(member_name, ds_name))
            file_path = fetch_handler._fetch_mvs_data(src, is_binary, encoding)
            res_args["remote_path"] = file_path
        else:
            res_args["remote_path"] = fetch_handler._fetch_pdse(
                src, is_binary, encoding)

    # ********************************************************** #
    #                  Fetch a USS file                          #
    # ********************************************************** #

    elif ds_type == "USS":
        if not os.access(b_src, os.R_OK):
            module.fail_json(
                msg="File '{0}' does not have appropriate read permission".
                format(src))
        file_path = fetch_handler._fetch_uss_file(src, is_binary, encoding)
        res_args["remote_path"] = file_path

    # ********************************************************** #
    #                  Fetch a VSAM data set                     #
    # ********************************************************** #

    elif ds_type == "VSAM":
        file_path = fetch_handler._fetch_vsam(src, is_binary, encoding)
        res_args["remote_path"] = file_path

    res_args["file"] = ds_name
    res_args["ds_type"] = ds_type
    module.exit_json(**res_args)
Ejemplo n.º 4
0
def main():
    module_args = dict(
        src=dict(type='str',
                 aliases=['path', 'destfile', 'name'],
                 required=True),
        state=dict(type='str',
                   default='present',
                   choices=['absent', 'present']),
        regexp=dict(type='str'),
        line=dict(type='str'),
        insertafter=dict(type='str', ),
        insertbefore=dict(type='str', ),
        backrefs=dict(type='bool', default=False),
        backup=dict(type='bool', default=False),
        backup_name=dict(type='str', required=False, default=None),
        firstmatch=dict(type='bool', default=False),
        encoding=dict(type='str', default="IBM-1047"),
    )
    module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
    result = dict(changed=False, cmd='', found=0)

    arg_defs = dict(
        src=dict(arg_type="data_set_or_path",
                 aliases=['path', 'destfile', 'name'],
                 required=True),
        state=dict(arg_type="str",
                   default='present',
                   choices=['absent', 'present']),
        regexp=dict(arg_type="str", required=False),
        line=dict(arg_type="str", required=False),
        insertafter=dict(arg_type="str", required=False),
        insertbefore=dict(arg_type="str", required=False),
        encoding=dict(arg_type="str", default="IBM-1047", required=False),
        backup=dict(arg_type="bool", default=False, required=False),
        backup_name=dict(arg_type="data_set_or_path",
                         required=False,
                         default=None),
        firstmatch=dict(arg_type="bool", required=False, default=False),
        backrefs=dict(arg_type="bool",
                      dependencies=['regexp'],
                      required=False,
                      default=False),
        mutually_exclusive=[["insertbefore", "insertafter"]],
    )

    try:
        parser = better_arg_parser.BetterArgParser(arg_defs)
        parsed_args = parser.parse_args(module.params)
    except ValueError as err:
        module.fail_json(msg="Parameter verification failed", stderr=str(err))

    backup = parsed_args.get('backup')
    # if backup_name is provided, update backup variable
    if parsed_args.get('backup_name') and backup:
        backup = parsed_args.get('backup_name')
    backrefs = parsed_args.get('backrefs')
    src = parsed_args.get('src')
    firstmatch = parsed_args.get('firstmatch')
    regexp = parsed_args.get('regexp')
    line = parsed_args.get('line')
    ins_aft = parsed_args.get('insertafter')
    ins_bef = parsed_args.get('insertbefore')
    encoding = parsed_args.get('encoding')

    if parsed_args.get('state') == 'present':
        if backrefs and regexp is None:
            module.fail_json(msg='regexp is required with backrefs=true')
        if line is None:
            module.fail_json(msg='line is required with state=present')
        # set the default to EOF, if regexp/insertafter/insertbefore are None
        if regexp is None and ins_aft is None and ins_bef is None:
            ins_aft = "EOF"
    else:
        if regexp is None and line is None:
            module.fail_json(
                msg='one of line or regexp is required with state=absent')

    # analysis the file type
    ds_utils = data_set.DataSetUtils(src)
    file_type = ds_utils.ds_type()
    if file_type == 'USS':
        file_type = 1
    else:
        if file_type not in DS_TYPE:
            message = "{0} data set type is NOT supported".format(
                str(file_type))
            module.fail_json(msg=message)
        file_type = 0
    # make sure the default encoding is set if null was passed
    if not encoding:
        encoding = "IBM-1047"
    if backup:
        # backup can be True(bool) or none-zero length string. string indicates that backup_name was provided.
        # setting backup to None if backup_name wasn't provided. if backup=None, Backup module will use
        # pre-defined naming scheme and return the created destination name.
        if isinstance(backup, bool):
            backup = None
        try:
            if file_type:
                result['backup_name'] = Backup.uss_file_backup(
                    src, backup_name=backup, compress=False)
            else:
                result['backup_name'] = Backup.mvs_file_backup(dsn=src,
                                                               bk_dsn=backup)
        except Exception:
            module.fail_json(msg="creating backup has failed")
    # state=present, insert/replace a line with matching regex pattern
    # state=absent, delete lines with matching regex pattern
    if parsed_args.get('state') == 'present':
        return_content = present(src, quotedString(line), quotedString(regexp),
                                 quotedString(ins_aft), quotedString(ins_bef),
                                 encoding, firstmatch, backrefs)
    else:
        return_content = absent(src, quotedString(line), quotedString(regexp),
                                encoding)
    stdout = return_content.stdout_response
    stderr = return_content.stderr_response
    rc = return_content.rc
    try:
        # change the return string to be loadable by json.loads()
        stdout = stdout.replace('/c\\', '/c\\\\')
        stdout = stdout.replace('/a\\', '/a\\\\')
        stdout = stdout.replace('/i\\', '/i\\\\')
        stdout = stdout.replace('$ a\\', '$ a\\\\')
        stdout = stdout.replace('1 i\\', '1 i\\\\')
        if line:
            stdout = stdout.replace(line, quotedString(line))
        if regexp:
            stdout = stdout.replace(regexp, quotedString(regexp))
        if ins_aft:
            stdout = stdout.replace(ins_aft, quotedString(ins_aft))
        if ins_bef:
            stdout = stdout.replace(ins_bef, quotedString(ins_bef))
        # Try to extract information from return_content
        ret = json.loads(stdout)
        result['cmd'] = ret['cmd']
        result['changed'] = ret['changed']
        result['found'] = ret['found']
    except Exception:
        messageDict = dict(msg="dsed return content is NOT in json format",
                           stdout=str(stdout),
                           stderr=str(stderr),
                           rc=rc)
        if result.get('backup_name'):
            messageDict['backup_name'] = result['backup_name']
        module.fail_json(**messageDict)
    module.exit_json(**result)
Ejemplo n.º 5
0
def main():
    module = AnsibleModule(
        argument_spec=dict(
            src=dict(type='str',
                     required=True,
                     aliases=['path', 'destfile', 'name']),
            state=dict(type='str',
                       default='present',
                       choices=['absent', 'present']),
            marker=dict(type='str', default='# {mark} ANSIBLE MANAGED BLOCK'),
            block=dict(type='str', default='', aliases=['content']),
            insertafter=dict(type='str'),
            insertbefore=dict(type='str'),
            marker_begin=dict(type='str', default='BEGIN'),
            marker_end=dict(type='str', default='END'),
            backup=dict(type='bool', default=False),
            backup_name=dict(type='str', required=False, default=None),
            encoding=dict(type='str', default='IBM-1047'),
        ),
        mutually_exclusive=[['insertbefore', 'insertafter']],
    )

    params = module.params

    arg_defs = dict(
        src=dict(arg_type='data_set_or_path',
                 aliases=['path', 'destfile', 'name'],
                 required=True),
        state=dict(arg_type='str',
                   default='present',
                   choices=['absent', 'present']),
        marker=dict(arg_type='str',
                    default='# {mark} ANSIBLE MANAGED BLOCK',
                    required=False),
        block=dict(arg_type='str',
                   default='',
                   aliases=['content'],
                   required=False),
        insertafter=dict(arg_type='str', required=False),
        insertbefore=dict(arg_type='str', required=False),
        marker_begin=dict(arg_type='str', default='BEGIN', required=False),
        marker_end=dict(arg_type='str', default='END', required=False),
        encoding=dict(arg_type='str', default='IBM-1047', required=False),
        backup=dict(arg_type='bool', default=False, required=False),
        backup_name=dict(arg_type='data_set_or_pat',
                         required=False,
                         default=None),
        mutually_exclusive=[['insertbefore', 'insertafter']],
    )
    result = dict(changed=False, cmd='', found=0)
    try:
        parser = better_arg_parser.BetterArgParser(arg_defs)
        parsed_args = parser.parse_args(module.params)
    except ValueError as err:
        module.fail_json(msg="Parameter verification failed", stderr=str(err))

    backup = parsed_args.get('backup')
    if parsed_args.get('backup_name') and backup:
        backup = parsed_args.get('backup_name')
    src = parsed_args.get('src')
    ins_aft = parsed_args.get('insertafter')
    ins_bef = parsed_args.get('insertbefore')
    encoding = parsed_args.get('encoding')
    block = parsed_args.get('block')
    marker = parsed_args.get('marker')
    marker_begin = parsed_args.get('marker_begin')
    marker_end = parsed_args.get('marker_end')

    if not block and parsed_args.get('state') == 'present':
        module.fail_json(msg='block is required with state=present')
    if not marker:
        marker = '# {mark} ANSIBLE MANAGED BLOCK'
    if "{mark}" not in marker:
        module.fail_json(msg='marker should have {mark}')
    # make sure the default encoding is set if empty string was passed
    if not encoding:
        encoding = "IBM-1047"
    if not ins_aft and not ins_bef and parsed_args.get('state') == 'present':
        ins_aft = "EOF"
    if not marker_begin:
        marker_begin = 'BEGIN'
    if not marker_end:
        marker_end = 'END'

    marker = "{0}\\n{1}\\n{2}".format(marker_begin, marker_end, marker)
    blocklines = block.splitlines()
    block = '\\n'.join(blocklines)

    # analysis the file type
    ds_utils = data_set.DataSetUtils(src)
    if not ds_utils.exists():
        message = "{0} does NOT exist".format(str(src))
        module.fail_json(msg=message)
    file_type = ds_utils.ds_type()
    if file_type == 'USS':
        file_type = 1
    else:
        if file_type not in DS_TYPE:
            message = "{0} data set type is NOT supported".format(
                str(file_type))
            module.fail_json(msg=message)
        file_type = 0

    if backup:
        # backup can be True(bool) or none-zero length string. string indicates that backup_name was provided.
        # setting backup to None if backup_name wasn't provided. if backup=None, Backup module will use
        # pre-defined naming scheme and return the created destination name.
        if isinstance(backup, bool):
            backup = None
        try:
            if file_type:
                result['backup_name'] = Backup.uss_file_backup(
                    src, backup_name=backup, compress=False)
            else:
                result['backup_name'] = Backup.mvs_file_backup(dsn=src,
                                                               bk_dsn=backup)
        except Exception:
            module.fail_json(msg="creating backup has failed")
    # state=present, insert/replace a block with matching regex pattern
    # state=absent, delete blocks with matching regex pattern
    if parsed_args.get('state') == 'present':
        return_content = present(src, quotedString(block),
                                 quotedString(marker), quotedString(ins_aft),
                                 quotedString(ins_bef), encoding)
    else:
        return_content = absent(src, quotedString(marker), encoding)
    stdout = return_content.stdout_response
    stderr = return_content.stderr_response
    rc = return_content.rc
    try:
        # change the return string to be loadable by json.loads()
        stdout = stdout.replace('/c\\', '/c\\\\')
        stdout = stdout.replace('/a\\', '/a\\\\')
        stdout = stdout.replace('/i\\', '/i\\\\')
        stdout = stdout.replace('$ a\\', '$ a\\\\')
        stdout = stdout.replace('1 i\\', '1 i\\\\')
        if block:
            stdout = stdout.replace(block, quotedString(block))
        if ins_aft:
            stdout = stdout.replace(ins_aft, quotedString(ins_aft))
        if ins_bef:
            stdout = stdout.replace(ins_bef, quotedString(ins_bef))
        # Try to extract information from stdout
        ret = json.loads(stdout)
        result['cmd'] = ret['cmd']
        result['changed'] = ret['changed']
        result['found'] = ret['found']
    except Exception:
        messageDict = dict(
            msg="ZOAU dmod return content is NOT in json format",
            stdout=str(stdout),
            stderr=str(stderr),
            rc=rc)
        if result.get('backup_name'):
            messageDict['backup_name'] = result['backup_name']
        module.fail_json(**messageDict)
    module.exit_json(**result)