Exemple #1
0
    def temp_data_set(self, reclen, space_u):
        """Creates a temporary data set with the given record length and size

        Arguments:
            size {str} -- The size of the data set
            lrecl {int} -- The record length of the data set

        Returns:
            str -- Name of the allocated data set

        Raises:
            OSError: When any exception is raised during the data set allocation
        """
        size = str(space_u * 2) + "K"
        hlq = datasets.hlq()
        temp_ps = datasets.tmp_name(hlq)
        response = datasets._create(
            name=temp_ps,
            type="SEQ",
            primary_space=size,
            record_format="VB",
            record_length=reclen,
        )
        if response.rc:
            raise OSError("Failed when allocating temporary sequential data set!")
        return temp_ps
 def __init__(self):
     """VIO DD type to be used in a DDStatement.
     VIO uses DASD space and system I/O more efficiently than other temporary data sets.
     A temporary data set will be created for use in cases where VIO is unavailable.
     Defaults for VIODefinition should be sufficient.
     """
     hlq = datasets.hlq()
     name = datasets.tmp_name(hlq)
     super().__init__(name)
def hlq_default(contents, dependencies):
    """Sets the default HLQ to use if none is provided.

    Args:
        contents (str): The HLQ to use
        dependencies (dict): Any dependent arguments

    Returns:
        str: The HLQ to use
    """
    hlq = None
    if dependencies.get("operation") == "restore":
        hlq = datasets.hlq()
    return hlq
Exemple #4
0
def run_sample():

    dsn_sample_jcl = datasets.hlq() + ".SAMPLE.JCL"
    dsn_with_mem_sample_jcl = dsn_sample_jcl + "(UPTIME)"

    # NOTE - data set does NOT need to exist prior to running this sample.

    # create and write JCL to data set
    datasets.write(dataset=dsn_with_mem_sample_jcl, content=jcl_sample)

    # submit job
    job_sample = jobs.submit(dsn_with_mem_sample_jcl)

    print("Details - sample job")
    print("id:", job_sample.id)
    print("name:", job_sample.name)
    print("owner:", job_sample.owner)
    print("status:", job_sample.status)
    print("rc:", job_sample.rc)

    print("Waiting for job completion, then refresh and print status, rc...")

    job_sample.wait()
    job_sample.refresh()

    print("status: ", job_sample.status)
    print("rc: ", job_sample.rc)

    dd_stdout = jobs.read_output(job_sample.id, 'UPTIME', 'STDOUT')

    # print the stdout produced by job
    print("The contents of the STDOUT DD:")
    print(dd_stdout)

    # cleanup:
    # cancels and removes job from jes system
    job_sample.purge()

    # delete data set
    datasets.delete(dsn_sample_jcl)
Exemple #5
0
def mvs_file_backup(dsn, bk_dsn=None):
    """Create a backup data set for an MVS data set

    Arguments:
        dsn {str} -- The name of the data set to backup.
                        It could be an MVS PS/PDS/PDSE/VSAM(KSDS), etc.
        bk_dsn {str} -- The name of the backup data set.

    Raises:
        BackupError: When backup data set exists.
        BackupError: When creation of backup data set fails.
    """
    dsn = _validate_data_set_name(dsn).upper()
    if is_member(dsn):
        if not bk_dsn:
            bk_dsn = extract_dsname(dsn) + "({0})".format(temp_member_name())
        bk_dsn = _validate_data_set_name(bk_dsn).upper()
        response = datasets._copy(dsn, bk_dsn)
        if response.rc != 0:
            raise BackupError("Unable to backup {0} to {1}".format(
                dsn, bk_dsn),
                              rc=response.rc,
                              stdout=response.stdout_response,
                              stderr=response.stderr_response)
    else:
        if not bk_dsn:
            bk_dsn = datasets.tmp_name(datasets.hlq())
        bk_dsn = _validate_data_set_name(bk_dsn).upper()
        cp_rc = _copy_ds(dsn, bk_dsn)
        if cp_rc == 12:  # The data set is probably a PDS or PDSE
            # Delete allocated backup that was created when attempting to use _copy_ds()
            # Safe to delete because _copy_ds() would have raised an exception if it did
            # not successfully create the backup data set, so no risk of it predating module invocation
            datasets.delete(bk_dsn)
            _allocate_model(bk_dsn, dsn)
            rc, out, err = _copy_pds(dsn, bk_dsn)
            if rc != 0:
                raise BackupError(
                    "Unable to backup data set {0} to {1}".format(dsn, bk_dsn))
    return bk_dsn
def run_module():
    # ********************************************************** #
    #                Module initialization                       #
    # ********************************************************** #
    module = AnsibleModule(argument_spec=dict(
        src=dict(required=True, type="str"),
        dest=dict(required=True, type="path"),
        fail_on_missing=dict(required=False, default=True, type="bool"),
        flat=dict(required=False, default=True, type="bool"),
        is_binary=dict(required=False, default=False, type="bool"),
        use_qualifier=dict(required=False, default=False, type="bool"),
        validate_checksum=dict(required=False, default=True, type="bool"),
        encoding=dict(required=False, type="dict"),
        sftp_port=dict(type="int", required=False),
        ignore_sftp_stderr=dict(type="bool", default=False, required=False),
        local_charset=dict(type="str"),
    ))

    src = module.params.get("src")
    if module.params.get("use_qualifier"):
        module.params["src"] = datasets.hlq() + "." + src

    # ********************************************************** #
    #                   Verify paramater validity                #
    # ********************************************************** #

    arg_def = dict(
        src=dict(arg_type="data_set_or_path", required=True),
        dest=dict(arg_type="path", required=True),
        fail_on_missing=dict(arg_type="bool", required=False, default=True),
        is_binary=dict(arg_type="bool", required=False, default=False),
        use_qualifier=dict(arg_type="bool", required=False, default=False),
    )

    if not module.params.get("encoding") and not module.params.get(
            "is_binary"):
        mvs_src = data_set.is_data_set(src)
        remote_charset = encode.Defaults.get_default_system_charset()

        module.params["encoding"] = {
            "from": encode.Defaults.DEFAULT_EBCDIC_MVS_CHARSET
            if mvs_src else remote_charset,
            "to": module.params.get("local_charset"),
        }

    if module.params.get("encoding"):
        module.params.update(
            dict(
                from_encoding=module.params.get("encoding").get("from"),
                to_encoding=module.params.get("encoding").get("to"),
            ))
        arg_def.update(
            dict(
                from_encoding=dict(arg_type="encoding"),
                to_encoding=dict(arg_type="encoding"),
            ))

    fetch_handler = FetchHandler(module)
    try:
        parser = better_arg_parser.BetterArgParser(arg_def)
        parsed_args = parser.parse_args(module.params)
    except ValueError as err:
        module.fail_json(msg="Parameter verification failed", stderr=str(err))
    src = parsed_args.get("src")
    b_src = to_bytes(src)
    fail_on_missing = boolean(parsed_args.get("fail_on_missing"))
    is_binary = boolean(parsed_args.get("is_binary"))
    encoding = module.params.get("encoding")

    # ********************************************************** #
    #  Check for data set existence and determine its type       #
    # ********************************************************** #

    res_args = dict()
    _fetch_member = "(" in src and src.endswith(")")
    ds_name = src if not _fetch_member else src[:src.find("(")]
    try:
        ds_utils = data_set.DataSetUtils(ds_name)
        if not ds_utils.exists():
            if fail_on_missing:
                module.fail_json(msg=("The source '{0}' does not exist or is "
                                      "uncataloged".format(ds_name)))
            module.exit_json(
                note=("Source '{0}' was not found. No data was fetched".format(
                    ds_name)))
        ds_type = ds_utils.ds_type()
        if not ds_type:
            module.fail_json(msg="Unable to determine data set type")

    except Exception as err:
        module.fail_json(msg="Error while gathering data set information",
                         stderr=str(err))

    # ********************************************************** #
    #                  Fetch a sequential data set               #
    # ********************************************************** #

    if ds_type == "PS":
        file_path = fetch_handler._fetch_mvs_data(src, is_binary, encoding)
        res_args["remote_path"] = file_path

    # ********************************************************** #
    #    Fetch a partitioned data set or one of its members      #
    # ********************************************************** #

    elif ds_type == "PO":
        if _fetch_member:
            member_name = src[src.find("(") + 1:src.find(")")]
            if not ds_utils.member_exists(member_name):
                module.fail_json(
                    msg=("The data set member '{0}' was not found inside data "
                         "set '{1}'").format(member_name, ds_name))
            file_path = fetch_handler._fetch_mvs_data(src, is_binary, encoding)
            res_args["remote_path"] = file_path
        else:
            res_args["remote_path"] = fetch_handler._fetch_pdse(
                src, is_binary, encoding)

    # ********************************************************** #
    #                  Fetch a USS file                          #
    # ********************************************************** #

    elif ds_type == "USS":
        if not os.access(b_src, os.R_OK):
            module.fail_json(
                msg="File '{0}' does not have appropriate read permission".
                format(src))
        file_path = fetch_handler._fetch_uss_file(src, is_binary, encoding)
        res_args["remote_path"] = file_path

    # ********************************************************** #
    #                  Fetch a VSAM data set                     #
    # ********************************************************** #

    elif ds_type == "VSAM":
        file_path = fetch_handler._fetch_vsam(src, is_binary, encoding)
        res_args["remote_path"] = file_path

    res_args["file"] = ds_name
    res_args["ds_type"] = ds_type
    module.exit_json(**res_args)