def copy_pds2uss(src, dest, is_binary=False): """Copy the whole PDS(E) to a uss path Arguments: src: {str} -- The MVS data set to be copied, it must be a PDS(E) data set dest: {str} -- The destination uss path Keyword Arguments: is_binary: {bool} -- Whether the file to be copied contains binary data Raises: USSCmdExecError: When any exception is raised during the conversion. Returns: boolean -- The return code after the USS command executed successfully str -- The stdout after the USS command executed successfully str -- The stderr after the USS command executed successfully """ module = AnsibleModuleHelper(argument_spec={}) src = _validate_data_set_name(src) dest = _validate_path(dest) cp_pds2uss = "cp -U -F rec \"//'{0}'\" {1}".format(src, quote(dest)) if is_binary: cp_pds2uss = cp_pds2uss.replace("rec", "bin", 1) rc, out, err = module.run_command(cp_pds2uss) if rc: raise USSCmdExecError(cp_pds2uss, rc, out, err) return rc, out, err
def _copy_ds(ds, bk_ds): """Copy the contents of a data set to another Arguments: ds {str} -- The source data set to be copied from. Should be SEQ or VSAM bk_dsn {str} -- The destination data set to copy to. Raises: BackupError: When copying data fails """ module = AnsibleModuleHelper(argument_spec={}) _allocate_model(bk_ds, ds) repro_cmd = """ REPRO - INDATASET({0}) - OUTDATASET({1})""".format(ds, bk_ds) rc, out, err = module.run_command( "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=repro_cmd) if rc != 0 and rc != 12: Datasets.delete(bk_ds) raise BackupError( "Unable to backup data set {0}; stdout: {1}; stderr: {2}".format( ds, out, err)) if rc != 0 and is_empty(ds): rc = 0 return rc
def copy_uss2mvs(src, dest, ds_type, is_binary=False): """Copy uss a file or path to an MVS data set Arguments: src: {str} -- The uss file or path to be copied dest: {str} -- The destination MVS data set, it must be a PS or PDS(E) ds_type: {str} -- The dsorg of the dest. Keyword Arguments: is_binary: {bool} -- Whether the file to be copied contains binary data Raises: USSCmdExecError: When any exception is raised during the conversion. Returns: boolean -- The return code after the copy command executed successfully str -- The stdout after the copy command executed successfully str -- The stderr after the copy command executed successfully """ module = AnsibleModuleHelper(argument_spec={}) src = _validate_path(src) dest = _validate_data_set_name(dest) if ds_type == "PO": cp_uss2mvs = "cp -CM -F rec {0} \"//'{1}'\"".format(quote(src), dest) else: cp_uss2mvs = "cp -F rec {0} \"//'{1}'\"".format(quote(src), dest) if is_binary: cp_uss2mvs = cp_uss2mvs.replace("rec", "bin", 1) rc, out, err = module.run_command(cp_uss2mvs) if rc: raise USSCmdExecError(cp_uss2mvs, rc, out, err) return rc, out, err
def copy_mvs2mvs(src, dest, is_binary=False): """Copy an MVS source to MVS target Arguments: src: {str} -- Name of source data set dest: {str} -- Name of destination data set Keyword Arguments: is_binary: {bool} -- Whether the data set to be copied contains binary data Raises: USSCmdExecError: When any exception is raised during the conversion. Returns: boolean -- The return code after the USS command executed successfully str -- The stdout after the USS command executed successfully str -- The stderr after the USS command executed successfully """ module = AnsibleModuleHelper(argument_spec={}) src = _validate_data_set_name(src) dest = _validate_data_set_name(dest) cp_mvs2mvs = "cp -F rec \"//'{0}'\" \"//'{1}'\"".format(src, dest) if is_binary: cp_mvs2mvs = cp_mvs2mvs.replace("rec", "bin", 1) rc, out, err = module.run_command(cp_mvs2mvs) if rc: raise USSCmdExecError(cp_mvs2mvs, rc, out, err) return rc, out, err
def _run_mvs_command(pgm, cmd, dd=None, authorized=False): """Run a particular MVS command. Arguments: pgm {str} -- The MVS program to run cmd {str} -- The input command to pass to the program Keyword Arguments: dd {dict} -- The DD definitions required by the program. (Default {None}) authorized {bool} -- Indicates whether the MVS program should run as authorized. (Default {False}) Returns: tuple[int, str, str] -- A tuple of return code, stdout and stderr """ module = AnsibleModuleHelper(argument_spec={}) sysprint = "sysprint" sysin = "sysin" pgm = pgm.upper() if pgm == "IKJEFT01": sysprint = "systsprt" sysin = "systsin" mvscmd = "mvscmd" if authorized: mvscmd += "auth" mvscmd += " --pgm={0} --{1}=* --{2}=stdin".format(pgm, sysprint, sysin) if dd: for k, v in dd.items(): mvscmd += " --{0}={1}".format(k, v) return module.run_command(mvscmd, data=cmd)
def __init__(self): """Call the coded character set conversion utility iconv to convert a USS file from one coded character set to another Arguments: module {AnsibleModule} -- The AnsibleModule object from currently running module """ self.module = AnsibleModuleHelper(argument_spec={})
def run_operator_command(params): module = AnsibleModuleHelper(argument_spec={}) command = params.get("cmd") verbose = "-v" if params.get("verbose") else "" debug = "-d" if params.get("debug") else "" rc, stdout, stderr = module.run_command( "opercmd {0} {1} {2}".format(verbose, debug, command), ) message = stdout + stderr if rc > 0: raise OperatorCmdError(command, rc, message.split("\n") if message else message) return {"rc": rc, "message": message}
def _iehlist(dd, stdin): """Calls IEHLIST program. Arguments: dd {str} -- Volume information to pass as DD statement. stdin {str} -- Input to stdin. Returns: str -- The sysprint response of IEHLIST. """ module = AnsibleModuleHelper(argument_spec={}) response = None rc, stdout, stderr = module.run_command( "mvscmd --pgm=iehlist --sysprint=* --dd={0} --sysin=stdin ".format(dd), data=stdin, ) if rc == 0: response = stdout return response
def execute_authorized(pgm, dds, parm="", debug=False, verbose=False): """Execute an authorized MVS command. Args: pgm (str): The name of the program to execute. dds (list[DDStatement]): A list of DDStatement objects. parm (str, optional): Argument string if required by the program. Defaults to "". Returns: MVSCmdResponse: The response of the command. """ module = AnsibleModuleHelper(argument_spec={}) command = "mvscmdauth {0} {1} {2} ".format( "-d" if debug else "", "-v" if verbose else "", MVSCmd._build_command(pgm, dds, parm), ) rc, out, err = module.run_command(command) return MVSCmdResponse(rc, out, err)
def _vls_wrapper(pattern, details=False, verbose=False): """A wrapper for ZOAU 'vls' shell command""" vls_cmd = "vls" if details: vls_cmd += " -l" if verbose: vls_cmd += " -v" vls_cmd += " {0}".format(quote(pattern)) return AnsibleModuleHelper(argument_spec={}).run_command(vls_cmd)
def _allocate_model(ds, model): """Allocate a data set using allocation information of a model data set Arguments: ds {str} -- The name of the data set to be allocated. model {str} -- The name of the data set whose allocation parameters should be used. Raises: BackupError: When allocation fails """ module = AnsibleModuleHelper(argument_spec={}) alloc_cmd = """ ALLOC - DS('{0}') - LIKE('{1}')""".format(ds, model) cmd = "mvscmdauth --pgm=ikjeft01 --systsprt=* --systsin=stdin" rc, out, err = module.run_command(cmd, data=alloc_cmd) if rc != 0: raise BackupError( "Unable to allocate data set {0}; stdout: {1}; stderr: {2}".format( ds, out, err)) return rc
def copy_uss2uss_binary(src, dest): """Copy a USS file to a USS location in binary mode Arguments: src: {str} -- The source USS path dest: {str} -- The destination USS path Raises: USSCmdExecError: When any exception is raised during the conversion. Returns: boolean -- The return code after the USS command executed successfully str -- The stdout after the USS command executed successfully str -- The stderr after the USS command executed successfully """ module = AnsibleModuleHelper(argument_spec={}) src = _validate_path(src) dest = _validate_path(dest) cp_uss2uss = "cp -F bin {0} {1}".format(quote(src), quote(dest)) rc, out, err = module.run_command(cp_uss2uss) if rc: raise USSCmdExecError(cp_uss2uss, rc, out, err) return rc, out, err
def copy_vsam_ps(src, dest): """Copy a VSAM(KSDS) data set to a PS data set vise versa Arguments: src: {str} -- The VSAM(KSDS) or PS data set to be copied dest: {str} -- The PS or VSAM(KSDS) data set Raises: USSCmdExecError: When any exception is raised during the conversion Returns: boolean -- The return code after the USS command executed successfully str -- The stdout after the USS command executed successfully str -- The stderr after the USS command executed successfully """ module = AnsibleModuleHelper(argument_spec={}) src = _validate_data_set_name(src) dest = _validate_data_set_name(dest) repro_cmd = REPRO.format(src, dest) cmd = "mvscmdauth --pgm=idcams --sysprint=stdout --sysin=stdin" rc, out, err = module.run_command(cmd, data=repro_cmd) if rc: raise USSCmdExecError(cmd, rc, out, err) return rc, out, err
def _dgrep_wrapper(data_set_pattern, content, ignore_case=False, line_num=False, verbose=False, context=None): """A wrapper for ZOAU 'dgrep' shell command""" dgrep_cmd = "dgrep" if ignore_case: dgrep_cmd += " -i" if line_num: dgrep_cmd += " -n" if verbose: dgrep_cmd += " -v" if context: dgrep_cmd += " -C{0}".format(context) dgrep_cmd += " {0} {1}".format(quote(content), quote(data_set_pattern)) return AnsibleModuleHelper(argument_spec={}).run_command(dgrep_cmd)
def _dls_wrapper(data_set_pattern, list_details=False, u_time=False, size=False, verbose=False, migrated=False): """A wrapper for ZOAU 'dls' shell command""" dls_cmd = "dls" if migrated: dls_cmd += " -m" else: if list_details: dls_cmd += " -l" if u_time: dls_cmd += " -u" if size: dls_cmd += " -s" if verbose: dls_cmd += " -v" dls_cmd += " {0}".format(quote(data_set_pattern)) return AnsibleModuleHelper(argument_spec={}).run_command(dls_cmd)
def _get_job_output_str(job_id="*", owner="*", job_name="*", dd_name=""): """Generate JSON output string containing Job info from SDSF. Writes a temporary REXX script to the USS filesystem to gather output. Keyword Arguments: job_id {str} -- The job ID to search for (default: {''}) owner {str} -- The owner of the job (default: {''}) job_name {str} -- The job name search for (default: {''}) dd_name {str} -- The data definition to retrieve (default: {''}) Returns: tuple[int, str, str] -- RC, STDOUT, and STDERR from the REXX script. """ get_job_detail_json_rexx = """/* REXX */ arg options parse var options param upper param parse var param 'JOBID=' jobid ' OWNER=' owner, ' JOBNAME=' jobname ' DDNAME=' ddname rc=isfcalls('ON') jobid = strip(jobid,'L') if (jobid <> '') then do ISFFILTER='JobID EQ '||jobid end owner = strip(owner,'L') if (owner <> '') then do ISFOWNER=owner end jobname = strip(jobname,'L') if (jobname <> '') then do ISFPREFIX=jobname end ddname = strip(ddname,'L') if (ddname == '?') then do ddname = '' end Address SDSF "ISFEXEC ST (ALTERNATE DELAYED)" if rc<>0 then do Say '[]' Exit 0 end if isfrows == 0 then do Say '[]' end else do Say '[' do ix=1 to isfrows linecount = 0 if ix<>1 then do Say ',' end Say '{' Say '"'||'job_id'||'":"'||value('JOBID'||"."||ix)||'",' Say '"'||'job_name'||'":"'||value('JNAME'||"."||ix)||'",' Say '"'||'subsystem'||'":"'||value('ESYSID'||"."||ix)||'",' Say '"'||'owner'||'":"'||value('OWNERID'||"."||ix)||'",' Say '"'||'ret_code'||'":{"'||'msg'||'":"'||value('RETCODE'||"."||ix)||'"},' Say '"'||'class'||'":"'||value('JCLASS'||"."||ix)||'",' Say '"'||'content_type'||'":"'||value('JTYPE'||"."||ix)||'",' Address SDSF "ISFACT ST TOKEN('"TOKEN.ix"') PARM(NP ?)", "("prefix JDS_ lrc=rc if lrc<>0 | JDS_DDNAME.0 == 0 then do Say '"ddnames":[]' end else do Say '"ddnames":[' do jx=1 to JDS_DDNAME.0 if jx<>1 & ddname == '' then do Say ',' end if ddname == '' | ddname == value('JDS_DDNAME'||"."||jx) then do Say '{' Say '"'||'ddname'||'":"'||value('JDS_DDNAME'||"."||jx)||'",' Say '"'||'record_count'||'":"'||value('JDS_RECCNT'||"."||jx)||'",' Say '"'||'id'||'":"'||value('JDS_DSID'||"."||jx)||'",' Say '"'||'stepname'||'":"'||value('JDS_STEPN'||"."||jx)||'",' Say '"'||'procstep'||'":"'||value('JDS_PROCS'||"."||jx)||'",' Say '"'||'byte_count'||'":"'||value('JDS_BYTECNT'||"."||jx)||'",' Say '"'||'content'||'":[' Address SDSF "ISFBROWSE ST TOKEN('"token.ix"')" untilline = linecount + JDS_RECCNT.jx startingcount = linecount + 1 do kx=linecount+1 to untilline if kx<>startingcount then do Say ',' end linecount = linecount + 1 Say '"'||escapeNewLine(escapeDoubleQuote(isfline.kx))||'"' end Say ']' Say '}' end else do linecount = linecount + JDS_RECCNT.jx end end Say ']' end Say '}' end Say ']' end rc=isfcalls('OFF') return 0 escapeDoubleQuote: Procedure Parse Arg string out='' Do While Pos('"',string)<>0 Parse Var string prefix '"' string out=out||prefix||'\\"' End Return out||string escapeNewLine: Procedure Parse Arg string Return translate(string, '4040'x, '1525'x) """ try: module = AnsibleModuleHelper(argument_spec={}) if dd_name is None or dd_name == "?": dd_name = "" jobid_param = "jobid=" + job_id owner_param = "owner=" + owner jobname_param = "jobname=" + job_name ddname_param = "ddname=" + dd_name tmp = NamedTemporaryFile(delete=True) with open(tmp.name, "w") as f: f.write(get_job_detail_json_rexx) chmod(tmp.name, S_IEXEC | S_IREAD | S_IWRITE) args = [jobid_param, owner_param, jobname_param, ddname_param] cmd = [tmp.name, " ".join(args)] rc, out, err = module.run_command(args=cmd) except Exception: raise return rc, out, err
def _get_job_status_str(job_id="*", owner="*", job_name="*"): """Generate JSON output string containing Job status info from SDSF. Writes a temporary REXX script to the USS filesystem to gather output. Keyword Arguments: job_id {str} -- The job ID to search for (default: {''}) owner {str} -- The owner of the job (default: {''}) job_name {str} -- The job name search for (default: {''}) Returns: tuple[int, str, str] -- RC, STDOUT, and STDERR from the REXX script. """ get_job_status_json_rexx = """/* REXX */ arg options parse var options param upper param parse var param 'JOBID=' jobid ' OWNER=' owner, ' JOBNAME=' jobname rc=isfcalls('ON') jobid = strip(jobid,'L') if (jobid <> '') then do ISFFILTER='JobID EQ '||jobid end owner = strip(owner,'L') if (owner <> '') then do ISFOWNER=owner end jobname = strip(jobname,'L') if (jobname <> '') then do ISFPREFIX=jobname end Address SDSF "ISFEXEC ST (ALTERNATE DELAYED)" if rc<>0 then do Say '[]' Exit 0 end if isfrows == 0 then do Say '[]' end else do Say '[' do ix=1 to isfrows linecount = 0 if ix<>1 then do Say ',' end Say '{' Say '"'||'job_id'||'":"'||value('JOBID'||"."||ix)||'",' Say '"'||'job_name'||'":"'||value('JNAME'||"."||ix)||'",' Say '"'||'subsystem'||'":"'||value('ESYSID'||"."||ix)||'",' Say '"'||'system'||'":"'||value('SYSNAME'||"."||ix)||'",' Say '"'||'owner'||'":"'||value('OWNERID'||"."||ix)||'",' Say '"'||'ret_code'||'":{"'||'msg'||'":"'||value('RETCODE'||"."||ix)||'"},' Say '"'||'class'||'":"'||value('JCLASS'||"."||ix)||'",' Say '"'||'content_type'||'":"'||value('JTYPE'||"."||ix)||'"' Say '}' end Say ']' end rc=isfcalls('OFF') return 0 escapeDoubleQuote: Procedure Parse Arg string out='' Do While Pos('"',string)<>0 Parse Var string prefix '"' string out=out||prefix||'\\"' End Return out||string escapeNewLine: Procedure Parse Arg string Return translate(string, '4040'x, '1525'x) """ try: module = AnsibleModuleHelper(argument_spec={}) jobid_param = "jobid=" + job_id owner_param = "owner=" + owner jobname_param = "jobname=" + job_name tmp = NamedTemporaryFile(delete=True) with open(tmp.name, "w") as f: f.write(get_job_status_json_rexx) chmod(tmp.name, S_IEXEC | S_IREAD | S_IWRITE) args = [jobid_param, owner_param, jobname_param] cmd = [tmp.name, " ".join(args)] rc, out, err = module.run_command(args=cmd) except Exception: raise return rc, out, err
def uss_file_backup(path, backup_name=None, compress=False): """Create a backup file for a USS file or path Arguments: path {str} -- The name of the USS file or path to backup. backup_name {str} -- The name of the backup file. Keyword Arguments: compress {bool} -- Determines if the backup be compressed. (default: {False}) Raises: BackupError: When creating compressed backup fails. Returns: str -- Name of the backup file. """ abs_path = os.path.abspath(path) if not os.path.exists(abs_path): raise BackupError("Path to be backed up does not exist.") module = AnsibleModuleHelper(argument_spec={}) ext = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()).lower() if os.path.isdir(abs_path): default_backup_name = "{0}@{1}-bak".format(abs_path[:-1], ext) else: default_backup_name = "{0}@{1}-bak".format(abs_path, ext) backup_base = os.path.basename(default_backup_name) backup_name_provided = True if not backup_name: backup_name = default_backup_name backup_name_provided = False if os.path.isdir(abs_path) and backup_name[-1] != "/" and not compress: backup_name += "/" make_dirs(backup_name, mode_from=abs_path) if backup_name[-1] == "/" and not os.path.isdir(backup_name): make_dirs(backup_name) elif os.path.isdir(backup_name) and backup_name[-1] != "/": backup_name += "/" if compress: if backup_name_provided and os.path.isdir(backup_name): backup_name += backup_base bk_cmd = "tar -cpf {0}.tar {1}".format(quote(backup_name), quote(abs_path)) rc, out, err = module.run_command(bk_cmd) if rc: raise BackupError(err) backup_name += ".tar" else: if os.path.isdir(abs_path): if os.path.exists(backup_name): rmtree(backup_name) copytree(abs_path, backup_name) elif not os.path.isdir(abs_path) and os.path.isdir(backup_name): backup_name = backup_name + os.path.basename(abs_path) copy2(abs_path, backup_name) else: copy2(abs_path, backup_name) return backup_name
class EncodeUtils(object): def __init__(self): """Call the coded character set conversion utility iconv to convert a USS file from one coded character set to another Arguments: module {AnsibleModule} -- The AnsibleModule object from currently running module """ self.module = AnsibleModuleHelper(argument_spec={}) def _validate_data_set_name(self, ds): arg_defs = dict(ds=dict(arg_type="data_set"), ) parser = BetterArgParser(arg_defs) parsed_args = parser.parse_args({"ds": ds}) return parsed_args.get("ds") def _validate_path(self, path): arg_defs = dict(path=dict(arg_type="path"), ) parser = BetterArgParser(arg_defs) parsed_args = parser.parse_args({"path": path}) return parsed_args.get("path") def _validate_data_set_or_path(self, path): arg_defs = dict(path=dict(arg_type="data_set_or_path"), ) parser = BetterArgParser(arg_defs) parsed_args = parser.parse_args({"path": path}) return parsed_args.get("path") def _validate_encoding(self, encoding): arg_defs = dict(encoding=dict(arg_type="encoding"), ) parser = BetterArgParser(arg_defs) parsed_args = parser.parse_args({"encoding": encoding}) return parsed_args.get("encoding") def listdsi_data_set(self, ds): """Invoke IDCAMS LISTCAT command to get the record length and space used to estimate the space used by the VSAM data set Arguments: ds: {str} -- The VSAM data set to be checked. Raises: EncodeError: When any exception is raised during the conversion. Returns: int -- The maximum record length of the VSAM data set. int -- The space used by the VSAM data set(KB). """ ds = self._validate_data_set_name(ds) reclen = 80 space_u = 1024 listcat_cmd = LISTCAT.format(ds) cmd = "mvscmdauth --pgm=ikjeft01 --systsprt=stdout --systsin=stdin" rc, out, err = self.module.run_command(cmd, data=listcat_cmd) if rc: raise EncodeError(err) if out: find_reclen = re.findall(r"MAXLRECL-*\d+", out) find_cisize = re.findall(r"CISIZE-*\d+", out) find_recnum = re.findall(r"REC-TOTAL-*\d+", out) find_freeci = re.findall(r"FREESPACE-%CI-*\d+", out) find_freeca = re.findall(r"FREESPACE-%CA-*\d+", out) find_cioca = re.findall(r"CI/CA-*\d+", out) find_trkoca = re.findall(r"TRACKS/CA-*\d+", out) if find_reclen: reclen = int("".join(re.findall(r"\d+", find_reclen[0]))) if find_cisize: cisize = int("".join(re.findall(r"\d+", find_cisize[0]))) if find_recnum: recnum = int("".join(re.findall(r"\d+", find_recnum[0]))) if find_freeci: freeci = int("".join(re.findall(r"\d+", find_freeci[0]))) if find_freeca: freeca = int("".join(re.findall(r"\d+", find_freeca[0]))) if find_cioca: cioca = int("".join(re.findall(r"\d+", find_cioca[0]))) if find_trkoca: trkoca = int("".join(re.findall(r"\d+", find_trkoca[0]))) # Algorithm used for VSAM data set space evaluation # Step01. Get the number of records in each VSAM CI # Step02. The CI used by the VSAM data set # Step03. The CA used by the VSAM data set # Step04. Calculate the VSAM data set space using the CA number # This value will be used by the temporary PS when coping a VSAM data set # For DASD volume type 3390, 56664 bytes per track rec_in_ci = floor((cisize - cisize * freeci - 10) / reclen) ci_num = ceil(recnum / rec_in_ci) ca_num = ceil(ci_num / (cioca * (1 - freeca))) if ca_num > 0: space_u = ceil(ca_num * trkoca * 566664 / 1024) return reclen, space_u def temp_data_set(self, reclen, space_u): """Creates a temporary data set with the given record length and size Arguments: size {str} -- The size of the data set lrecl {int} -- The record length of the data set Returns: str -- Name of the allocated data set Raises: OSError: When any exception is raised during the data set allocation """ size = str(space_u * 2) + "K" hlq = Datasets.hlq() temp_ps = Datasets.temp_name(hlq) rc = Datasets.create(temp_ps, "SEQ", size, "VB", "", reclen) if rc: raise OSError( "Failed when allocating temporary sequential data set!") return temp_ps def get_codeset(self): """Get the list of supported encodings from the USS command 'iconv -l' Raises: EncodeError: When any exception is raised during the conversion Returns: list -- The code set list supported in current USS platform """ code_set = None iconv_list_cmd = ["iconv", "-l"] rc, out, err = self.module.run_command(iconv_list_cmd) if rc: raise EncodeError(err) if out: code_set_list = list(filter(None, re.split(r"[\n|\t]", out))) code_set = [ c for i, c in enumerate(code_set_list) if i > 0 and i % 2 == 0 ] return code_set def string_convert_encoding(self, src, from_encoding, to_encoding): """Convert the encoding of the data when the src is a normal string Arguments: from_code_set: {str} -- The source code set of the string to_code_set: {str} -- The destination code set for the string src: {str} -- The input string content Raises: EncodeError: When any exception is raised during the conversion Returns: str -- The string content after the encoding """ from_encoding = self._validate_encoding(from_encoding) to_encoding = self._validate_encoding(to_encoding) iconv_cmd = "printf {0} | iconv -f {1} -t {2}".format( quote(src), quote(from_encoding), quote(to_encoding)) rc, out, err = self.module.run_command(iconv_cmd, use_unsafe_shell=True) if rc: raise EncodeError(err) return out def uss_convert_encoding(self, src, dest, from_code, to_code): """Convert the encoding of the data in a USS file Arguments: from_code: {str} -- The source code set of the input file to_code: {str} -- The destination code set for the output file src: {str} -- The input file name, it should be a uss file dest: {str} -- The output file name, it should be a uss file Raises: EncodeError: When any exception is raised during the conversion. MoveFileError: When any exception is raised during moving files. Returns: boolean -- Indicate whether the conversion is successful or not. """ src = self._validate_path(src) dest = self._validate_path(dest) from_code = self._validate_encoding(from_code) to_code = self._validate_encoding(to_code) convert_rc = False temp_fo = None if not src == dest: temp_fi = dest else: temp_fo, temp_fi = mkstemp() iconv_cmd = "iconv -f {0} -t {1} {2} > {3}".format( quote(from_code), quote(to_code), quote(src), quote(temp_fi)) try: rc, out, err = self.module.run_command(iconv_cmd, use_unsafe_shell=True) if rc: raise EncodeError(err) if dest == temp_fi: convert_rc = True else: try: src_mode = os.stat(src).st_mode temp_mode = os.stat(temp_fi).st_mode if src_mode != temp_mode: os.chmod(temp_fi, src_mode) shutil.move(temp_fi, dest) convert_rc = True except (OSError, IOError) as e: raise MoveFileError(src, dest, e) except Exception: raise finally: if temp_fo: try: os.close(temp_fo) unlink(temp_fi) except OSError as e: if e.errno != errno.ENOENT: raise return convert_rc def uss_convert_encoding_prev(self, src, dest, from_code, to_code): """ For multiple files conversion, such as a USS path or MVS PDS data set, use this method to split then do the conversion Arguments: from_code: {str} -- The source code set of the input path to_code: {str} -- The destination code set for the output path src: {str} -- The input uss path or a file dest: {str} -- The output uss path or a file Raises: EncodeError: When direcotry is empty or copy multiple files to a single file Returns: boolean -- Indicate whether the conversion is successful or not """ src = self._validate_path(src) dest = self._validate_path(dest) from_code = self._validate_encoding(from_code) to_code = self._validate_encoding(to_code) convert_rc = False file_list = list() if path.isdir(src): for (dir, subdir, files) in walk(src): for file in files: file_list.append(path.join(dir, file)) if len(file_list) == 0: raise EncodeError( "Directory {0} is empty. Please check the path.".format( src)) elif len(file_list) == 1: if path.isdir(dest): file_name = path.basename(file_list[0]) src_f = path.join(src, file_name) dest_f = path.join(dest, file_name) convert_rc = self.uss_convert_encoding(src_f, dest_f, from_code, to_code) else: if path.isfile(dest): raise EncodeError( "Can't convert multiple files (src) {0} to a single file" " (dest) {1}.".format(src, dest)) else: for file in file_list: if dest == src: dest_f = file else: dest_f = file.replace(src, dest, 1) dest_dir = path.dirname(dest_f) if not path.exists(dest_dir): makedirs(dest_dir) convert_rc = self.uss_convert_encoding( file, dest_f, from_code, to_code) else: if path.isdir(dest): file_name = path.basename(path.abspath(src)) dest = path.join(dest, file_name) convert_rc = self.uss_convert_encoding(src, dest, from_code, to_code) return convert_rc def mvs_convert_encoding(self, src, dest, from_code, to_code, src_type=None, dest_type=None): """Convert the encoding of the data from 1) USS to MVS(PS, PDS/E VSAM) 2) MVS to USS 3) MVS to MVS Arguments: src: {str} -- The input MVS data set or USS path to be converted dest: {str} -- The output MVS data set or USS path to be converted from_code: {str} -- The source code set of the input MVS data set to_code: {str} -- The destination code set of the output MVS data set Keyword Arguments: src_type {[type]} -- The input MVS data set or type: PS, PDS, PDSE, VSAM(KSDS) (default: {None}) dest_type {[type]} -- The output MVS data set type (default: {None}) Returns: boolean -- Indicate whether the conversion is successful or not """ src = self._validate_data_set_or_path(src) dest = self._validate_data_set_or_path(dest) from_code = self._validate_encoding(from_code) to_code = self._validate_encoding(to_code) convert_rc = False temp_ps = None temp_src = src temp_dest = dest try: if src_type == "PS": temp_src_fo = NamedTemporaryFile() temp_src = temp_src_fo.name rc, out, err = copy.copy_ps2uss(src, temp_src) if src_type == "PO": temp_src_fo = TemporaryDirectory() temp_src = temp_src_fo.name rc, out, err = copy.copy_pds2uss(src, temp_src) if src_type == "VSAM": reclen, space_u = self.listdsi_data_set(src.upper()) temp_ps = self.temp_data_set(reclen, space_u) rc, out, err = copy.copy_vsam_ps(src.upper(), temp_ps) temp_src_fo = NamedTemporaryFile() temp_src = temp_src_fo.name rc, out, err = copy.copy_ps2uss(temp_ps, temp_src) if dest_type == "PS" or dest_type == "VSAM": temp_dest_fo = NamedTemporaryFile() temp_dest = temp_dest_fo.name if dest_type == "PO": temp_dest_fo = TemporaryDirectory() temp_dest = temp_dest_fo.name rc = self.uss_convert_encoding_prev(temp_src, temp_dest, from_code, to_code) if rc: if not dest_type: convert_rc = True else: if dest_type == "VSAM": reclen, space_u = self.listdsi_data_set(dest.upper()) temp_ps = self.temp_data_set(reclen, space_u) rc, out, err = copy.copy_uss2mvs( temp_dest, temp_ps, "PS") rc, out, err = copy.copy_vsam_ps(temp_ps, dest.upper()) convert_rc = True elif dest_type == "PO": for (dir, subdir, files) in walk(temp_dest): for file in files: temp_file = path.join(dir, file) rc, out, err = copy.copy_uss2mvs( temp_file, dest, "PO") convert_rc = True else: rc, out, err = copy.copy_uss2mvs( temp_dest, dest, dest_type) convert_rc = True except Exception: raise finally: if temp_ps: Datasets.delete(temp_ps) return convert_rc