def mvs_file_backup(dsn, bk_dsn): """Create a backup data set for an MVS data set Arguments: dsn {str} -- The name of the data set to backup. It could be an MVS PS/PDS/PDSE/VSAM(KSDS), etc. bk_dsn {str} -- The name of the backup data set. Raises: BackupError: When backup data set exists. BackupError: When creation of backup data set fails. """ dsn = _validate_data_set_name(dsn).upper() bk_dsn = _validate_data_set_name(bk_dsn).upper() if not bk_dsn: hlq = Datasets.hlq() bk_dsn = Datasets.temp_name(hlq) cp_rc = _copy_ds(dsn, bk_dsn) # The data set is probably a PDS or PDSE if cp_rc == 12: # Delete allocated backup that was created when attempting to use _copy_ds() # Safe to delete because _copy_ds() would have raised an exception if it did # not successfully create the backup data set, so no risk of it predating module invocation Datasets.delete(bk_dsn) if Datasets.move(dsn, bk_dsn) == 0: _allocate_model(dsn, bk_dsn) else: raise BackupError("Unable to backup data set {0} to {1}".format( dsn, bk_dsn))
def _uncatalog_vsam_data_set(name): """Uncatalog a VSAM data set. Arguments: name {str} -- The name of the data set to uncatalog. Raises: DatasetUncatalogError: When uncataloging fails. """ idcams_input = VSAM_UNCATALOG_COMMAND.format(name) try: temp_data_set_name = _create_temp_data_set(name.split(".")[0]) _write_data_set(temp_data_set_name, idcams_input) dd_statements = [] dd_statements.append( types.DDStatement(ddName="sysin", dataset=temp_data_set_name) ) dd_statements.append(types.DDStatement(ddName="sysprint", dataset="*")) rc = MVSCmd.execute_authorized(pgm="idcams", args="", dds=dd_statements) if rc != 0: raise DatasetUncatalogError(name, rc) except Exception: raise finally: Datasets.delete(temp_data_set_name) return
def _copy_ds(ds, bk_ds): """Copy the contents of a data set to another Arguments: ds {str} -- The source data set to be copied from. Should be SEQ or VSAM bk_dsn {str} -- The destination data set to copy to. Raises: BackupError: When copying data fails """ module = AnsibleModule(argument_spec={}, check_invalid_arguments=False) _allocate_model(bk_ds, ds) repro_cmd = """ REPRO - INDATASET({0}) - OUTDATASET({1})""".format(ds, bk_ds) rc, out, err = module.run_command( "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin", data=repro_cmd) if rc != 0 and rc != 12: Datasets.delete(bk_ds) raise BackupError( "Unable to backup data set {0}; stdout: {1}; stderr: {2}".format( ds, out, err)) if rc != 0 and _vsam_empty(ds): rc = 0 return rc
def _catalog_non_vsam_data_set(name, volume): """Catalog a non-VSAM data set. Arguments: name {str} -- The data set to catalog. volume {str} -- The volume the data set resides on. Raises: DatasetCatalogError: When attempt at catalog fails. """ iehprogm_input = _build_non_vsam_catalog_command(name, volume) try: temp_data_set_name = _create_temp_data_set(name.split(".")[0]) _write_data_set(temp_data_set_name, iehprogm_input) rc, stdout, stderr = module.run_command( "mvscmdauth --pgm=iehprogm --sysprint=* --sysin={0}".format( temp_data_set_name ) ) if rc != 0 or "NORMAL END OF TASK RETURNED" not in stdout: raise DatasetCatalogError(name, volume, rc) except Exception: raise finally: Datasets.delete(temp_data_set_name) return
def _uncatalog_non_vsam_data_set(name): """Uncatalog a non-VSAM data set. Arguments: name {str} -- The name of the data set to uncatalog. Raises: DatasetUncatalogError: When uncataloging fails. """ iehprogm_input = NON_VSAM_UNCATALOG_COMMAND.format(name) try: temp_data_set_name = _create_temp_data_set(name.split(".")[0]) _write_data_set(temp_data_set_name, iehprogm_input) rc, stdout, stderr = module.run_command( "mvscmdauth --pgm=iehprogm --sysprint=* --sysin={0}".format( temp_data_set_name ) ) if rc != 0 or "NORMAL END OF TASK RETURNED" not in stdout: raise DatasetUncatalogError(name, rc) except Exception: raise finally: Datasets.delete(temp_data_set_name) return
def _catalog_vsam_data_set(name, volume): """Catalog a VSAM data set. Arguments: name {str} -- The data set to catalog. volume {str} -- The volume the data set resides on. Raises: DatasetCatalogError: When attempt at catalog fails. """ data_set_name = name.upper() data_set_volume = volume.upper() success = False try: temp_data_set_name = _create_temp_data_set(name.split(".")[0]) command_rc = 0 for data_set_type in ["", "LINEAR", "INDEXED", "NONINDEXED", "NUMBERED"]: if data_set_type != "INDEXED": command = VSAM_CATALOG_COMMAND_NOT_INDEXED.format( data_set_name, data_set_volume, data_set_type ) else: command = VSAM_CATALOG_COMMAND_INDEXED.format( data_set_name, data_set_volume, data_set_type ) _write_data_set(temp_data_set_name, command) dd_statements = [] dd_statements.append( types.DDStatement(ddName="sysin", dataset=temp_data_set_name) ) dd_statements.append(types.DDStatement(ddName="sysprint", dataset="*")) command_rc = MVSCmd.execute_authorized( pgm="idcams", args="", dds=dd_statements ) if command_rc == 0: success = True break if not success: raise DatasetCatalogError( name, volume, command_rc, "Attempt to catalog VSAM data set failed." ) except Exception: raise finally: Datasets.delete(temp_data_set_name) return
def _delete_data_set(name): """A wrapper around zoautil_py Dataset.delete() to raise exceptions on failure. Arguments: name {str} -- The name of the data set to delete. Raises: DatasetDeleteError: When data set deletion fails. """ rc = Datasets.delete(name) if rc > 0: raise DatasetDeleteError(name, rc) return
def _fetch_vsam(self, src, is_binary, encoding=None): """ Copy the contents of a VSAM to a sequential data set. Afterwards, copy that data set to a USS file. """ temp_ds = self._copy_vsam_to_temp_data_set(src) file_path = self._fetch_mvs_data(temp_ds, is_binary, encoding) rc = Datasets.delete(temp_ds) if rc != 0: os.remove(file_path) self._fail_json( msg="Unable to delete temporary data set {0}".format(temp_ds), rc=rc) return file_path
def mvs_file_backup(dsn, bk_dsn=None): """Create a backup data set for an MVS data set Arguments: dsn {str} -- The name of the data set to backup. It could be an MVS PS/PDS/PDSE/VSAM(KSDS), etc. bk_dsn {str} -- The name of the backup data set. Raises: BackupError: When backup data set exists. BackupError: When creation of backup data set fails. """ dsn = _validate_data_set_name(dsn).upper() if is_member(dsn): if not bk_dsn: bk_dsn = extract_dsname(dsn) + "({0})".format(temp_member_name()) bk_dsn = _validate_data_set_name(bk_dsn).upper() if Datasets.copy(dsn, bk_dsn) != 0: raise BackupError("Unable to backup {0} to {1}".format( dsn, bk_dsn)) else: if not bk_dsn: bk_dsn = Datasets.temp_name(Datasets.hlq()) bk_dsn = _validate_data_set_name(bk_dsn).upper() cp_rc = _copy_ds(dsn, bk_dsn) if cp_rc == 12: # The data set is probably a PDS or PDSE # Delete allocated backup that was created when attempting to use _copy_ds() # Safe to delete because _copy_ds() would have raised an exception if it did # not successfully create the backup data set, so no risk of it predating module invocation Datasets.delete(bk_dsn) _allocate_model(bk_dsn, dsn) rc, out, err = _copy_pds(dsn, bk_dsn) if rc != 0: raise BackupError( "Unable to backup data set {0} to {1}".format(dsn, bk_dsn)) return bk_dsn
def _copy_vsam_to_temp_data_set(self, ds_name): """ Copy VSAM data set to a temporary sequential data set """ mvs_rc = 0 vsam_size = self._get_vsam_size(ds_name) sysprint = sysin = out_ds_name = None try: sysin = data_set.DataSet.create_temp("MVSTMP") sysprint = data_set.DataSet.create_temp("MVSTMP") out_ds_name = data_set.DataSet.create_temp("MSVTMP", space_primary=vsam_size, space_type="K") repro_sysin = " REPRO INFILE(INPUT) OUTFILE(OUTPUT) " Datasets.write(sysin, repro_sysin) dd_statements = [] dd_statements.append( types.DDStatement(ddName="sysin", dataset=sysin)) dd_statements.append( types.DDStatement(ddName="input", dataset=ds_name)) dd_statements.append( types.DDStatement(ddName="output", dataset=out_ds_name)) dd_statements.append( types.DDStatement(ddName="sysprint", dataset=sysprint)) mvs_rc = MVSCmd.execute_authorized(pgm="idcams", args="", dds=dd_statements) except OSError as err: self._fail_json(msg=str(err)) except Exception as err: if Datasets.exists(out_ds_name): Datasets.delete(out_ds_name) if mvs_rc != 0: self._fail_json( msg=( "Non-zero return code received while executing MVSCmd " "to copy VSAM data set {0}".format(ds_name)), rc=mvs_rc, ) self._fail_json( msg= ("Failed to call IDCAMS to copy VSAM data set {0} to a temporary" " sequential data set".format(ds_name)), stderr=str(err), rc=mvs_rc, ) finally: Datasets.delete(sysprint) Datasets.delete(sysin) return out_ds_name
def delete_data_set(name): """ A wrapper around zoautil_py data set delete to raise exceptions on failure. """ rc = Datasets.delete(name) if rc > 0: raise DatasetDeleteError(name, rc) return
def mvs_convert_encoding(self, src, dest, from_code, to_code, src_type=None, dest_type=None): """Convert the encoding of the data from 1) USS to MVS(PS, PDS/E VSAM) 2) MVS to USS 3) MVS to MVS Arguments: src: {str} -- The input MVS data set or USS path to be converted dest: {str} -- The output MVS data set or USS path to be converted from_code: {str} -- The source code set of the input MVS data set to_code: {str} -- The destination code set of the output MVS data set Keyword Arguments: src_type {[type]} -- The input MVS data set or type: PS, PDS, PDSE, VSAM(KSDS) (default: {None}) dest_type {[type]} -- The output MVS data set type (default: {None}) Returns: boolean -- Indicate whether the conversion is successful or not """ src = self._validate_data_set_or_path(src) dest = self._validate_data_set_or_path(dest) from_code = self._validate_encoding(from_code) to_code = self._validate_encoding(to_code) convert_rc = False temp_ps = None temp_src = src temp_dest = dest try: if src_type == "PS": temp_src_fo = NamedTemporaryFile() temp_src = temp_src_fo.name rc, out, err = copy.copy_ps2uss(src, temp_src) if src_type == "PO": temp_src_fo = TemporaryDirectory() temp_src = temp_src_fo.name rc, out, err = copy.copy_pds2uss(src, temp_src) if src_type == "VSAM": reclen, space_u = self.listdsi_data_set(src.upper()) temp_ps = self.temp_data_set(reclen, space_u) rc, out, err = copy.copy_vsam_ps(src.upper(), temp_ps) temp_src_fo = NamedTemporaryFile() temp_src = temp_src_fo.name rc, out, err = copy.copy_ps2uss(temp_ps, temp_src) if dest_type == "PS" or dest_type == "VSAM": temp_dest_fo = NamedTemporaryFile() temp_dest = temp_dest_fo.name if dest_type == "PO": temp_dest_fo = TemporaryDirectory() temp_dest = temp_dest_fo.name rc = self.uss_convert_encoding_prev(temp_src, temp_dest, from_code, to_code) if rc: if not dest_type: convert_rc = True else: if dest_type == "VSAM": reclen, space_u = self.listdsi_data_set(dest.upper()) temp_ps = self.temp_data_set(reclen, space_u) rc, out, err = copy.copy_uss2mvs( temp_dest, temp_ps, "PS") rc, out, err = copy.copy_vsam_ps(temp_ps, dest.upper()) convert_rc = True elif dest_type == "PO": for (dir, subdir, files) in walk(temp_dest): for file in files: temp_file = path.join(dir, file) rc, out, err = copy.copy_uss2mvs( temp_file, dest, "PO") convert_rc = True else: rc, out, err = copy.copy_uss2mvs( temp_dest, dest, dest_type) convert_rc = True except Exception: raise finally: if temp_ps: Datasets.delete(temp_ps) return convert_rc
#Import the Z Open Automation Utilities libraries we need from zoautil_py import MVSCmd, Datasets from zoautil_py.types import DDStatement # Import datetime, needed so we can format the report from datetime import datetime # Import os, needed to get the environment variables import os #Take the contents of this data set and read it into cc_contents cc_contents = Datasets.read("MTM2020.PUBLIC.CUST16") USERID = os.getenv("USER") output_dataset = USERID + ".OUTPUT.CCINVALD" #Delete the output dataset if it already exists if Datasets.exists(output_dataset): Datasets.delete(output_dataset) Datasets.create(output_dataset, "SEQ") # Create a new SEQUENTIAL DATA SET with the name of output_dataset #A function that checks to see if the number passed to it is even. Returns True or False (Boolean) def is_even(num_to_check ): # this is a function. num_to_check is what gets sent to it if ((num_to_check % 2) == 0): # a simple check to see if num_to_check is even. result = True # We set result to True return result # and then return it. else: # if it isn't result = False # set return to False
logging.config.fileConfig('logging.conf') # Create list of DD statements for MVSCmd dd_statements = [] dd_statements.append( DDStatement(ddName="sortin01", dataset="USR.MVSCMD.DFSORT.MASTER")) dd_statements.append( DDStatement(ddName="sortin02", dataset="USR.MVSCMD.DFSORT.NEW")) dd_statements.append( DDStatement(ddName="sysin", dataset="USR.MVSCMD.DFSORT.CMD")) dd_statements.append( DDStatement(ddName="sortout", dataset="USR.MVSCMD.DFSORT.MERGE")) dd_statements.append(DDStatement(ddName="sysout", dataset="*")) # Delete datasets if already exist Datasets.delete("USR.MVSCMD.DFSORT.*") # Create datasets Datasets.create("USR.MVSCMD.DFSORT.MASTER", type="SEQ") Datasets.create("USR.MVSCMD.DFSORT.NEW", type="SEQ") Datasets.create("USR.MVSCMD.DFSORT.CMD", type="SEQ") Datasets.create("USR.MVSCMD.DFSORT.MERGE", type="SEQ") # Write command to USR.MVSCMD.DFSORT.CMD Datasets.write("USR.MVSCMD.DFSORT.CMD", " MERGE FORMAT=CH,FIELDS=(1,9,A)") # Write example text to USR.MVSCMD.DFSORT.MASTER Datasets.write("USR.MVSCMD.DFSORT.MASTER", "Chang Joe 278 232 6043") Datasets.write("USR.MVSCMD.DFSORT.MASTER", "DeBeer Jo 348 132 6023", append=True)