def test_catalog_load_managed_acbs(ansible_zos_module): hosts = ansible_zos_module load_catalog(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.LOADMODE, validation_msg="DFS4533I", control_statements={'managed_acbs': { "setup": True }}) purge_catalog(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.PURGEMODE, validation_msg="", delete=cp.DELETES, managed_acbs=True)
def test_catalog_load_simple(ansible_zos_module): hosts = ansible_zos_module load_catalog(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, modstat=cp.MODSTAT, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.LOADMODE, validation_msg="DFS4434I") purge_catalog(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.PURGEMODE, validation_msg="DFS4518I", # validation_msg="", sysut1=cp.SYSUT1, delete=cp.DELETES)
def test_creation_of_temp_acb_dataset_with_managed_acbs(ansible_zos_module): hosts = ansible_zos_module # Delete TEMP_ACB data set before the test response = hosts.all.zos_data_set(name=cp.TEMP_ACB, state="absent") for result in response.contacted.values(): assert result['message'] == '' temp_acb_data_set = { 'dataset_name': cp.TEMP_ACB, 'disposition': 'NEW', 'normal_disposition': 'CATLG', 'primary': 200, 'volumes': ['222222'] } load_catalog(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, temp_acb_dataset=temp_acb_data_set, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.LOADMODE, validation_msg="DFS4533I", control_statements={'managed_acbs': { 'setup': True }}) estimated_size_in_bytes = 0 response = hosts.all.command("dls -s " + cp.TEMP_ACB) for result in response.contacted.values(): for line in result.get("stdout_lines", []): lineList = line.split() estimated_size_in_bytes = int(lineList[-1]) estimated_size_in_unit = bytes_to_unit(estimated_size_in_bytes, "TRK") assert estimated_size_in_unit == 200 purge_catalog(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.PURGEMODE, validation_msg="", delete=cp.DELETES, managed_acbs=True) # Delete TEMP_ACB data set after the test response = hosts.all.zos_data_set(name=cp.TEMP_ACB, state="absent") for result in response.contacted.values(): assert result['changed'] == True assert result['message'] == ''
def test_catalog_update_managed_acbs_stage_and_update(ansible_zos_module): hosts = ansible_zos_module load_catalog(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.UPDATEMODE, validation_msg="DFS4536I", control_statements={ 'managed_acbs': { 'stage': { 'save_acb': "UNCOND", 'clean_staging_dataset': True } } }) load_catalog(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.UPDATEMODE, validation_msg="DFS4534I", control_statements={ 'managed_acbs': { 'update': { 'replace_acb': "UNCOND" } } }) purge_catalog(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.PURGEMODE, validation_msg="", delete=cp.DELETES, managed_acbs=True)
def test_catalog_purge_analysis(ansible_zos_module): hosts = ansible_zos_module purge_catalog( hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.ANALYSISMODE, validation_msg="DFS4430I", # validation_msg="", sysut1=cp.SYSUT1, changed=False)
def test_catalog_update_retention(ansible_zos_module): hosts = ansible_zos_module load_catalog(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.LOADMODE, validation_msg="DFS4434I") purge_catalog( hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.ANALYSISMODE, update_retention_criteria=cp.RETENTION, validation_msg="UPDATE DBD", # validation_msg="", sysut1=cp.SYSUT1, changed=False) purge_catalog( hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.PURGEMODE, validation_msg="DFS4518I", # validation_msg="", sysut1=cp.SYSUT1, delete=cp.DELETES)
def test_catalog_define_directory(ansible_zos_module): hosts = ansible_zos_module # Delete the directory datasets first response = hosts.all.zos_data_set(batch=cp.DIR_BATCH) for result in response.contacted.values(): assert result['message'] == '' if result['changed'] == False: response = hosts.all.zos_data_set(name=cp.DIR_BATCH, state="absent", volume="SCR03") # Load catalog while defining the directory datasets load_catalog(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.LOADMODE, validation_msg="DFS4533I", directory_datasets=[ { 'dataset_name': cp.DIR1, 'disposition': 'NEW', 'normal_disposition': 'CATLG', 'primary': 200, 'volumes': ['222222'] }, { 'dataset_name': cp.DIR2, 'disposition': 'NEW', 'normal_disposition': 'CATLG', 'primary': 200, 'volumes': ['222222'] }, ], control_statements={'managed_acbs': { "setup": True }}) # Verify the directory datasets were created with the specified parameters estimated_size_in_bytes = 0 response = hosts.all.command("dls -s " + cp.DIR1) for result in response.contacted.values(): for line in result.get("stdout_lines", []): lineList = line.split() estimated_size_in_bytes = int(lineList[-1]) estimated_size_in_unit = bytes_to_unit(estimated_size_in_bytes, "TRK") assert estimated_size_in_unit == 200 response = hosts.all.command("dls -s " + cp.DIR2) for result in response.contacted.values(): for line in result.get("stdout_lines", []): lineList = line.split() estimated_size_in_bytes = int(lineList[-1]) estimated_size_in_unit = bytes_to_unit(estimated_size_in_bytes, "TRK") assert estimated_size_in_unit == 200 # Purge the catalog purge_catalog(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.PURGEMODE, validation_msg="", delete=cp.DELETES, managed_acbs=True) # Finally delete the directory datasets again response = hosts.all.zos_data_set(batch=cp.DIR_BATCH) for result in response.contacted.values(): assert result['changed'] == True assert result['message'] == ''
def test_gen_vsam_acb_stage_import(ansible_zos_module): hosts = ansible_zos_module # Load the catalog load_catalog(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.LOADMODE, validation_msg="DFS4434I", control_statements={'managed_acbs': { 'setup': True }}) # Generate vsam DBD response = hosts.all.ims_dbd_gen( src=cp.DBDSOURCE, location="DATA_SET", replace=True, member_list=['DGSAM1'], dbd_name=None, dest=cp.DBDDEST, sys_lib=["IMSBLD.I15RTSMM.SDFSMAC", "SYS1.MACLIB"]) for result in response.contacted.values(): assert result['changed'] == True assert result['rc'] == 0 # Check for success message (if we remove return codes) assert result['msg'] == 'DBDGEN execution was successful.' # Generate vsam PSB response = hosts.all.ims_psb_gen( src=cp.PSBSOURCE, location="DATA_SET", replace=True, member_list=['PGSAM1'], psb_name=None, dest=cp.PSBDEST, sys_lib=["IMSBLD.I15RTSMM.SDFSMAC", "SYS1.MACLIB"]) for result in response.contacted.values(): assert result['changed'] == True assert result['rc'] == 0 # Check for success message (if we remove return codes) assert result['msg'] == 'PSBGEN execution was successful.' # Add to ACBLIB validate_acbgen(hosts, command_input=ac.COMMAND_INPUT_BUILD, psb_name=cp.PSB_NAME, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBDEST, steplib=cp.STEPLIB, reslib=cp.RESLIB) # Add to the catalog staging directory load_catalog( hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.UPDATEMODE, validation_msg="DFS4536I", control_statements={'managed_acbs': { 'stage': { 'save_acb': "LATEST" } }}) # Update catalog directory datasets load_catalog(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, acb_lib=cp.ACBLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.UPDATEMODE, validation_msg="DFS4534I", control_statements={ 'managed_acbs': { 'update': { 'replace_acb': "LATEST" } } }) # Purge catalog and directory purge_catalog(hosts, psb_lib=cp.PSBLIB, dbd_lib=cp.DBDLIB, steplib=cp.STEPLIB, reslib=cp.RESLIB, proclib=cp.PROCLIB, primary_log_dataset=cp.PRIMARYLOG, buffer_pool_param_dataset=cp.BUFFERPOOL, mode=cp.PURGEMODE, validation_msg="", delete=cp.DELETES, managed_acbs=True)