def setup_module(): # enable generic search for test_cics_filter_by_transaction z = zOSMFConnector(hostname, username, password) user_profile = f"{rtes[rte]['rte_hlq']}.UKOBDATF({username})" member_text = z.read_ds(user_profile) text_to_add = 'SET KCP_GENERIC_FIND=YES' if text_to_add not in member_text: member_text = member_text.replace('<CUADATA>', f'{text_to_add}\n<CUADATA>') z.write_ds(user_profile, member_text)
def wait_job_to_finish(hostname, username, password, job): z = zOSMFConnector(hostname, username, password) maxtries = 10 tries = 1 while is_job_active(z.list_jobs(prefix=job, owner='*')) and tries < maxtries: time.sleep(30) tries += 1 assert tries < maxtries, f'{job} did not stop'
def __init__(self, username: str, password: str, hlq: str, rte: str, is_new: bool = False, rte_model: str = '', rte_products: List[str] = None, rte_type: int = None): """ :param username: TSO username :param password: TSO password :param hlq: hlq of the csi, e.g. itm.itd :param rte: rte name :param is_new: new or old """ self.username = username.upper() self.password = password self.hlq = hlq.upper() self.rte = rte.upper() self.step_results = {} self.is_new = is_new self.original_config = '' self.variables_config = '' self.rte_type = rte_type if not None else rtes[rte]['type'] self.time = datetime.datetime.now().strftime('%Y/%m/%d') self.z = zOSMFConnector(self.exec_host, username, password) self.jobname = None self.step_results: Dict[str:Dict[str:int]] = {} self.ispf = None self.d = None self.all_jobs = set() self.changed_parameters = [] self.rte_model = rte_model self.middle_hlq = self.hlq.split('.')[1] if rte_products is None: self.rte_products = [] else: self.rte_products = rte_products self.CONFIG_FOR_PRODUCTS = { 'KC5': self.update_config_for_kc5, 'KDS': self.update_config_for_kds, 'KD5': self.update_config_for_kd5, 'KGW': self.update_config_for_kgw, 'KI5': self.update_config_for_ki5, 'KJJ': self.update_config_for_kjj, 'KMQ': self.update_config_for_kmq, 'KM5': self.update_config_for_km5, 'KN3': self.update_config_for_kn3, 'KOB': self.update_config_for_kob, 'KQI': self.update_config_for_kqi, 'KS3': self.update_config_for_ks3, }
def upload_member_in_ds(hostname, username, password, datasets: List, data: str = ''): z = zOSMFConnector(hostname, username, password) for dataset in datasets: try: z.read_ds(dataset) except: z.write_ds(data=data, dataset=dataset)
def set_security(hostname, username, password, rte_hlq, rte_name, omegamon, members_dir, jobs_to_submit: Tuple, stc_job: str = None, delay=30): # stop the server z1 = JESAdapter(hostname, username, password) if stc_job is not None: z1.submit_jcl( text=stop_start.replace('#CMD#', 'P').replace('#JOB#', stc_job)) z = zOSMFConnector(hostname, username, password) source_dir = os.path.join(root, 'resources', 'members', rte_name, members_dir, omegamon) dest_hlq = rte_hlq for dir_name in listdir(source_dir): dir_path = os.path.join(source_dir, dir_name) if os.path.isdir(dir_path): for member in listdir(dir_path): member_in_dataset = open(os.path.join(dir_path, member), 'r') text_in_member = member_in_dataset.read() proclib = 'ROCKET.USER.PROCLIB' clist = 'ITM.ITE.QA.CLIST' if dir_name.upper() == 'PROCLIB': print(f'uploading to {proclib}({member})') z.write_ds(dataset=f'{proclib}({member})', data=text_in_member) elif dir_name.upper() == 'CLIST': print(f'uploading to {clist}({member})') z.write_ds(dataset=f'{clist}({member})', data=text_in_member) else: print(f'uploading to {dest_hlq}.{dir_name}({member})') z.write_ds(dataset=f'{dest_hlq}.{dir_name}({member})', data=text_in_member) member_in_dataset.close() for job in jobs_to_submit: z1.submit_jcl(f'{dest_hlq}.RKANSAMU({job})') if stc_job is not None: wait_job_to_finish(hostname, username, password, stc_job) z1.submit_jcl( text=stop_start.replace('#CMD#', 'S').replace('#JOB#', stc_job)) time.sleep(delay)
def delete_libs(hostname, username, password, src: str, dst: str = 'ITM.ITE.DEV.PROCLIB', member_mask: str = None, members: str = Union[str, List[str]], libraries: Union[str, List[str]] = None): """ Delete developers libraries from jobs in proclib. :param hostname: FTP host name :param username: userid :param password: password :param src: main proclib :param dst: developers itm proclib :param member_mask: pattern for jobs from the proclib :param members: list of members or string of member :param libraries: list of libraries or string of library :return: nothing """ z = zOSMFConnector(hostname, username, password) if member_mask is not None: list_job = z.list(src, member_pattern=member_mask) elif isinstance(members, str): list_job = [members] else: list_job = members if isinstance(libraries, str): libs = [libraries] else: libs = libraries for job in list_job: data = z.read_ds(f'{dst}({job})') count = 0 for library in libs: if library in data: if 'APF' not in job: new_data = data.replace( f'// DSN={library}\n// DD DISP=SHR,\n', '') data = new_data count += 1 else: new_data = data.replace( f'// SETPROG APF,ADD,\n// DSNAME={library},SMS\n', '') data = new_data count += 1 if count > 0: z.write_ds(f'{dst}({job})', data)
def rte_setup(request): rte = request.param omegamon = 'zos' hostname = rtes[rte]['hostname'] # for these tests, we don't need stc, but we need to start it just to make sure libs are APF authorized utils.set_security(hostname, username, password, rtes[rte]['rte_hlq'], rte, omegamon, 'basic', ('KOMSUPD', 'KOMRACFA'), f'{rte}M2RC') # prepare ispf logon # we need to copy KOMSPF and KOMSPFU, # KOMSPFSC, KOMSPFSI, KOMSPFSX for new ispf logon from *.RKANSAMU to to ROCKET.USER.CLIST z = zOSMFConnector(hostname, username, password) for mem in ('KOMSPF', 'KOMSPFU', 'KOMSPFSC', 'KOMSPFSI', 'KOMSPFSX'): read_mem = z.read_ds(f"{rtes[rte]['rte_hlq']}.RKANSAMU({mem})") z.write_ds(f'ITM.ITE.QA.CLIST({mem})', read_mem) return rte
from taf.zos.zosmflib import zOSMFConnector from libs.creds import * # """ # This needs for TSO/ISPF tests after reIPL # """ z = zOSMFConnector('rsd1', username, password) z.issue_command('VARY NET,ACT,ID=KOBVT1AP')
def test_compare_rte(rte_name): jes_adapter = JESAdapter(hostname, username, password) zosmf = zOSMFConnector(hostname, username, password) datasets_back = zosmf.list_ds( ds_mask=f'ITM.PARM.ARCH.{rte_name.upper()}.*') last_gdg = [] ps_ds_back = [] exclude_ds = {} old_ds = {} datasets_new = [ dataset for dataset in zosmf.list_ds( ds_mask=f'{rtes[rte_name]["rte_hlq"]}.R*') + zosmf.list_ds(ds_mask=f'{rtes[rte_name]["rte_hlq"]}.W*') + zosmf.list_ds(ds_mask=f'{rtes[rte_name]["rte_hlq"][:-4]}BASE.R*') ] # search vsam ds and add their to array vsam_ds_new = [ re.findall(r'(?<=[\w+].)\w+', datasets_new.pop(datasets_new.index(dataset) - 1))[-1] for dataset in datasets_new if re.findall(r'(?<=[\w+].)\w+', dataset)[-1] == 'DATA' ] vsam_ds_back = [ re.findall(r'(?<=[\w+].)\w+', ds)[-2] for ds in list( zosmf.list_ds(ds_mask=f'ITM.PARM.ARCH.{rte_name.upper()}.*.VSAM')) ] for dataset in datasets_back: if dataset == re.findall(r'\w+.\w+.\w+.\w+.\w+', dataset)[0] and dataset not in vsam_ds_back: try: last_gdg.append(zosmf.list_ds(ds_mask=f'{dataset}(0)')[0]) except: logging.error(f'Generation does not found for {dataset}') gdg_name = re.findall(r'(?<=[\w+].)\w+', last_gdg[0])[-1] for dataset in last_gdg: members = zosmf.list(dataset=dataset) # check datasets for PO and PS if members: old_ds.update( {re.findall(r'(?<=[\w+].)\w+', dataset)[-2]: members}) else: ps_ds_back.append(re.findall(r'(?<=[\w+].)\w+', dataset)[-2]) for dataset in datasets_new: # check for all ds without vsam if dataset == re.findall(r'\w+.\w+.\w+.\w+', dataset)[0]: members = zosmf.list(dataset=dataset) name = re.findall(r'(?<=[\w+].)\w+', dataset)[-1] if members: try: diff = list(set(members) - set(old_ds[name])) if diff: show_new_datasets(dataset, diff, 'PO') diff = list(set(old_ds[name]) - set(members)) if diff: exclude_ds.update({dataset: diff}) res = jes_adapter.submit_jcl(text=jcl_compare_word.replace( '#OLDDS#', f'ITM.PARM.ARCH.{rte_name.upper()}.' f'{name}.{gdg_name}').replace('#NEWDS#', f'{dataset}')) if res.rc != '0000': allure.attach( jes_adapter.get_job_output(job_name=res.jobname, job_id=res.jobid, utf_8_errors='ignore'), name, allure.attachment_type.TEXT) except KeyError: show_new_datasets(dataset, members, 'PO') else: # check added new datasets after updates rte if name not in ps_ds_back: show_new_datasets(dataset, '', 'PS') for member in set(vsam_ds_new) - set(vsam_ds_back): show_new_datasets(f'{rtes[rte_name]["rte_hlq"].upper()}.{member}', '', 'VSAM') for member in set(vsam_ds_back) - set(vsam_ds_new): exclude_ds.update( {f'{rtes[rte_name]["rte_hlq"].upper()}.{member} -- VSAM': ''}) if exclude_ds: show_exclude_datasets( ',\n '.join("Dataset: {!s} \n Members: {!r}".format(ds, memb) for (ds, memb) in exclude_ds.items()), rte_name)
logger = logging.getLogger(__name__) logging.getLogger().setLevel(logging.INFO) # jubula_agent = 'localhost' jubula_agent = 'localhost' host_teps = 'WALDEVITMZQA03' hostname = 'RSD4' plex = 'RSPLEXL4' rte = 'ite4' tep = None jubula_agent = os.environ.get('jubula_agent', jubula_agent) + ':60000' hostname = os.environ.get('hostname', hostname) plex = os.environ.get('plex', plex).upper() z = zOSMFConnector(hostname, username, password) # we need base username (not ends with 'a') as it has an auth for $kobsec class username = os.environ.get('mf_user', username[:-1]) password = os.environ.get('mf_password', password) host_teps = os.environ.get('host_teps', host_teps) tems = rtes[rte]['tems'].upper() clean_racf_cmds = [ f'RDELETE $KOBSEC ({tems}.KGLUMAP.*)', f'RDELETE $KOBSEC ({tems}.KGLUMAP.TS5813)', f'RDELETE $KOBSEC ({tems}.KGLUMAP.TS*)', f'RDELETE $KOBSEC ({tems}.KGLCMAP.*)', f'RDELETE $KOBSEC ({tems}.KGLCMAP.%LU)', f'rdelete $kobsec ({tems}.KDS_VALIDATE)',
def update_stcs(hostname, username, password, src: str, dst: str = 'ITM.ITE.DEV.PROCLIB', member_mask: str = None, members: str = Union[str, List[str]], libraries: Union[str, List[str]] = None): """ Load developers libraries in jobs from proclib. :param hostname: FTP host name :param username: userid :param password: password :param src: source library :param dst: destination library :param member_mask: pattern for jobs from the src lib to update :param members: list of members or string of member to update :param libraries: list of libraries or string of library to add :return: nothing """ z = zOSMFConnector(hostname, username, password) zftp = ZFTP(hostname, username, password) if member_mask is not None: list_job = z.list(src, member_pattern=member_mask) elif isinstance(members, str): list_job = [members] else: list_job = members if isinstance(libraries, str): libs = [libraries] else: libs = libraries for job in list_job: data = z.read_ds(f'{src}({job})') for library in libs: if library not in data: name_lib = re.findall(r'(?<=\.\w)(\w+)(?!\.)', library)[-1] if data.find('&BASEHLEV.' + name_lib): if 'APF' in job: entries = re.findall( rf'//\s+DSNAME=&BASEHLEV.{name_lib},', data) start = 0 for entry in entries: index_start = re.search( entry, data[start:]).start() + start data = data[: index_start] + f'// DSNAME={library},SMS\n// SETPROG APF,ADD,\n' + data[ index_start:] start = re.search(entry, data).end() else: entries = re.findall( rf'//\s+DSN=&BASEHLEV.{name_lib}\s', data) start = 0 for entry in entries: index_start = re.search( entry, data[start:]).start() + start data = data[: index_start] + f'// DSN={library}\n// DD DISP=SHR,\n' + data[ index_start:] start = re.search(entry, data).end() count = 0 for library in libs: if library in data: count += 1 logger.info(f"{library} added to {job}") if count == 0: logger.info(f"{job} doesn't have libraries") elif count > 0: logger.info(f"Writing {dst}({job})") zftp.upload_ds(text=data, dest=f'{dst}({job})')