Ejemplo n.º 1
0
    def _build_utility_statements(self):
        """Builds the list DDStatements that will be provided to the zos_raw execute function
        based on the user input.

        Returns:
            (list[DDStatement]): List of DDStatements
        """
        dbrc_utility_fields = []
        steplib_data_set_definitions = [
            DatasetDefinition(steplib) for steplib in self.steplib_list
        ]
        if self.dynamic_allocation_dataset:
            steplib_data_set_definitions.append(
                DatasetDefinition(self.dynamic_allocation_dataset))
        steplib = DDStatement("steplib", steplib_data_set_definitions)
        dbrc_utility_fields.append(steplib)
        self._add_utility_statement("recon1", self.recon1, dbrc_utility_fields)
        self._add_utility_statement("recon2", self.recon2, dbrc_utility_fields)
        self._add_utility_statement("recon3", self.recon3, dbrc_utility_fields)
        self._add_utility_statement("jclpds", self.genjcl_input_dataset,
                                    dbrc_utility_fields)
        self._add_utility_statement("genjcl", self.genjcl_output_dataset,
                                    dbrc_utility_fields)
        self._add_utility_statement("ims", self.dbd_lib, dbrc_utility_fields)
        dbrc_commands = StdinDefinition("\n".join(self.commands))
        sysin = DDStatement("sysin", dbrc_commands)
        sysprint = DDStatement("sysprint", StdoutDefinition())
        dbrc_utility_fields.extend([sysin, sysprint])
        return dbrc_utility_fields
Ejemplo n.º 2
0
    def _build_compress_statements(self):
        """Builds the list of DDStatements that will be provided to the zos_mvs_raw to execute IEBCOPY
         based on the user input.

        Returns:
          (list[DDStatement]): List of DDStatements
        """
        iebcopy_utility_fields = []
        sysprint = DDStatement("SYSPRINT", StdoutDefinition())
        iebcopy_utility_fields.append(sysprint)
        compctl_stdin_definitions = DDStatement(
            "SYSIN", StdinDefinition("  COPY  INDD=IMSACB,OUTDD=IMSACB"))
        iebcopy_utility_fields.append(compctl_stdin_definitions)
        acblib_data_set_definitions = DDStatement(
            "IMSACB", DatasetDefinition(self.acb_lib, disposition="old"))
        iebcopy_utility_fields.append(acblib_data_set_definitions)
        return iebcopy_utility_fields
Ejemplo n.º 3
0
    def _build_acbgen_statements(self):
        """Builds the list of DDStatements that will be provided to the zos_mvs_raw to execute DFSRRC00
         based on the user input.

        Returns:
          (list[DDStatement]): List of DDStatements
        """
        acbgen_utility_fields = []
        ims_dataset_list = []
        sysprint = DDStatement("SYSPRINT", StdoutDefinition())

        acbgen_utility_fields.append(sysprint)

        if self.steplib:
            steplib_data_set_definitions = [
                DatasetDefinition(steplib) for steplib in self.steplib]
            steplib = DDStatement("STEPLIB", steplib_data_set_definitions)
            acbgen_utility_fields.append(steplib)

        if self.reslib:
            reslib = self.reslib
        else:
            reslib = self.steplib

        if self.reslib:
            reslib_data_set_definitions = [
                DatasetDefinition(reslib) for reslib in self.reslib]
            reslib_dd_statement = DDStatement("DFSRESLB", reslib_data_set_definitions)
            acbgen_utility_fields.append(reslib_dd_statement)
        if self.psb_lib:
            for psblib in self.psb_lib:
                ims_dataset_list.append(DatasetDefinition(psblib))
        if self.dbd_lib:
            for dbdlib in self.dbd_lib:
                ims_dataset_list.append(DatasetDefinition(dbdlib))

        if ims_dataset_list:
            ims_data_set_definitions = DDStatement("IMS", ims_dataset_list)
            acbgen_utility_fields.append(ims_data_set_definitions)

        if self.acb_lib:
            acblib_data_set_definitions = DDStatement(
                "IMSACB", DatasetDefinition(self.acb_lib, disposition="old"))
            acbgen_utility_fields.append(acblib_data_set_definitions)

        commandList = []
        # Generate DD statements for commands
        if self.command_input:
            psb_name_str = ""
            if self.psb_name:
                psb_name_str = " " + self._split_lines_psb()
                commandList.append(psb_name_str)
            dbd_name_str = ""
            if self.dbd_name:
                dbd_name_str = " " + self._split_lines_dbd()
                commandList.append(dbd_name_str)

        for a in commandList:
            print(" commandList:  ", a)
        command_stdin_definitions = DDStatement(
            "SYSIN", StdinDefinition("\n".join(commandList)))
        acbgen_utility_fields.append(command_stdin_definitions)

        return acbgen_utility_fields
Ejemplo n.º 4
0
    def _constructCatalogDDStatements(self):
        dDStatementList = []
        acbDatasetList = []

        #Generate DD statements for ACB lib. Behavior is different depending on check_timestamps
        if self.parsed_args.get('acb_lib') is not None:
            #Check if check_timestamp is false. If so, then we include all the datasets in a single DD Statement
            if self.parsed_args.get('check_timestamp') is False:
                for i in self.parsed_args.get('acb_lib'):
                    acbDataset = DatasetDefinition(i)
                    acbDatasetList.append(acbDataset)
                acbDDStatement = DDStatement("IMSACBA", acbDatasetList)
                dDStatementList.append(acbDDStatement)
            #If check_timestamp is true, then we generate a dd statement for each dataset
            else:
                acbCount = 1
                for i in self.parsed_args.get('acb_lib'):
                    if acbCount >= 10:
                        acbDDStatement = DDStatement(
                            "IMSACB{0}".format(acbCount), DatasetDefinition(i))
                        dDStatementList.append(acbDDStatement)
                        acbCount += 1
                    else:
                        acbDDStatement = DDStatement(
                            "IMSACB0{0}".format(acbCount),
                            DatasetDefinition(i))
                        dDStatementList.append(acbDDStatement)
                        acbCount += 1
                acbCount = 1

        if self.parsed_args.get('secondary_log_dataset') is not None:
            iefrder2DDStatement = DDStatement(
                "IEFRDER2",
                DatasetDefinition(
                    **{
                        k: v
                        for k, v in self.parsed_args.get(
                            'secondary_log_dataset').items() if v is not None
                    }))
            dDStatementList.append(iefrder2DDStatement)

        if self.parsed_args.get('modstat') is not None:
            modstatDDStatement = DDStatement(
                "MODSTAT", DatasetDefinition(self.parsed_args.get('modstat')))
            dDStatementList.append(modstatDDStatement)

        if self.parsed_args.get('bootstrap_dataset') is not None:
            bootParams = {
                "record_length": 96,
                "record_format": "FB",
                "type": "SEQ"
            }
            bootParams.update(self.parsed_args.get('bootstrap_dataset'))
            btstrDataset = DDStatement(
                "IMSDBSDS",
                DatasetDefinition(
                    **{k: v
                       for k, v in bootParams.items() if v is not None}))
            dDStatementList.append(btstrDataset)

        if self.parsed_args.get('directory_datasets') is not None:
            dirParams = {
                "block_size": 32760,
                "record_length": 0,
                "record_format": "U",
                "type": "PDSE"
            }
            directoryCount = 1
            for i in self.parsed_args.get('directory_datasets'):
                dirParams.update(i)
                if directoryCount >= 10:
                    directoryDDStatement = DDStatement(
                        "IMSD00{0}".format(directoryCount),
                        DatasetDefinition(**{
                            k: v
                            for k, v in dirParams.items() if v is not None
                        }))
                    dDStatementList.append(directoryDDStatement)
                    directoryCount = directoryCount + 1
                else:
                    directoryDDStatement = DDStatement(
                        "IMSD000{0}".format(directoryCount),
                        DatasetDefinition(**{
                            k: v
                            for k, v in dirParams.items() if v is not None
                        }))
                    dDStatementList.append(directoryDDStatement)
                    directoryCount = directoryCount + 1

        if self.parsed_args.get('temp_acb_dataset') is not None:
            tempParams = {
                "block_size": 32760,
                "record_length": 80,
                "record_format": "U",
                "type": "PDSE"
            }
            tempParams.update(self.parsed_args.get('temp_acb_dataset'))
            tempDDStatement = DDStatement(
                "IMSDG001",
                DatasetDefinition(
                    **{k: v
                       for k, v in tempParams.items() if v is not None}))
            dDStatementList.append(tempDDStatement)

        if self.parsed_args.get('directory_staging_dataset') is not None:
            stagingParams = {
                "block_size": 32760,
                "record_length": 0,
                "record_format": "U",
                "type": "PDSE"
            }
            stagingParams.update(
                self.parsed_args.get('directory_staging_dataset'))
            dirDDStatement = DDStatement(
                "IMSDSTAG",
                DatasetDefinition(
                    **
                    {k: v
                     for k, v in stagingParams.items() if v is not None}))
            dDStatementList.append(dirDDStatement)

        #Add dummy dd statement
        dummyDDStatement = DDStatement("ACBCATWK", DummyDefinition())
        dDStatementList.append(dummyDDStatement)

        #add sysabend dd statement
        if self.parsed_args.get('sysabend') is None:
            sysDefinition = StdoutDefinition()
        else:
            sysDefinition = DatasetDefinition(self.parsed_args['sysabend'])
        sysabendDDStatement = DDStatement("SYSABEND", sysDefinition)
        dDStatementList.append(sysabendDDStatement)

        controlList = []
        if self.parsed_args.get('control_statements') is not None:
            controlList = self._parse_control_statements()
            ctrlStateDDStatement = DDStatement("SYSINP",
                                               StdinDefinition(controlList))
            dDStatementList.append(ctrlStateDDStatement)

        irlm_id = ""
        irlm_flag = "N"
        if self.parsed_args.get('irlm_id') is not None:
            irlm_id = self.parsed_args.get('irlm_id')
            irlm_flag = "Y"

        dbrc = "N"
        if self.parsed_args.get("dbrc"):
            dbrc = "Y"
            if self.parsed_args.get("primary_log_dataset") is None:
                self.result[
                    'msg'] = "You must specify a primary log dataset if dbrc is set to true"
                self.result['rc'] = 1
                self.module.fail_json(**self.result)

        mode = ""
        mode_param = self.parsed_args.get('mode')
        if mode_param == 'LOAD':
            mode = 'DFSCPL00'
        elif mode_param == 'UPDATE':
            mode = "DFSCP001"
            if self.parsed_args.get("primary_log_dataset") is None:
                self.result[
                    'msg'] = "You must specify a primary log dataset in UPDATE mode"
                self.result['rc'] = 1
                self.module.fail_json(**self.result)
        elif mode_param == 'READ':
            mode = "DFSCP000"

        imsid = ""

        if self.parsed_args.get("online_batch"):
            if self.parsed_args.get("ims_id") is not None:
                imsid = self.parsed_args.get("ims_id")
                self.paramString = "BMP,DFS3PU00,DFSCP001,,,,,,,,,,,{0},,,,,,".format(
                    imsid)
            else:
                self.result[
                    'msg'] = "You must specify an ims_id when running in a BMP region (online_batch=true)"
                self.result['rc'] = 1
                self.module.fail_json(**self.result)
        else:
            if self.parsed_args.get("buffer_pool_param_dataset") is None:
                self.result[
                    'msg'] = "You must specify a buffer pool parameter dataset when running as DLI."
                self.result['rc'] = 1
                self.module.fail_json(**self.result)
            else:
                self.paramString = "DLI,DFS3PU00,{0},,,,,,,,,,,{1},{2},{3},,,,,,,,,,,'DFSDF=CAT'".format(
                    mode, dbrc, irlm_flag, irlm_id)

        self.dDStatements = self.dDStatements + dDStatementList
Ejemplo n.º 5
0
    def _constructPurgeDDStatements(self):
        dDStatementList = []

        sysinList = self._parse_sysin()
        sysInDDStatement = DDStatement("SYSIN", StdinDefinition(sysinList))
        dDStatementList.append(sysInDDStatement)

        if self.parsed_args.get("delete") is not None:
            sysut1List = self._parse_sysut1()
            sysut1DDStatement = DDStatement("SYSUT1",
                                            StdinDefinition(sysut1List))
        else:
            if self.parsed_args.get("sysut1") is not None:
                sysut1DDStatement = DDStatement(
                    "SYSUT1",
                    DatasetDefinition(
                        **{
                            k: v
                            for k, v in self.parsed_args.get('sysut1').items()
                            if v is not None
                        }))
            else:
                sysut1DDStatement = DDStatement("SYSUT1", StdoutDefinition())
        dDStatementList.append(sysut1DDStatement)

        irlm_id = ""
        irlm_flag = "N"
        if self.parsed_args.get('irlm_id') is not None:
            irlm_id = self.parsed_args.get('irlm_id')
            irlm_flag = "Y"

        dbrc = "N"
        if self.parsed_args.get("dbrc"):
            dbrc = "Y"
            if self.parsed_args.get("primary_log_dataset") is None:
                self.result[
                    'msg'] = "You must specify a primary log dataset if dbrc is set to true"
                self.result['rc'] = 1
                self.module.fail_json(**self.result)

        imsid = ""
        if self.parsed_args.get("online_batch"):
            if self.parsed_args.get("ims_id") is not None:
                imsid = self.parsed_args.get("ims_id")
                self.paramString = "BMP,DFS3PU10,DFSCP001,,,,,,,,,,,{0},,,,,,".format(
                    imsid)
            else:
                self.result[
                    'msg'] = "You must specify an ims_id when running in a BMP region (online_batch=true)"
                self.result['rc'] = 1
                self.module.fail_json(**self.result)
        else:
            if self.parsed_args.get("buffer_pool_param_dataset") is None:
                self.result[
                    'msg'] = "You must specify a buffer pool parameter dataset when running as DLI."
                self.result['rc'] = 1
                self.module.fail_json(**self.result)
            else:
                self.paramString = "DLI,DFS3PU10,DFSCP001,,,,,,,,,,,{0},{1},{2},,,,,,,,,,,'DFSDF=CAT'".format(
                    dbrc, irlm_flag, irlm_id)

        self.dDStatements = self.dDStatements + dDStatementList