Exemplo n.º 1
0
def deploy_artifact(type, object, importcontrol):
    thisproc = 'deployArtifact'
    supporting.log(logger, logging.DEBUG, thisproc,
                   'started deploy for object >' + object + '<.')

    result = getInformaticaArtifact(object)
    if result.rc != 0:
        supporting.log(
            logger, logging.ERROR, thisproc,
            'getInformaticaArtifact failed with >' + result.message + "<.")
        return result

    if type == 'PROJECT':
        result = informatica.import_infadeveloper(
            Domain=infaSettings.targetDomain,
            Repository=infaSettings.targetModelRepository,
            Project=object,
            FilePath=generalSettings.artifactDir + "/" + object + ".xml",
            ExportRefData=infaSettings.targetExportRefData)
    elif type == 'CONTROLFILE':
        result = informatica.import_infadeveloper(
            Domain=infaSettings.targetDomain,
            Repository=infaSettings.targetModelRepository,
            Project=object,
            FilePath=generalSettings.artifactDir + "/" + object + ".xml",
            ControlFilePath=importcontrol)
    else:
        result = errorcodes.NOT_IMPLEMENTED

    return result
Exemplo n.º 2
0
def outschedulerenvvars():
    thisproc = "outschedulerenvvars"
    supporting.log(logger, logging.INFO, thisproc, 'schedulerdeploylist is >' + schedulerdeploylist + "<.")
    supporting.log(logger, logging.INFO, thisproc, 'sourceschedulerdir is >' + sourceschedulerdir + "<.")
    supporting.log(logger, logging.INFO, thisproc, 'sourceschedulertypedir is >' + sourceschedulertypedir + "<.")
    supporting.log(logger, logging.INFO, thisproc, 'targetschedulerdir is >' + targetschedulerdir + "<.")
    supporting.log(logger, logging.INFO, thisproc, 'targetschedulertypedir is >' + targetschedulertypedir + "<.")
Exemplo n.º 3
0
def additemto_zip(zipObj, basedirectory, item, filter='*', suppress_extension='7Al!#%ˆˆ'):
    thisproc = "additemto_zip"

    supporting.log(logger, logging.DEBUG, thisproc,
                   "Adding >" + item + "< ...")

    for folderName, subfolders, filenames in os.walk(item):
        for filename in filenames:
            if filename.endswith('.' + suppress_extension):
                supporting.log(logger, logging.DEBUG, thisproc,
                               "Ignoring >" + filename + "< as it has the extension >" + suppress_extension + "<.")
            else:
                if fnmatch.fnmatch(filename, filter):
                    filePath = os.path.join(folderName, filename)
                    # Add file to zip
                    archive_name = filePath[len(basedirectory):]
                    supporting.log(logger, logging.DEBUG, thisproc,
                                   "Adding >" + filePath + "< to zip as >" + archive_name + "<.")
                    zipObj.write(filePath, archive_name)
                else:
                    supporting.log(logger, logging.DEBUG, thisproc,
                                   ">" + filename + "< was not added to zip as it does not match pattern >" + filter + "<.")

    supporting.log(logger, logging.DEBUG, thisproc,
                   "Done adding >" + item + "< ...")

    return err.OK
Exemplo n.º 4
0
    def runit(self, arguments):
        """Creates a scheduler artifact.
        """
        thisproc = "runit"

        args = self.parse_the_arguments(arguments)

        generalSettings.getenvvars()

        supporting.log(self.logger, logging.DEBUG, thisproc, 'Started')
        supporting.log(self.logger, logging.DEBUG, thisproc, 'logDir is >' + generalSettings.logDir + "<.")

        # Check requirements for artifact generation
        generalSettings.getenvvars()
        settings.getschedulerenvvars()
        settings.outschedulerenvvars()

        result = schedulerChecks.schedulerartifactchecks()
        if result.rc == err.IGNORE.rc:
            # deploylist is not mandatory since 2020-02-09
            supporting.log(logging, result.level, thisproc, 'Artifact ignored.')
            result = err.OK
        else:
            if result.rc != 0:
                supporting.log(self.logger, logging.ERROR, thisproc,
                               'Scheduler Artifact Checks failed with >' + result.message + "<.")
                supporting.exitscript(self.resultlogger, result)
            else:
                result = artifact.processList(settings.schedulerdeploylist)

        supporting.log(self.logger, logging.DEBUG, thisproc, 'Completed with return code >' + str(result.rc)
                       + '< and result code >' + result.code + "<.")
        #    supporting.writeresult(resultlogger, result)
        return result
Exemplo n.º 5
0
def deploy_artifact(type, object, import_control, import_filename="export"):
    thisproc = 'deploy_artifact'
    supporting.log(logger, logging.DEBUG, thisproc,
                   'started deploy for object >' + object + '<.')

    #    workspace = get_workspace()
    workspace = infaSettings.target_informatica_dir

    if type == 'PROJECT':
        result = informatica.import_infadeveloper(
            Domain=infaSettings.targetDomain,
            Repository=infaSettings.targetModelRepository,
            Project=object,
            ImportFilePath=workspace + "/" + object + "." + import_filename +
            ".xml",
            ExportRefData=infaSettings.targetExportRefData)
    elif type == 'CONTROLFILE':
        result = informatica.import_infadeveloper(
            Domain=infaSettings.targetDomain,
            Repository=infaSettings.targetModelRepository,
            ImportFilePath=workspace + "/" + object + "_" + str(entrynr) +
            "." + import_filename + ".xml",
            ControlFilePath=import_control)
    else:
        result = errorcodes.NOT_IMPLEMENTED

    return result
Exemplo n.º 6
0
def CheckInMutiple(**KeyWordArguments):
    thisproc = "CheckInMultiple"
    """ Check in Multiple IDQ components """
    for key, value in KeyWordArguments.items():
        if key == "MultipleObjectPaths":
            ObjectPaths = KeyWordArguments["MultipleObjectPaths"]

    KeyWordArguments["Tool"] = "CheckIn"

    CheckInCommands = []
    for ObjectPathName in ObjectPaths:
        KeyWordArguments["ObjectPathName"] = ObjectPathName
        CheckInCommands.append(buildCommand.build(**KeyWordArguments))

    CheckInAllCommand = "\n".join(CheckInCommands)

    timebefore = datetime.datetime.now()
    output, error = executeInfacmd.execute(CheckInAllCommand)
    timeafter = datetime.datetime.now()
    duration = timeafter - timebefore

    supporting.log(
        logging.DEBUG, thisproc, "Infacmd took " + str(duration) +
        " seconds to check-in " + str(len(ObjectPaths)) + " objects")

    # output, error = (CheckInAllCommand, 0)

    return (output, error)
Exemplo n.º 7
0
def ListCheckedOutObjects(**KeyWordArguments):
    thisproc = "ListCheckedOutObjects"
    """ List Components that are currently checked out """

    KeyWordArguments["Tool"] = "ListCheckOutObjects"
    ListCheckedOutCommand = buildCommand.build(**KeyWordArguments)
    output, error = executeInfacmd.execute(ListCheckedOutCommand)

    # The output is in the form of one object per line, with properties spearated by a comma + space.
    # To filter out irrelevant lines, such as "Command succesful", we keep only line that start with "MRS_PATH="
    OutputLines = output.splitlines()
    OutputKeyValuePairLines = [
        Properties.split(", ") for Properties in OutputLines
        if Properties.startswith("MRS_PATH=")
    ]

    # ObjectsOLD = [[KVPair.split("=", 1) for KVPair in Line] for Line in OutputKeyValuePairLines]

    # Each object is a dictionary, with properties as keys
    # Since the date field has a comma in it, its not parsed properly. For this reason we need the len == 2 filter
    # If the date is required, the parsing of the output should be adjusted
    Objects = [
        dict(
            KVPair.split("=") for KVPair in Line
            if len(KVPair.split("=")) == 2) for Line in OutputKeyValuePairLines
    ]

    supporting.log(logging.DEBUG, thisproc, output)

    return (Objects)
Exemplo n.º 8
0
def create_artifact(type, object, exportcontrol="default.ecf"):
    thisproc = 'create_artifact'
    supporting.log(
        logger, logging.DEBUG, thisproc, "Creating artifact for object >" +
        object + "< of type >" + type + "<.")

    if type == 'PROJECT':
        result = informatica.export_infadeveloper(
            Domain=infaSettings.sourceDomain,
            Repository=infaSettings.sourceModelRepository,
            Project=object,
            FilePath=generalSettings.artifactDir + "/" + object + ".xml",
            OverwriteExportFile=infaSettings.overwriteExportFile,
            ExportRefData=infaSettings.sourceExportRefData)
    elif type == 'CONTROLFILE':
        result = informatica.export_infadeveloper(
            Domain=infaSettings.sourceDomain,
            Repository=infaSettings.sourceModelRepository,
            Project=object,
            FilePath=generalSettings.artifactDir + "/" + object + ".xml",
            OverwriteExportFile=infaSettings.overwriteExportFile,
            ControlFilePath=exportcontrol)
    else:
        result = errorcodes.NOT_IMPLEMENTED

    return result
Exemplo n.º 9
0
def execute(command, env, pre_command=None):
    """Execute the command on the underlying system, capturing the output generated by the command."""
    thisproc = "execute"
    process = ""
    result = errorcodes.OK

    if pre_command is None:
        log(logger, logging.INFO, thisproc, "No pre_command provided.")
    else:
        log(logger, logging.INFO, thisproc,
            "pre_command is >" + pre_command + "<.")
        command = pre_command + ' && ' + command

    log(logger, logging.INFO, thisproc,
        "Executing command >" + mask_password(command) + "<.")

    pipes = subprocess.Popen(command,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE,
                             shell=True,
                             env=env)
    pipes.wait()
    std_out, std_err = pipes.communicate()

    if pipes.returncode == 0:
        log(logger, logging.INFO, thisproc, std_out.decode("utf-8"))
    else:
        result = errorcodes.COMMAND_FAILED
        result.message = std_out.decode("utf-8") + std_err.decode("utf-8")
        log(logger, logging.ERROR, thisproc, result.message)

    return result
Exemplo n.º 10
0
def getinfaenvvars_old():
    thisproc = "getinfaenvvars"
    global deploylist
    supporting.log(logger, logging.DEBUG, thisproc, 'started')

    deploylist = os.environ.get(constants.varDeveloperDeployList, constants.DEFAULT_DEVELOPER_DEPLOYLIST)

    supporting.log(logger, logging.DEBUG, thisproc, 'completed')
    return
Exemplo n.º 11
0
def outinfaenvvars():
    thisproc = "outinfaenvvars"
    supporting.log(logger, logging.DEBUG, thisproc, 'started')
    # no need to show source env vars when deploying
    # supporting.log(logger, logging.INFO, thisproc, constants.varSourceInfaHome + ' =>' + sourceInfaHome + "<.")
    ##
    # etc. etc.

    supporting.log(logger, logging.DEBUG, thisproc, 'completed')
    return
Exemplo n.º 12
0
def infadeploychecks():
    thisproc = "infadeploychecks"
    supporting.log(logger, logging.DEBUG, thisproc, 'started')

    # for now the checks for deploys and artifacts are the same
    result = infaartifactchecks()

    supporting.log(logger, logging.DEBUG, thisproc,
                   'completed with >' + str(result.rc) + "<.")
    return result
Exemplo n.º 13
0
def create_iar_file(**KeyWordArguments):
    thisproc = "create_iar_file"

    KeyWordArguments["Tool"] = "CreateIAR"
    create_command = buildCommand.build(**KeyWordArguments)

    supporting.log(logger, logging.INFO, thisproc,
                   "Command is >" + create_command + "<.")
    result = executeInfacmd.execute(create_command, constants.CREATEARTIFACT)

    return result
Exemplo n.º 14
0
def redeploy_iar_file(**KeyWordArguments):
    thisproc = "redeploy_iar_file"

    KeyWordArguments["Tool"] = "RedeployIAR"
    deploy_command = buildCommand.build(**KeyWordArguments)

    supporting.log(logger, logging.INFO, thisproc,
                   "Command is >" + deploy_command + "<.")
    result = executeInfacmd.execute(deploy_command, constants.DEPLOYARTIFACT)

    return result
Exemplo n.º 15
0
def set_app_privileges(**KeyWordArguments):
    thisproc = "set_app_privileges"

    KeyWordArguments["Tool"] = "AppPrivileges"
    deploy_command = buildCommand.build(**KeyWordArguments)

    supporting.log(logger, logging.INFO, thisproc,
                   "Command is >" + deploy_command + "<.")
    result = executeInfacmd.execute(deploy_command, constants.DEPLOYARTIFACT)

    return result
def get_artifact(artifact_name):
    thisproc = "get_artifact"
    global workspace
    # something like this:    artifact = repositorytools.Artifact("group","demoArtifact","1.0.0","classifier","zip")
    # at the moment cicd runs in Azure DevOps and artifacts are stored within the pipeline.

    workspace = get_workspace()
    supporting.log(logger, logging.DEBUG, thisproc,
                   'workspace is >' + workspace + "<.")

    return workspace + "/" + artifact_name
Exemplo n.º 17
0
def export_infadeveloper(**KeyWordArguments):
    thisproc = "export_infadeveloper"

    KeyWordArguments["Tool"] = "Export"
    ExportCommand = buildCommand.build(**KeyWordArguments)

    supporting.log(logger, logging.INFO, thisproc,
                   "ExportCommand is >" + ExportCommand + "<.")
    result = executeInfacmd.execute(ExportCommand)

    return (result)
Exemplo n.º 18
0
def generate_zip(basedirectory, directory, zipFileName, filter='*', suppress_extension='7Al!#%ˆˆ'):
    thisproc = "generate_zip"

    supporting.log(logger, logging.DEBUG, thisproc,
                   "Creating new zip >" + zipFileName + "<...")
    # create a ZipFile object
    with ZipFile(zipFileName, 'w') as zipObj:
        result = additemto_zip(zipObj, basedirectory, directory, filter, suppress_extension)
    supporting.log(logger, logging.DEBUG, thisproc,
                   "Done. Result is: " + result.code)

    return result
Exemplo n.º 19
0
    def manage(self):
        RunCommand = buildCommand.build(**self.keyword_arguments)

        log(self.logger, logging.INFO, __name__, "RunCommand is >" + RunCommand + "<.")
        result = executeInfacmd.execute(RunCommand)

        if(result.rc != errorcodes.OK.rc):
            oldResult = result.message
            result = self.keyword_arguments["OnError"]
            result.message = oldResult

        return (result)
Exemplo n.º 20
0
def processList(what, deployFile):
    this_function = "processList"
    latest_result = err.OK
    overall_result = err.OK

    supporting.log(logger, logging.DEBUG, this_function,
                   "deployfile is >" + deployFile + "<.")
    result, deploy_items = supporting.deploylist.getWorkitemList(deployFile)
    if result.rc == 0:
        if what == infaConstants.CREATEARTIFACT or what == infaConstants.DEPLOYARTIFACT:
            if what == infaConstants.CREATEARTIFACT:
                supporting.log(
                    logger, logging.DEBUG, this_function,
                    "Copying files in >" + os.path.dirname(deployFile) +
                    "< to artifact.")
                copy_files(os.path.dirname(deployFile),
                           generalSettings.artifactDir)

            # the following also executes if what = deploy artifact
            for deployEntry in deploy_items:
                latest_result = processEntry(what, deployEntry)
                if latest_result.rc != err.OK.rc:
                    overall_result = latest_result
            return overall_result
        else:
            supporting.log(logger, logging.ERROR, this_function,
                           "INTERNAL ERROR: Unexpected 'what' >" + what + "<.")
    else:
        supporting.log(logger, logging.ERROR, this_function,
                       "Could not get deploylist")
        return errorcodes.FILE_NF
Exemplo n.º 21
0
def main(argv):
    """Deploys an Informatica Platform artifact
    Usage: deployInformaticaPlatformArtifact.py [-h]
    The module uses environment variables to steer the import on the target environment
    """
    thisproc = "MAIN"
    mainProc = 'deployInformaticaPlatformArtifact'

    resultlogger = supporting.configurelogger(mainProc)
    logger = logging.getLogger(mainProc)

    args = parse_the_arguments(argv)

    supporting.log(logger, logging.DEBUG, thisproc, 'Started')
    supporting.log(logger, logging.DEBUG, thisproc,
                   'logDir is >' + logDir + "<.")

    infaSettings.getinfaenvvars()
    infaSettings.outinfaenvvars()

    # Check requirements for artifact generation
    result = infaArtifactChecks.infadeploychecks()
    if result.rc != 0:
        supporting.log(logger, logging.ERROR, thisproc,
                       'INFA Checks failed with >' + result.message + "<.")
        supporting.exitscript(resultlogger, result)

    #    result = informatica.import_infadeveloper(infaConstants.DEPLOYARTIFACT, infaSettings.infadeploylist)

    supporting.log(
        logger, logging.DEBUG, thisproc, 'Completed with return code >' +
        str(result.rc) + '< and result code >' + result.code + "<.")
    supporting.exitscript(resultlogger, result)
Exemplo n.º 22
0
    def run_sqlplus(self, sqlfile):
        thisproc = "run_sqlplus"
        result = err.OK

        try:
            log(logger, logging.INFO, thisproc, "Running script >" + sqlfile + "< on database >" + self.database_connection +"<.")
            p = Popen([dbSettings.sqlplus_command, "-s", "/NOLOG"], universal_newlines=True, stdin=PIPE, stdout=PIPE,
                                stderr=PIPE)
            stdoutput = p.communicate(input=self.error_clause_sql
                                            +"\n" + self.error_clause_os
                                            +"\n" + self.spool_clause_on
                                            +"\n" + self.connect_string
                                            +"\n" + "@" + sqlfile
                                            +"\n" + self.spool_clause_off
                                            +"\n" + "exit 0")[0]
            log(logger, logging.INFO, thisproc, "SQLPlus output: " + stdoutput)
            if p.returncode == 0:
                return err.OK
            else:
                if self.on_sql_error == "ABORT":
                    err.SQLPLUS_ERROR.message = stdoutput
                    return err.SQLPLUS_ERROR
                else:
                    log(logger, logging.WARNING, thisproc, "Errors occurred but were ignored as on_sql_error is >"
                        + self.on_sql_error +"<. You may want to check >" + self.output_file +"<.")
                    return err.OK

        except FileNotFoundError as e:
            log(logger, logging.ERROR, thisproc, e.strerror +": " + dbSettings.sqlplus_command)
            return err.SQLFILE_NF
Exemplo n.º 23
0
def processList(what, deployFile):
    thisproc = "processList"
    latestResult = err.OK
    supporting.log(logger, logging.DEBUG, thisproc,
                   "deployfile is >" + deployFile + "<.")
    result, deployItems = supporting.deploylist.getWorkitemList(deployFile)
    if result.rc == 0:
        for deployEntry in deployItems:
            latestResult = processEntry(what, deployEntry)
        return latestResult
    else:
        supporting.log(logger, logging.ERROR, thisproc,
                       "Could not get deploylist")
        return errorcodes.FILE_NF
    def runit(self, arguments):
        """Deploys a scheduler artifact.
        """
        thisproc = "runit"

        args = self.parse_the_arguments(arguments)

        generalSettings.getenvvars()

        supporting.log(self.logger, logging.DEBUG, thisproc, 'DUMMY DUMMY Started')
        supporting.log(self.logger, logging.DEBUG, thisproc, 'logDir is >' + generalSettings.logDir + "<.")

        # Check requirements for artifact generation
        generalSettings.getenvvars()
        settings.getschedulerenvvars()
        settings.outschedulerenvvars()

        result = schedulerChecks.schedulerartifactchecks()
        if result.rc != 0:
            supporting.log(self.logger, logging.ERROR, thisproc,
                           'Scheduler Artifact Checks failed with >' + result.message + "<.")
            supporting.exitscript(self.resultlogger, result)

        #        result = artifact.processList(settings.schedulerdeploylist)

        supporting.log(self.logger, logging.DEBUG, thisproc, 'Completed with return code >' + str(result.rc)
                       + '< and result code >' + result.code + "<.")
        #    supporting.writeresult(resultlogger, result)
        return result
Exemplo n.º 25
0
def generate_orderedsql(sourcesqldir, schema, input_sqlfile):
    thisproc = "generate_orderedsql"
    global entrynr, previous_schema
    result = err.OK
    supporting.log(logger, logging.DEBUG, thisproc, "Started to work on sql file >" + input_sqlfile + "< in schema >" +schema +"<.")
    supporting.log(logger, logging.DEBUG, thisproc, "settings.targetsqldir is >" + settings.targetsqldir + "<.")

    the_source_sqlfile = input_sqlfile
    if schema == previous_schema:
        entrynr = entrynr + 1
    else:
        entrynr = 1

    prefixReleaseID = settings.sqlprefix + "%02d" % entrynr

    orderedsqlfilename = settings.targetsqldir + "/" + schema + "/" + prefixReleaseID + "_" + schema + "_" + generalSettings.releaseID + ".sql"
    create_directory(settings.targetsqldir + "/" + schema)
    supporting.log(logger, logging.INFO, thisproc,
                   "orderedsqlfilename is >" + orderedsqlfilename + "<. Based on prefixReleaseID >" + prefixReleaseID + ", settings.targetsqldir >"
                   + settings.targetsqldir + "<, schema >" + schema +"< and generalSettings.releaseID >" + generalSettings.releaseID +"<.")

    filehandling.removefile(orderedsqlfilename)
    global level
    level = 0
    result = processlines(sourcesqldir, schema, the_source_sqlfile, orderedsqlfilename)

    supporting.log(logger, logging.DEBUG, thisproc,
                   "Completed with rc >" + str(result.rc) + "< and code >" + result.code + "<.")

    previous_schema = schema
    return result
def outschemaenvvars():
    thisproc = "outdbenvvars"
    supporting.log(logger, logging.DEBUG, thisproc,
                   'database_tns_name is >' + database_tns_name + "<.")
    supporting.log(logger, logging.DEBUG, thisproc,
                   'database_schema is >' + database_schema + "<.")
    supporting.log(logger, logging.DEBUG, thisproc,
                   'database user is >' + database_user + "<.")
    if database_user_password is None:
        supporting.log(logger, logging.WARNING, thisproc,
                       'database_user_password is empty')
    else:
        supporting.log(logger, logging.DEBUG, thisproc,
                       'database_user_password has been determined.')
Exemplo n.º 27
0
def main(argv):
    """Generate an Informatica Platform artifact based on environment variables
    """
    thisproc = "MAIN"
    mainProc = 'CreateInformaticaPlatformArtifact'

    resultlogger = supporting.configurelogger(mainProc)
    logger = logging.getLogger(mainProc)

    args = parse_the_arguments(argv)

    supporting.log(logger, logging.DEBUG, thisproc, 'Started')
    supporting.log(logger, logging.DEBUG, thisproc,
                   'logDir is >' + generalsettings.logDir + "<.")

    settings.getinfaenvvars()
    settings.outinfaenvvars()

    # Check requirements for artifact generation
    result = infachecks.infaartifactchecks()
    if result.rc != 0:
        supporting.log(logger, logging.ERROR, thisproc,
                       'INFA Checks failed with >' + result.message + "<.")
        supporting.exitscript(resultlogger, result)

    result = artifact.processList(infaConstants.CREATEARTIFACT,
                                  settings.infadeploylist)

    supporting.log(
        logger, logging.DEBUG, thisproc, 'Completed with return code >' +
        str(result.rc) + '< and result code >' + result.code + "<.")
    #    supporting.writeresult(resultlogger, result)
    supporting.exitscript(resultlogger, result)
Exemplo n.º 28
0
def processlines(the_source_sqldir, schema, the_source_sqlfile, orderedsqlfilename):
    result = err.OK
    global level
    level +=1
    thisproc="processlines-" + "%03d" % level
    supporting.log(logger, logging.DEBUG, thisproc, "level is >" + "%03d" % level +"<.")

    try:
        with open(the_source_sqldir + the_source_sqlfile) as thesql:
            for line in thesql:
                if ignoreline(line):
                    continue
                if calltosubsql(line):
                    supporting.log(logger, logging.DEBUG, thisproc, "Found '@@', a call to another script.")
                    write2file(orderedsqlfilename, "-- Start expansion -- " + line)
                    subsql = line[2:-1].split(' ', 1)[0]
                    completepathsql = the_source_sqldir + subsql
                    supporting.log(logger, logging.DEBUG, thisproc, "Sub file name determined as >" + subsql +"<. Complete file path >"
                                   + completepathsql +"<.")
                    processlines(the_source_sqldir, schema, schema +"/" + subsql, orderedsqlfilename)
                    write2file(orderedsqlfilename, "-- End expansion -- " + line)
                else:
                    write2file(orderedsqlfilename, line)

    except IOError:
        supporting.log(logger, logging.ERROR, thisproc, "Could not find file >" + the_source_sqlfile + "<.")
        write2file(orderedsqlfilename,"ERROR: Could not find file >" + the_source_sqlfile +"<.")
        result = err.FILE_NF

    return result
Exemplo n.º 29
0
def determineSourceDirectory(directory, type):
    thisproc = "determineSourceDirectory"

    # type_path = directory + "/" + type
    type_path = directory
    directoryPath = Path(type_path)
    if directoryPath.is_dir():
        supporting.log(logger, logging.DEBUG, thisproc,
                       'Found directory >' + type_path + "<.")
        directory = type_path
    else:
        sourceDir = determinebaseSourceDirectory(type) + "/"
        supporting.log(
            logger, logging.DEBUG, thisproc, 'directory >' + type_path +
            '< not found. Trying >' + sourceDir + type_path + '<...')
        type_path = sourceDir + type_path
        directoryPath = Path(type_path)
        if directoryPath.is_dir():
            supporting.log(logger, logging.DEBUG, thisproc,
                           'Found directory >' + type_path + "<.")
        else:
            supporting.log(
                logger, err.SQLFILE_NF.level, thisproc, "directory checked >" +
                type_path + "<. " + err.DIRECTORY_NF.message)
            result = err.DIRECTORY_NF
            return constants.NOT_SET, result

    return type_path, err.OK
def getschemaenvvars(schema):
    thisproc = "getschemaenvvars"
    global database_user, database_schema, database_user_password, database_tns_name
    # Database user etc.
    database_user = os.environ.get(
        schema + '_' + constants.varOracleDatabaseUser, constants.NOT_SET)
    database_user_password = os.environ.get(
        schema + '_' + constants.varDatabaseUserPassword, None)
    database_schema = os.environ.get(
        schema + '_' + constants.varOracleSchemaName, constants.NOT_SET)
    database_tns_name = os.environ.get(
        schema + '_' + constants.varOracleTNSName, constants.NOT_SET)

    supporting.log(logger, logging.DEBUG, thisproc, 'completed')
    return database_tns_name, database_schema, database_user, database_user_password