コード例 #1
0
def psLocalTest():
	try:
		thisPath = os.path.dirname(os.path.abspath(__file__))
		basePath = os.path.abspath(os.path.join(thisPath, '..'))
		if basePath not in sys.path:
			sys.path.append(basePath)
		import env
		env.addLibPath()
		import utils
		import twisted.logger

		## Setup requested log handlers
		globalSettings = utils.loadSettings(os.path.join(env.configPath, 'globalSettings.json'))
		logFiles = utils.setupLogFile('JobDetail', env, globalSettings['fileContainingContentGatheringLogSettings'], directoryName='client')
		logObserver  = utils.setupObservers(logFiles, 'JobDetail', env, globalSettings['fileContainingContentGatheringLogSettings'])
		logger = twisted.logger.Logger(observer=logObserver, namespace='JobDetail')

		client = PwshLocal(logger)
		version = client.open()
		logger.debug('version: {version!r}', version=version)

		logger.debug('sleep should timeout and reinitialize shell...')
		results = client.run('sleep 5', timeout=2)
		logger.debug('sleep output: {results!r}', results=results)

		osAttrDict = {}
		queryOperatingSystem(client, logger, osAttrDict)
		logger.debug('osAttrDict: {osAttrDict!r}', osAttrDict=osAttrDict)
		client.close()

	except:
		stacktrace = traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])
		print('Main Exception: {}'.format(stacktrace))
		client.close()
コード例 #2
0
def main():
    """Entry point for this utility.

	Usage::

	  $ python createApiUser.py

	"""
    try:
        ## Setup requested log handlers
        globalSettings = utils.loadSettings(
            os.path.join(env.configPath, "globalSettings.json"))
        logFiles = utils.setupLogFile(
            "ApiApplication",
            env,
            globalSettings['fileContainingServiceLogSettings'],
            directoryName='service')
        logObserver = utils.setupObservers(
            logFiles, "ApiApplication", env,
            globalSettings['fileContainingServiceLogSettings'])
        logger = twisted.logger.Logger(observer=logObserver,
                                       namespace="ApiApplication")
        logger.info('Starting createApiUser')

        ## Connect to database
        dbClient = DatabaseClient(logger)
        if dbClient is None:
            raise SystemError(
                'Failed to connect to database; unable to initialize tables.')

        ## Get list of valid users
        users = {}
        getUsers(dbClient, logger, users)

        ## Create and insert a new credential
        createUserEntry(dbClient, logger, users)

        ## Cleanup
        dbClient.session.remove()
        dbClient.close()
        logger.info('Exiting configureDatabase utility.')

    except:
        stacktrace = traceback.format_exception(sys.exc_info()[0],
                                                sys.exc_info()[1],
                                                sys.exc_info()[2])
        ## The basic print is here for a console message in case we weren't
        ## able to use the logging mechanism before encountering the failure.
        print('Failure in createApiUser: {}'.format(stacktrace))
        try:
            logger.debug('Failure in createApiUser: {}'.format(stacktrace))
        except:
            pass

    ## end main
    return
コード例 #3
0
    def __init__(self, serviceName, globalSettings, canceledEvent,
                 shutdownEvent):
        """Constructor for the Query service."""
        self.canceledEvent = canceledEvent
        self.shutdownEvent = shutdownEvent
        self.logFiles = utils.setupLogFile(
            serviceName,
            env,
            globalSettings['fileContainingServiceLogSettings'],
            directoryName='service')
        self.logObserver = utils.setupObservers(
            self.logFiles, serviceName, env,
            globalSettings['fileContainingServiceLogSettings'])
        self.logger = twisted.logger.Logger(observer=self.logObserver,
                                            namespace=serviceName)
        self.logger.info('Started logger for {serviceName!r}',
                         serviceName=serviceName)

        ## Allow the dbClient to get created in the main thread, to reuse pool
        self.dbClient = None
        super().__init__(serviceName, globalSettings, getDbClient=True)
        self.dbClient.session.close()

        self.localSettings = utils.loadSettings(
            os.path.join(env.configPath,
                         globalSettings['fileContainingQuerySettings']))
        self.logger.info(
            'waitSecondsBetweenCacheCleanupJobs: {secs!r}',
            secs=self.localSettings['waitSecondsBetweenCacheCleanupJobs'])

        ## Twisted import here to avoid issues with epoll on Linux
        from twisted.internet import task, threads

        ## TODO: modify looping calls to use threads.deferToThread(); avoid
        ## time delays/waits from being blocking to the main reactor thread
        self.loopingCleanUpCache = task.LoopingCall(self.cleanUpCache)
        self.loopingCleanUpCache.start(
            self.localSettings['waitSecondsBetweenCacheCleanupJobs'])

        ## Make checking kafka and processing results a looping call, to give a
        ## break to the main reactor thread; otherwise it blocks other looping
        ## calls, like those in coreService for health and environment details:
        self.kafkaConsumer = self.createKafkaConsumer(
            globalSettings['kafkaQueryTopic'])
        self.loopingGetKafkaResults = task.LoopingCall(self.getKafkaResults,
                                                       self.kafkaConsumer)
        ## Give a second break before starting the main LoopingCall
        self.loopingGetKafkaResults.start(1, now=False)
        self.logger.debug('Leaving Query constructor')
コード例 #4
0
def setupLogging(globalSettings):
    """Logger for the parent process."""
    logFiles = setupLogFile('Main', env,
                            globalSettings['fileContainingServiceLogSettings'])
    logObserver = setupObservers(
        logFiles, 'Main', env,
        globalSettings['fileContainingServiceLogSettings'])
    logger = twisted.logger.Logger(observer=logObserver, namespace='Main')
    if not os.path.exists(env.logPath):
        os.makedirs(env.logPath)
    logger.info('Starting Open Content Platform.')
    logger.info(' Main-process identifier (PID): {}.'.format(os.getpid()))
    logger.info(' Started on the command line; press Ctrl+C to exit.')

    ## end setupLogging
    return logger
コード例 #5
0
    def __init__(self, serviceName, globalSettings, canceledEvent,
                 shutdownEvent):
        """Constructor for the ResultProcessingFactory."""
        self.canceledEvent = canceledEvent
        self.shutdownEvent = shutdownEvent
        self.logFiles = utils.setupLogFile(
            serviceName,
            env,
            globalSettings['fileContainingServiceLogSettings'],
            directoryName='service')
        self.logObserver = utils.setupObservers(
            self.logFiles, serviceName, env,
            globalSettings['fileContainingServiceLogSettings'])
        self.logger = twisted.logger.Logger(observer=self.logObserver,
                                            namespace=serviceName)
        self.logger.info('Started logger for {serviceName!r}',
                         serviceName=serviceName)
        self.localSettings = utils.loadSettings(
            os.path.join(
                env.configPath,
                globalSettings['fileContainingResultProcessingSettings']))
        self.globalSettings = globalSettings
        self.clientEndpointTable = platformSchema.ServiceResultProcessingEndpoint
        self.serviceJobTable = None
        self.serviceHealthTable = platformSchema.ServiceResultProcessingHealth
        self.validActions = [
            'connectionRequest', 'healthResponse', 'cacheResponse',
            'getKafkaPartitionCount', 'kafkaHealth'
        ]
        self.actionMethods = [
            'doConnectionRequest', 'doHealthResponse', 'doCacheResponse',
            'doGetKafkaPartitionCount', 'doKafkaHealth'
        ]
        super().__init__(serviceName, globalSettings)
        if self.canceledEvent.is_set() or self.shutdownEvent.is_set():
            self.logger.error('Cancelling startup of {serviceName!r}',
                              serviceName=serviceName)
            return

        ## Twisted import here to avoid issues with epoll on Linux
        from twisted.internet import task

        self.loopingGetKafkaPartitionCount = task.LoopingCall(
            self.doGetKafkaPartitionCount)
        self.loopingGetKafkaPartitionCount.start(
            self.
            localSettings['waitSecondsBetweenRequestingKafkaPartitionCount'])
コード例 #6
0
    def getLocalLogger(self):
        """Setup a log handler."""
        logFiles = setupLogFile(
            self.serviceName,
            env,
            self.globalSettings['fileContainingServiceLogSettings'],
            directoryName='service')
        logObserver = setupObservers(
            logFiles, 'ApiService', env,
            self.globalSettings['fileContainingServiceLogSettings'])
        self.logger = twisted.logger.Logger(observer=logObserver,
                                            namespace='ApiService')
        self.logger.info('Started logger for {serviceName!r}',
                         serviceName=self.serviceName)

        ## end getLocalLogger
        return
コード例 #7
0
    def __init__(self, serviceName, globalSettings, canceledEvent,
                 shutdownEvent):
        """Constructor for the LogCollectionForJobs service."""
        self.canceledEvent = canceledEvent
        self.shutdownEvent = shutdownEvent
        self.logFiles = setupLogFile(
            serviceName,
            env,
            globalSettings['fileContainingServiceLogSettings'],
            directoryName='service')
        self.logObserver = setupObservers(
            self.logFiles, serviceName, env,
            globalSettings['fileContainingServiceLogSettings'])
        self.logger = twisted.logger.Logger(observer=self.logObserver,
                                            namespace=serviceName)
        self.logger.info('Started logger for {serviceName!r}',
                         serviceName=serviceName)
        super().__init__(serviceName, globalSettings)
        self.localSettings = loadSettings(
            os.path.join(
                env.configPath,
                globalSettings['fileContainingLogCollectionForJobsSettings']))
        self.secondsBetweenLogCleanup = int(
            self.localSettings['waitHoursBetweenLogCleanupChecks']) * 60 * 60
        self.secondsToRetainLogFiles = int(
            self.localSettings['numberOfHoursToRetainLogFiles']) * 60 * 60

        ## Twisted import here to avoid issues with epoll on Linux
        from twisted.internet import task

        ## Make checking kafka and processing results a looping call, to give a
        ## break to the main reactor thread; otherwise it blocks other looping
        ## calls, like those in coreService for health and environment details:
        self.kafkaConsumer = self.createKafkaConsumer(
            globalSettings['kafkaLogForJobsTopic'])
        self.loopingGetKafkaResults = task.LoopingCall(self.getKafkaResults,
                                                       self.kafkaConsumer)
        ## Give a second break before starting the main LoopingCall
        self.loopingGetKafkaResults.start(1, now=False)
        ## Regularly check logs for cleanup; avoid filling disk with old logs
        self.loopingCleanupLogs = task.LoopingCall(self.deferCleanupLogs)
        self.loopingCleanupLogs.start(self.secondsBetweenLogCleanup)
        self.logger.debug('Leaving LogCollectionForJobs constructor')
コード例 #8
0
	def __init__(self, serviceName, globalSettings, canceledEvent, shutdownEvent):
		"""Constructor for the ResultProcessingClientFactory.

		Arguments:
		  serviceName (str)     : class name of the client ('ResultProcessingClient')
		  globalSettings (dict) : global globalSettings
		"""
		try:
			self.canceledEvent = canceledEvent
			self.shutdownEvent = shutdownEvent
			self.logFiles = utils.setupLogFile(serviceName, env, globalSettings['fileContainingClientLogSettings'], directoryName='client')
			self.logObserver = utils.setupObservers(self.logFiles, serviceName, env, globalSettings['fileContainingClientLogSettings'])
			self.logger = twisted.logger.Logger(observer=self.logObserver, namespace=serviceName)
			self.globalSettings = globalSettings
			self.localSettings = utils.loadSettings(os.path.join(env.configPath, globalSettings['fileContainingResultProcessingClientSettings']))
			self.dbClient = None
			self.validActions = ['connectionResponse', 'healthRequest', 'tokenExpired', 'unauthorized', 'partitionCountResponse']
			self.actionMethods = ['doConnectionResponse', 'doHealthRequest', 'doTokenExpired', 'doUnauthorized', 'doPartitionCountResponse']
			self.kafkaErrorCount = 0
			self.kafkaErrorLimit = 5
			self.kafkaConsumer = None
			self.partitionCount = 0
			self.connectedToKafkaConsumer = False
			self.resultProcessingUtility = None
			self.maintenanceMode = True
			self.pauseKafkaProcessing = True
			super().__init__(serviceName, globalSettings)
			self.initialize(True)
			## Looping call to build objectCache and start kafka processing
			self.loopingStartProcessing = task.LoopingCall(self.startProcessing)
			self.loopingStartProcessing.start(int(self.localSettings['waitSecondsBetweenRequestingFullSyncCacheUpdates'])).addErrback(self.logger.error)
			## Looping call to delta update (in-place) the objectCache
			self.loopingDeltaSync = task.LoopingCall(self.updateObjectCache)
			self.loopingDeltaSync.start(int(self.localSettings['waitSecondsBetweenRequestingDeltaSyncCacheUpdates'])).addErrback(self.logger.error)

		except:
			exception = traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])
			print('Exception in ResultProcessingClientFactory constructor: {}'.format(str(exception)))
			with suppress(Exception):
				self.logger.error('Exception in ResultProcessingClientFactory: {exception!r}', exception=exception)
			self.logToKafka(sys.exc_info()[1])
			self.shutdownEvent.set()
			reactor.stop()
コード例 #9
0
def powerShell():
    import sys
    import traceback
    import os
    import re
    try:
        ## Add openContentPlatform directories onto the sys path
        thisPath = os.path.dirname(os.path.abspath(__file__))
        basePath = os.path.abspath(os.path.join(thisPath, '..'))
        if basePath not in sys.path:
            sys.path.append(basePath)
        import env
        env.addLibPath()
        env.addDatabasePath()
        env.addExternalPath()

        ## Setup requested log handlers
        globalSettings = utils.loadSettings(
            os.path.join(env.configPath, 'globalSettings.json'))
        logEntity = 'Protocols'
        logger = utils.setupLogger(logEntity, env, 'logSettingsCore.json')
        logger.info('Starting protocolWrapperPowershell...')

        import twisted.logger
        logFiles = utils.setupLogFile(
            'JobDetail',
            env,
            globalSettings['fileContainingContentGatheringLogSettings'],
            directoryName='client')
        logObserver = utils.setupObservers(
            logFiles, 'JobDetail', env,
            globalSettings['fileContainingContentGatheringLogSettings'])
        logger = twisted.logger.Logger(observer=logObserver,
                                       namespace='JobDetail')

        from remoteRuntime import Runtime
        runtime = Runtime(logger, env, 'TestPkg', 'TestJob', 'endpoint', {},
                          None, {}, None, {}, None)

        ## Manual creation of a protocol via protocolHandler
        externalProtocolHandler = utils.loadExternalLibrary(
            'externalProtocolHandler', env, globalSettings)
        protocolHandler = externalProtocolHandler.ProtocolHandler(
            None, globalSettings, env, logger)
        protocolType = 'ProtocolPowerShell'
        protocolData = {'user': '******', 'password': '******'}
        protocolHandler.createManual(runtime, protocolType, protocolData)
        protocol = externalProtocolHandler.getProtocolObject(runtime, 1)
        print('protocol to use: {}'.format(protocol))
        print('protocols: {}'.format(
            externalProtocolHandler.getProtocolObjects(runtime)))

        endpoint = '192.168.1.100'
        client = PowerShell(runtime, logger, endpoint, 1, protocol)
        client.open()

        osAttrDict = {}
        queryOperatingSystem(client, logger, osAttrDict)
        logger.debug('osAttrDict: {osAttrDict!r}', osAttrDict=osAttrDict)
        client.close()

    except:
        stacktrace = traceback.format_exception(sys.exc_info()[0],
                                                sys.exc_info()[1],
                                                sys.exc_info()[2])
        msg = str(sys.exc_info()[1])
        ## Cleanup message when we know what it is:
        ## "<x_wmi: The RPC server is unavailable.  (-2147023174, 'The RPC server is unavailable. ', (0, None, 'The RPC server is unavailable. ', None, None, -2147023174), None)>"
        if re.search(
                'The client cannot connect to the destination specified in the request',
                msg, re.I):
            ## Remove the rest of the fluff
            msg = 'The client cannot connect to the destination specified in the request. Verify that the service on the destination is running and is accepting requests.'
            logger.debug('Main Exception: {exception!r}', exception=msg)
        else:
            logger.debug('Main Exception: {exception!r}', exception=stacktrace)