Exemple #1
0
    def __init__(self, taskQueueName: str, sink: AbstractSink,
                 workerName: str):
        Process.__init__(self)
        self.__sink = sink
        self.__workerName = workerName

        ## Queue Manager needs to be implemented for this
        self.__SBQ = SharedBaseQueue(taskQueueName)
        self.__taskQueue = self.__getQueue()
Exemple #2
0
	def __init__(self, level : Levels, configuration):

		self.__configuration = configuration
		self.__checkConfiguration()

		self.queueName = configuration['queueName']
		self.tsFormat = configuration['tsFormat']
		self.level = level

		self.numberOfQueues = 1
		if configuration['threadModel'] == 'MULTI':
			numberOfQueues = configuration['numberOfQueues']

		## Queue Manager needs to be implemented for this
		self.__SBQ = SharedBaseQueue(self.queueName, self.numberOfQueues)
		self.__logQueue = self.__getQueue()
Exemple #3
0
class LevelLogger():
	"""
	Queue for logger is fetch from configuration while constructing the logger
	The logs are the then push using
	"""
	def __init__(self, level : Levels, configuration):

		self.__configuration = configuration
		self.__checkConfiguration()

		self.queueName = configuration['queueName']
		self.tsFormat = configuration['tsFormat']
		self.level = level

		self.numberOfQueues = 1
		if configuration['threadModel'] == 'MULTI':
			numberOfQueues = configuration['numberOfQueues']

		## Queue Manager needs to be implemented for this
		self.__SBQ = SharedBaseQueue(self.queueName, self.numberOfQueues)
		self.__logQueue = self.__getQueue()

	def __checkConfiguration(self):
		# print('__checkConfiguration', configuration)
		assert('queueName' in self.__configuration), 'queue name not found in configuration'
		assert('workerName' in self.__configuration), 'worker name not found in configuration'
		## and many more assertion to be included

	def __getQueue(self, queueNumber = -1):
		if queueNumber == -1:
			return self.__SBQ.getQueue()
		return self.__SBQ.getQueue(queueNumber)

	def pushLog(self, logData):
		self.__logQueue.put(logData)

	def log(self, messageNamespace, messageContent):

		## currently its a blocking put
		## but it should be put_no_wait with a timeout
		## based on settings depending on the importance of logs
		logData = LogData(self.level, messageNamespace, messageContent, self.tsFormat)
		self.pushLog(logData)
DB_CONFIGS['host'] = '127.0.0.1'
DB_CONFIGS['username'] = '******'
DB_CONFIGS['password'] = ''
DB_CONFIGS['database'] = 'logdb'
DB_SINK_CONFIGURATIONS = {'database_configurations': DB_CONFIGS}

qConfiguration = {'queueName': 'netBankingLogs'}

nLogger = NetBankingLogger()
# nLogger.info('mesg1', ['adffffa','fads'])
# nLogger.info('mesg2', ['adffffa','fads'])
# nLogger.info('mesg3', ['adffffa','fads'])

#net banking queue configuration
sink = DatabaseSink(DB_SINK_CONFIGURATIONS)
SBQ = SharedBaseQueue(qConfiguration['queueName'])
print("number of queues: ", SBQ.getCurrentNumberOfQueues())
taskQueue = SBQ.getQueue()
workerName = 'NetBankingWorker'

# ADDING 5 worker processes

workerList = []
for i in range(10):
    #net banking queue configuration
    sink = DatabaseSink(DB_SINK_CONFIGURATIONS)

    SBQ = SharedBaseQueue(qConfiguration['queueName'])
    # print("number of queues: ", SBQ.getCurrentNumberOfQueues())

    taskQueue = SBQ.getQueue()
Exemple #5
0
from modules.Queues.SharedBaseQueue import SharedBaseQueue

SBQ = SharedBaseQueue('q1')

print("number of queues: ", SBQ.getCurrentNumberOfQueues())

print("TESTING ADDITION OF QUEUES")
SBQ.addQueues(2)
print("number of queues: ", SBQ.getCurrentNumberOfQueues())

## Adding an element to a queue
q0 = SBQ.getQueue()
q0.put(0)
q0.get()

SBQ.addQueues(3)
print("number of queues: ", SBQ.getCurrentNumberOfQueues())

# use of modulus
q = SBQ.getQueue(3)
q = SBQ.getQueue(100)
q = SBQ.getQueue(-11)

q = SBQ.getQueue(0)
q.put(0)
q = SBQ.getQueue(4)  # 4%4 = 0 so points to same queue
q.get()
Exemple #6
0
class BaseWorker(Process):
    """Parent class for other workers. The other workers
	need to set the queue and execute consume operation.
	The consumer does blocking pop operations from the queue.
	Each worker process listens to only one of the  queues.
	Also the shutdown hooks are implemented to inform the
	Start and stop of the worker and any other errors.

	Also currently only multiprocessing.manager.queues are used
	directly, but a wrapper needs be implemented for it.
	And this worker should only take the queue of the that wrapper.
	Thus leaving the specific detail of the queues
	"""
    def __init__(self, taskQueueName: str, sink: AbstractSink,
                 workerName: str):
        Process.__init__(self)
        self.__sink = sink
        self.__workerName = workerName

        ## Queue Manager needs to be implemented for this
        self.__SBQ = SharedBaseQueue(taskQueueName)
        self.__taskQueue = self.__getQueue()

    def __getQueue(self, queueNumber=-1):
        if queueNumber == -1:
            return self.__SBQ.getQueue()
        return self.__SBQ.getQueue(queueNumber)

    def getTaskQueue(self):
        return self.__taskQueue

    ## block pop from the task queue
    def getTask(self):
        return self.getTaskQueue().get()

    ## returns the name of the spawned worker
    def getWorkerName(self):
        return self.__workerName

    ## worker specific function
    def processTask(self, nextTask):
        print("task processed at worker : ", self.getWorkerName(),
              nextTask.getAllData())
        self.__sink.logData(nextTask)
#raise NotImplementedError("processTask not implemented for the worker")

## adding hooks for getting the reason of worker stopped

    @atexit.register
    def addShutDownHook():
        print(" Worker Exited successfully!!! ")

    def run(self):
        print(self.getWorkerName(), " Worker started running successfully!!!")

        while True:
            nextTask = self.getTask()
            if nextTask is None:
                # Poison pill
                print(self.getWorkerName(),
                      " Worker stopped successfully from poison pill!!!")
                self.addShutDownHook()
                break

            self.processTask(nextTask)

        return