def processRecords(self): """ It consumes all messaged from the MQ (these are failover messages). In case of failure, the messages will be inserted to the MQ again. """ result = createConsumer(self.__monitoringType) if not result["OK"]: gLogger.error("Fail to create Consumer: %s" % result["Message"]) return S_ERROR("Fail to create Consumer: %s" % result["Message"]) else: mqConsumer = result["Value"] result = S_OK() while result["OK"]: # we consume all messages from the consumer internal queue. result = mqConsumer.get() mqConsumer.close() # make sure that we will not proccess any more messages. if result["OK"]: records = json.loads(result["Value"]) retVal = monitoringDB.put(list(records), self.__monitoringType) if not retVal["OK"]: # the db is not available and we publish again the data to MQ res = self.publishRecords(records) if not res["OK"]: return res return S_OK()
def processRecords(self): """ It consumes all messaged from the MQ (these are failover messages). In case of failure, the messages will be inserted to the MQ again. """ result = createConsumer(self.__monitoringType) if not result['OK']: gLogger.error("Fail to create Consumer: %s" % result['Message']) return S_ERROR("Fail to create Consumer: %s" % result['Message']) else: mqConsumer = result['Value'] result = S_OK() while result['OK']: # we consume all messages from the consumer internal queue. result = mqConsumer.get() mqConsumer.close( ) # make sure that we will not proccess any more messages. if result['OK']: records = json.loads(result['Value']) retVal = monitoringDB.put(list(records), self.__monitoringType) if not retVal['OK']: # the db is not available and we publish again the data to MQ res = self.publishRecords(records) if not res['OK']: return res return S_OK()
def commit(self): """ It inserts the accumulated data to the db. In case of failure it keeps in memory/MQ """ # before we try to insert the data to the db, we process all the data # which are already in the queue mqProducer = self.__createProducer( ) # we are sure that we can connect to MQ if mqProducer: result = self.processRecords() if not result['OK']: gLogger.error("Unable to insert data to the db:", result['Message']) self.__documentLock.acquire() documents = self.__documents self.__documents = [] self.__documentLock.release() recordSent = 0 try: while documents: recordsToSend = documents[:self.__maxRecordsInABundle] retVal = monitoringDB.put(recordsToSend, self.__monitoringType) if retVal['OK']: recordSent += len(recordsToSend) del documents[:self.__maxRecordsInABundle] gLogger.info("%d records inserted to the db" % (recordSent)) else: if mqProducer: res = self.publishRecords(recordsToSend, mqProducer) # if we managed to publish the records we can delete from the list if res['OK']: recordSent += len(recordsToSend) del documents[:self.__maxRecordsInABundle] else: return res # in case of MQ problem else: gLogger.warn("Failed to insert the records:", retVal['Message']) except Exception as e: # pylint: disable=broad-except gLogger.exception("Error committing", lException=e) return S_ERROR("Error committing %s" % repr(e).replace(',)', ')')) finally: if mqProducer: mqProducer.close() self.__documents.extend(documents) return S_OK(recordSent)
def commit(self): """ It inserts the accumulated data to the db. In case of failure it keeps in memory/MQ """ # before we try to insert the data to the db, we process all the data # which are already in the queue mqProducer = self.__createProducer() # we are sure that we can connect to MQ if mqProducer is not None: result = self.processRecords() if not result['OK']: gLogger.error("Unable to insert data to the db:", result['Message']) self.__documentLock.acquire() documents = self.__documents self.__documents = [] self.__documentLock.release() recordSent = 0 try: while documents: recordsToSend = documents[:self.__maxRecordsInABundle] retVal = monitoringDB.put(recordsToSend, self.__monitoringType) if retVal['OK']: recordSent += len(recordsToSend) del documents[:self.__maxRecordsInABundle] gLogger.info("%d records inserted to the db" % (recordSent)) else: if mqProducer is not None: res = self.publishRecords(recordsToSend, mqProducer) # if we managed to publish the records we can delete from the list if res['OK']: recordSent += len(recordsToSend) del documents[:self.__maxRecordsInABundle] else: return res # in case of MQ problem else: gLogger.warn("Failed to insert the records:", retVal['Message']) except Exception as e: # pylint: disable=broad-except gLogger.exception("Error committing", lException=e) return S_ERROR("Error committing %s" % repr(e).replace(',)', ')')) finally: if mqProducer is not None: mqProducer.close() mqProducer = None self.__documents.extend(documents) return S_OK(recordSent)
def commit(self): """ It inserts the accumulated data to the db. In case of failure it keeps in memory/MQ """ self.__documentLock.acquire() documents = self.__documents self.__documents = [] self.__documentLock.release() recordSent = 0 try: while documents: recordsToSend = documents[: self.__maxRecordsInABundle] retVal = monitoringDB.put(recordsToSend, self.__monitoringType) if retVal["OK"]: recordSent += len(recordsToSend) del documents[: self.__maxRecordsInABundle] gLogger.info("%d records inserted to the db" % (recordSent)) else: if self.__mq: res = self.publishRecords(recordsToSend) # if we managed to publish the records we can delete from the list if res["OK"]: recordSent += len(recordsToSend) del documents[: self.__maxRecordsInABundle] else: return res # in case of MQ problem else: gLogger.warn("Failed to insert the records: %s", retVal["Message"]) except Exception as e: # pylint: disable=broad-except gLogger.exception("Error committing", lException=e) return S_ERROR("Error committing %s" % repr(e).replace(",)", ")")) finally: self.__documents.extend(documents) if self.__mq: result = self.processRecords() if not result["OK"]: gLogger.error("Unable to insert data from the MQ", result["Message"]) return S_OK(recordSent)
def processRecords(self): """ It consumes all messaged from the MQ (these are failover messages). In case of failure, the messages will be inserted to the MQ again. """ retVal = monitoringDB.pingDB( ) # if the db is not accessible, the records will be not processed from MQ if retVal['OK']: if not retVal['Value']: # false if we can not connect to the db return retVal else: return retVal result = createConsumer("Monitoring::Queue::%s" % self.__monitoringType) if not result['OK']: gLogger.error("Fail to create Consumer: %s" % result['Message']) return S_ERROR("Fail to create Consumer: %s" % result['Message']) else: mqConsumer = result['Value'] result = S_OK() failedToProcess = [] while result['OK']: # we consume all messages from the consumer internal queue. result = mqConsumer.get() if result['OK']: records = json.loads(result['Value']) retVal = monitoringDB.put(list(records), self.__monitoringType) if not retVal['OK']: failedToProcess.append(records) mqConsumer.close( ) # make sure that we will not process any more messages. # the db is not available and we publish again the data to MQ for records in failedToProcess: res = self.publishRecords(records) if not res['OK']: return res return S_OK()
def processRecords(self): """ It consumes all messaged from the MQ (these are failover messages). In case of failure, the messages will be inserted to the MQ again. """ retVal = monitoringDB.pingDB() # if the db is not accessible, the records will be not processed from MQ if retVal['OK']: if not retVal['Value']: # false if we can not connect to the db return retVal else: return retVal result = createConsumer("Monitoring::Queue::%s" % self.__failoverQueueName) if not result['OK']: gLogger.error("Fail to create Consumer: %s" % result['Message']) return S_ERROR("Fail to create Consumer: %s" % result['Message']) else: mqConsumer = result['Value'] result = S_OK() failedToProcess = [] while result['OK']: # we consume all messages from the consumer internal queue. result = mqConsumer.get() if result['OK']: records = json.loads(result['Value']) retVal = monitoringDB.put(list(records), self.__monitoringType) if not retVal['OK']: failedToProcess.append(records) mqConsumer.close() # make sure that we will not process any more messages. # the db is not available and we publish again the data to MQ for records in failedToProcess: res = self.publishRecords(records) if not res['OK']: return res return S_OK()