def test_queue_queue(self): """ Test queue 2 queue. """ print("checking queue 2 queue use case") mq1_path = self.path + "/mq1" mq2_path = self.path + "/mq2" mq1 = DQS(path=mq1_path) count = 10 bodies = list() for i in range(count): body = "hello world %s" % (i, ) bodies.append(body) mq1.add_message(Message(body=body)) self.assertEqual(count, mq1.count()) cmd = "python bin/amqpclt --incoming-queue path=%s" \ " --outgoing-queue path=%s --remove --loglevel debug" \ % (mq1_path, mq2_path) (ret, out, err) = proc.timed_process(cmd.split()) self.assertEqual(0, ret, "out: %s\nerr: %s" % (out, err)) mq2 = DQS(path=mq2_path) for i in mq2: if mq2.lock(i): bodies.remove(mq2.get_message(i).body) self.assertEqual(count, mq2.count()) self.assertEqual(0, len(bodies)) print("checking queue 2 queue use case OK")
def __init__(self, name, configFile): """ Initializer for the object. """ Service.__init__(self, name, configFile) self.id = self.param('id') self.udp_port = int(self.param('udp_port')) self.udp_host = self.param('udp_host') if self.udp_host == 'hostname': self.udp_host = socket.gethostname() try: global gmq gmq = DQS(path=self.param('localQueue')) self._logger.info("Created connection to local queue %s" % self.param('localQueue')) except Exception as err: self._logger.error("Failing connection to local queue %s" % (err)) raise Exception self._logger.info('UDP listener on %s:%s' % (self.udp_host, self.udp_port)) self.server = ThreadedUDPServer((self.udp_host, self.udp_port), CMSSWUDPHandler) self._logger.info('created server. going to serve_forever in thread') self.server_thread = threading.Thread(target=self.server.serve_forever) self.server_thread.start() self._logger.info('Server loop running in thread: %s' % self.server_thread.name)
def __init__(self, name, configFile): """ Initializer for the object. """ Service.__init__(self, name, configFile) # Hourly purge self.PURGE_INTERVAL = 3600 # DB Table where to store the messages self.transfers_db_table = self.param('transfersDBTable') self.transfers_db_table_rejected = self.param('rejectedDBTable') # Maximum number of messages in the buffer when making a bulk insert self.buffer_size = int(self.param('bufferSize')) self.id = self.param('id') self.dbsection = self.param('dbsection') self._next_purge = time.time() + self.PURGE_INTERVAL # Try to read the local queue try: self.localQueue = DQS(path = self.param('localQueue')) except Exception as e: self._logger.error("connection to the local queue failed")
def main(): parser = argparse.ArgumentParser() parser.add_argument('--sleep', required=False, default=0, type=float) parser.add_argument('--queue', required=False, default=default_queue, type=str) parser.add_argument('--runas', required=False, default=default_user, type=str) parser.add_argument('--purge', required=False, action='store_true', default=False) parser.add_argument('--noout', required=False, action='store_true', default=False) parser.add_argument('--num', required=False, default=0, type=int) global args args = parser.parse_args() seteuser(pwd.getpwnam(args.runas)) msgs = [] mq = DQS(path=args.queue) try: if args.purge: mq.purge() if args.sleep > 0: while True: consume_queue(mq, args.num) time.sleep(args.sleep) else: consume_queue(mq, args.num) except KeyboardInterrupt as e: raise SystemExit(0)
def test_full_chain(self): """ Test kombu full chain. """ print("checking kombu full chain") try: import kombu except ImportError: print("kombu is not available, skipping it") return mq1_path = self.path + "/mq1" mq2_path = self.path + "/mq2" mq1 = DQS(path=mq1_path) count = 10 dest = "/queue/test%s" % (rndstr(10), ) bodies = list() for i in range(count): body = "hello world %s" % (i, ) bodies.append(body) msg = Message(body=body) msg.header = {"destination": dest} mq1.add_message(msg) self.assertEqual(count, mq1.count()) cmd1 = "python bin/amqpclt --incoming-queue path=%s" \ " --outgoing-broker-uri %s " \ " --outgoing-broker-module kombu " \ " --outgoing-broker-auth plain,name=guest,pass=guest" \ " --remove --loglevel debug" \ % (mq1_path, self.broker) (ret, out, err) = proc.timed_process(cmd1.split()) self.assertEqual(0, ret, "out: %s\nerr: %s" % (out, err)) cmd2 = "python bin/amqpclt --incoming-broker-uri %s" \ " --incoming-broker-module kombu" \ " --incoming-broker-auth plain,name=guest,pass=guest" \ " --subscribe destination=%s" \ " --outgoing-queue path=%s --count %d --reliable " \ "--loglevel debug" \ % (self.broker, dest, mq2_path, count) (ret, out, err) = proc.timed_process(cmd2.split()) self.assertEqual(0, ret, "out: %s\nerr: %s" % (out, err)) mq2 = DQS(path=mq2_path) for i in mq2: if mq2.lock(i): bodies.remove(mq2.get_message(i).body) self.assertEqual(count, mq2.count()) self.assertEqual(0, len(bodies)) self.assertEqual(0, mq1.count()) print("checking kombu fullchain OK")
def main(): parser = argparse.ArgumentParser() lobj = log.Logger(sys.argv[0]) logger = lobj.get() confopts = config.parse_config(logger) nagioshost = confopts['general']['host'] tz = pytz.timezone(confopts['general']['timezone']) timestamp = datetime.datetime.now(tz).strftime('%Y-%m-%dT%H:%M:%SZ') parser.add_argument('--queue', required=True, nargs='+') # msg headers parser.add_argument('--service', required=True, type=str) parser.add_argument('--hostname', required=True, type=str) parser.add_argument('--testname', required=True, type=str) parser.add_argument('--status', required=True, type=str) # msg body parser.add_argument('--details', required=False, type=str) parser.add_argument('--vo', required=False, type=str) parser.add_argument('--site', required=False, type=str) parser.add_argument('--roc', required=False, type=str) parser.add_argument('--urlhistory', required=False, type=str) parser.add_argument('--urlhelp', required=False, type=str) args = parser.parse_args() seteuser(pwd.getpwnam(confopts['general']['runasuser'])) try: for q in args.queue: granularity = config.get_queue_granul(q) mq = DQS(path=q, granularity=granularity) msg = build_msg(args, timestamp, args.service, args.hostname, \ args.testname, args.status, nagioshost) mq.add_message(msg) except MessageError as e: logger.error('Error constructing alarm - %s', repr(e)) except KeyError: logger.error('No configured Queue for directory %s' % q) queue_paths = list() for (k, v) in confopts['queues'].items(): queue_paths.append('{0} - {1}'.format(k, v['directory'])) logger.error('Queues and directories found in config: %s' % ', '.join(queue_paths)) raise SystemExit(1) except (OSError, IOError) as e: logger.error(e) raise SystemExit(1)
def enqueue(dirq, destination, event): mq_header = { 'measurement_agent': socket.gethostname(), 'destination': destination } if 'timestamp' not in event.keys(): event['timestamp'] = time.time() mq_body = json.dumps(event) msg = Message(body=mq_body, header=mq_header) msg.is_text = True mq = DQS(path=dirq) mq.add_message(msg)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--session', required=False, default=str(), type=str) parser.add_argument('--num', required=False, default=0, type=int) parser.add_argument('--queue', required=False, default=default_queue, type=str) parser.add_argument('--granularity', required=False, default=60, type=int) parser.add_argument('--runas', required=False, default=default_user, type=str) parser.add_argument('--noout', required=False, action='store_true', default=False) parser.add_argument('--sleep', required=False, default=0, type=float) parser.add_argument('--bodysize', required=False, default=40, type=int) parser.add_argument('--timezone', required=False, default='UTC', type=str) args = parser.parse_args() seteuser(pwd.getpwnam(args.runas)) try: tz = timezone(args.timezone) except UnknownTimeZoneError as e: print("Timezone not correct") raise SystemExit(1) mq = DQS(path=args.queue, granularity=args.granularity) try: if args.num: for i in range(args.num): msg = construct_msg(args.session, args.bodysize, tz) queue_msg(msg, mq) if not args.noout: print(msg) else: while True: msg = construct_msg(args.session, args.bodysize, tz) queue_msg(msg, mq) if not args.noout: print(msg) if args.sleep: time.sleep(args.sleep) except KeyboardInterrupt as e: raise SystemExit(0)
def __init__(self, start=1600, connect='iut2-net3.iu.edu', metricName='org.osg.general-perfsonar-simple.conf'): Uploader.__init__(self, start, connect, metricName) self.maxMQmessageSize = self.readConfigFile('mq-max-message-size') #Code to allow publishing data to the mq self.mq = None self.dq = self.readConfigFile('directoryqueue') self.granularity = int(self.readConfigFile('granularity')) if self.dq != None and self.dq != 'None': try: self.mq = DQS(path=self.dq, granularity=self.granularity) except Exception as e: self.add2log("Unable to create dirq %s, exception was %s, " % (self.dq, e))
def setup(self): self.dirq = DQS(path=self.shared.queue['directory']) numloop = None if (self.shared.topic['bulk'] == 1 or self.shared.topic['bulk'] >= self.shared.queue['rate']): numloop = 1 elif self.shared.queue['rate'] > self.shared.topic['bulk']: numloop = int(self.shared.queue['rate'] / self.shared.topic['bulk']) self.pubnumloop = numloop self.shared.runtime.update(inmemq=self.inmemq, pubnumloop=self.pubnumloop, dirq=self.dirq, filepublisher=False) self.publisher = self.shared.runtime['publisher'](self.events, worker=self.name)