def start(self): if self.message: # handle a test message json_data = open(self.message) data = json.load(json_data) self.on_pulse_message(data) return # Start listening for pulse messages. If 5 failures in a # minute, wait 5 minutes before retrying. failures = [] while True: pulse = consumers.BuildConsumer(applabel=self.label, connect=False) pulse.configure(topic=['#.finished', '#.log_uploaded'], callback=self.on_pulse_message, durable=self.durable) if self.consumer_cfg: pulse.config = self.consumer_cfg try: pulse.listen() except Exception: self.error_logger.exception( "Error occurred during pulse.listen()") now = datetime.datetime.now() failures = [ x for x in failures if now - x < datetime.timedelta(seconds=60) ] failures.append(now) if len(failures) >= 5: failures = [] time.sleep(5 * 60)
def main(): import sys master = sys.argv[1] pulse = consumers.BuildConsumer(applabel=str(uuid.uuid4())) pulse.configure( topic='change.*.added', callback=lambda data, message: sendchange(master, data, message)) pulse.listen()
def main(): parser = OptionParser() parser.add_option("-c", "--config", dest="config", help="Configuration file") options = parser.parse_args()[0] config = ConfigParser() try: config.read(options.config) except: parser.error("Could not open configuration file") def got_message(data, message): try: receive_message(config, data, message) finally: message.ack() if not options.config: parser.error('Configuration file is required') if not all([config.has_section('pulse'), config.has_option('pulse', 'user'), config.has_option('pulse', 'password')]): log.critical('Config file must have a [pulse] section containing and ' 'least "user" and "password" options.') exit(1) verbosity = {True: log.DEBUG, False: log.WARN} log.basicConfig( format='%(asctime)s %(message)s', level=verbosity[config.getboolean('shipit-notifier', 'verbose')] ) pulse_cfg = pconf.PulseConfiguration.read_from_config(config) # Adjust applabel when wanting to run shipit on multiple machines pulse = consumers.BuildConsumer(applabel='shipit-notifier', connect=False) pulse.config = pulse_cfg pulse.configure(topic='build.#.finished', durable=True, callback=got_message) log.info('listening for pulse messages') pulse.listen()
def main(): global auth global dbconn global logger parser = argparse.ArgumentParser() dsn = 'dbname=autoland user=autoland host=localhost password=autoland' parser.add_argument('--dsn', default=dsn, help='Postgresql DSN connection string') parser.add_argument('--message-log-path', default=None, help='Path to which to log received messages') commandline.add_logging_group(parser) args = parser.parse_args() logging.basicConfig() logger = commandline.setup_logging('autoland-pulse', vars(args), {}) logger.info('starting pulse listener') while not dbconn: try: dbconn = psycopg2.connect(args.dsn) except psycopg2.OperationalError: time.sleep(0.1) user, password = read_credentials() unique_label = 'autoland-%s' % platform.node() pulse = consumers.BuildConsumer(applabel=unique_label, user=user, password=password) pulse.configure(topic=['build.#.finished'], callback=handle_message) logger.debug('applabel: %s' % unique_label) while True: try: pulse.listen() except amqp.exceptions.ConnectionForced as e: logger.error('pulse error: ' + str(e)) except IOError as e: logger.error('pulse error: ' + str(e))
def run(): global logger global tw global is_triggerbot_user parser = argparse.ArgumentParser() parser.add_argument('--log-dir') parser.add_argument('--no-log-stderr', dest='log_stderr', action='store_false', default=True) args = parser.parse_args(sys.argv[1:]) service_name = 'trigger-bot' logger = setup_logging(service_name, args.log_dir, args.log_stderr) logger.info('starting listener') ldap_auth = read_ldap_auth() user, pw = read_pulse_auth() get_users() tw = TreeWatcher(ldap_auth) consumer = consumers.BuildConsumer(applabel=service_name, user=user, password=pw) consumer.configure(topic=['build.#.started', 'build.#.finished'], callback=handle_message) while True: try: consumer.listen() except KeyboardInterrupt: raise except IOError: pass except: logger.exception("Received an unexpected exception")
'Offline testing, uses named file instead of Pulse server'), } if __name__ == '__main__': options = initOptions(params=_defaultOptions) initLogs(options) log.info('Starting') log.info('Connecting to datastore') db = dbRedis(options) log.info('Creating ZeroMQ handler') Process(name='zmq', target=handleZMQ, args=(options, eventQueue, db)).start() if options.testfile: OfflineTest(options) else: try: log.info('Connecting to Mozilla Pulse with topic "%s"' % options.topic) pulse = consumers.BuildConsumer(applabel=options.appinfo) pulse.configure(topic=options.topic, callback=cbMessage) log.debug('Starting pulse.listen()') pulse.listen() except: log.error('Pulse Exception', exc_info=True) eventQueue.put(('exit', ))
def __init__(self, rawdata=None, outfile=None, durable=False, context='dataadapter', logdir='logs'): self.data = {} #### #TODO: Put appropriate data in logdir #### self.logdir = logdir self.context = context self.durable = durable self.rawdata = rawdata #Set the output stream to write to self.outstream = None if outfile: if outfile == 'stdout': outfile = sys.stdout else: outfile = open(outfile, 'w') self.outstream = outfile #Setup signal handler signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGTERM, self.signal_handler) """ data_attributes description key - set of '.' delimited keys in the raw pulse stream processor - function reference called with the data structure specified in the key. Ex: processor(attr_table, pulse_data, data) attr_table - List of attributes to process in the data structure specified by the key. attr - The attribute name in the raw pulse stream. attr_test - A list of strings to match against the attribute in the raw pulse stream. cb - function reference that's called instead of executing the default behavior. Use this when special processing of the raw data for an attribute is required. Ex: cb(attr, pulse_value, data) """ self.data_attributes = { '_meta': { 'processor': self.process_raw_data_dict, 'attr_table': [ { 'attr': 'routing_key', 'cb': self.get_routing_key_data } ] }, 'payload.build': { 'processor': self.process_raw_data_dict, 'attr_table': [ {'attr': 'results'}, {'attr': 'slave'}, {'attr': 'times', 'cb': self.get_times_data}, {'attr': 'blame'}, {'attr': 'reason'}, ] }, 'payload.build.sourceStamp.changes': { 'processor': self.process_sourcestamp_changes_list, 'attr_table': [ {'attr': 'who'}, {'attr': 'when'}, {'attr': 'comments'}, ] }, 'payload.build.properties': { 'processor': self.process_property_list, 'attr_table': [ {'attr': 'revision'}, {'attr': 'product'}, {'attr': 'branch'}, {'attr': 'platform'}, {'attr': 'buildid'}, {'attr': 'log_url'}, {'attr': 'buildername', 'cb': self.get_buildername_data}, {'attr': 'slavename'}, {'attr': 'request_ids'}, {'attr': 'request_times'}, { 'attr': 'buildurl', 'attr_test': ['packageUrl', 'build_url', 'fileURL'] }, ], } } #Build list of required attributes for data validation self.required_attributes = set( #These are attributes set outside of the attr_table's in #self.data_attributes ['os', 'os_platform', 'arch', 'vm', 'buildtype', 'test_name'] ) for key in self.data_attributes: for attr_dict in self.data_attributes[key]['attr_table']: self.required_attributes.add(attr_dict['attr']) # # TODO: This list of routing key strings were excluded from # processing in the current PulseBuildbotTranslator. Confirm # if we need to exclude any of these and then use or remove #self.exclude_routing_key_regex = re.compile( # r'[schedulers|tag|submitter|final_verification|fuzzer|source|repack|jetpack|finished]' # ) #set pulse consumer labels app_label_base = 'pulse-{0}-consumer-{1}-{2}' self.buildapp_label = app_label_base.format( 'build', self.context, socket.gethostname() ) #initialize consumers self.pulse = consumers.BuildConsumer( applabel=self.buildapp_label ) #configure consumers self.pulse.configure( ##### #TODO: Register a specialized adapter for #.finished # to record the heartbeat of the push. This will # require adding the request_ids and request_times # to the .finished data structure. # #topic=['#.finished', '#.log_uploaded'], ##### topic=['#.log_uploaded'], callback=self.process_data, durable=self.durable )