Exemplo n.º 1
0
def worker(jobQueue, graphite, db):
    log.info('starting')

    metrics = Metric(graphite, db)

    while True:
        try:
            job = jobQueue.get(False)
        except Empty:
            job = None

        if job is not None:
            try:
                jobs = json.loads(job)

                for item in jobs:
                    metric, data = item
                    if metric == METRICS_COUNT:
                        group = data[0]
                        key   = data[1]

                        metrics.count('count')
                        metrics.count(group)
                        metrics.count('%s.%s' % (group, key))

                    elif metric == METRICS_LIST:
                        metrics.count('list')
                        if len(data) == 2:
                            key   = data[0]
                            value = data[1]
                            db.rpush(key, value)

                    elif metric == METRICS_SET:
                        metrics.count('set')
                        if len(data) == 2:
                            key   = data[0]
                            value = data[1]
                            db.sadd(key, value)

                    elif metric == METRICS_HASH:
                        metrics.count('hash')
                        if len(data) == 3:
                            hash  = data[0]
                            key   = data[1]
                            value = data[2]
                            db.hset(hash, key, value)
                            db.sadd('metrics.hashes', hash)

            except:
                log.error('Error converting incoming job', exc_info=True)

            metrics.check()

    log.info('done')
Exemplo n.º 2
0
def worker(jobs, db, archivePath, statsdServer):
    log.info('starting')

    aCount  = 0
    archive = getArchive(archivePath)

    pNames = ('branch', 'product', 'platform', 'revision', 'request_ids',
              'builduid', 'buildnumber', 'buildid', 'statusdb_id',
              'build_url', 'log_url', 'pgo_build', 'scheduler', 'who',
             )

    metric = Metric(statsdServer, archivePath)

    while True:
        try:
            entry = jobs.get(False)
        except Empty:
            entry = None

        if entry is not None:
            try:
                item = json.loads(entry)

                event    = item['event']
                key      = item['pulse_key']
                master   = item['master'].partition(':')[0].partition('.')[0]
                ts       = item['time']
                entryKey = key.split('.')[1]

                log.debug('Job: %s %s %s' % (event, key, ts))

                metric.incr('pulse', 1)

                if event == 'source':
                    properties = { 'revision':  None,
                                   'builduid':  None,
                                 }
                    try:
                        for p in item['pulse']['payload']['change']['properties']:
                            pName, pValue, _ = p
                            if pName in pNames:
                                properties[pName] = pValue
                    except:
                        log.error('exception extracting properties from build step', exc_info=True)

                    if properties['revision'] is None:
                        properties['revision'] = item['pulse']['payload']['change']['revision']

                    builduid  = properties['builduid']
                    changeKey = 'change:%s' % builduid

                    db.hset(changeKey, 'master',   master)
                    db.hset(changeKey, 'comments', item['pulse']['payload']['change']['comments'])
                    db.hset(changeKey, 'project',  item['pulse']['payload']['change']['project'])
                    db.hset(changeKey, 'branch',   item['pulse']['payload']['change']['branch'])

                    for p in properties:
                        db.hset(changeKey, p, properties[p])

                    tsDate, tsTime = ts.split('T')
                    tsHour         = tsTime[:2]

                    db.sadd('change:%s'    % tsDate,           changeKey)
                    db.sadd('change:%s.%s' % (tsDate, tsHour), changeKey)

                    metric.incr('change', 1)

                elif event == 'slave connect':
                    slave = item['slave']
                    metric.incr('machines.connect', 1)
                    metric.incr('machine.connect.%s' % slave, 1)

                elif event == 'slave disconnect':
                    slave = item['slave']
                    metric.incr('machines.disconnect', 1)
                    metric.incr('machine.disconnect.%s' % slave, 1)

                elif event == 'build':
                    items      = key.split('.')
                    buildEvent = items[-1]
                    project    = items[1]
                    slave      = item['slave']
                    properties = { 'branch':    None,
                                   'product':   None,
                                   'revision':  None,
                                   'builduid':  None,
                                 }
                    try:
                        for p in item['pulse']['payload']['build']['properties']:
                            pName, pValue, _ = p
                            if pName in pNames:
                                properties[pName] = pValue
                    except:
                        log.error('exception extracting properties from build step', exc_info=True)

                    product = properties['product']

                    if product in ('seamonkey',):
                        print 'skipping', product, event
                    else:
                        tStart    = item['time']
                        branch    = properties['branch']
                        builduid  = properties['builduid']
                        number    = properties['buildnumber']
                        buildKey  = 'build:%s'     % builduid
                        jobKey    = 'job:%s.%s.%s' % (builduid, master, number)
                        jobResult = item['pulse']['payload']['build']['results']

                        db.hset(jobKey, 'slave',   slave)
                        db.hset(jobKey, 'master',  master)
                        db.hset(jobKey, 'results', jobResult)

                        db.lpush('build:slave:jobs:%s' % slave, jobKey)
                        db.ltrim('build:slave:jobs:%s' % slave, 0, 20)

                        print jobKey, 'results', jobResult

                        for p in properties:
                            db.hset(jobKey, p, properties[p])

                        if 'scheduler' in properties:
                            scheduler = properties['scheduler']
                        else:
                            scheduler = 'None'
                        if 'platform' in properties:
                            platform = properties['platform']
                        else:
                            platform = 'None'

                            # jobs.:product.:platform.:scheduler.:master.:slave.:branch.:buildUID.results.:result
                        statskey = '%s.%s.%s.%s.%s.%s.%s' % (product, platform, scheduler, master, slave, branch, builduid)

                        if product == 'firefox':
                            metric.incr('jobs.results.%s.%s' % (statskey, jobResult), 1)

                        if buildEvent == 'started':
                            db.hset(jobKey, 'started', tStart)
                            if product == 'firefox':
                                metric.incr('jobs.start.%s' % statskey, 1)

                        elif buildEvent == 'finished':
                            if product == 'firefox':
                                metric.incr('jobs.end.%s' % statskey, 1)

                            # if started time is found, use that for the key
                            ts = db.hget(jobKey, 'started')
                            if ts is not None:
                                tStart = ts

                            dStarted   = datetime.strptime(tStart[:-6],       '%Y-%m-%dT%H:%M:%S')
                            dFinished  = datetime.strptime(item['time'][:-6], '%Y-%m-%dT%H:%M:%S')
                            tdElapsed  = dFinished - dStarted
                            secElapsed = (tdElapsed.days * 86400) + tdElapsed.seconds

                            db.hset(jobKey, 'finished', item['time'])
                            db.hset(jobKey, 'elapsed',  secElapsed)
                            if product == 'firefox':
                                #metric.time('build.%s' % statskey, secElapsed)
                                metric.incr('build.time.%s' % statskey, secElapsed)

                            builderName = item['pulse']['payload']['build']['builderName']
                            steps       = []
                            for step in item['pulse']['payload']['build']['steps']:
                                worksteps = getWorksteps(builderName)

                                if worksteps is not None:
                                    if step['name'] in worksteps:
                                        steps.append(step)
                                        continue
                            for step in steps:
                                stepStart = step['times'][0]
                                stepStop  = step['times'][1]
                                stepKey   = 'step:%s' % step['name']
                                db.hset(jobKey, '%s:start'   % stepKey, stepStart)
                                db.hset(jobKey, '%s:stop'    % stepKey, stepStop)
                                db.hset(jobKey, '%s:elapsed' % stepKey, stepStop - stepStart)

                        elif buildEvent == 'log_uploaded':
                            if 'request_ids' in properties:
                                db.hset(jobKey, 'request_ids', properties['request_ids'])

                        tsDate, tsTime = tStart.split('T')
                        tsHour         = tsTime[:2]

                        db.sadd('build:%s'    % tsDate,           buildKey)
                        db.sadd('build:%s.%s' % (tsDate, tsHour), buildKey)
                        db.sadd(buildKey, jobKey)

            except:
                log.error('Error converting incoming job', exc_info=True)

            if archive is not None:
                archive.write('%s\n' % entry)

            aCount += 1
            if aCount > ARCHIVE_CHUNK:
                if archive is not None:
                    archive.close()
                archive = getArchive(archivePath)
                aCount  = 0

    if archive is not None:
        archive.close()

    log.info('done')
Exemplo n.º 3
0
def worker(jobQueue, graphite, db):
    log.info('starting')

    metrics = Metric(graphite, db)

    while True:
        try:
            job = jobQueue.get(False)
        except Empty:
            job = None

        if job is not None:
            try:
                jobs = json.loads(job)

                for item in jobs:
                    metric, data = item
                    if metric == METRICS_COUNT:
                        group = data[0]
                        key = data[1]

                        metrics.count('count')
                        metrics.count(group)
                        metrics.count('%s.%s' % (group, key))

                    elif metric == METRICS_LIST:
                        metrics.count('list')
                        if len(data) == 2:
                            key = data[0]
                            value = data[1]
                            db.rpush(key, value)

                    elif metric == METRICS_SET:
                        metrics.count('set')
                        if len(data) == 2:
                            key = data[0]
                            value = data[1]
                            db.sadd(key, value)

                    elif metric == METRICS_HASH:
                        metrics.count('hash')
                        if len(data) == 3:
                            hash = data[0]
                            key = data[1]
                            value = data[2]
                            db.hset(hash, key, value)
                            db.sadd('metrics.hashes', hash)
                    elif metric == METRICS_RAW:
                        metrics.carbon('%s %d %s\n' %
                                       (data[0], data[1], time.time()))

            except:
                log.error('Error converting incoming job', exc_info=True)

            metrics.check()

    log.info('done')
Exemplo n.º 4
0
def worker(jobs, db, archivePath, statsdServer):
    log.info('starting')

    aCount  = 0
    archive = getArchive(archivePath)

    pNames = ('branch', 'product', 'platform', 'revision', 'request_ids',
              'builduid', 'buildnumber', 'buildid', 'statusdb_id',
              'build_url', 'log_url', 'pgo_build', 'scheduler', 'who',
             )

    metric = Metric(statsdServer, archivePath)

    while True:
        try:
            entry = jobs.get(False)
        except Empty:
            entry = None

        if entry is not None:
            try:
                item = json.loads(entry)

                event    = item['event']
                key      = item['pulse_key']
                master   = item['master'].partition(':')[0].partition('.')[0]
                ts       = item['time']
                entryKey = key.split('.')[1]

                log.debug('Job: %s %s %s' % (event, key, ts))

                metric.incr('pulse', 1)

                if event == 'source':
                    properties = { 'revision':  None,
                                   'builduid':  None,
                                 }
                    try:
                        for p in item['pulse']['payload']['change']['properties']:
                            pName, pValue, _ = p
                            if pName in pNames:
                                properties[pName] = pValue
                    except:
                        log.error('exception extracting properties from build step', exc_info=True)

                    if properties['revision'] is None:
                        properties['revision'] = item['pulse']['payload']['change']['revision']

                    builduid  = properties['builduid']
                    changeKey = 'change:%s' % builduid

                    db.hset(changeKey, 'master',   master)
                    db.hset(changeKey, 'comments', item['pulse']['payload']['change']['comments'])
                    db.hset(changeKey, 'project',  item['pulse']['payload']['change']['project'])
                    db.hset(changeKey, 'branch',   item['pulse']['payload']['change']['branch'])

                    for p in properties:
                        db.hset(changeKey, p, properties[p])

                    tsDate, tsTime = ts.split('T')
                    tsHour         = tsTime[:2]

                    db.sadd('change:%s'    % tsDate,           changeKey)
                    db.sadd('change:%s.%s' % (tsDate, tsHour), changeKey)

                    metric.incr('change', 1)

                elif event == 'slave connect':
                    slave = item['slave']
                    metric.incr('machines.connect', 1)
                    metric.incr('machine.connect.%s' % slave, 1)

                elif event == 'slave disconnect':
                    slave = item['slave']
                    metric.incr('machines.disconnect', 1)
                    metric.incr('machine.disconnect.%s' % slave, 1)

                elif event == 'build':
                    items      = key.split('.')
                    buildEvent = items[-1]
                    project    = items[1]
                    slave      = item['slave']
                    properties = { 'branch':    None,
                                   'product':   None,
                                   'revision':  None,
                                   'builduid':  None,
                                 }
                    try:
                        for p in item['pulse']['payload']['build']['properties']:
                            pName, pValue, _ = p
                            if pName in pNames:
                                properties[pName] = pValue
                    except:
                        log.error('exception extracting properties from build step', exc_info=True)

                    product = properties['product']

                    if product in ('seamonkey',):
                        print 'skipping', product, event
                    else:
                        tStart    = item['time']
                        branch    = properties['branch']
                        builduid  = properties['builduid']
                        number    = properties['buildnumber']
                        buildKey  = 'build:%s'     % builduid
                        jobKey    = 'job:%s.%s.%s' % (builduid, master, number)
                        jobResult = item['pulse']['payload']['build']['results']

                        db.hset(jobKey, 'slave',   slave)
                        db.hset(jobKey, 'master',  master)
                        db.hset(jobKey, 'results', jobResult)

                        db.lpush('build:slave:jobs:%s' % slave, jobKey)
                        db.ltrim('build:slave:jobs:%s' % slave, 0, 20)

                        print jobKey, 'results', jobResult

                        for p in properties:
                            db.hset(jobKey, p, properties[p])

                        if 'scheduler' in properties:
                            scheduler = properties['scheduler']
                        else:
                            scheduler = 'None'
                        if 'platform' in properties:
                            platform = properties['platform']
                        else:
                            platform = 'None'

                            # jobs.:product.:platform.:scheduler.:master.:slave.:branch.:buildUID.results.:result
                        statskey = '%s.%s.%s.%s.%s.%s.%s' % (product, platform, scheduler, master, slave, branch, builduid)

                        if product == 'firefox':
                            metric.incr('jobs.results.%s.%s' % (statskey, jobResult), 1)

                        if buildEvent == 'started':
                            db.hset(jobKey, 'started', tStart)
                            if product == 'firefox':
                                metric.incr('jobs.start.%s' % statskey, 1)

                        elif buildEvent == 'finished':
                            if product == 'firefox':
                                metric.incr('jobs.end.%s' % statskey, 1)

                            # if started time is found, use that for the key
                            ts = db.hget(jobKey, 'started')
                            if ts is not None:
                                tStart = ts

                            dStarted   = datetime.strptime(tStart[:-6],       '%Y-%m-%dT%H:%M:%S')
                            dFinished  = datetime.strptime(item['time'][:-6], '%Y-%m-%dT%H:%M:%S')
                            tdElapsed  = dFinished - dStarted
                            secElapsed = (tdElapsed.days * 86400) + tdElapsed.seconds

                            db.hset(jobKey, 'finished', item['time'])
                            db.hset(jobKey, 'elapsed',  secElapsed)
                            if product == 'firefox':
                                #metric.time('build.%s' % statskey, secElapsed)
                                metric.incr('build.time.%s' % statskey, secElapsed)

                        elif buildEvent == 'log_uploaded':
                            if 'request_ids' in properties:
                                db.hset(jobKey, 'request_ids', properties['request_ids'])

                        tsDate, tsTime = tStart.split('T')
                        tsHour         = tsTime[:2]

                        db.sadd('build:%s'    % tsDate,           buildKey)
                        db.sadd('build:%s.%s' % (tsDate, tsHour), buildKey)
                        db.sadd(buildKey, jobKey)

            except:
                log.error('Error converting incoming job', exc_info=True)

            if archive is not None:
                archive.write('%s\n' % entry)

            aCount += 1
            if aCount > ARCHIVE_CHUNK:
                if archive is not None:
                    archive.close()
                archive = getArchive(archivePath)
                aCount  = 0

    if archive is not None:
        archive.close()

    log.info('done')