예제 #1
0
    def __init__(self, options):
        self.queuedir = options.queuedir
        self.q = QueueDir('commands', self.queuedir)
        self.concurrency = options.concurrency
        self.retry_time = options.retry_time
        self.max_retries = options.max_retries
        self.max_time = options.max_time

        self.active = []

        # List of (signal_time, level, proc)
        self.to_kill = []
예제 #2
0
    def __init__(self,
                 queuedir,
                 publisher,
                 max_idle_time=300,
                 max_connect_time=600,
                 retry_time=60,
                 max_retries=5):
        self.queuedir = QueueDir('pulse', queuedir)
        self.publisher = publisher
        self.max_idle_time = max_idle_time
        self.max_connect_time = max_connect_time
        self.retry_time = retry_time
        self.max_retries = max_retries

        # When should we next disconnect
        self._disconnect_timer = None
        # When did we last have activity
        self._last_activity = None
        # When did we last connect
        self._last_connection = None
예제 #3
0
    def __init__(self, options):
        self.queuedir = options.queuedir
        self.q = QueueDir('commands', self.queuedir)
        self.concurrency = options.concurrency
        self.retry_time = options.retry_time
        self.max_retries = options.max_retries
        self.max_time = options.max_time

        self.active = []

        # List of (signal_time, level, proc)
        self.to_kill = []
예제 #4
0
    def __init__(self, queuedir, publisher, max_idle_time=300,
                 max_connect_time=600, retry_time=60, max_retries=5):
        self.queuedir = QueueDir('pulse', queuedir)
        self.publisher = publisher
        self.max_idle_time = max_idle_time
        self.max_connect_time = max_connect_time
        self.retry_time = retry_time
        self.max_retries = max_retries

        # When should we next disconnect
        self._disconnect_timer = None
        # When did we last have activity
        self._last_activity = None
        # When did we last connect
        self._last_connection = None
예제 #5
0
class CommandRunner(object):
    def __init__(self, options):
        self.queuedir = options.queuedir
        self.q = QueueDir('commands', self.queuedir)
        self.concurrency = options.concurrency
        self.retry_time = options.retry_time
        self.max_retries = options.max_retries
        self.max_time = options.max_time

        self.active = []

        # List of (signal_time, level, proc)
        self.to_kill = []

    def run(self, job):
        """
        Runs the given job
        """
        log.info("Running %s", job.cmd)
        try:
            job.start()
            self.active.append(job)
        except OSError:
            job.log.write("\nFailed with OSError; requeuing in %i seconds\n" %
                          self.retry_time)
            # Wait to requeue it
            # If we die, then it's still in cur, and will be moved back into
            # 'new' eventually
            self.q.requeue(job.item_id, self.retry_time, self.max_retries)

    def monitor(self):
        """
        Monitor running jobs
        """
        for job in self.active[:]:
            self.q.touch(job.item_id)
            result = job.check()

            if result is not None:
                self.active.remove(job)
                if result == 0:
                    self.q.remove(job.item_id)
                else:
                    log.warn("%s failed; requeuing", job.item_id)
                    # Requeue it!
                    self.q.requeue(
                        job.item_id, self.retry_time, self.max_retries)

    def loop(self):
        """
        Main processing loop. Read new items from the queue and run them!
        """
        while True:
            self.monitor()
            if len(self.active) >= self.concurrency:
                # Wait!
                time.sleep(1)
                continue

            while len(self.active) < self.concurrency:
                item = self.q.pop()
                if not item:
                    # Don't wait for very long, since we have to check up on
                    # our children
                    if self.active:
                        self.q.wait(1)
                    else:
                        self.q.wait()
                    break

                item_id, fp = item
                try:
                    command = json.load(fp)
                    job = Job(command, item_id, self.q.getlog(item_id))
                    job.max_time = self.max_time
                    self.run(job)
                except ValueError:
                    # Couldn't parse it as json
                    # There's no hope!
                    self.q.log(item_id, "Couldn't load json; murdering")
                    self.q.murder(item_id)
                finally:
                    fp.close()
예제 #6
0
class PulsePusher(object):
    """
    Publish buildbot events via pulse.

    `queuedir`         - a directory to look for incoming events being written
                         by a buildbot master

    `publisher`        - an instance of mozillapulse.GenericPublisher indicating where
                         these messages should be sent

    `max_idle_time`    - number of seconds since last activity after which we'll
                         disconnect. Set to None/0 to disable

    `max_connect_time` - number of seconds since we last connected after which
                         we'll disconnect. Set to None/0 to disable

    `retry_time`       - time in seconds to wait between retries

    `max_retries`      - how many times to retry
    """
    def __init__(self, queuedir, publisher, max_idle_time=300,
                 max_connect_time=600, retry_time=60, max_retries=5):
        self.queuedir = QueueDir('pulse', queuedir)
        self.publisher = publisher
        self.max_idle_time = max_idle_time
        self.max_connect_time = max_connect_time
        self.retry_time = retry_time
        self.max_retries = max_retries

        # When should we next disconnect
        self._disconnect_timer = None
        # When did we last have activity
        self._last_activity = None
        # When did we last connect
        self._last_connection = None

    def send(self, events):
        """
        Send events to pulse

        `events` - a list of buildbot event dicts
        """
        if not self._last_connection and self.max_connect_time:
            self._last_connection = time.time()
        log.debug("Sending %i messages", len(events))
        start = time.time()
        skipped = 0
        sent = 0
        for e in events:
            routing_key = e['event']
            if any(exp.search(routing_key) for exp in skip_exps):
                skipped += 1
                log.debug("Skipping event %s", routing_key)
                continue
            else:
                log.debug("Sending event %s", routing_key)
            msg = BuildMessage(transform_times(e))
            self.publisher.publish(msg)
            sent += 1
        end = time.time()
        log.info("Sent %i messages in %.2fs (skipped %i)", sent,
                 end - start, skipped)
        self._last_activity = time.time()

        # Update our timers
        t = 0
        if self.max_connect_time:
            t = self._last_connection + self.max_connect_time
        if self.max_idle_time:
            if t:
                t = min(t, self._last_activity + self.max_idle_time)
            else:
                t = self._last_activity + self.max_idle_time
        if t:
            self._disconnect_timer = t

    def maybe_disconnect(self):
        "Disconnect from pulse if our timer has expired"
        now = time.time()
        if self._disconnect_timer and now > self._disconnect_timer:
            log.info("Disconnecting")
            self.publisher.disconnect()
            self._disconnect_timer = None
            self._last_connection = None
            self._last_activity = None

    def loop(self):
        """
        Main processing loop. Read new items from the queue, push them to
        pulse, remove processed items, and then wait for more.
        """
        while True:
            self.maybe_disconnect()

            # Grab any new events
            item_ids = []
            events = []
            come_back_soon = False
            try:
                while True:
                    item = self.queuedir.pop()
                    if not item:
                        break
                    if len(events) > 50:
                        come_back_soon = True
                        break

                    try:
                        item_id, fp = item
                        item_ids.append(item_id)
                        log.debug("Loading %s", item)
                        events.extend(json.load(fp))
                    except:
                        log.exception("Error loading %s", item_id)
                        raise
                    finally:
                        fp.close()
                log.info("Loaded %i events", len(events))
                self.send(events)
                for item_id in item_ids:
                    log.info("Removing %s", item_id)
                    try:
                        self.queuedir.remove(item_id)
                    except OSError:
                        # Somebody (re-)moved it already, that's ok!
                        pass
            except:
                log.exception("Error processing messages")
                # Don't try again soon, something has gone horribly wrong!
                come_back_soon = False
                for item_id in item_ids:
                    self.queuedir.requeue(
                        item_id, self.retry_time, self.max_retries)

            if come_back_soon:
                # Let's do more right now!
                log.info("Doing more!")
                continue

            # Wait for more
            # don't wait more than our max_idle/max_connect_time
            now = time.time()
            to_wait = None
            if self._disconnect_timer:
                to_wait = self._disconnect_timer - now
                if to_wait < 0:
                    to_wait = None
            log.info("Waiting for %s", to_wait)
            self.queuedir.wait(to_wait)
예제 #7
0
class PulsePusher(object):
    """
    Publish buildbot events via pulse.

    `queuedir`         - a directory to look for incoming events being written
                         by a buildbot master

    `publisher`        - an instance of mozillapulse.GenericPublisher indicating where
                         these messages should be sent

    `max_idle_time`    - number of seconds since last activity after which we'll
                         disconnect. Set to None/0 to disable

    `max_connect_time` - number of seconds since we last connected after which
                         we'll disconnect. Set to None/0 to disable

    `retry_time`       - time in seconds to wait between retries

    `max_retries`      - how many times to retry
    """
    def __init__(self,
                 queuedir,
                 publisher,
                 max_idle_time=300,
                 max_connect_time=600,
                 retry_time=60,
                 max_retries=5):
        self.queuedir = QueueDir('pulse', queuedir)
        self.publisher = publisher
        self.max_idle_time = max_idle_time
        self.max_connect_time = max_connect_time
        self.retry_time = retry_time
        self.max_retries = max_retries

        # When should we next disconnect
        self._disconnect_timer = None
        # When did we last have activity
        self._last_activity = None
        # When did we last connect
        self._last_connection = None

    def send(self, events):
        """
        Send events to pulse

        `events` - a list of buildbot event dicts
        """
        if not self._last_connection and self.max_connect_time:
            self._last_connection = time.time()
        log.debug("Sending %i messages", len(events))
        start = time.time()
        skipped = 0
        sent = 0
        for e in events:
            routing_key = e['event']
            if any(exp.search(routing_key) for exp in skip_exps):
                skipped += 1
                log.debug("Skipping event %s", routing_key)
                continue
            else:
                log.debug("Sending event %s", routing_key)
            msg = BuildMessage(transform_times(e))
            self.publisher.publish(msg)
            sent += 1
        end = time.time()
        log.info("Sent %i messages in %.2fs (skipped %i)", sent, end - start,
                 skipped)
        self._last_activity = time.time()

        # Update our timers
        t = 0
        if self.max_connect_time:
            t = self._last_connection + self.max_connect_time
        if self.max_idle_time:
            if t:
                t = min(t, self._last_activity + self.max_idle_time)
            else:
                t = self._last_activity + self.max_idle_time
        if t:
            self._disconnect_timer = t

    def maybe_disconnect(self):
        "Disconnect from pulse if our timer has expired"
        now = time.time()
        if self._disconnect_timer and now > self._disconnect_timer:
            log.info("Disconnecting")
            self.publisher.disconnect()
            self._disconnect_timer = None
            self._last_connection = None
            self._last_activity = None

    def loop(self):
        """
        Main processing loop. Read new items from the queue, push them to
        pulse, remove processed items, and then wait for more.
        """
        while True:
            self.maybe_disconnect()

            # Grab any new events
            item_ids = []
            events = []
            come_back_soon = False
            try:
                while True:
                    item = self.queuedir.pop()
                    if not item:
                        break
                    if len(events) > 50:
                        come_back_soon = True
                        break

                    try:
                        item_id, fp = item
                        item_ids.append(item_id)
                        log.debug("Loading %s", item)
                        events.extend(json.load(fp))
                    except:
                        log.exception("Error loading %s", item_id)
                        raise
                    finally:
                        fp.close()
                log.info("Loaded %i events", len(events))
                self.send(events)
                for item_id in item_ids:
                    log.info("Removing %s", item_id)
                    try:
                        self.queuedir.remove(item_id)
                    except OSError:
                        # Somebody (re-)moved it already, that's ok!
                        pass
            except:
                log.exception("Error processing messages")
                # Don't try again soon, something has gone horribly wrong!
                come_back_soon = False
                for item_id in item_ids:
                    self.queuedir.requeue(item_id, self.retry_time,
                                          self.max_retries)

            if come_back_soon:
                # Let's do more right now!
                log.info("Doing more!")
                continue

            # Wait for more
            # don't wait more than our max_idle/max_connect_time
            now = time.time()
            to_wait = None
            if self._disconnect_timer:
                to_wait = self._disconnect_timer - now
                if to_wait < 0:
                    to_wait = None
            log.info("Waiting for %s", to_wait)
            self.queuedir.wait(to_wait)
예제 #8
0
    def __init__(self, config):
        self.config = config

        self.command_queue = QueueDir('commands', config['command_queue'])
        self.pulse_queue = QueueDir('pulse', config['pulse_queue'])
예제 #9
0
class PostRunner(object):
    def __init__(self, config):
        self.config = config

        self.command_queue = QueueDir('commands', config['command_queue'])
        self.pulse_queue = QueueDir('pulse', config['pulse_queue'])

    def uploadLog(self, build):
        """Uploads the build log, and returns the URL to it"""
        builder = build.builder

        info = self.getBuildInfo(build)
        branch = info['branch']
        product = info['product']
        platform = info['platform']

        upload_args = ['-r', '2', '-t', '10', '--master-name',
                       self.config['statusdb.master_name']]
        if "nightly" in builder.name:
            upload_args.append("--nightly")
        if builder.name.startswith("release-"):
            upload_args.append("--release")
            upload_args.append(
                "%s/%s" % (info.get('version'), info.get('build_number')))

        if branch and 'try' in branch:
            upload_args.append("--try")
        elif branch == 'shadow-central':
            upload_args.append("--shadow")

        if 'l10n' in builder.name:
            upload_args.append("--l10n")

        if product:
            upload_args.extend(["--product", product])

        if platform:
            upload_args.extend(["--platform", platform])
        else:
            upload_args.extend(["--platform", 'noarch'])

        if branch:
            upload_args.extend(["--branch", branch])

        upload_args.extend(self.getUploadArgs(build, product))
        upload_args.extend([builder.basedir, str(build.number)])

        my_dir = os.path.abspath(os.path.dirname(__file__))
        cmd = [sys.executable, "%s/log_uploader.py" % my_dir] + upload_args
        devnull = open(os.devnull)

        log.info("Running %s", cmd)

        output = get_output(cmd, stdin=devnull)

        # Look for URLs
        url = re.search("http(s)?://\S+", output)
        if url:
            return url.group()
        return None

    def mailResults(self, build, log_url):
        my_dir = os.path.abspath(os.path.dirname(__file__))
        cmd = [sys.executable, "%s/try_mailer.py" % my_dir,
               "--log-url", log_url,
               ]

        cmd.extend(['-f', self.config.get(
            'mail_notifier_sender', '*****@*****.**')])
        if self.config.get('mail_real_author'):
            cmd.append('--to-author')

        for r in self.config.get('mail_extra_people', []):
            cmd.extend(['-t', r])

        cmd.extend([build.builder.basedir, str(build.number)])

        # Add the command to our queue
        self.command_queue.add(json.dumps(cmd))

    def getUploadArgs(self, build, product):
        ssh_info = self.config['ssh_info']
        ssh_info = ssh_info.get(product, ssh_info["*"])

        branch = self.getBuildInfo(build).get('branch')
        ssh_info = ssh_info.get(branch, ssh_info["*"])

        retval = ['--user', ssh_info['user']]
        if 'sshkey' in ssh_info:
            retval.extend(["-i", ssh_info['sshkey']])
        retval.append(ssh_info['host'])
        return retval

    def getBuild(self, build_path):
        log.info("Loading build pickle")
        if not os.path.exists(build_path):
            raise ValueError("Couldn't find %s" % build_path)

        builder_path = os.path.dirname(build_path)

        class FakeBuilder:
            basedir = builder_path
            name = os.path.basename(builder_path)

        build = pickle.load(open(build_path))
        build.builder = FakeBuilder()
        return build

    def getBuildInfo(self, build):
        """
        Returns a dictionary with
        'branch', 'platform', 'product'
        set as appropriate
        """
        props = build.getProperties()
        retval = {}
        if props.getProperty('stage_platform') is not None:
            retval['platform'] = props['stage_platform']
        elif props.getProperty('platform') is not None:
            retval['platform'] = props['platform']
        else:
            retval['platform'] = None

        if props.getProperty('stage_product') is not None:
            retval['product'] = props['stage_product']
        elif props.getProperty('product') is not None:
            retval['product'] = props['product']
        else:
            retval['product'] = None

        # fennec builds are called 'fennec' by buildbot, but are 'mobile' as
        # far as post_upload.py is concerned.
        if retval['product'] == 'fennec':
            retval['product'] = 'mobile'

        if props.getProperty('branch') is not None:
            retval['branch'] = props['branch']
        else:
            retval['branch'] = None

        if props.getProperty('version') is not None:
            retval['version'] = props['version']

        if props.getProperty('build_number') is not None:
            retval['build_number'] = props['build_number']

        log.debug("Build info: %s", retval)
        return retval

    def writePulseMessage(self, options, build, build_id):
        builder_name = build.builder.name
        msg = {
            'event': 'build.%s.%s.log_uploaded' % (builder_name, build.number),
            'payload': {"build": build.asDict()},
            'master_name': options.master_name,
            'master_incarnation': options.master_incarnation,
            'id': build_id,
        }
        self.pulse_queue.add(json.dumps([msg]))

    def updateStatusDB(self, build, request_ids):
        log.info("Updating statusdb")
        session = model.connect(self.config['statusdb.url'])()
        master = model.Master.get(session, self.config['statusdb.master_url'])
        master.name = unicode(self.config['statusdb.master_name'])

        if not master.id:
            log.debug("added master")
            session.add(master)
            session.commit()

        builder_name = build.builder.name
        db_builder = model.Builder.get(session, builder_name, master.id)
        db_builder.category = unicode(build.getProperty('branch'))

        starttime = None
        if build.started:
            starttime = datetime.utcfromtimestamp(build.started)

        log.debug("searching for build")
        q = session.query(model.Build).filter_by(
            master_id=master.id,
            builder=db_builder,
            buildnumber=build.number,
            starttime=starttime,
        )
        db_build = q.first()
        if not db_build:
            log.debug("creating new build")
            db_build = model.Build.fromBBBuild(
                session, build, builder_name, master.id)
        else:
            log.debug("updating old build")
            db_build.updateFromBBBuild(session, build)
        session.commit()
        log.debug("committed")

        log.debug("updating schedulerdb_requests table")

        schedulerdb = sa.create_engine(self.config['schedulerdb.url'])

        for i in request_ids:
            # See if we already have this row
            q = model.schedulerdb_requests.select()
            q = q.where(
                model.schedulerdb_requests.c.status_build_id == db_build.id)
            q = q.where(model.schedulerdb_requests.c.scheduler_request_id == i)
            q = q.limit(1).execute()
            if not q.fetchone():
                # Find the schedulerdb build id for this
                bid = schedulerdb.execute(
                    sa.text('select id from builds where brid=:brid and number=:number'),
                    brid=i, number=build.number
                ).fetchone()
                if bid is not None:
                    bid = bid[0]
                    log.debug("bid for %s is %s", i, bid)
                    model.schedulerdb_requests.insert().execute(
                        status_build_id=db_build.id,
                        scheduler_request_id=i,
                        scheduler_build_id=bid,
                    )
        log.debug("build id is %s", db_build.id)
        return db_build.id

    def getRequestTimes(self, request_ids):
        """Returns a dictionary of request_id => submitted_at (as an epoch
        time)"""
        schedulerdb = sa.create_engine(self.config['schedulerdb.url'])
        retval = {}
        for i in request_ids:
            submitted_at = schedulerdb.execute(
                sa.text(
                    "select submitted_at from buildrequests where id=:brid"),
                brid=i,
            ).fetchone()
            if submitted_at is not None:
                retval[i] = submitted_at[0]
        return retval

    def processBuild(self, options, build_path, request_ids):
        build = self.getBuild(build_path)
        info = self.getBuildInfo(build)
        if not options.log_url:
            log.info("uploading log")
            log_url = self.uploadLog(build)
            if log_url is None:
                log_url = 'null'
            cmd = [sys.executable] + sys.argv + ["--log-url", log_url]
            self.command_queue.add(json.dumps(cmd))
            # If this is for try, Mail the try user as well
            if info['branch'] in self.config['mail_notifier_branches']:
                self.mailResults(build, log_url)
        elif not options.statusdb_id:
            log.info("adding to statusdb")
            log_url = options.log_url
            if log_url == 'null':
                log_url = None
            log.debug("adding properties")
            build.properties.setProperty('log_url', log_url, 'postrun.py')
            build.properties.setProperty(
                'request_ids', [int(i) for i in request_ids], 'postrun.py')
            build.properties.setProperty('request_times', self.getRequestTimes(
                request_ids), 'postrun.py')
            build_id = self.updateStatusDB(build, request_ids)

            cmd = [sys.executable] + sys.argv + [
                "--statusdb-id", str(build_id)]
            self.command_queue.add(json.dumps(cmd))
        else:
            log.info("publishing to pulse")
            log_url = options.log_url
            build_id = options.statusdb_id
            build.properties.setProperty('log_url', log_url, 'postrun.py')
            build.properties.setProperty('statusdb_id', build_id, 'postrun.py')
            build.properties.setProperty(
                'request_ids', [int(i) for i in request_ids], 'postrun.py')
            build.properties.setProperty('request_times', self.getRequestTimes(
                request_ids), 'postrun.py')
            self.writePulseMessage(options, build, build_id)
예제 #10
0
class CommandRunner(object):
    def __init__(self, options):
        self.queuedir = options.queuedir
        self.q = QueueDir('commands', self.queuedir)
        self.concurrency = options.concurrency
        self.retry_time = options.retry_time
        self.max_retries = options.max_retries
        self.max_time = options.max_time

        self.active = []

        # List of (signal_time, level, proc)
        self.to_kill = []

    def run(self, job):
        """
        Runs the given job
        """
        log.info("Running %s", job.cmd)
        try:
            job.start()
            self.active.append(job)
        except OSError:
            job.log.write("\nFailed with OSError; requeuing in %i seconds\n" %
                          self.retry_time)
            # Wait to requeue it
            # If we die, then it's still in cur, and will be moved back into
            # 'new' eventually
            self.q.requeue(job.item_id, self.retry_time, self.max_retries)

    def monitor(self):
        """
        Monitor running jobs
        """
        for job in self.active[:]:
            self.q.touch(job.item_id)
            result = job.check()

            if result is not None:
                self.active.remove(job)
                if result == 0:
                    self.q.remove(job.item_id)
                else:
                    log.warn("%s failed; requeuing", job.item_id)
                    # Requeue it!
                    self.q.requeue(job.item_id, self.retry_time,
                                   self.max_retries)

    def loop(self):
        """
        Main processing loop. Read new items from the queue and run them!
        """
        while True:
            self.monitor()
            if len(self.active) >= self.concurrency:
                # Wait!
                time.sleep(1)
                continue

            while len(self.active) < self.concurrency:
                item = self.q.pop()
                if not item:
                    # Don't wait for very long, since we have to check up on
                    # our children
                    if self.active:
                        self.q.wait(1)
                    else:
                        self.q.wait()
                    break

                item_id, fp = item
                try:
                    command = json.load(fp)
                    job = Job(command, item_id, self.q.getlog(item_id))
                    job.max_time = self.max_time
                    self.run(job)
                except ValueError:
                    # Couldn't parse it as json
                    # There's no hope!
                    self.q.log(item_id, "Couldn't load json; murdering")
                    self.q.murder(item_id)
                finally:
                    fp.close()
    def __init__(self, config):
        self.config = config

        self.command_queue = QueueDir('commands', config['command_queue'])
        self.pulse_queue = QueueDir('pulse', config['pulse_queue'])
class PostRunner(object):
    def __init__(self, config):
        self.config = config

        self.command_queue = QueueDir('commands', config['command_queue'])
        self.pulse_queue = QueueDir('pulse', config['pulse_queue'])

    def uploadLog(self, build):
        """Uploads the build log, and returns the URL to it"""
        builder = build.builder

        info = self.getBuildInfo(build)
        branch = info['branch']
        product = info['product'].lower()
        platform = info['platform']

        upload_args = [
            '-r', '2', '-t', '10', '--master-name',
            self.config['statusdb.master_name']
        ]
        if "nightly" in builder.name:
            upload_args.append("--nightly")
        if builder.name.startswith("release-") and \
                not info['release_promotion']:
            upload_args.append("--release")
            upload_args.append("%s/%s" %
                               (info.get('version'), info.get('build_number')))

        if branch and 'try' in branch:
            upload_args.append("--try")
        elif branch == 'shadow-central':
            upload_args.append("--shadow")

        if 'l10n' in builder.name and not info['release_promotion']:
            upload_args.append("--l10n")

        if product:
            upload_args.extend(["--product", product])

        if platform:
            upload_args.extend(["--platform", platform])
        else:
            upload_args.extend(["--platform", 'noarch'])

        if branch:
            upload_args.extend(["--branch", branch])

        upload_args.extend(self.getUploadArgs(build, product))
        upload_args.extend([builder.basedir, str(build.number)])

        my_dir = os.path.abspath(os.path.dirname(__file__))
        cmd = [sys.executable, "%s/log_uploader.py" % my_dir] + upload_args
        devnull = open(os.devnull)

        log.info("Running %s", cmd)

        output = get_output(cmd, stdin=devnull)

        # Look for URLs
        url = re.search("http(s)?://\S+", output)
        if url:
            return url.group()
        return None

    def mailResults(self, build, log_url):
        my_dir = os.path.abspath(os.path.dirname(__file__))
        cmd = [
            sys.executable,
            "%s/try_mailer.py" % my_dir,
            "--log-url",
            log_url,
        ]

        cmd.extend([
            '-f',
            self.config.get('mail_notifier_sender',
                            '*****@*****.**')
        ])
        if self.config.get('mail_real_author'):
            cmd.append('--to-author')

        for r in self.config.get('mail_extra_people', []):
            cmd.extend(['-t', r])

        cmd.extend([build.builder.basedir, str(build.number)])

        # Add the command to our queue
        self.command_queue.add(json.dumps(cmd))

    def getUploadArgs(self, build, product):
        ssh_info = self.config['ssh_info']
        ssh_info = ssh_info.get(product, ssh_info["*"])

        branch = self.getBuildInfo(build).get('branch')
        ssh_info = ssh_info.get(branch, ssh_info["*"])

        retval = ['--user', ssh_info['user']]
        if 'sshkey' in ssh_info:
            retval.extend(["-i", ssh_info['sshkey']])
        retval.append(ssh_info['host'])
        return retval

    def getBuild(self, build_path):
        log.info("Loading build pickle")
        if not os.path.exists(build_path):
            raise ValueError("Couldn't find %s" % build_path)

        builder_path = os.path.dirname(build_path)

        class FakeBuilder:
            basedir = builder_path
            name = os.path.basename(builder_path)

        build = pickle.load(open(build_path))
        build.builder = FakeBuilder()
        return build

    def getBuildInfo(self, build):
        """
        Returns a dictionary with
        'branch', 'platform', 'product'
        set as appropriate
        """
        props = build.getProperties()
        retval = {}
        if props.getProperty('stage_platform') is not None:
            retval['platform'] = props['stage_platform']
        elif props.getProperty('platform') is not None:
            retval['platform'] = props['platform']
        else:
            retval['platform'] = None

        if props.getProperty('stage_product') is not None:
            retval['product'] = props['stage_product']
        elif props.getProperty('product') is not None:
            retval['product'] = props['product'].lower()
        else:
            retval['product'] = None

        # fennec builds are called 'fennec' by buildbot, but are 'mobile' as
        # far as post_upload.py is concerned.
        if retval['product'] == 'fennec':
            retval['product'] = 'mobile'

        if props.getProperty('branch') is not None:
            retval['branch'] = props['branch']
        else:
            retval['branch'] = None

        if props.getProperty('version') is not None:
            retval['version'] = props['version']

        if props.getProperty('build_number') is not None:
            retval['build_number'] = props['build_number']

        retval['release_promotion'] = props.getProperty('release_promotion')

        log.debug("Build info: %s", retval)
        return retval

    def writePulseMessage(self, options, build, build_id):
        builder_name = build.builder.name
        msg = {
            'event': 'build.%s.%s.log_uploaded' % (builder_name, build.number),
            'payload': {
                "build": build.asDict()
            },
            'master_name': options.master_name,
            'master_incarnation': options.master_incarnation,
            'id': build_id,
        }
        self.pulse_queue.add(json.dumps([msg]))

    def updateStatusDB(self, build, request_ids):
        log.info("Updating statusdb")
        session = model.connect(self.config['statusdb.url'])()
        master = model.Master.get(session, self.config['statusdb.master_url'])
        master.name = unicode(self.config['statusdb.master_name'])

        if not master.id:
            log.debug("added master")
            session.add(master)
            session.commit()

        builder_name = build.builder.name
        db_builder = model.Builder.get(session, builder_name, master.id)
        db_builder.category = unicode(build.getProperty('branch'))

        starttime = None
        if build.started:
            starttime = datetime.utcfromtimestamp(build.started)

        log.debug("searching for build")
        q = session.query(model.Build).filter_by(
            master_id=master.id,
            builder=db_builder,
            buildnumber=build.number,
            starttime=starttime,
        )
        db_build = q.first()
        # Force start/endtime to None initially so that we get all the
        # properties, steps, etc. populated
        old_times = build.started, build.finished
        build.started, build.finished = None, None
        if not db_build:
            log.debug("creating new build")
            db_build = model.Build.fromBBBuild(session, build, builder_name,
                                               master.id)
        else:
            log.debug("updating old build")
            db_build.updateFromBBBuild(session, build)
        session.commit()
        log.debug("committed")

        # Now we can record the actual build times
        log.debug("updating times")
        old_times = [ts2dt(t) for t in old_times]
        db_build.starttime, db_build.endtime = old_times
        session.commit()

        log.debug("updating schedulerdb_requests table")

        schedulerdb = sa.create_engine(self.config['schedulerdb.url'])

        for i in request_ids:
            # See if we already have this row
            q = model.schedulerdb_requests.select()
            q = q.where(
                model.schedulerdb_requests.c.status_build_id == db_build.id)
            q = q.where(model.schedulerdb_requests.c.scheduler_request_id == i)
            q = q.limit(1).execute()
            if not q.fetchone():
                # Find the schedulerdb build id for this
                bid = schedulerdb.execute(sa.text(
                    'select id from builds where brid=:brid and number=:number'
                ),
                                          brid=i,
                                          number=build.number).fetchone()
                if bid is not None:
                    bid = bid[0]
                    log.debug("bid for %s is %s", i, bid)
                    model.schedulerdb_requests.insert().execute(
                        status_build_id=db_build.id,
                        scheduler_request_id=i,
                        scheduler_build_id=bid,
                    )
        log.debug("build id is %s", db_build.id)
        return db_build.id

    def getRequestTimes(self, request_ids):
        """Returns a dictionary of request_id => submitted_at (as an epoch
        time)"""
        schedulerdb = sa.create_engine(self.config['schedulerdb.url'])
        retval = {}
        for i in request_ids:
            submitted_at = schedulerdb.execute(
                sa.text(
                    "select submitted_at from buildrequests where id=:brid"),
                brid=i,
            ).fetchone()
            if submitted_at is not None:
                retval[i] = submitted_at[0]
        return retval

    def processBuild(self, options, build_path, request_ids):
        build = self.getBuild(build_path)
        info = self.getBuildInfo(build)
        if not options.log_url:
            log.info("uploading log")
            log_url = self.uploadLog(build)
            if log_url is None:
                log_url = 'null'
            cmd = [sys.executable] + sys.argv + ["--log-url", log_url]
            self.command_queue.add(json.dumps(cmd))
            # If this is for try, Mail the try user as well
            if info['branch'] in self.config['mail_notifier_branches']:
                self.mailResults(build, log_url)
        elif not options.statusdb_id:
            log.info("adding to statusdb")
            log_url = options.log_url
            if log_url == 'null':
                log_url = None
            log.debug("adding properties")
            build.properties.setProperty('log_url', log_url, 'postrun.py')
            build.properties.setProperty('request_ids',
                                         [int(i) for i in request_ids],
                                         'postrun.py')
            build.properties.setProperty('request_times',
                                         self.getRequestTimes(request_ids),
                                         'postrun.py')
            build_id = self.updateStatusDB(build, request_ids)

            cmd = [sys.executable] + sys.argv + [
                "--statusdb-id", str(build_id)
            ]
            self.command_queue.add(json.dumps(cmd))
        else:
            log.info("publishing to pulse")
            log_url = options.log_url
            build_id = options.statusdb_id
            build.properties.setProperty('log_url', log_url, 'postrun.py')
            build.properties.setProperty('statusdb_id', build_id, 'postrun.py')
            build.properties.setProperty('request_ids',
                                         [int(i) for i in request_ids],
                                         'postrun.py')
            build.properties.setProperty('request_times',
                                         self.getRequestTimes(request_ids),
                                         'postrun.py')
            self.writePulseMessage(options, build, build_id)