Beispiel #1
0
    def getAllChanges(self, request, status, debug_info):
        master = request.site.buildbot_service.master
        max_rev_limit = skia_vars.GetGlobalVariable('console_max_rev_limit')
        default_rev_limit = skia_vars.GetGlobalVariable(
            'console_default_rev_limit')
        limit = min(
            max_rev_limit,
            max(1, int(request.args.get('limit', [default_rev_limit])[0])))
        wfd = defer.waitForDeferred(master.db.changes.getRecentChanges(limit))
        yield wfd
        chdicts = wfd.getResult()

        # convert those to Change instances
        wfd = defer.waitForDeferred(
            defer.gatherResults([
                changes_module.Change.fromChdict(master, chdict)
                for chdict in chdicts
            ]))
        yield wfd
        all_changes = wfd.getResult()

        all_changes.sort(key=self.comparator.getSortingKey())

        # Remove the dups
        prev_change = None
        new_changes = []
        for change in all_changes:
            rev = change.revision
            if not prev_change or rev != prev_change.revision:
                new_changes.append(change)
            prev_change = change
        all_changes = new_changes

        debug_info["source_len"] = len(all_changes)
        yield all_changes
Beispiel #2
0
    def content(self, request, cxt):
        "This method builds the main console view display."

        reload_time = None
        # Check if there was an arg. Don't let people reload faster than
        # every 15 seconds. 0 means no reload.
        if "reload" in request.args:
            try:
                reload_time = int(request.args["reload"][0])
                if reload_time != 0:
                    reload_time = max(reload_time, 15)
            except ValueError:
                pass

        request.setHeader('Cache-Control', 'no-cache')

        # Sets the default reload time to 60 seconds.
        if not reload_time:
            reload_time = skia_vars.GetGlobalVariable(
                'default_webstatus_refresh')

        # Append the tag to refresh the page.
        if reload_time is not None and reload_time != 0:
            cxt['refresh'] = reload_time

        # List of categories for which we load information but hide initially.
        hidden_categories_sets = request.args.get("hideCategories", [])
        hide_categories = []
        for category_set in hidden_categories_sets:
            hide_categories.extend(category_set.split(','))
        cxt['hide_categories'] = hide_categories

        # List of subcategories for which we load information but hide initially.
        hidden_subcategories_sets = request.args.get("hideSubcategories", [])
        hide_subcategories = []
        for subcategory_set in hidden_subcategories_sets:
            hide_subcategories.extend(subcategory_set.split(','))
        cxt['hide_subcategories'] = hide_subcategories

        # Console event-loading limits.
        cxt['default_console_limit'] = \
            skia_vars.GetGlobalVariable('console_default_rev_limit')
        cxt['max_console_limit'] = \
            skia_vars.GetGlobalVariable('console_max_rev_limit')

        templates = request.site.buildbot_service.templates
        template = templates.get_template("console.html")
        data = template.render(cxt)

        return data
    def _UploadDMResults(self):
        # self._dm_dir holds a bunch of <hash>.png files and one dm.json.

        # Upload the dm.json summary file.
        summary_dir = tempfile.mkdtemp()
        shutil.move(os.path.join(self._dm_dir, 'dm.json'),
                    os.path.join(summary_dir, 'dm.json'))

        # /dm-json-v1/year/month/day/hour/git-hash/builder/build-number
        now = datetime.datetime.utcnow()
        summary_dest_dir = '/'.join([
            'dm-json-v1',
            str(now.year).zfill(4),
            str(now.month).zfill(2),
            str(now.day).zfill(2),
            str(now.hour).zfill(2), self._got_revision, self._builder_name,
            self._build_number
        ])
        # Trybot results are further siloed by CL.
        if self._is_try:
            summary_dest_dir = '/'.join(
                ['trybot', summary_dest_dir, self._args['issue_number']])

        gs = gs_utils.GSUtils()
        acl = gs.PLAYBACK_CANNED_ACL  # Private,
        fine_acls = gs.PLAYBACK_FINEGRAINED_ACL_LIST  # but Google-visible.

        gs.upload_dir_contents(
            source_dir=summary_dir,
            dest_bucket=skia_vars.GetGlobalVariable('dm_summaries_bucket'),
            dest_dir=summary_dest_dir,
            upload_if=gs.UploadIf.ALWAYS,
            predefined_acl=acl,
            fine_grained_acl_list=fine_acls)

        # Now we can upload everything that's left, all the .pngs.
        gs.upload_dir_contents(
            source_dir=self._dm_dir,
            dest_bucket=skia_vars.GetGlobalVariable('dm_images_bucket'),
            dest_dir='dm-images-v1',
            upload_if=gs.UploadIf.IF_NEW,
            predefined_acl=acl,
            fine_grained_acl_list=fine_acls)

        # Just for hygiene, put dm.json back.
        shutil.move(os.path.join(summary_dir, 'dm.json'),
                    os.path.join(self._dm_dir, 'dm.json'))
        os.rmdir(summary_dir)
Beispiel #4
0
    def _Run(self):
        # Copy actual-results.json to skimage/actuals
        print '\n\n====Uploading skimage actual-results to Google Storage====\n\n'
        src_dir = os.path.abspath(
            os.path.join(self._skimage_out_dir, self._builder_name))
        bucket_url = gs_utils.GSUtils.with_gs_prefix(
            skia_vars.GetGlobalVariable('googlestorage_bucket'))
        dest_dir = posixpath.join(bucket_url, 'skimage', 'actuals',
                                  self._builder_name)
        http_header_lines = ['Cache-Control:public,max-age=3600']
        old_gs_utils.upload_dir_contents(local_src_dir=src_dir,
                                         remote_dest_dir=dest_dir,
                                         gs_acl='public-read',
                                         http_header_lines=http_header_lines)

        # Copy actual images to Google Storage at skimage/output. This will merge
        # with the existing files.
        print '\n\n========Uploading skimage results to Google Storage=======\n\n'
        src_dir = os.path.abspath(os.path.join(self._skimage_out_dir,
                                               'images'))
        dest_dir = posixpath.join(bucket_url, 'skimage', 'output', 'images')
        if os.path.isdir(src_dir) and os.listdir(src_dir):
            old_gs_utils.upload_dir_contents(
                local_src_dir=src_dir,
                remote_dest_dir=dest_dir,
                gs_acl=gs_utils.GSUtils.PLAYBACK_CANNED_ACL)
Beispiel #5
0
    def displayPage(self, request, status, builder_list, all_builds, revisions,
                    categories, repository, branch, debug_info):
        """Display the console page."""
        # Build the main template directory with all the informations we have.
        subs = dict()
        subs["branch"] = branch or 'trunk'
        subs["repository"] = repository
        if categories:
            subs["categories"] = ' '.join(categories)
        subs["time"] = time.strftime("%a %d %b %Y %H:%M:%S",
                                     time.localtime(util.now()))
        subs["debugInfo"] = debug_info
        subs["ANYBRANCH"] = ANYBRANCH

        if builder_list:
            builders = builder_list
        else:
            builders = {}
        subs['builders'] = builders
        subs['revisions'] = []

        # For each revision we show one line
        for revision in revisions:
            r = {}

            # Fill the dictionary with this new information
            r['id'] = revision.revision
            r['link'] = revision.revlink
            if (skia_vars.GetGlobalVariable('commit_bot_username')
                    in revision.who and 'Author: ' in revision.comments):
                who = revision.comments.split('Author: ')[1].split('\n')[0]
                who += ' (commit-bot)'
            else:
                who = revision.who
            r['who'] = utils.FixGitSvnEmail(who)
            r['date'] = revision.date
            r['comments'] = revision.comments
            r['repository'] = revision.repository
            r['project'] = revision.project

            # Display the status for all builders.
            (builds, details) = self.displayStatusLine(builder_list,
                                                       all_builds, revision,
                                                       debug_info)
            r['builds'] = builds
            r['details'] = details

            # Calculate the td span for the comment and the details.
            r["span"] = sum ([len(builder_list[category]) \
                              for category in builder_list]) + 2

            subs['revisions'].append(r)

        #
        # Display the footer of the page.
        #
        debug_info["load_time"] = time.time() - debug_info["load_time"]
        return subs
    class CompileSkia(Skia):
        project_name = 'CompileSkia'
        project_url = skia_vars.GetGlobalVariable('project_url')
        master_host = skia_vars.GetGlobalVariable('compile_master_host')
        master_fqdn = SKIA_COMPILE_MASTER_INTERNAL_FQDN
        is_production_host = socket.getfqdn(
        ) == SKIA_COMPILE_MASTER_INTERNAL_FQDN
        _skip_render_results_upload = False
        _skip_bench_results_upload = False
        master_port = skia_vars.GetGlobalVariable('compile_internal_port')
        slave_port = skia_vars.GetGlobalVariable('compile_slave_port')
        master_port_alt = skia_vars.GetGlobalVariable('compile_external_port')
        tree_closing_notification_recipients = ['*****@*****.**']
        from_address = skia_vars.GetGlobalVariable('gce_smtp_user')
        is_publicly_visible = True
        code_review_site = \
            skia_vars.GetGlobalVariable('code_review_status_listener')

        def create_schedulers_and_builders(self, cfg):
            """Create the Schedulers and Builders.

      Args:
          cfg: dict; configuration dict for the build master.
      """
            # These imports needs to happen inside this function because modules
            # imported by master_builders_cfg import this module.
            import master_builders_cfg
            import master_compile_builders_cfg
            master_builders_cfg.create_schedulers_and_builders(
                sys.modules[__name__], self, cfg,
                master_compile_builders_cfg.setup_all_builders)
Beispiel #7
0
 def CompareGMs(self):
     """Compare the actually-generated GM images to the checked-in baselines."""
     self.AddSlaveScript(
         script='compare_gms.py',
         description='CompareGMs',
         get_props_from_stdout={
             'latest_gm_failures_url':
             '%s([^\n]*)\n' %
             skia_vars.GetGlobalVariable('latest_gm_failures_preamble')
         },
         is_rebaseline_step=True)
Beispiel #8
0
def rebaseline_server_url(directive, builder_name):
  """Returns the URL for results from this builder on rebaseline_server.

  Args:
    directive: part of the URL, indicating whether to generate a 'prefetch'
        URL, a 'live-loader' URL, or a link to live-view.html
    builder_name: name of this builder
  """
  bucket = skia_vars.GetGlobalVariable('skp_summaries_bucket')
  return URL_FORMATTER.format(
      directive=directive,
      actuals_summary_dir=urllib.quote(
          'gs://%s/%s' % (bucket, builder_name), safe=''))
    def _GSUploadAllImages(self, src_dir):
        """Upload all image files from src_dir to Google Storage.
    We know that GM wrote out these image files with a filename pattern we
    can use to generate the checksum-based Google Storage paths."""
        all_files = sorted(os.listdir(src_dir))

        def filematcher(filename):
            return filename.endswith('.png')

        files_to_upload = filter(filematcher, all_files)
        print 'Uploading %d GM-actual files to Google Storage...' % (
            len(files_to_upload))
        if not files_to_upload:
            return
        filename_pattern = re.compile('^([^_]+)_(.+)_([^_]+)\.png$')

        gm_actuals_subdir = 'gm'
        temp_root = tempfile.mkdtemp()
        try:
            # Copy all of the desired files to a staging dir, with new filenames.
            for filename in files_to_upload:
                match = filename_pattern.match(filename)
                if not match:
                    print 'Warning: found no images matching pattern "%s"' % filename
                    continue
                (hashtype, test, hashvalue) = match.groups()
                src_filepath = os.path.join(src_dir, filename)
                temp_dir = os.path.join(temp_root, gm_actuals_subdir, hashtype,
                                        test)
                if not os.path.isdir(temp_dir):
                    os.makedirs(temp_dir)
                shutil.copy(src_filepath,
                            os.path.join(temp_dir, hashvalue + '.png'))

            # Upload the entire staging dir to Google Storage.
            # At present, this will merge the entire contents of [temp_root]/gm
            # into the existing contents of gs://chromium-skia-gm/gm .
            #
            # TODO(epoger): Add a "noclobber" mode to gs_utils.upload_dir_contents()
            # and use it here so we don't re-upload image files we already have
            # in Google Storage.
            bucket_url = gs_utils.GSUtils.with_gs_prefix(
                skia_vars.GetGlobalVariable('googlestorage_bucket'))
            old_gs_utils.upload_dir_contents(
                local_src_dir=os.path.abspath(
                    os.path.join(temp_root, gm_actuals_subdir)),
                remote_dest_dir=posixpath.join(bucket_url, gm_actuals_subdir),
                gs_acl='public-read',
                http_header_lines=['Cache-Control:public,max-age=3600'])
        finally:
            shutil.rmtree(temp_root)
Beispiel #10
0
        def got_changes(all_changes):
            debug_info["source_all"] = len(all_changes)

            rev_filter = {}
            if branch != ANYBRANCH:
                rev_filter['branch'] = branch
            if dev_name:
                rev_filter['who'] = dev_name
            rev_filter['repository'] = skia_vars.GetGlobalVariable(
                'skia_git_url')
            revisions = list(
                self.filterRevisions(all_changes,
                                     max_revs=num_revs,
                                     rev_filter=rev_filter))
            debug_info["revision_final"] = len(revisions)

            # Fetch all the builds for all builders until we get the next build
            # after last_revision.
            builder_list = None
            all_builds = None
            if revisions:
                last_revision = revisions[len(revisions) - 1].revision
                debug_info["last_revision"] = last_revision

                (builder_list, all_builds) = self.getAllBuildsForRevision(
                    status, request, last_revision, num_builds, categories,
                    builders, debug_info)

            debug_info["added_blocks"] = 0
            debug_info["from_cache"] = 0

            if request.args.get("display_cache", None):
                data = ""
                data += "\nGlobal Cache\n"
                data += self.cache.display()
                return data

            cxt.update(
                self.displayPage(request, status, builder_list, all_builds,
                                 revisions, categories, repository, branch,
                                 debug_info))
            # Clean up the cache.
            if debug_info["added_blocks"]:
                self.cache.trim()
            return {'builders': cxt['builders'], 'revisions': cxt['revisions']}
    def _Run(self):
        # Tar up the results.
        result_tarball = '%s_%s.tgz' % (self._builder_name, self._got_revision)
        shell_utils.run([
            'tar', '-cvzf',
            os.path.join(os.pardir, result_tarball),
            self._flavor_utils.result_dir
        ])

        # Upload to Google Storage
        bucket_url = gs_utils.GSUtils.with_gs_prefix(
            skia_vars.GetGlobalVariable('googlestorage_bucket'))
        upload_to_bucket.upload_to_bucket(os.path.join(os.pardir,
                                                       result_tarball),
                                          bucket_url,
                                          subdir=GS_DRT_SUBDIR)

        print 'To download the tarball, run this command:'
        gs_url = posixpath.join(bucket_url, GS_DRT_SUBDIR, result_tarball)
        print 'gsutil cp %s <local_dir>' % gs_url
Beispiel #12
0
    def _Run(self):
        json_summary_path = misc.GetAbsPath(
            os.path.join(self._gm_actual_dir, run_gm.JSON_SUMMARY_FILENAME))

        # Temporary list of builders who are allowed to fail this step without the
        # bot turning red.
        may_fail_with_warning = []
        # This import must happen after BuildStep.__init__ because it requires that
        # CWD is in PYTHONPATH, and BuildStep.__init__ may change the CWD.
        from gm import display_json_results
        success = display_json_results.Display(json_summary_path)
        print('%s<a href="%s?resultsToLoad=/results/failures&builder=%s">'
              'link</a>' %
              (skia_vars.GetGlobalVariable('latest_gm_failures_preamble'),
               LIVE_REBASELINE_SERVER_BASEURL, self._builder_name))
        if not success:
            if self._builder_name in may_fail_with_warning:
                raise BuildStepWarning('Expectations mismatch in %s!' %
                                       json_summary_path)
            else:
                raise Exception('Expectations mismatch in %s!' %
                                json_summary_path)
Beispiel #13
0
def HtmlResourceRender(self, request):
    """ Override of buildbot.status.web.base.HtmlResource.render:
  http://src.chromium.org/viewvc/chrome/trunk/tools/build/third_party/buildbot_8_4p1/buildbot/status/web/base.py?view=markup

  We modify it to pass additional variables on to the web status pages, and
  remove the "if False" section.
  """
    # tell the WebStatus about the HTTPChannel that got opened, so they
    # can close it if we get reconfigured and the WebStatus goes away.
    # They keep a weakref to this, since chances are good that it will be
    # closed by the browser or by us before we get reconfigured. See
    # ticket #102 for details.
    if hasattr(request, "channel"):
        # web.distrib.Request has no .channel
        request.site.buildbot_service.registerChannel(request.channel)

    ctx = self.getContext(request)

    ############################## Added by borenet ##############################
    status = self.getStatus(request)
    all_builders = status.getBuilderNames()
    all_full_category_names = set()
    all_categories = set()
    all_subcategories = set()
    subcategories_by_category = {}
    for builder_name in all_builders:
        category_full = status.getBuilder(builder_name).category or 'default'
        all_full_category_names.add(category_full)
        category_split = category_full.split('|')
        category = category_split[0]
        subcategory = category_split[1] if len(
            category_split) > 1 else 'default'
        all_categories.add(category)
        all_subcategories.add(subcategory)
        if not subcategories_by_category.get(category):
            subcategories_by_category[category] = []
        if not subcategory in subcategories_by_category[category]:
            subcategories_by_category[category].append(subcategory)

    ctx['tree_status_baseurl'] = \
        skia_vars.GetGlobalVariable('tree_status_baseurl')

    ctx['all_full_category_names'] = sorted(list(all_full_category_names))
    ctx['all_categories'] = sorted(list(all_categories))
    ctx['all_subcategories'] = sorted(list(all_subcategories))
    ctx['subcategories_by_category'] = subcategories_by_category
    ctx['default_refresh'] = \
        skia_vars.GetGlobalVariable('default_webstatus_refresh')
    ctx['skia_repo'] = config_private.SKIA_GIT_URL

    active_master = config_private.Master.get_active_master()
    ctx['internal_port'] = active_master.master_port
    ctx['external_port'] = active_master.master_port_alt
    ctx['title_url'] = config_private.Master.Skia.project_url
    ctx['slave_hosts_cfg'] = slave_hosts_cfg.SLAVE_HOSTS
    ctx['slaves_cfg'] = slaves_cfg.SLAVES

    ctx['active_master_name'] = active_master.project_name
    ctx['master_revision'] = utils.get_current_revision()
    ctx['master_running_revision'] = active_master.running_revision
    ctx['master_launch_datetime'] = active_master.launch_datetime
    ctx['is_internal_view'] = request.host.port == ctx['internal_port']
    ctx['masters'] = []
    for master in config_private.Master.valid_masters:
        ctx['masters'].append({
            'name': master.project_name,
            'host': master.master_host,
            'internal_port': master.master_port,
            'external_port': master.master_port_alt,
        })
    ##############################################################################

    d = defer.maybeDeferred(lambda: self.content(request, ctx))

    def handle(data):
        if isinstance(data, unicode):
            data = data.encode("utf-8")
        request.setHeader("content-type", self.contentType)
        if request.method == "HEAD":
            request.setHeader("content-length", len(data))
            return ''
        return data

    d.addCallback(handle)

    def ok(data):
        request.write(data)
        request.finish()

    def fail(f):
        request.processingFailed(f)
        return None  # processingFailed will log this for us

    d.addCallbacks(ok, fail)
    return server.NOT_DONE_YET
Beispiel #14
0
This differs from UpdateScripts in that it updates ALL of the buildbot script
checkouts for ALL buildslaves, as opposed to a single buildslave's checkout of
the buildbot scripts on a single host machine.
"""


import re
import skia_vars
import sys

from build_step import BuildStep, BuildStepWarning
from scripts import run_cmd
from utils import force_update_checkout


BUILDBOT_GIT_URL = skia_vars.GetGlobalVariable('buildbot_git_url')


class UpdateAllBuildslaves(BuildStep):
  def _Run(self):
    script_path = run_cmd.ResolvablePath('slave', 'skia_slave_scripts', 'utils',
                                         'force_update_checkout.py')
    sync_cmd = ['python', script_path]
    results = run_cmd.run_on_all_slaves_on_all_hosts(sync_cmd)
    failed = []
    for host in results.iterkeys():
      print host
      # If results[host] is a MultiCommandResults instance, then we have results
      # for buildslaves running on that machine, which implies that we were able
      # to log in to the machine successfully.
      if isinstance(results[host], run_cmd.MultiCommandResults):
# pylint: enable=C0301

import posixpath
import sys

from build_step import BuildStep
from utils import gs_utils
from utils import old_gs_utils
import generate_doxygen
import skia_vars

# It's silly that we include 'doxygen' in the destination path twice, but
# that's to maintain current behavior while fixing http://skbug.com/2658 .
DOXYGEN_GSUTIL_PATH = posixpath.join(
    gs_utils.GSUtils.with_gs_prefix(
        skia_vars.GetGlobalVariable('googlestorage_bucket')), 'doxygen',
    'doxygen')

# Directives for HTTP caching of these files served out by Google Storage.
# See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
GS_CACHE_CONTROL_HEADER = 'Cache-Control:public,max-age=3600'


class UploadDoxygen(BuildStep):
    def _Run(self):
        old_gs_utils.upload_dir_contents(
            local_src_dir=generate_doxygen.DOXYGEN_WORKING_DIR,
            remote_dest_dir=DOXYGEN_GSUTIL_PATH,
            gs_acl='public-read',
            http_header_lines=[GS_CACHE_CONTROL_HEADER])
class Master(config_default.Master):
    googlecode_revlinktmpl = 'http://code.google.com/p/%s/source/browse?r=%s'
    bot_password = '******'
    default_clobber = False

    # SMTP configurations.
    smtp_server = skia_vars.GetGlobalVariable('gce_smtp_server')
    smtp_port = skia_vars.GetGlobalVariable('gce_smtp_port')
    smtp_use_tls = skia_vars.GetGlobalVariable('gce_smtp_use_tls')
    smtp_user = skia_vars.GetGlobalVariable('gce_smtp_user')

    # domains to which we will send blame emails
    permitted_domains = ['google.com', 'chromium.org']

    class Skia(object):
        project_name = 'Skia'
        project_url = skia_vars.GetGlobalVariable('project_url')
        master_host = skia_vars.GetGlobalVariable('public_master_host')
        master_fqdn = SKIA_PUBLIC_MASTER_INTERNAL_FQDN
        is_production_host = socket.getfqdn(
        ) == SKIA_PUBLIC_MASTER_INTERNAL_FQDN
        _skip_render_results_upload = False
        _skip_bench_results_upload = False
        master_port = skia_vars.GetGlobalVariable('public_internal_port')
        slave_port = skia_vars.GetGlobalVariable('public_slave_port')
        master_port_alt = skia_vars.GetGlobalVariable('public_external_port')
        tree_closing_notification_recipients = ['*****@*****.**']
        from_address = skia_vars.GetGlobalVariable('gce_smtp_user')
        is_publicly_visible = True
        code_review_site = \
            skia_vars.GetGlobalVariable('code_review_status_listener')
        tree_status_url = skia_vars.GetGlobalVariable('tree_status_url')

        @property
        def do_upload_render_results(self):
            return self.is_production_host and not self._skip_render_results_upload

        @property
        def do_upload_bench_results(self):
            return self.is_production_host and not self._skip_bench_results_upload

        def create_schedulers_and_builders(self, cfg):
            """Create the Schedulers and Builders.

      Args:
          cfg: dict; configuration dict for the build master.
      """
            # This import needs to happen inside this function because modules
            # imported by master_builders_cfg import this module.
            import master_builders_cfg
            master_builders_cfg.create_schedulers_and_builders(
                sys.modules[__name__], self, cfg,
                master_builders_cfg.setup_all_builders)

    class PrivateSkia(Skia):
        project_name = 'PrivateSkia'
        project_url = skia_vars.GetGlobalVariable('project_url')
        master_host = skia_vars.GetGlobalVariable('private_master_host')
        master_fqdn = SKIA_PRIVATE_MASTER_INTERNAL_FQDN
        is_production_host = socket.getfqdn(
        ) == SKIA_PRIVATE_MASTER_INTERNAL_FQDN
        _skip_render_results_upload = False
        # Don't upload bench results on the private master, since we don't yet have
        # a private destination for them.
        _skip_bench_results_upload = True
        master_port = skia_vars.GetGlobalVariable('private_internal_port')
        slave_port = skia_vars.GetGlobalVariable('private_slave_port')
        master_port_alt = skia_vars.GetGlobalVariable('private_external_port')
        tree_closing_notification_recipients = []
        from_address = skia_vars.GetGlobalVariable('gce_smtp_user')
        is_publicly_visible = False
        code_review_site = \
            skia_vars.GetGlobalVariable('code_review_status_listener')

        def create_schedulers_and_builders(self, cfg):
            """Create the Schedulers and Builders.

      Args:
          cfg: dict; configuration dict for the build master.
      """
            # These imports needs to happen inside this function because modules
            # imported by master_builders_cfg import this module.
            import master_builders_cfg
            import master_private_builders_cfg
            master_builders_cfg.create_schedulers_and_builders(
                sys.modules[__name__], self, cfg,
                master_private_builders_cfg.setup_all_builders)

    class FYISkia(Skia):
        project_name = 'FYISkia'
        project_url = skia_vars.GetGlobalVariable('project_url')
        master_host = skia_vars.GetGlobalVariable('fyi_master_host')
        master_fqdn = SKIA_FYI_MASTER_INTERNAL_FQDN
        is_production_host = socket.getfqdn() == SKIA_FYI_MASTER_INTERNAL_FQDN
        _skip_render_results_upload = False
        _skip_bench_results_upload = False
        master_port = skia_vars.GetGlobalVariable('fyi_internal_port')
        slave_port = skia_vars.GetGlobalVariable('fyi_slave_port')
        master_port_alt = skia_vars.GetGlobalVariable('fyi_external_port')
        tree_closing_notification_recipients = []
        from_address = skia_vars.GetGlobalVariable('gce_smtp_user')
        is_publicly_visible = True
        code_review_site = \
            skia_vars.GetGlobalVariable('code_review_status_listener')

        def create_schedulers_and_builders(self, cfg):
            """Create the Schedulers and Builders.

      Args:
          cfg: dict; configuration dict for the build master.
      """
            # These imports needs to happen inside this function because modules
            # imported by master_builders_cfg import this module.
            import master_builders_cfg
            import master_fyi_builders_cfg
            master_builders_cfg.create_schedulers_and_builders(
                sys.modules[__name__], self, cfg,
                master_fyi_builders_cfg.setup_all_builders)

    class AndroidSkia(Skia):
        project_name = 'AndroidSkia'
        project_url = skia_vars.GetGlobalVariable('project_url')
        master_host = skia_vars.GetGlobalVariable('android_master_host')
        master_fqdn = SKIA_ANDROID_MASTER_INTERNAL_FQDN
        is_production_host = socket.getfqdn(
        ) == SKIA_ANDROID_MASTER_INTERNAL_FQDN
        _skip_render_results_upload = False
        _skip_bench_results_upload = False
        master_port = skia_vars.GetGlobalVariable('android_internal_port')
        slave_port = skia_vars.GetGlobalVariable('android_slave_port')
        master_port_alt = skia_vars.GetGlobalVariable('android_external_port')
        tree_closing_notification_recipients = []
        from_address = skia_vars.GetGlobalVariable('gce_smtp_user')
        is_publicly_visible = True
        code_review_site = \
            skia_vars.GetGlobalVariable('code_review_status_listener')

        def create_schedulers_and_builders(self, cfg):
            """Create the Schedulers and Builders.

      Args:
          cfg: dict; configuration dict for the build master.
      """
            # These imports needs to happen inside this function because modules
            # imported by master_builders_cfg import this module.
            import master_builders_cfg
            import master_android_builders_cfg
            master_builders_cfg.create_schedulers_and_builders(
                sys.modules[__name__], self, cfg,
                master_android_builders_cfg.setup_all_builders)

    class CompileSkia(Skia):
        project_name = 'CompileSkia'
        project_url = skia_vars.GetGlobalVariable('project_url')
        master_host = skia_vars.GetGlobalVariable('compile_master_host')
        master_fqdn = SKIA_COMPILE_MASTER_INTERNAL_FQDN
        is_production_host = socket.getfqdn(
        ) == SKIA_COMPILE_MASTER_INTERNAL_FQDN
        _skip_render_results_upload = False
        _skip_bench_results_upload = False
        master_port = skia_vars.GetGlobalVariable('compile_internal_port')
        slave_port = skia_vars.GetGlobalVariable('compile_slave_port')
        master_port_alt = skia_vars.GetGlobalVariable('compile_external_port')
        tree_closing_notification_recipients = ['*****@*****.**']
        from_address = skia_vars.GetGlobalVariable('gce_smtp_user')
        is_publicly_visible = True
        code_review_site = \
            skia_vars.GetGlobalVariable('code_review_status_listener')

        def create_schedulers_and_builders(self, cfg):
            """Create the Schedulers and Builders.

      Args:
          cfg: dict; configuration dict for the build master.
      """
            # These imports needs to happen inside this function because modules
            # imported by master_builders_cfg import this module.
            import master_builders_cfg
            import master_compile_builders_cfg
            master_builders_cfg.create_schedulers_and_builders(
                sys.modules[__name__], self, cfg,
                master_compile_builders_cfg.setup_all_builders)

    # List of the valid master classes.
    valid_masters = [Skia, PrivateSkia, FYISkia, AndroidSkia, CompileSkia]

    @staticmethod
    def set_active_master(master_name):
        """Sets the master with the given name as active and returns its instance.

    Args:
        master_name: string; name of the desired build master.
    """
        global _ACTIVE_MASTER
        master = Master.get(master_name)
        if master:
            _ACTIVE_MASTER = master()
            return _ACTIVE_MASTER
        raise Exception('Invalid master: %s' % master_name)

    @staticmethod
    def get(master_name):
        """Return the master with the given name or None if no such master exists.

    Args:
        master_name: string; name of the desired build master.
    """
        for master in Master.valid_masters:
            if master_name == master.__name__:
                return master
        return None

    @staticmethod
    def get_active_master():
        """Returns the instance of the active build master."""
        return _ACTIVE_MASTER
Beispiel #17
0
import datetime
import json
import os
import smtplib
import sys
import urllib2

# Set the PYTHONPATH for this script to include skia site_config.
sys.path.append(os.path.join(os.pardir, 'site_config'))
import skia_vars

SHERIFF_EMAIL_TYPE = 'sheriff'
STATUS_EMAIL_TYPE = 'status'
ALL_EMAIL_TYPES = (SHERIFF_EMAIL_TYPE, STATUS_EMAIL_TYPE)

NEXT_SHERIFF_JSON_URL = skia_vars.GetGlobalVariable('next_sheriff_url')
DEFAULT_EMAIL_SENDER = '*****@*****.**'
ADDITIONAL_EMAIL_RECIPIENTS = ['*****@*****.**']


def _GetSheriffDetails():
    """Returns the next sheriff and his/her schedule."""
    connection = urllib2.urlopen(NEXT_SHERIFF_JSON_URL)
    sheriff_details = json.loads(connection.read())
    connection.close()
    return sheriff_details


def EmailSheriff():
    """Sends an email to the next sheriff."""
    sheriff_details = _GetSheriffDetails()
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

# These buildbot configurations are "private" in the sense that they are
# specific to Skia buildbots (not shared by other Chromium buildbots).
# But this file is stored within a public SVN repository, so don't put any
# secrets in here.

import socket
import skia_vars
import sys

# import base class from third_party/chromium_buildbot/site_config/
import config_default

CODE_REVIEW_SITE = skia_vars.GetGlobalVariable('code_review_site')

# On startup, the build master validates the bot configuration against a known
# expectation.  If this variable is set to true (eg. in a buildbot self-test),
# the master will fail to start up if validation fails.
#
# For more information: https://code.google.com/p/skia/issues/detail?id=1289
die_on_validation_failure = False

# Skia's Google Compute Engine instances.
# The public master which is visible to everyone.
SKIA_PUBLIC_MASTER_INTERNAL_FQDN = skia_vars.GetGlobalVariable(
    'public_master_internal_fqdn')
# The private master which is visible only to Google corp.
SKIA_PRIVATE_MASTER_INTERNAL_FQDN = skia_vars.GetGlobalVariable(
    'private_master_internal_fqdn')
Beispiel #19
0
        "source": ".boto",
        "destination": "buildbot/third_party/chromium_buildbot/site_config",
    },
]
CHROMEBUILD_COPIES = [
    {
        "source": ".boto",
        "destination": "build/site_config",
    },
    {
        "source": ".bot_password",
        "destination": "build/site_config",
    },
]

GCE_PROJECT = skia_vars.GetGlobalVariable('gce_project')
GCE_USERNAME = skia_vars.GetGlobalVariable('gce_username')
GCE_ZONE = skia_vars.GetGlobalVariable('gce_compile_bots_zone')

GCE_COMPILE_A_ONLINE = GCE_ZONE == 'a'
GCE_COMPILE_B_ONLINE = GCE_ZONE == 'b'
GCE_COMPILE_C_ONLINE = True

LAUNCH_SCRIPT_UNIX = ['scripts', 'skiabot-slave-start-on-boot.sh']
LAUNCH_SCRIPT_WIN = ['scripts', 'skiabot-slave-start-on-boot.bat']

SKIALAB_ROUTER_IP = skia_vars.GetGlobalVariable('skialab_router_ip')
SKIALAB_USERNAME = skia_vars.GetGlobalVariable('skialab_username')

# Procedures for logging in to the host machines.
def Sync(skia_revision=SKIA_REV_MASTER,
         chrome_revision=CHROME_REV_LKGR,
         fetch_target=DEFAULT_FETCH_TARGET,
         gyp_defines=None,
         gyp_generators=None):
    """ Create and sync a checkout of Skia inside a checkout of Chrome. Returns
  a tuple containing the actually-obtained revision of Skia and the actually-
  obtained revision of Chrome.

  skia_revision: revision of Skia to sync. Should be a commit hash or one of
      (SKIA_REV_DEPS, SKIA_REV_MASTER).
  chrome_revision: revision of Chrome to sync. Should be a commit hash or one
      of (CHROME_REV_LKGR, CHROME_REV_MASTER).
  fetch_target: string; Calls the fetch tool in depot_tools with the specified
      argument. Default is DEFAULT_FETCH_TARGET.
  gyp_defines: optional string; GYP_DEFINES to be passed to Gyp.
  gyp_generators: optional string; which GYP_GENERATORS to use.
  """
    # Figure out what revision of Skia we should use.
    if skia_revision == SKIA_REV_MASTER:
        output = git_utils.GetRemoteMasterHash(
            skia_vars.GetGlobalVariable('skia_git_url'))
        if output:
            skia_revision = shlex.split(output)[0]
        if not skia_revision:
            raise Exception('Could not determine current Skia revision!')
    skia_revision = str(skia_revision)

    # Use Chrome LKGR, since gclient_utils will force a sync to origin/master.
    if chrome_revision == CHROME_REV_LKGR:
        chrome_revision = urllib2.urlopen(CHROME_LKGR_URL).read()
    elif chrome_revision == CHROME_REV_MASTER:
        chrome_revision = shlex.split(
            git_utils.GetRemoteMasterHash(CHROME_GIT_URL))[0]

    # Run "fetch chromium". The initial run is allowed to fail after it does some
    # work. At the least, we expect the .gclient file to be present when it
    # finishes.
    if not os.path.isfile(GCLIENT_FILE):
        try:
            shell_utils.run([FETCH, fetch_target, '--nosvn=True'])
        except shell_utils.CommandFailedException:
            pass
    if not os.path.isfile(GCLIENT_FILE):
        raise Exception('Could not fetch %s!' % fetch_target)

    # Run "gclient sync"
    revisions = [('src', chrome_revision)]
    if skia_revision != SKIA_REV_DEPS:
        revisions.append(('src/third_party/skia', skia_revision))

    try:
        # Hack: We have to set some GYP_DEFINES, or upstream scripts will complain.
        os.environ['GYP_DEFINES'] = os.environ.get('GYP_DEFINES') or ''
        gclient_utils.Sync(revisions=revisions,
                           jobs=1,
                           no_hooks=True,
                           force=True)
    except shell_utils.CommandFailedException as e:
        # We frequently see sync failures because a lock file wasn't deleted. In
        # that case, delete the lock file and try again.
        pattern = r".*fatal: Unable to create '(\S+)': File exists\..*"
        match = re.search(pattern, e.output)
        if not match:
            raise e
        file_to_delete = match.groups()[0]
        try:
            print 'Attempting to remove %s' % file_to_delete
            os.remove(file_to_delete)
        except OSError:
            # If the file no longer exists, just try again.
            pass
        gclient_utils.Sync(revisions=revisions,
                           jobs=1,
                           no_hooks=True,
                           force=True)

    # Find the actually-obtained Chrome revision.
    os.chdir('src')
    actual_chrome_rev = shell_utils.run([GIT, 'rev-parse', 'HEAD'],
                                        log_in_real_time=False).rstrip()

    # Find the actually-obtained Skia revision.
    with misc.ChDir(os.path.join('third_party', 'skia')):
        actual_skia_rev = shell_utils.run([GIT, 'rev-parse', 'HEAD'],
                                          log_in_real_time=False).rstrip()

    # Run gclient hooks
    gclient_utils.RunHooks(gyp_defines=gyp_defines,
                           gyp_generators=gyp_generators)

    # Fix the submodules so that they don't show up in "git status"
    # This fails on Windows...
    if os.name != 'nt':
        submodule_cmd = ('\'git config -f '
                         '$toplevel/.git/config submodule.$name.ignore all\'')
        shell_utils.run(' '.join([GIT, 'submodule', 'foreach', submodule_cmd]),
                        shell=True)

    # Verify that we got the requested revisions of Chrome and Skia.
    if skia_revision != actual_skia_rev and skia_revision != SKIA_REV_DEPS:
        raise Exception('Requested Skia revision %s but got %s!' %
                        (repr(skia_revision), repr(actual_skia_rev)))
    if chrome_revision and chrome_revision != actual_chrome_rev:
        raise Exception('Requested Chrome revision %s but got %s!' %
                        (repr(chrome_revision), repr(actual_chrome_rev)))

    return (actual_skia_rev, actual_chrome_rev)
Beispiel #21
0
 def asDict(self, request):
     return {
         'cq_required_steps':
         skia_vars.GetGlobalVariable('cq_required_steps')
     }
Beispiel #22
0
 def __init__(self, attempts=3, **kwargs):
   super(UploadRenderedSKPs, self).__init__(
       attempts=attempts, **kwargs)
   self._gs_images_bucket = skia_vars.GetGlobalVariable('skp_images_bucket')
   self._gs_summaries_bucket = skia_vars.GetGlobalVariable(
       'skp_summaries_bucket')
Beispiel #23
0
  def Update(self, c):
    for builder in self._builders:
      # Update the schedulers with the builder.
      schedulers = builder['schedulers']
      if schedulers:
        for scheduler in schedulers:
          self._schedulers[scheduler]['builders'].append(builder['name'])

      # Construct the category.
      categories = []
      if builder.get('category', None):
        categories.append(builder['category'])
      if builder.get('gatekeeper', None):
        categories.extend(builder['gatekeeper'].split('|'))
      category = '|'.join(categories)

      # Append the builder to the list.
      new_builder = {'name': builder['name'],
                     'factory': self._factories[builder['factory']],
                     'category': category,
                     'auto_reboot': builder['auto_reboot']}
      if builder['builddir']:
        new_builder['builddir'] = builder['builddir']
      c['builders'].append(new_builder)

    c['builders'].sort(key=lambda builder: builder['name'])

    # Process the main schedulers.
    for s_name in self._schedulers:
      scheduler = self._schedulers[s_name]
      if scheduler['type'] == 'Scheduler':
        def filter_fn(change, builders):
          """Filters out if change.comments contains certain keywords.

          The change is filtered out if the commit message contains:
          * SKIP_BUILDBOT_SUBSTRING or
          * RUN_BUILDERS_REGEX when the scheduler does not contain any of the
            specified builders

          Args:
            change: An instance of changes.Change.
            builders: Sequence of strings. The builders that are run by this
              scheduler.

          Returns:
            If the change should be filtered out (i.e. not run by the buildbot
            code) then False is returned else True is returned.
          """
          if SKIP_BUILDBOT_SUBSTRING in change.comments:
            return False
          match_obj = RUN_BUILDERS_RE_COMPILED.search(change.comments)
          if builders and match_obj:
            for builder_to_run in match_obj.group(1).split(','):
              if builder_to_run.strip() in builders:
                break
            else:
              return False
          return True

        skia_change_filter = SkiaChangeFilter(
            builders=scheduler['builders'],
            branch=skia_vars.GetGlobalVariable('master_branch_name'),
            filter_fn=filter_fn)

        instance = Scheduler(name=s_name,
                             treeStableTimer=scheduler['treeStableTimer'],
                             builderNames=scheduler['builders'],
                             change_filter=skia_change_filter)
        c['schedulers'].append(instance)
        self._schedulers[s_name]['instance'] = instance

    # Process the periodic schedulers.
    for s_name in self._schedulers:
      scheduler = self._schedulers[s_name]
      if scheduler['type'] == 'PeriodicScheduler':
        instance = timed.Nightly(
            name=s_name,
            branch=skia_vars.GetGlobalVariable('master_branch_name'),
            builderNames=scheduler['builders'],
            minute=scheduler['minute'],
            hour=scheduler['hour'],
            dayOfMonth=scheduler['dayOfMonth'],
            month=scheduler['month'],
            dayOfWeek=scheduler['dayOfWeek'])
        c['schedulers'].append(instance)
        self._schedulers[s_name]['instance'] = instance

    # Process the Rietveld-based try schedulers.
    for s_name in self._schedulers:
      scheduler = self._schedulers[s_name]
      if scheduler['type'] == 'TryJobRietveld':
        pools = BuildersPools(s_name)
        pools[s_name].extend(scheduler['builders'])
        instance = try_job_rietveld.TryJobRietveld(
            name=s_name,
            pools=pools,
            last_good_urls={'skia': None},
            code_review_sites={'skia': config_private.CODE_REVIEW_SITE},
            project='skia')
        c['schedulers'].append(instance)
        self._schedulers[s_name]['instance'] = instance

    # Process the svn-based try schedulers.
    for s_name in self._schedulers:
      scheduler = self._schedulers[s_name]
      if scheduler['type'] == 'TryJobSubversion':
        pools = BuildersPools(s_name)
        pools[s_name].extend(scheduler['builders'])
        instance = try_job_svn.TryJobSubversion(
            name=s_name,
            svn_url=TRY_SVN_BASEURL,
            last_good_urls={'skia': None},
            code_review_sites={'skia': config_private.CODE_REVIEW_SITE},
            pools=pools)
        c['schedulers'].append(instance)
        self._schedulers[s_name]['instance'] = instance

    # Process the dependent schedulers.
    for s_name in self._schedulers:
      scheduler = self._schedulers[s_name]
      if scheduler['type'] == 'Dependent':
        instance = Dependent(
            s_name,
            self._schedulers[scheduler['parent']]['instance'],
            scheduler['builders'])
        c['schedulers'].append(instance)
        self._schedulers[s_name]['instance'] = instance
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Merge Skia into Android."""

import os
import sys

from build_step import BuildStep, BuildStepFailure, BuildStepWarning
import skia_vars
from sync_android import ANDROID_CHECKOUT_PATH, REPO, GitAuthenticate
from py.utils.git_utils import GIT
from py.utils import git_utils
from py.utils import misc
from py.utils import shell_utils

SKIA_REPO_URL = skia_vars.GetGlobalVariable('skia_git_url')
SKIA_REV_URL = skia_vars.GetGlobalVariable('revlink_tmpl')

MASTER_SKIA_URL = ('https://googleplex-android-review.googlesource.com/'
                   'platform/external/skia')
MASTER_SKIA_REFS = 'HEAD:refs/heads/master-skia'

UPSTREAM_REMOTE_NAME = 'upstream'

ANDROID_USER_CONFIG = 'include/core/SkUserConfig.h'
UPSTREAM_USER_CONFIG = 'include/config/SkUserConfig.h'

EXTERNAL_SKIA = os.path.join(ANDROID_CHECKOUT_PATH, 'external', 'skia')
# Path to gyp_to_android.py.
PLATFORM_TOOLS_BIN = os.path.join(EXTERNAL_SKIA, 'platform_tools', 'android',
                                  'bin')