コード例 #1
0
    def __init__(self):
        super(SpidermonkeyBuild, self).__init__(
            config_options=self.config_options,
            # other stuff
            all_actions=[
                'purge',
                'checkout-tools',

                # First, build an optimized JS shell for running the analysis
                'checkout-source',
                'get-blobs',
                'clobber-shell',
                'configure-shell',
                'build-shell',

                # Next, build a tree with the analysis plugin active. Note that
                # we are using the same checkout for the JS shell build and the
                # build of the source to be analyzed, which is a little
                # unnecessary (no need to rebuild the JS shell all the time).
                # (Different objdir, though.)
                'clobber-analysis',
                'setup-analysis',
                'run-analysis',
                'collect-analysis-output',
                'upload-analysis',
                'check-expectations',
            ],
            default_actions=[
                'purge',
                'checkout-tools',
                'checkout-source',
                'get-blobs',
                'clobber-shell',
                'configure-shell',
                'build-shell',
                'clobber-analysis',
                'setup-analysis',
                'run-analysis',
                'collect-analysis-output',
                # Temporarily disabled, see bug 1211402
                # 'upload-analysis',
                'check-expectations',
            ],
            config={
                'default_vcs': 'hgtool',
                'vcs_share_base': os.environ.get('HG_SHARE_BASE_DIR'),
                'ccache': True,
                'buildbot_json_path': os.environ.get('PROPERTIES_FILE'),
                'tools_repo': 'https://hg.mozilla.org/build/tools',
                'upload_ssh_server': None,
                'upload_remote_basepath': None,
                'enable_try_uploads': True,
                'source': None,
                'stage_product': 'firefox',
            },
        )

        self.buildid = None
        self.create_virtualenv()
        self.analysis = HazardAnalysis()
コード例 #2
0
    def __init__(self):
        BaseScript.__init__(self,
                            config_options=self.config_options,
                            # other stuff
                            all_actions=[
                                'purge',
                                'checkout-tools',

                                # First, build an optimized JS shell for running the analysis
                                'checkout-source',
                                'get-blobs',
                                'clobber-shell',
                                'configure-shell',
                                'build-shell',

                                # Next, build a tree with the analysis plugin
                                # active. Note that we are using the same
                                # checkout for the JS shell build and the build
                                # of the source to be analyzed, which is a
                                # little unnecessary (no need to rebuild the JS
                                # shell all the time). (Different objdir,
                                # though.)
                                'clobber-analysis',
                                'setup-analysis',
                                'run-analysis',
                                'collect-analysis-output',
                                'upload-analysis',
                                'check-expectations',
                            ],
                            default_actions=[
                                'purge',
                                'checkout-tools',
                                'checkout-source',
                                'get-blobs',
                                'clobber-shell',
                                'configure-shell',
                                'build-shell',
                                'clobber-analysis',
                                'setup-analysis',
                                'run-analysis',
                                'collect-analysis-output',
                                'upload-analysis',
                                'check-expectations',
                            ],
                            config={
                                'default_vcs': 'hgtool',
                                'vcs_share_base': os.environ.get('HG_SHARE_BASE_DIR'),
                                'ccache': True,
                                'buildbot_json_path': os.environ.get('PROPERTIES_FILE'),
                                'tools_repo': 'https://hg.mozilla.org/build/tools',

                                'upload_ssh_server': None,
                                'upload_remote_basepath': None,
                                'enable_try_uploads': True,
                                'source': None,
                            },
        )

        self.buildid = None
        self.analysis = HazardAnalysis()
コード例 #3
0
    def __init__(self):
        BaseScript.__init__(self,
                            config_options=self.config_options,
                            # other stuff
                            all_actions=[
                                'purge',
                                'checkout-tools',

                                # First, build an optimized JS shell for running the analysis
                                'checkout-source',
                                'get-blobs',
                                'clobber-shell',
                                'configure-shell',
                                'build-shell',

                                # Next, build a tree with the analysis plugin
                                # active. Note that we are using the same
                                # checkout for the JS shell build and the build
                                # of the source to be analyzed, which is a
                                # little unnecessary (no need to rebuild the JS
                                # shell all the time). (Different objdir,
                                # though.)
                                'clobber-analysis',
                                'setup-analysis',
                                'run-analysis',
                                'collect-analysis-output',
                                'upload-analysis',
                                'check-expectations',
                            ],
                            default_actions=[
                                'purge',
                                'checkout-tools',
                                'checkout-source',
                                'get-blobs',
                                'clobber-shell',
                                'configure-shell',
                                'build-shell',
                                'clobber-analysis',
                                'setup-analysis',
                                'run-analysis',
                                'collect-analysis-output',
                                'upload-analysis',
                                'check-expectations',
                            ],
                            config={
                                'default_vcs': 'hgtool',
                                'vcs_share_base': os.environ.get('HG_SHARE_BASE_DIR'),
                                'ccache': True,
                                'buildbot_json_path': os.environ.get('PROPERTIES_FILE'),
                                'tools_repo': 'https://hg.mozilla.org/build/tools',

                                'upload_ssh_server': None,
                                'upload_remote_basepath': None,
                                'enable_try_uploads': True,
                                'source': None,
                            },
        )

        self.buildid = None
        self.analysis = HazardAnalysis()
コード例 #4
0
    def __init__(self):
        BaseScript.__init__(
            self,
            config_options=self.config_options,
            # other stuff
            all_actions=[
                "purge",
                "checkout-tools",
                # First, build an optimized JS shell for running the analysis
                "checkout-source",
                "get-blobs",
                "clobber-shell",
                "configure-shell",
                "build-shell",
                # Next, build a tree with the analysis plugin
                # active. Note that we are using the same
                # checkout for the JS shell build and the build
                # of the source to be analyzed, which is a
                # little unnecessary (no need to rebuild the JS
                # shell all the time). (Different objdir,
                # though.)
                "clobber-analysis",
                "setup-analysis",
                "run-analysis",
                "collect-analysis-output",
                "upload-analysis",
                "check-expectations",
            ],
            default_actions=[
                "purge",
                "checkout-tools",
                "checkout-source",
                "get-blobs",
                "clobber-shell",
                "configure-shell",
                "build-shell",
                "clobber-analysis",
                "setup-analysis",
                "run-analysis",
                "collect-analysis-output",
                # Temporary - see bug 1211402
                #'upload-analysis',
                "check-expectations",
            ],
            config={
                "default_vcs": "hgtool",
                "vcs_share_base": os.environ.get("HG_SHARE_BASE_DIR"),
                "ccache": True,
                "buildbot_json_path": os.environ.get("PROPERTIES_FILE"),
                "tools_repo": "https://hg.mozilla.org/build/tools",
                "upload_ssh_server": None,
                "upload_remote_basepath": None,
                "enable_try_uploads": True,
                "source": None,
                "stage_product": "firefox",
            },
        )

        self.buildid = None
        self.analysis = HazardAnalysis()
コード例 #5
0
    def __init__(self):
        super(B2GHazardBuild, self).__init__(
            config_options=[],
            config={
                'default_vcs': 'hgtool',
                'ccache': False,
                'mozilla_dir': 'build/gecko',
                'upload_ssh_server': None,
                'upload_remote_basepath': None,
                'enable_try_uploads': True,
            },
            all_actions=B2GHazardBuild.all_actions,
            default_actions=B2GHazardBuild.default_actions,
        )

        self.buildid = None
        self.analysis = HazardAnalysis()
コード例 #6
0
ファイル: hazard_build.py プロジェクト: Antonius32/Pale-Moon
    def __init__(self):
        super(B2GHazardBuild, self).__init__(
            config_options=[],
            config={
                "default_vcs": "hgtool",
                "ccache": False,
                "mozilla_dir": "build/goanna",
                "upload_ssh_server": None,
                "upload_remote_basepath": None,
                "enable_try_uploads": True,
            },
            all_actions=B2GHazardBuild.all_actions,
            default_actions=B2GHazardBuild.default_actions,
        )

        self.buildid = None
        self.analysis = HazardAnalysis()
コード例 #7
0
ファイル: hazard_build.py プロジェクト: DINKIN/Waterfox
    def __init__(self):
        super(B2GHazardBuild, self).__init__(
            config_options=[],
            config={
                'default_vcs': 'hgtool',
                'ccache': False,
                'mozilla_dir': 'build/gecko',

                'upload_ssh_server': None,
                'upload_remote_basepath': None,
                'enable_try_uploads': True,
            },
            all_actions=B2GHazardBuild.all_actions,
            default_actions=B2GHazardBuild.default_actions,
        )

        self.buildid = None
        self.analysis = HazardAnalysis()
コード例 #8
0
ファイル: hazard_build.py プロジェクト: DINKIN/Waterfox
class B2GHazardBuild(PurgeMixin, B2GBuildBaseScript):
    all_actions = [
        'clobber',
        'checkout-tools',
        'checkout-sources',
        'get-blobs',
        'update-source-manifest',
        'clobber-shell',
        'configure-shell',
        'build-shell',
        'clobber-analysis',
        'setup-analysis',
        'run-analysis',
        'collect-analysis-output',
        'upload-analysis',
        'check-expectations',
    ]

    default_actions = [
        'clobber',
        'checkout-tools',
        'checkout-sources',
        'get-blobs',
        'clobber-shell',
        'configure-shell',
        'build-shell',
        'clobber-analysis',
        'setup-analysis',
        'run-analysis',
        'collect-analysis-output',
        'upload-analysis',
        'check-expectations',
    ]

    def __init__(self):
        super(B2GHazardBuild, self).__init__(
            config_options=[],
            config={
                'default_vcs': 'hgtool',
                'ccache': False,
                'mozilla_dir': 'build/gecko',

                'upload_ssh_server': None,
                'upload_remote_basepath': None,
                'enable_try_uploads': True,
            },
            all_actions=B2GHazardBuild.all_actions,
            default_actions=B2GHazardBuild.default_actions,
        )

        self.buildid = None
        self.analysis = HazardAnalysis()

    def _pre_config_lock(self, rw_config):
        super(B2GHazardBuild, self)._pre_config_lock(rw_config)

        if self.buildbot_config:
            self.config['is_automation'] = True
        else:
            self.config['is_automation'] = False

        dirs = self.query_abs_dirs()
        replacements = self.config['env_replacements'].copy()
        for k, v in replacements.items():
            replacements[k] = v % dirs

        self.env = self.query_env(replace_dict=replacements,
                                  partial_env=self.config['partial_env'],
                                  purge_env=nuisance_env_vars)

    def query_abs_dirs(self):
        if self.abs_dirs:
            return self.abs_dirs
        abs_dirs = super(B2GHazardBuild, self).query_abs_dirs()

        abs_work_dir = abs_dirs['abs_work_dir']
        dirs = {
            'b2g_src':
                abs_work_dir,
            'target_compiler_base':
                os.path.join(abs_dirs['abs_work_dir'], 'target_compiler'),
            'shell_objdir':
                os.path.join(abs_work_dir, self.config['shell-objdir']),
            'mozharness_scriptdir':
                os.path.abspath(os.path.dirname(__file__)),
            'abs_analysis_dir':
                os.path.join(abs_work_dir, self.config['analysis-dir']),
            'abs_analyzed_objdir':
                os.path.join(abs_work_dir, self.config['srcdir'], self.config['analysis-objdir']),
            'analysis_scriptdir':
                os.path.join(abs_dirs['gecko_src'], self.config['analysis-scriptdir'])
        }

        abs_dirs.update(dirs)
        self.abs_dirs = abs_dirs
        return self.abs_dirs

    def query_branch(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['branch']
        elif 'branch' in self.config:
            # Used for locally testing try vs non-try
            return self.config['branch']
        else:
            return os.path.basename(self.query_repo())

    def query_compiler_manifest(self):
        dirs = self.query_abs_dirs()
        return os.path.join(dirs['analysis_scriptdir'], self.config['compiler_manifest'])

    def query_b2g_compiler_manifest(self):
        dirs = self.query_abs_dirs()
        return os.path.join(dirs['analysis_scriptdir'], self.config['b2g_compiler_manifest'])

    def query_sixgill_manifest(self):
        dirs = self.query_abs_dirs()
        return os.path.join(dirs['analysis_scriptdir'], self.config['sixgill_manifest'])

    def query_buildid(self):
        if self.buildid:
            return self.buildid
        if self.buildbot_config and 'properties' in self.buildbot_config:
            self.buildid = self.buildbot_config['properties'].get('buildid')
        if not self.buildid:
            self.buildid = datetime.now().strftime("%Y%m%d%H%M%S")
        return self.buildid

    def query_upload_ssh_server(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['upload_ssh_server']
        else:
            return self.config['upload_ssh_server']

    def query_upload_ssh_key(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            key = self.buildbot_config['properties']['upload_ssh_key']
        else:
            key = self.config['upload_ssh_key']
        if self.mock_enabled and not key.startswith("/"):
            key = "/home/mock_mozilla/.ssh/" + key
        return key

    def query_upload_ssh_user(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['upload_ssh_user']
        else:
            return self.config['upload_ssh_user']

    def query_product(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['product']
        else:
            return self.config['product']

    def query_upload_remote_basepath(self):
        if self.config.get('upload_remote_basepath'):
            return self.config['upload_remote_basepath']
        else:
            return "/pub/mozilla.org/{product}".format(
                product=self.query_product(),
            )

    def query_upload_remote_baseuri(self):
        baseuri = self.config.get('upload_remote_baseuri')
        if self.buildbot_config and 'properties' in self.buildbot_config:
            buildprops = self.buildbot_config['properties']
            if 'upload_remote_baseuri' in buildprops:
                baseuri = buildprops['upload_remote_baseuri']
        return baseuri.strip("/") if baseuri else None

    def query_upload_label(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['platform']
        else:
            return self.config.get('upload_label')

    def query_upload_path(self):
        branch = self.query_branch()

        common = {
            'basepath': self.query_upload_remote_basepath(),
            'branch': branch,
            'target': self.query_upload_label(),
        }

        if branch == 'try':
            if not self.config['enable_try_uploads']:
                return None
            try:
                user = self.buildbot_config['sourcestamp']['changes'][0]['who']
            except (KeyError, TypeError):
                user = "******"
            return "{basepath}/try-builds/{user}-{rev}/{branch}-{target}".format(
                user=user,
                rev=self.query_revision(),
                **common
            )
        else:
            return "{basepath}/tinderbox-builds/{branch}-{target}/{buildid}".format(
                buildid=self.query_buildid(),
                **common
            )

    def query_do_upload(self):
        if self.query_branch() == 'try':
            return self.config.get('enable_try_uploads')
        return True

    def make_source_dir(self):
        dirs = self.query_abs_dirs()
        dest = dirs['b2g_src']
        if not os.path.exists(dest):
            self.mkdir_p(dest)

    # The gecko config is required before enabling mock, because it determines
    # what mock target to use.
    def enable_mock(self):
        self.load_gecko_config()
        super(B2GHazardBuild, self).enable_mock()

    # Actions {{{2
    def clobber(self):
        dirs = self.query_abs_dirs()
        PurgeMixin.clobber(
            self,
            always_clobber_dirs=[
                dirs['abs_upload_dir'],
            ],
        )

    def checkout_sources(self):
        self.make_source_dir()
        super(B2GHazardBuild, self).checkout_sources()

    def get_blobs(self):
        dirs = self.query_abs_dirs()
        self.tooltool_fetch(self.query_compiler_manifest(),
                            output_dir=dirs['abs_work_dir'])
        self.tooltool_fetch(self.query_sixgill_manifest(),
                            output_dir=dirs['abs_work_dir'])
        if not os.path.exists(dirs['target_compiler_base']):
            self.mkdir_p(dirs['target_compiler_base'])
        self.tooltool_fetch(self.query_b2g_compiler_manifest(),
                            output_dir=dirs['target_compiler_base'])

    def clobber_shell(self):
        dirs = self.query_abs_dirs()
        self.rmtree(dirs['shell_objdir'])

    def configure_shell(self):
        self.enable_mock()
        dirs = self.query_abs_dirs()

        if not os.path.exists(dirs['shell_objdir']):
            self.mkdir_p(dirs['shell_objdir'])

        js_src_dir = os.path.join(dirs['gecko_src'], 'js', 'src')
        rc = self.run_command(['autoconf-2.13'],
                              cwd=js_src_dir,
                              env=self.env,
                              error_list=MakefileErrorList)
        if rc != 0:
            self.fatal("autoconf failed, can't continue.", exit_code=FAILURE)

        rc = self.run_command([os.path.join(js_src_dir, 'configure'),
                               '--enable-optimize',
                               '--disable-debug',
                               '--enable-ctypes',
                               '--with-system-nspr',
                               '--without-intl-api'],
                              cwd=dirs['shell_objdir'],
                              env=self.env,
                              error_list=MakefileErrorList)
        if rc != 0:
            self.fatal("Configure failed, can't continue.", exit_code=FAILURE)

        self.disable_mock()

    def build_shell(self):
        self.enable_mock()
        dirs = self.query_abs_dirs()

        rc = self.run_command(['make', '-j', str(self.config.get('concurrency', 4)), '-s'],
                              cwd=dirs['shell_objdir'],
                              env=self.env,
                              error_list=MakefileErrorList)
        if rc != 0:
            self.fatal("Build failed, can't continue.", exit_code=FAILURE)

        self.disable_mock()

    def clobber_analysis(self):
        dirs = self.query_abs_dirs()
        self.rmtree(dirs['abs_analysis_dir'])
        self.rmtree(dirs['abs_analyzed_objdir'])

    def setup_analysis(self):
        dirs = self.query_abs_dirs()
        analysis_dir = dirs['abs_analysis_dir']

        if not os.path.exists(analysis_dir):
            self.mkdir_p(analysis_dir)

        values = {
            'js': os.path.join(dirs['shell_objdir'], 'dist', 'bin', 'js'),
            'analysis_scriptdir': dirs['analysis_scriptdir'],
            'source_objdir': dirs['abs_analyzed_objdir'],
            'source': os.path.join(dirs['work_dir'], 'source'),
            'sixgill': os.path.join(dirs['work_dir'], self.config['sixgill']),
            'sixgill_bin': os.path.join(dirs['work_dir'], self.config['sixgill_bin']),
        }
        defaults = """
js = '%(js)s'
analysis_scriptdir = '%(analysis_scriptdir)s'
objdir = '%(source_objdir)s'
source = '%(source)s'
sixgill = '%(sixgill)s'
sixgill_bin = '%(sixgill_bin)s'
jobs = 2
""" % values

        #defaults_path = os.path.join(analysis_dir, 'defaults.py')
        defaults_path = os.path.join(analysis_dir, 'defaults.py')
        file(defaults_path, "w").write(defaults)
        self.log("Wrote analysis config file " + defaults_path)

        build_command = self.config['build_command']
        self.copyfile(os.path.join(dirs['mozharness_scriptdir'],
                                   os.path.join('spidermonkey', build_command)),
                      os.path.join(analysis_dir, build_command),
                      copystat=True)

    def run_analysis(self):
        dirs = self.query_abs_dirs()

        env = self.query_build_env().copy()
        self.enable_mock()

        gonk_misc = os.path.join(dirs['b2g_src'], 'gonk-misc')
        mozconfig = os.path.join(gonk_misc, 'hazard-analysis-config')
        mozconfig_text = ''
        mozconfig_text += 'CXXFLAGS="-Wno-attributes"\n'
        mozconfig_text += '. "%s/default-gecko-config"\n' % gonk_misc
        basecc = os.path.join(dirs['abs_work_dir'], self.config['sixgill'], 'scripts', 'wrap_gcc', 'basecc')
        mozconfig_text += "ac_add_options --with-compiler-wrapper=" + basecc + "\n"
        mozconfig_text += "ac_add_options --without-ccache\n"
        file(mozconfig, "w").write(mozconfig_text)

        # Stuff I set in my .userconfig for manual builds
        env['B2G_SOURCE'] = dirs['b2g_src']
        env['MOZCONFIG_PATH'] = mozconfig
        env['GECKO_PATH'] = dirs['gecko_src']
        env['TARGET_TOOLS_PREFIX'] = os.path.join(dirs['abs_work_dir'], self.config['b2g_target_compiler_prefix'])

        try:
            self.analysis.run(self, env=env, error_list=B2GMakefileErrorList)
        except HazardError as e:
            self.fatal(e, exit_code=FAILURE)

        self.disable_mock()

    def collect_analysis_output(self):
        self.analysis.collect_output(self)

    def upload_analysis(self):
        if not self.query_do_upload():
            self.info("Uploads disabled for this build. Skipping...")
            return

        self.enable_mock()

        try:
            self.analysis.upload_results(self)
        except HazardError as e:
            self.error(e)
            self.return_code = WARNINGS

        self.disable_mock()

    def check_expectations(self):
        try:
            self.analysis.check_expectations(self)
        except HazardError as e:
            self.fatal(e, exit_code=FAILURE)
コード例 #9
0
class SpidermonkeyBuild(MockMixin,
                        PurgeMixin, BaseScript,
                        VCSMixin, BuildbotMixin, TooltoolMixin, TransferMixin):
    config_options = [
        [["--repo"], {
            "dest": "repo",
            "help": "which gecko repo to get spidermonkey from",
        }],
        [["--source"], {
            "dest": "source",
            "help": "directory containing gecko source tree (instead of --repo)",
        }],
        [["--revision"], {
            "dest": "revision",
        }],
        [["--branch"], {
            "dest": "branch",
        }],
        [["--vcs-share-base"], {
            "dest": "vcs_share_base",
            "help": "base directory for shared repositories",
        }],
        [["-j"], {
            "dest": "concurrency",
            "type": int,
            "default": 4,
            "help": "number of simultaneous jobs used while building the shell " +
                    "(currently ignored for the analyzed build",
        }],
    ]

    def __init__(self):
        BaseScript.__init__(self,
                            config_options=self.config_options,
                            # other stuff
                            all_actions=[
                                'purge',
                                'checkout-tools',

                                # First, build an optimized JS shell for running the analysis
                                'checkout-source',
                                'get-blobs',
                                'clobber-shell',
                                'configure-shell',
                                'build-shell',

                                # Next, build a tree with the analysis plugin
                                # active. Note that we are using the same
                                # checkout for the JS shell build and the build
                                # of the source to be analyzed, which is a
                                # little unnecessary (no need to rebuild the JS
                                # shell all the time). (Different objdir,
                                # though.)
                                'clobber-analysis',
                                'setup-analysis',
                                'run-analysis',
                                'collect-analysis-output',
                                'upload-analysis',
                                'check-expectations',
                            ],
                            default_actions=[
                                'purge',
                                'checkout-tools',
                                'checkout-source',
                                'get-blobs',
                                'clobber-shell',
                                'configure-shell',
                                'build-shell',
                                'clobber-analysis',
                                'setup-analysis',
                                'run-analysis',
                                'collect-analysis-output',
                                'upload-analysis',
                                'check-expectations',
                            ],
                            config={
                                'default_vcs': 'hgtool',
                                'vcs_share_base': os.environ.get('HG_SHARE_BASE_DIR'),
                                'ccache': True,
                                'buildbot_json_path': os.environ.get('PROPERTIES_FILE'),
                                'tools_repo': 'https://hg.mozilla.org/build/tools',

                                'upload_ssh_server': None,
                                'upload_remote_basepath': None,
                                'enable_try_uploads': True,
                                'source': None,
                            },
        )

        self.buildid = None
        self.analysis = HazardAnalysis()

    def _pre_config_lock(self, rw_config):
        if self.config['source']:
            self.config['srcdir'] = self.config['source']
        super(SpidermonkeyBuild, self)._pre_config_lock(rw_config)

        if self.buildbot_config is None:
            self.info("Reading buildbot build properties...")
            self.read_buildbot_config()

        if self.buildbot_config:
            bb_props = [('mock_target', 'mock_target', None),
                        ('base_bundle_urls', 'hgtool_base_bundle_urls', None),
                        ('base_mirror_urls', 'hgtool_base_mirror_urls', None),
                        ('hgurl', 'hgurl', None),
                        ('clobberer_url', 'clobberer_url', 'https://api.pub.build.mozilla.org/clobberer/lastclobber'),
                        ('purge_minsize', 'purge_minsize', 15),
                        ('purge_maxage', 'purge_maxage', None),
                        ('purge_skip', 'purge_skip', None),
                        ('force_clobber', 'force_clobber', None),
                        ]
            buildbot_props = self.buildbot_config.get('properties', {})
            for bb_prop, cfg_prop, default in bb_props:
                if not self.config.get(cfg_prop) and buildbot_props.get(bb_prop, default):
                    self.config[cfg_prop] = buildbot_props.get(bb_prop, default)
            self.config['is_automation'] = True
        else:
            self.config['is_automation'] = False

        dirs = self.query_abs_dirs()
        replacements = self.config['env_replacements'].copy()
        for k,v in replacements.items():
            replacements[k] = v % dirs

        self.env = self.query_env(replace_dict=replacements,
                                  partial_env=self.config['partial_env'],
                                  purge_env=nuisance_env_vars)

    def query_abs_dirs(self):
        if self.abs_dirs:
            return self.abs_dirs
        abs_dirs = BaseScript.query_abs_dirs(self)

        abs_work_dir = abs_dirs['abs_work_dir']
        dirs = {
            'shell_objdir':
                os.path.join(abs_work_dir, self.config['shell-objdir']),
            'mozharness_scriptdir':
                os.path.abspath(os.path.dirname(__file__)),
            'abs_analysis_dir':
                os.path.join(abs_work_dir, self.config['analysis-dir']),
            'abs_analyzed_objdir':
                os.path.join(abs_work_dir, self.config['srcdir'], self.config['analysis-objdir']),
            'analysis_scriptdir':
                os.path.join(self.config['srcdir'], self.config['analysis-scriptdir']),
            'abs_tools_dir':
                os.path.join(abs_dirs['base_work_dir'], 'tools'),
            'gecko_src':
                os.path.join(abs_work_dir, self.config['srcdir']),
            'abs_blob_upload_dir':
                os.path.join(abs_work_dir, 'blobber_upload_dir'),
        }

        abs_dirs.update(dirs)
        self.abs_dirs = abs_dirs

        return self.abs_dirs

    def query_repo(self):
        if self.config.get('repo'):
            return self.config['repo']
        elif self.buildbot_config and 'properties' in self.buildbot_config:
            return self.config['hgurl'] + self.buildbot_config['properties']['repo_path']
        else:
            return None

    def query_revision(self):
        if 'revision' in self.buildbot_properties:
            revision = self.buildbot_properties['revision']
        elif self.buildbot_config and 'sourcestamp' in self.buildbot_config:
            revision = self.buildbot_config['sourcestamp']['revision']
        else:
            # Useful for local testing. In actual use, this would always be
            # None.
            revision = self.config.get('revision')

        return revision

    def query_branch(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['branch']
        elif 'branch' in self.config:
            # Used for locally testing try vs non-try
            return self.config['branch']
        else:
            return os.path.basename(self.query_repo())

    def query_compiler_manifest(self):
        dirs = self.query_abs_dirs()
        manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['compiler_manifest'])
        if os.path.exists(manifest):
            return manifest
        return os.path.join(dirs['abs_work_dir'], self.config['compiler_manifest'])

    def query_sixgill_manifest(self):
        dirs = self.query_abs_dirs()
        manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['sixgill_manifest'])
        if os.path.exists(manifest):
            return manifest
        return os.path.join(dirs['abs_work_dir'], self.config['sixgill_manifest'])

    def query_buildid(self):
        if self.buildid:
            return self.buildid
        if self.buildbot_config and 'properties' in self.buildbot_config:
            self.buildid = self.buildbot_config['properties'].get('buildid')
        if not self.buildid:
            self.buildid = datetime.now().strftime("%Y%m%d%H%M%S")
        return self.buildid

    def query_upload_ssh_server(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['upload_ssh_server']
        else:
            return self.config['upload_ssh_server']

    def query_upload_ssh_key(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            key = self.buildbot_config['properties']['upload_ssh_key']
        else:
            key = self.config['upload_ssh_key']
        if self.mock_enabled and not key.startswith("/"):
            key = "/home/mock_mozilla/.ssh/" + key
        return key

    def query_upload_ssh_user(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['upload_ssh_user']
        else:
            return self.config['upload_ssh_user']

    def query_product(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['product']
        else:
            return self.config['product']

    def query_upload_remote_basepath(self):
        if self.config.get('upload_remote_basepath'):
            return self.config['upload_remote_basepath']
        else:
            return "/pub/mozilla.org/{product}".format(
                product=self.query_product(),
            )

    def query_upload_remote_baseuri(self):
        baseuri = self.config.get('upload_remote_baseuri')
        if self.buildbot_config and 'properties' in self.buildbot_config:
            buildprops = self.buildbot_config['properties']
            if 'upload_remote_baseuri' in buildprops:
                baseuri = buildprops['upload_remote_baseuri']
        return baseuri.strip("/") if baseuri else None

    def query_target(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['platform']
        else:
            return self.config.get('target')

    def query_upload_path(self):
        branch = self.query_branch()

        common = {
            'basepath': self.query_upload_remote_basepath(),
            'branch': branch,
            'target': self.query_target(),
        }

        if branch == 'try':
            if not self.config['enable_try_uploads']:
                return None
            try:
                user = self.buildbot_config['sourcestamp']['changes'][0]['who']
            except (KeyError, TypeError):
                user = "******"
            return "{basepath}/try-builds/{user}-{rev}/{branch}-{target}".format(
                user=user,
                rev=self.query_revision(),
                **common
            )
        else:
            return "{basepath}/tinderbox-builds/{branch}-{target}/{buildid}".format(
                buildid=self.query_buildid(),
                **common
            )

    def query_do_upload(self):
        if self.query_branch() == 'try':
            return self.config.get('enable_try_uploads')
        return True

    # Actions {{{2
    def purge(self):
        dirs = self.query_abs_dirs()
        self.info("purging, abs_upload_dir=" + dirs['abs_upload_dir'])
        PurgeMixin.clobber(
            self,
            always_clobber_dirs=[
                dirs['abs_upload_dir'],
            ],
        )

    def checkout_tools(self):
        dirs = self.query_abs_dirs()
        rev = self.vcs_checkout(
            vcs='hg',  # Don't have hgtool.py yet
            repo=self.config['tools_repo'],
            clean=False,
            dest=dirs['abs_tools_dir'],
        )
        self.set_buildbot_property("tools_revision", rev, write_to_file=True)

    def do_checkout_source(self):
        # --source option means to use an existing source directory instead of checking one out.
        if self.config['source']:
            return

        dirs = self.query_abs_dirs()
        dest = dirs['gecko_src']

        # Pre-create the directory to appease the share extension
        if not os.path.exists(dest):
            self.mkdir_p(dest)

        rev = self.vcs_checkout(
            repo=self.query_repo(),
            dest=dest,
            revision=self.query_revision(),
            branch=self.config.get('branch'),
            clean=True,
        )
        self.set_buildbot_property('source_revision', rev, write_to_file=True)

    def checkout_source(self):
        try:
            self.do_checkout_source()
        except Exception as e:
            self.fatal("checkout failed: " + str(e), exit_code=RETRY)

    def get_blobs(self):
        work_dir = self.query_abs_dirs()['abs_work_dir']
        if not os.path.exists(work_dir):
            self.mkdir_p(work_dir)
        self.tooltool_fetch(self.query_compiler_manifest(), output_dir=work_dir)
        self.tooltool_fetch(self.query_sixgill_manifest(), output_dir=work_dir)

    def clobber_shell(self):
        self.analysis.clobber_shell(self)

    def configure_shell(self):
        self.enable_mock()

        try:
            self.analysis.configure_shell(self)
        except HazardError as e:
            self.fatal(e, exit_code=FAILURE)

        self.disable_mock()

    def build_shell(self):
        self.enable_mock()

        try:
            self.analysis.build_shell(self)
        except HazardError as e:
            self.fatal(e, exit_code=FAILURE)

        self.disable_mock()

    def clobber_analysis(self):
        self.analysis.clobber(self)

    def setup_analysis(self):
        self.analysis.setup(self)

    def run_analysis(self):
        self.enable_mock()

        upload_dir = self.query_abs_dirs()['abs_blob_upload_dir']
        if not os.path.exists(upload_dir):
            self.mkdir_p(upload_dir)

        env = self.env.copy()
        env['MOZ_UPLOAD_DIR'] = upload_dir

        try:
            self.analysis.run(self, env=env, error_list=MakefileErrorList)
        except HazardError as e:
            self.fatal(e, exit_code=FAILURE)

        self.disable_mock()

    def collect_analysis_output(self):
        self.analysis.collect_output(self)

    def upload_analysis(self):
        if not self.config['is_automation']:
            return

        if not self.query_do_upload():
            self.info("Uploads disabled for this build. Skipping...")
            return

        self.enable_mock()

        try:
            self.analysis.upload_results(self)
        except HazardError as e:
            self.error(e)
            self.return_code = WARNINGS

        self.disable_mock()

    def check_expectations(self):
        try:
            self.analysis.check_expectations(self)
        except HazardError as e:
            self.fatal(e, exit_code=FAILURE)
コード例 #10
0
class SpidermonkeyBuild(MockMixin,
                        PurgeMixin, BaseScript,
                        VCSMixin, BuildbotMixin, TooltoolMixin, TransferMixin):
    config_options = [
        [["--repo"], {
            "dest": "repo",
            "help": "which gecko repo to get spidermonkey from",
        }],
        [["--source"], {
            "dest": "source",
            "help": "directory containing gecko source tree (instead of --repo)",
        }],
        [["--revision"], {
            "dest": "revision",
        }],
        [["--branch"], {
            "dest": "branch",
        }],
        [["--vcs-share-base"], {
            "dest": "vcs_share_base",
            "help": "base directory for shared repositories",
        }],
        [["-j"], {
            "dest": "concurrency",
            "type": int,
            "default": 4,
            "help": "number of simultaneous jobs used while building the shell " +
                    "(currently ignored for the analyzed build",
        }],
    ]

    def __init__(self):
        BaseScript.__init__(self,
                            config_options=self.config_options,
                            # other stuff
                            all_actions=[
                                'purge',
                                'checkout-tools',

                                # First, build an optimized JS shell for running the analysis
                                'checkout-source',
                                'get-blobs',
                                'clobber-shell',
                                'configure-shell',
                                'build-shell',

                                # Next, build a tree with the analysis plugin
                                # active. Note that we are using the same
                                # checkout for the JS shell build and the build
                                # of the source to be analyzed, which is a
                                # little unnecessary (no need to rebuild the JS
                                # shell all the time). (Different objdir,
                                # though.)
                                'clobber-analysis',
                                'setup-analysis',
                                'run-analysis',
                                'collect-analysis-output',
                                'upload-analysis',
                                'check-expectations',
                            ],
                            default_actions=[
                                'purge',
                                'checkout-tools',
                                'checkout-source',
                                'get-blobs',
                                'clobber-shell',
                                'configure-shell',
                                'build-shell',
                                'clobber-analysis',
                                'setup-analysis',
                                'run-analysis',
                                'collect-analysis-output',
                                # Temporary - see bug 1211402
                                #'upload-analysis',
                                'check-expectations',
                            ],
                            config={
                                'default_vcs': 'hgtool',
                                'vcs_share_base': os.environ.get('HG_SHARE_BASE_DIR'),
                                'ccache': True,
                                'buildbot_json_path': os.environ.get('PROPERTIES_FILE'),
                                'tools_repo': 'https://hg.mozilla.org/build/tools',

                                'upload_ssh_server': None,
                                'upload_remote_basepath': None,
                                'enable_try_uploads': True,
                                'source': None,
                                'stage_product': 'firefox',
                            },
        )

        self.buildid = None
        self.analysis = HazardAnalysis()

    def _pre_config_lock(self, rw_config):
        if self.config['source']:
            self.config['srcdir'] = self.config['source']
        super(SpidermonkeyBuild, self)._pre_config_lock(rw_config)

        if self.buildbot_config is None:
            self.info("Reading buildbot build properties...")
            self.read_buildbot_config()

        if self.buildbot_config:
            bb_props = [('mock_target', 'mock_target', None),
                        ('base_bundle_urls', 'hgtool_base_bundle_urls', None),
                        ('base_mirror_urls', 'hgtool_base_mirror_urls', None),
                        ('hgurl', 'hgurl', None),
                        ('clobberer_url', 'clobberer_url', 'https://api.pub.build.mozilla.org/clobberer/lastclobber'),
                        ('purge_minsize', 'purge_minsize', 15),
                        ('purge_maxage', 'purge_maxage', None),
                        ('purge_skip', 'purge_skip', None),
                        ('force_clobber', 'force_clobber', None),
                        ]
            buildbot_props = self.buildbot_config.get('properties', {})
            for bb_prop, cfg_prop, default in bb_props:
                if not self.config.get(cfg_prop) and buildbot_props.get(bb_prop, default):
                    self.config[cfg_prop] = buildbot_props.get(bb_prop, default)
            self.config['is_automation'] = True
        else:
            self.config['is_automation'] = False

        dirs = self.query_abs_dirs()
        replacements = self.config['env_replacements'].copy()
        for k,v in replacements.items():
            replacements[k] = v % dirs

        self.env = self.query_env(replace_dict=replacements,
                                  partial_env=self.config['partial_env'],
                                  purge_env=nuisance_env_vars)

    def query_abs_dirs(self):
        if self.abs_dirs:
            return self.abs_dirs
        abs_dirs = BaseScript.query_abs_dirs(self)

        abs_work_dir = abs_dirs['abs_work_dir']
        dirs = {
            'shell_objdir':
                os.path.join(abs_work_dir, self.config['shell-objdir']),
            'mozharness_scriptdir':
                os.path.abspath(os.path.dirname(__file__)),
            'abs_analysis_dir':
                os.path.join(abs_work_dir, self.config['analysis-dir']),
            'abs_analyzed_objdir':
                os.path.join(abs_work_dir, self.config['srcdir'], self.config['analysis-objdir']),
            'analysis_scriptdir':
                os.path.join(self.config['srcdir'], self.config['analysis-scriptdir']),
            'abs_tools_dir':
                os.path.join(abs_dirs['base_work_dir'], 'tools'),
            'gecko_src':
                os.path.join(abs_work_dir, self.config['srcdir']),
            'abs_blob_upload_dir':
                os.path.join(abs_work_dir, 'blobber_upload_dir'),
        }

        abs_dirs.update(dirs)
        self.abs_dirs = abs_dirs

        return self.abs_dirs

    def query_repo(self):
        if self.config.get('repo'):
            return self.config['repo']
        elif self.buildbot_config and 'properties' in self.buildbot_config:
            return self.config['hgurl'] + self.buildbot_config['properties']['repo_path']
        else:
            return None

    def query_revision(self):
        if 'revision' in self.buildbot_properties:
            revision = self.buildbot_properties['revision']
        elif self.buildbot_config and 'sourcestamp' in self.buildbot_config:
            revision = self.buildbot_config['sourcestamp']['revision']
        else:
            # Useful for local testing. In actual use, this would always be
            # None.
            revision = self.config.get('revision')

        return revision

    def query_branch(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['branch']
        elif 'branch' in self.config:
            # Used for locally testing try vs non-try
            return self.config['branch']
        else:
            return os.path.basename(self.query_repo())

    def query_compiler_manifest(self):
        dirs = self.query_abs_dirs()
        manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['compiler_manifest'])
        if os.path.exists(manifest):
            return manifest
        return os.path.join(dirs['abs_work_dir'], self.config['compiler_manifest'])

    def query_sixgill_manifest(self):
        dirs = self.query_abs_dirs()
        manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['sixgill_manifest'])
        if os.path.exists(manifest):
            return manifest
        return os.path.join(dirs['abs_work_dir'], self.config['sixgill_manifest'])

    def query_buildid(self):
        if self.buildid:
            return self.buildid
        if self.buildbot_config and 'properties' in self.buildbot_config:
            self.buildid = self.buildbot_config['properties'].get('buildid')
        if not self.buildid:
            self.buildid = datetime.now().strftime("%Y%m%d%H%M%S")
        return self.buildid

    def query_upload_ssh_server(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['upload_ssh_server']
        else:
            return self.config['upload_ssh_server']

    def query_upload_ssh_key(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            key = self.buildbot_config['properties']['upload_ssh_key']
        else:
            key = self.config['upload_ssh_key']
        if self.mock_enabled and not key.startswith("/"):
            key = "/home/mock_mozilla/.ssh/" + key
        return key

    def query_upload_ssh_user(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['upload_ssh_user']
        else:
            return self.config['upload_ssh_user']

    def query_product(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['product']
        else:
            return self.config['product']

    def query_upload_remote_basepath(self):
        if self.config.get('upload_remote_basepath'):
            return self.config['upload_remote_basepath']
        else:
            return "/pub/mozilla.org/{product}".format(
                product=self.query_product(),
            )

    def query_upload_remote_baseuri(self):
        baseuri = self.config.get('upload_remote_baseuri')
        if self.buildbot_config and 'properties' in self.buildbot_config:
            buildprops = self.buildbot_config['properties']
            if 'upload_remote_baseuri' in buildprops:
                baseuri = buildprops['upload_remote_baseuri']
        return baseuri.strip("/") if baseuri else None

    def query_target(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['platform']
        else:
            return self.config.get('target')

    def query_upload_path(self):
        branch = self.query_branch()

        common = {
            'basepath': self.query_upload_remote_basepath(),
            'branch': branch,
            'target': self.query_target(),
        }

        if branch == 'try':
            if not self.config['enable_try_uploads']:
                return None
            try:
                user = self.buildbot_config['sourcestamp']['changes'][0]['who']
            except (KeyError, TypeError):
                user = "******"
            return "{basepath}/try-builds/{user}-{rev}/{branch}-{target}".format(
                user=user,
                rev=self.query_revision(),
                **common
            )
        else:
            return "{basepath}/tinderbox-builds/{branch}-{target}/{buildid}".format(
                buildid=self.query_buildid(),
                **common
            )

    def query_do_upload(self):
        if self.query_branch() == 'try':
            return self.config.get('enable_try_uploads')
        return True

    # Actions {{{2
    def purge(self):
        dirs = self.query_abs_dirs()
        self.info("purging, abs_upload_dir=" + dirs['abs_upload_dir'])
        PurgeMixin.clobber(
            self,
            always_clobber_dirs=[
                dirs['abs_upload_dir'],
            ],
        )

    def checkout_tools(self):
        dirs = self.query_abs_dirs()

        # If running from within a directory also passed as the --source dir,
        # this has the danger of clobbering <source>/tools/
        if self.config['source']:
            srcdir = self.config['source']
            if os.path.samefile(srcdir, os.path.dirname(dirs['abs_tools_dir'])):
                raise Exception("Cannot run from source checkout to avoid overwriting subdirs")

        rev = self.vcs_checkout(
            vcs='hg',  # Don't have hgtool.py yet
            repo=self.config['tools_repo'],
            clean=False,
            dest=dirs['abs_tools_dir'],
        )
        self.set_buildbot_property("tools_revision", rev, write_to_file=True)

    def do_checkout_source(self):
        # --source option means to use an existing source directory instead of checking one out.
        if self.config['source']:
            return

        dirs = self.query_abs_dirs()
        dest = dirs['gecko_src']

        # Pre-create the directory to appease the share extension
        if not os.path.exists(dest):
            self.mkdir_p(dest)

        rev = self.vcs_checkout(
            repo=self.query_repo(),
            dest=dest,
            revision=self.query_revision(),
            branch=self.config.get('branch'),
            clean=True,
        )
        self.set_buildbot_property('source_revision', rev, write_to_file=True)

    def checkout_source(self):
        try:
            self.do_checkout_source()
        except Exception as e:
            self.fatal("checkout failed: " + str(e), exit_code=RETRY)

    def get_blobs(self):
        work_dir = self.query_abs_dirs()['abs_work_dir']
        if not os.path.exists(work_dir):
            self.mkdir_p(work_dir)
        self.tooltool_fetch(self.query_compiler_manifest(), output_dir=work_dir)
        self.tooltool_fetch(self.query_sixgill_manifest(), output_dir=work_dir)

    def clobber_shell(self):
        self.analysis.clobber_shell(self)

    def configure_shell(self):
        self.enable_mock()

        try:
            self.analysis.configure_shell(self)
        except HazardError as e:
            self.fatal(e, exit_code=FAILURE)

        self.disable_mock()

    def build_shell(self):
        self.enable_mock()

        try:
            self.analysis.build_shell(self)
        except HazardError as e:
            self.fatal(e, exit_code=FAILURE)

        self.disable_mock()

    def clobber_analysis(self):
        self.analysis.clobber(self)

    def setup_analysis(self):
        self.analysis.setup(self)

    def run_analysis(self):
        self.enable_mock()

        upload_dir = self.query_abs_dirs()['abs_blob_upload_dir']
        if not os.path.exists(upload_dir):
            self.mkdir_p(upload_dir)

        env = self.env.copy()
        env['MOZ_UPLOAD_DIR'] = upload_dir

        try:
            self.analysis.run(self, env=env, error_list=MakefileErrorList)
        except HazardError as e:
            self.fatal(e, exit_code=FAILURE)

        self.disable_mock()

    def collect_analysis_output(self):
        self.analysis.collect_output(self)

    def upload_analysis(self):
        if not self.config['is_automation']:
            return

        if not self.query_do_upload():
            self.info("Uploads disabled for this build. Skipping...")
            return

        self.enable_mock()

        try:
            self.analysis.upload_results(self)
        except HazardError as e:
            self.error(e)
            self.return_code = WARNINGS

        self.disable_mock()

    def check_expectations(self):
        try:
            self.analysis.check_expectations(self)
        except HazardError as e:
            self.fatal(e, exit_code=FAILURE)
コード例 #11
0
    def __init__(self):
        super(SpidermonkeyBuild, self).__init__(
            config_options=self.config_options,
            # other stuff
            all_actions=[
                'purge',
                'checkout-tools',

                # First, build an optimized JS shell for running the analysis
                'checkout-source',
                'get-blobs',
                'clobber-shell',
                'configure-shell',
                'build-shell',

                # Next, build a tree with the analysis plugin active. Note that
                # we are using the same checkout for the JS shell build and the
                # build of the source to be analyzed, which is a little
                # unnecessary (no need to rebuild the JS shell all the time).
                # (Different objdir, though.)

                'clobber-analysis',
                'setup-analysis',
                'run-analysis',
                'collect-analysis-output',
                'upload-analysis',
                'check-expectations',
            ],
            default_actions=[
                'purge',
                'checkout-tools',
                'checkout-source',
                'get-blobs',
                'clobber-shell',
                'configure-shell',
                'build-shell',
                'clobber-analysis',
                'setup-analysis',
                'run-analysis',
                'collect-analysis-output',
                # Temporarily disabled, see bug 1211402
                # 'upload-analysis',
                'check-expectations',
            ],
            config={
                'default_vcs': 'hgtool',
                'vcs_share_base': os.environ.get('HG_SHARE_BASE_DIR'),
                'ccache': True,
                'buildbot_json_path': os.environ.get('PROPERTIES_FILE'),
                'tools_repo': 'https://hg.mozilla.org/build/tools',

                'upload_ssh_server': None,
                'upload_remote_basepath': None,
                'enable_try_uploads': True,
                'source': None,
                'stage_product': 'firefox',
            },
        )

        self.buildid = None
        self.create_virtualenv()
        self.analysis = HazardAnalysis()
コード例 #12
0
ファイル: hazard_build.py プロジェクト: Antonius32/Pale-Moon
class B2GHazardBuild(PurgeMixin, B2GBuildBaseScript):
    all_actions = [
        "clobber",
        "checkout-tools",
        "checkout-sources",
        "get-blobs",
        "update-source-manifest",
        "clobber-shell",
        "configure-shell",
        "build-shell",
        "clobber-analysis",
        "setup-analysis",
        "run-analysis",
        "collect-analysis-output",
        "upload-analysis",
        "check-expectations",
    ]

    default_actions = [
        "clobber",
        "checkout-tools",
        "checkout-sources",
        "get-blobs",
        "clobber-shell",
        "configure-shell",
        "build-shell",
        "clobber-analysis",
        "setup-analysis",
        "run-analysis",
        "collect-analysis-output",
        "upload-analysis",
        "check-expectations",
    ]

    def __init__(self):
        super(B2GHazardBuild, self).__init__(
            config_options=[],
            config={
                "default_vcs": "hgtool",
                "ccache": False,
                "mozilla_dir": "build/goanna",
                "upload_ssh_server": None,
                "upload_remote_basepath": None,
                "enable_try_uploads": True,
            },
            all_actions=B2GHazardBuild.all_actions,
            default_actions=B2GHazardBuild.default_actions,
        )

        self.buildid = None
        self.analysis = HazardAnalysis()

    def _pre_config_lock(self, rw_config):
        super(B2GHazardBuild, self)._pre_config_lock(rw_config)

        if self.buildbot_config:
            self.config["is_automation"] = True
        else:
            self.config["is_automation"] = False

        dirs = self.query_abs_dirs()
        replacements = self.config["env_replacements"].copy()
        for k, v in replacements.items():
            replacements[k] = v % dirs

        self.env = self.query_env(
            replace_dict=replacements, partial_env=self.config["partial_env"], purge_env=nuisance_env_vars
        )

    def query_abs_dirs(self):
        if self.abs_dirs:
            return self.abs_dirs
        abs_dirs = super(B2GHazardBuild, self).query_abs_dirs()

        abs_work_dir = abs_dirs["abs_work_dir"]
        dirs = {
            "b2g_src": abs_work_dir,
            "target_compiler_base": os.path.join(abs_dirs["abs_work_dir"], "target_compiler"),
            "shell_objdir": os.path.join(abs_work_dir, self.config["shell-objdir"]),
            "mozharness_scriptdir": os.path.abspath(os.path.dirname(__file__)),
            "abs_analysis_dir": os.path.join(abs_work_dir, self.config["analysis-dir"]),
            "abs_analyzed_objdir": os.path.join(abs_work_dir, self.config["srcdir"], self.config["analysis-objdir"]),
            "analysis_scriptdir": os.path.join(abs_dirs["goanna_src"], self.config["analysis-scriptdir"]),
        }

        abs_dirs.update(dirs)
        self.abs_dirs = abs_dirs
        return self.abs_dirs

    def query_branch(self):
        if self.buildbot_config and "properties" in self.buildbot_config:
            return self.buildbot_config["properties"]["branch"]
        elif "branch" in self.config:
            # Used for locally testing try vs non-try
            return self.config["branch"]
        else:
            return os.path.basename(self.query_repo())

    def query_compiler_manifest(self):
        dirs = self.query_abs_dirs()
        return os.path.join(dirs["analysis_scriptdir"], self.config["compiler_manifest"])

    def query_b2g_compiler_manifest(self):
        dirs = self.query_abs_dirs()
        return os.path.join(dirs["analysis_scriptdir"], self.config["b2g_compiler_manifest"])

    def query_sixgill_manifest(self):
        dirs = self.query_abs_dirs()
        return os.path.join(dirs["analysis_scriptdir"], self.config["sixgill_manifest"])

    def query_buildid(self):
        if self.buildid:
            return self.buildid
        if self.buildbot_config and "properties" in self.buildbot_config:
            self.buildid = self.buildbot_config["properties"].get("buildid")
        if not self.buildid:
            self.buildid = datetime.now().strftime("%Y%m%d%H%M%S")
        return self.buildid

    def query_upload_ssh_server(self):
        if self.buildbot_config and "properties" in self.buildbot_config:
            return self.buildbot_config["properties"]["upload_ssh_server"]
        else:
            return self.config["upload_ssh_server"]

    def query_upload_ssh_key(self):
        if self.buildbot_config and "properties" in self.buildbot_config:
            key = self.buildbot_config["properties"]["upload_ssh_key"]
        else:
            key = self.config["upload_ssh_key"]
        if self.mock_enabled and not key.startswith("/"):
            key = "/home/mock_mozilla/.ssh/" + key
        return key

    def query_upload_ssh_user(self):
        if self.buildbot_config and "properties" in self.buildbot_config:
            return self.buildbot_config["properties"]["upload_ssh_user"]
        else:
            return self.config["upload_ssh_user"]

    def query_product(self):
        if self.buildbot_config and "properties" in self.buildbot_config:
            return self.buildbot_config["properties"]["product"]
        else:
            return self.config["product"]

    def query_upload_remote_basepath(self):
        if self.config.get("upload_remote_basepath"):
            return self.config["upload_remote_basepath"]
        else:
            return "/pub/mozilla.org/{product}".format(product=self.query_product())

    def query_upload_remote_baseuri(self):
        baseuri = self.config.get("upload_remote_baseuri")
        if self.buildbot_config and "properties" in self.buildbot_config:
            buildprops = self.buildbot_config["properties"]
            if "upload_remote_baseuri" in buildprops:
                baseuri = buildprops["upload_remote_baseuri"]
        return baseuri.strip("/") if baseuri else None

    def query_upload_label(self):
        if self.buildbot_config and "properties" in self.buildbot_config:
            return self.buildbot_config["properties"]["platform"]
        else:
            return self.config.get("upload_label")

    def query_upload_path(self):
        branch = self.query_branch()

        common = {
            "basepath": self.query_upload_remote_basepath(),
            "branch": branch,
            "target": self.query_upload_label(),
        }

        if branch == "try":
            if not self.config["enable_try_uploads"]:
                return None
            try:
                user = self.buildbot_config["sourcestamp"]["changes"][0]["who"]
            except (KeyError, TypeError):
                user = "******"
            return "{basepath}/try-builds/{user}-{rev}/{branch}-{target}".format(
                user=user, rev=self.query_revision(), **common
            )
        else:
            return "{basepath}/tinderbox-builds/{branch}-{target}/{buildid}".format(
                buildid=self.query_buildid(), **common
            )

    def query_do_upload(self):
        if self.query_branch() == "try":
            return self.config.get("enable_try_uploads")
        return True

    def make_source_dir(self):
        dirs = self.query_abs_dirs()
        dest = dirs["b2g_src"]
        if not os.path.exists(dest):
            self.mkdir_p(dest)

    # The goanna config is required before enabling mock, because it determines
    # what mock target to use.
    def enable_mock(self):
        self.load_goanna_config()
        super(B2GHazardBuild, self).enable_mock()

    # Actions {{{2
    def clobber(self):
        dirs = self.query_abs_dirs()
        PurgeMixin.clobber(self, always_clobber_dirs=[dirs["abs_upload_dir"]])

    def checkout_sources(self):
        self.make_source_dir()
        super(B2GHazardBuild, self).checkout_sources()

    def get_blobs(self):
        dirs = self.query_abs_dirs()
        self.tooltool_fetch(self.query_compiler_manifest(), "sh " + self.config["compiler_setup"], dirs["abs_work_dir"])
        self.tooltool_fetch(self.query_sixgill_manifest(), "sh " + self.config["sixgill_setup"], dirs["abs_work_dir"])
        if not os.path.exists(dirs["target_compiler_base"]):
            self.mkdir_p(dirs["target_compiler_base"])
        self.tooltool_fetch(
            self.query_b2g_compiler_manifest(), "sh " + self.config["compiler_setup"], dirs["target_compiler_base"]
        )

    def clobber_shell(self):
        dirs = self.query_abs_dirs()
        self.rmtree(dirs["shell_objdir"])

    def configure_shell(self):
        self.enable_mock()
        dirs = self.query_abs_dirs()

        if not os.path.exists(dirs["shell_objdir"]):
            self.mkdir_p(dirs["shell_objdir"])

        js_src_dir = os.path.join(dirs["goanna_src"], "js", "src")
        rc = self.run_command(["autoconf-2.13"], cwd=js_src_dir, env=self.env, error_list=MakefileErrorList)
        if rc != 0:
            self.fatal("autoconf failed, can't continue.", exit_code=FAILURE)

        rc = self.run_command(
            [
                os.path.join(js_src_dir, "configure"),
                "--enable-optimize",
                "--disable-debug",
                "--enable-ctypes",
                "--with-system-nspr",
                "--without-intl-api",
            ],
            cwd=dirs["shell_objdir"],
            env=self.env,
            error_list=MakefileErrorList,
        )
        if rc != 0:
            self.fatal("Configure failed, can't continue.", exit_code=FAILURE)

        self.disable_mock()

    def build_shell(self):
        self.enable_mock()
        dirs = self.query_abs_dirs()

        rc = self.run_command(
            ["make", "-j", str(self.config.get("concurrency", 4)), "-s"],
            cwd=dirs["shell_objdir"],
            env=self.env,
            error_list=MakefileErrorList,
        )
        if rc != 0:
            self.fatal("Build failed, can't continue.", exit_code=FAILURE)

        self.disable_mock()

    def clobber_analysis(self):
        dirs = self.query_abs_dirs()
        self.rmtree(dirs["abs_analysis_dir"])
        self.rmtree(dirs["abs_analyzed_objdir"])

    def setup_analysis(self):
        dirs = self.query_abs_dirs()
        analysis_dir = dirs["abs_analysis_dir"]

        if not os.path.exists(analysis_dir):
            self.mkdir_p(analysis_dir)

        values = {
            "js": os.path.join(dirs["shell_objdir"], "dist", "bin", "js"),
            "analysis_scriptdir": dirs["analysis_scriptdir"],
            "source_objdir": dirs["abs_analyzed_objdir"],
            "source": os.path.join(dirs["work_dir"], "source"),
            "sixgill": os.path.join(dirs["work_dir"], self.config["sixgill"]),
            "sixgill_bin": os.path.join(dirs["work_dir"], self.config["sixgill_bin"]),
        }
        defaults = (
            """
js = '%(js)s'
analysis_scriptdir = '%(analysis_scriptdir)s'
objdir = '%(source_objdir)s'
source = '%(source)s'
sixgill = '%(sixgill)s'
sixgill_bin = '%(sixgill_bin)s'
jobs = 2
"""
            % values
        )

        # defaults_path = os.path.join(analysis_dir, 'defaults.py')
        defaults_path = os.path.join(analysis_dir, "defaults.py")
        file(defaults_path, "w").write(defaults)
        self.log("Wrote analysis config file " + defaults_path)

        build_command = self.config["build_command"]
        self.copyfile(
            os.path.join(dirs["mozharness_scriptdir"], os.path.join("spidermonkey", build_command)),
            os.path.join(analysis_dir, build_command),
            copystat=True,
        )

    def run_analysis(self):
        dirs = self.query_abs_dirs()

        env = self.query_build_env().copy()
        self.enable_mock()

        gonk_misc = os.path.join(dirs["b2g_src"], "gonk-misc")
        mozconfig = os.path.join(gonk_misc, "hazard-analysis-config")
        mozconfig_text = ""
        mozconfig_text += 'CXXFLAGS="-Wno-attributes"\n'
        mozconfig_text += '. "%s/default-goanna-config"\n' % gonk_misc
        basecc = os.path.join(dirs["abs_work_dir"], self.config["sixgill"], "scripts", "wrap_gcc", "basecc")
        mozconfig_text += "ac_add_options --with-compiler-wrapper=" + basecc + "\n"
        mozconfig_text += "ac_add_options --without-ccache\n"
        file(mozconfig, "w").write(mozconfig_text)

        # Stuff I set in my .userconfig for manual builds
        env["B2G_SOURCE"] = dirs["b2g_src"]
        env["MOZCONFIG_PATH"] = mozconfig
        env["GECKO_PATH"] = dirs["goanna_src"]
        env["TARGET_TOOLS_PREFIX"] = os.path.join(dirs["abs_work_dir"], self.config["b2g_target_compiler_prefix"])

        try:
            self.analysis.run(self, env=env, error_list=B2GMakefileErrorList)
        except HazardError as e:
            self.fatal(e, exit_code=FAILURE)

        self.disable_mock()

    def collect_analysis_output(self):
        self.analysis.collect_output(self)

    def upload_analysis(self):
        if not self.query_do_upload():
            self.info("Uploads disabled for this build. Skipping...")
            return

        self.enable_mock()

        try:
            self.analysis.upload_results(self)
        except HazardError as e:
            self.error(e)
            self.return_code = WARNINGS

        self.disable_mock()

    def check_expectations(self):
        try:
            self.analysis.check_expectations(self)
        except HazardError as e:
            self.fatal(e, exit_code=FAILURE)
コード例 #13
0
class B2GHazardBuild(PurgeMixin, B2GBuildBaseScript):
    all_actions = [
        'clobber',
        'checkout-tools',
        'checkout-sources',
        'get-blobs',
        'update-source-manifest',
        'clobber-shell',
        'configure-shell',
        'build-shell',
        'clobber-analysis',
        'setup-analysis',
        'run-analysis',
        'collect-analysis-output',
        'upload-analysis',
        'check-expectations',
    ]

    default_actions = [
        'clobber',
        'checkout-tools',
        'checkout-sources',
        'get-blobs',
        'clobber-shell',
        'configure-shell',
        'build-shell',
        'clobber-analysis',
        'setup-analysis',
        'run-analysis',
        'collect-analysis-output',
        'upload-analysis',
        'check-expectations',
    ]

    def __init__(self):
        super(B2GHazardBuild, self).__init__(
            config_options=[],
            config={
                'default_vcs': 'hgtool',
                'ccache': False,
                'mozilla_dir': 'build/gecko',
                'upload_ssh_server': None,
                'upload_remote_basepath': None,
                'enable_try_uploads': True,
            },
            all_actions=B2GHazardBuild.all_actions,
            default_actions=B2GHazardBuild.default_actions,
        )

        self.buildid = None
        self.analysis = HazardAnalysis()

    def _pre_config_lock(self, rw_config):
        super(B2GHazardBuild, self)._pre_config_lock(rw_config)

        if self.buildbot_config:
            self.config['is_automation'] = True
        else:
            self.config['is_automation'] = False

        dirs = self.query_abs_dirs()
        replacements = self.config['env_replacements'].copy()
        for k, v in replacements.items():
            replacements[k] = v % dirs

        self.env = self.query_env(replace_dict=replacements,
                                  partial_env=self.config['partial_env'],
                                  purge_env=nuisance_env_vars)

    def query_abs_dirs(self):
        if self.abs_dirs:
            return self.abs_dirs
        abs_dirs = super(B2GHazardBuild, self).query_abs_dirs()

        abs_work_dir = abs_dirs['abs_work_dir']
        dirs = {
            'b2g_src':
            abs_work_dir,
            'target_compiler_base':
            os.path.join(abs_dirs['abs_work_dir'], 'target_compiler'),
            'shell_objdir':
            os.path.join(abs_work_dir, self.config['shell-objdir']),
            'mozharness_scriptdir':
            os.path.abspath(os.path.dirname(__file__)),
            'abs_analysis_dir':
            os.path.join(abs_work_dir, self.config['analysis-dir']),
            'abs_analyzed_objdir':
            os.path.join(abs_work_dir, self.config['srcdir'],
                         self.config['analysis-objdir']),
            'analysis_scriptdir':
            os.path.join(abs_dirs['gecko_src'],
                         self.config['analysis-scriptdir'])
        }

        abs_dirs.update(dirs)
        self.abs_dirs = abs_dirs
        return self.abs_dirs

    def query_branch(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['branch']
        elif 'branch' in self.config:
            # Used for locally testing try vs non-try
            return self.config['branch']
        else:
            return os.path.basename(self.query_repo())

    def query_compiler_manifest(self):
        dirs = self.query_abs_dirs()
        return os.path.join(dirs['analysis_scriptdir'],
                            self.config['compiler_manifest'])

    def query_b2g_compiler_manifest(self):
        dirs = self.query_abs_dirs()
        return os.path.join(dirs['analysis_scriptdir'],
                            self.config['b2g_compiler_manifest'])

    def query_sixgill_manifest(self):
        dirs = self.query_abs_dirs()
        return os.path.join(dirs['analysis_scriptdir'],
                            self.config['sixgill_manifest'])

    def query_buildid(self):
        if self.buildid:
            return self.buildid
        if self.buildbot_config and 'properties' in self.buildbot_config:
            self.buildid = self.buildbot_config['properties'].get('buildid')
        if not self.buildid:
            self.buildid = datetime.now().strftime("%Y%m%d%H%M%S")
        return self.buildid

    def query_upload_ssh_server(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['upload_ssh_server']
        else:
            return self.config['upload_ssh_server']

    def query_upload_ssh_key(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            key = self.buildbot_config['properties']['upload_ssh_key']
        else:
            key = self.config['upload_ssh_key']
        if self.mock_enabled and not key.startswith("/"):
            key = "/home/mock_mozilla/.ssh/" + key
        return key

    def query_upload_ssh_user(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['upload_ssh_user']
        else:
            return self.config['upload_ssh_user']

    def query_product(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['product']
        else:
            return self.config['product']

    def query_upload_remote_basepath(self):
        if self.config.get('upload_remote_basepath'):
            return self.config['upload_remote_basepath']
        else:
            return "/pub/mozilla.org/{product}".format(
                product=self.query_product(), )

    def query_upload_remote_baseuri(self):
        baseuri = self.config.get('upload_remote_baseuri')
        if self.buildbot_config and 'properties' in self.buildbot_config:
            buildprops = self.buildbot_config['properties']
            if 'upload_remote_baseuri' in buildprops:
                baseuri = buildprops['upload_remote_baseuri']
        return baseuri.strip("/") if baseuri else None

    def query_upload_label(self):
        if self.buildbot_config and 'properties' in self.buildbot_config:
            return self.buildbot_config['properties']['platform']
        else:
            return self.config.get('upload_label')

    def query_upload_path(self):
        branch = self.query_branch()

        common = {
            'basepath': self.query_upload_remote_basepath(),
            'branch': branch,
            'target': self.query_upload_label(),
        }

        if branch == 'try':
            if not self.config['enable_try_uploads']:
                return None
            try:
                user = self.buildbot_config['sourcestamp']['changes'][0]['who']
            except (KeyError, TypeError):
                user = "******"
            return "{basepath}/try-builds/{user}-{rev}/{branch}-{target}".format(
                user=user, rev=self.query_revision(), **common)
        else:
            return "{basepath}/tinderbox-builds/{branch}-{target}/{buildid}".format(
                buildid=self.query_buildid(), **common)

    def query_do_upload(self):
        if self.query_branch() == 'try':
            return self.config.get('enable_try_uploads')
        return True

    def make_source_dir(self):
        dirs = self.query_abs_dirs()
        dest = dirs['b2g_src']
        if not os.path.exists(dest):
            self.mkdir_p(dest)

    # The gecko config is required before enabling mock, because it determines
    # what mock target to use.
    def enable_mock(self):
        self.load_gecko_config()
        super(B2GHazardBuild, self).enable_mock()

    # Actions {{{2
    def clobber(self):
        dirs = self.query_abs_dirs()
        PurgeMixin.clobber(
            self,
            always_clobber_dirs=[
                dirs['abs_upload_dir'],
            ],
        )

    def checkout_sources(self):
        self.make_source_dir()
        super(B2GHazardBuild, self).checkout_sources()

    def get_blobs(self):
        dirs = self.query_abs_dirs()
        self.tooltool_fetch(self.query_compiler_manifest(),
                            output_dir=dirs['abs_work_dir'])
        self.tooltool_fetch(self.query_sixgill_manifest(),
                            output_dir=dirs['abs_work_dir'])
        if not os.path.exists(dirs['target_compiler_base']):
            self.mkdir_p(dirs['target_compiler_base'])
        self.tooltool_fetch(self.query_b2g_compiler_manifest(),
                            output_dir=dirs['target_compiler_base'])

    def clobber_shell(self):
        dirs = self.query_abs_dirs()
        self.rmtree(dirs['shell_objdir'])

    def configure_shell(self):
        self.enable_mock()
        dirs = self.query_abs_dirs()

        if not os.path.exists(dirs['shell_objdir']):
            self.mkdir_p(dirs['shell_objdir'])

        js_src_dir = os.path.join(dirs['gecko_src'], 'js', 'src')
        rc = self.run_command(['autoconf-2.13'],
                              cwd=js_src_dir,
                              env=self.env,
                              error_list=MakefileErrorList)
        if rc != 0:
            self.fatal("autoconf failed, can't continue.", exit_code=FAILURE)

        rc = self.run_command([
            os.path.join(js_src_dir,
                         'configure'), '--enable-optimize', '--disable-debug',
            '--enable-ctypes', '--with-system-nspr', '--without-intl-api'
        ],
                              cwd=dirs['shell_objdir'],
                              env=self.env,
                              error_list=MakefileErrorList)
        if rc != 0:
            self.fatal("Configure failed, can't continue.", exit_code=FAILURE)

        self.disable_mock()

    def build_shell(self):
        self.enable_mock()
        dirs = self.query_abs_dirs()

        rc = self.run_command(
            ['make', '-j',
             str(self.config.get('concurrency', 4)), '-s'],
            cwd=dirs['shell_objdir'],
            env=self.env,
            error_list=MakefileErrorList)
        if rc != 0:
            self.fatal("Build failed, can't continue.", exit_code=FAILURE)

        self.disable_mock()

    def clobber_analysis(self):
        dirs = self.query_abs_dirs()
        self.rmtree(dirs['abs_analysis_dir'])
        self.rmtree(dirs['abs_analyzed_objdir'])

    def setup_analysis(self):
        dirs = self.query_abs_dirs()
        analysis_dir = dirs['abs_analysis_dir']

        if not os.path.exists(analysis_dir):
            self.mkdir_p(analysis_dir)

        values = {
            'js':
            os.path.join(dirs['shell_objdir'], 'dist', 'bin', 'js'),
            'analysis_scriptdir':
            dirs['analysis_scriptdir'],
            'source_objdir':
            dirs['abs_analyzed_objdir'],
            'source':
            os.path.join(dirs['work_dir'], 'source'),
            'sixgill':
            os.path.join(dirs['work_dir'], self.config['sixgill']),
            'sixgill_bin':
            os.path.join(dirs['work_dir'], self.config['sixgill_bin']),
        }
        defaults = """
js = '%(js)s'
analysis_scriptdir = '%(analysis_scriptdir)s'
objdir = '%(source_objdir)s'
source = '%(source)s'
sixgill = '%(sixgill)s'
sixgill_bin = '%(sixgill_bin)s'
jobs = 2
""" % values

        #defaults_path = os.path.join(analysis_dir, 'defaults.py')
        defaults_path = os.path.join(analysis_dir, 'defaults.py')
        file(defaults_path, "w").write(defaults)
        self.log("Wrote analysis config file " + defaults_path)

        build_command = self.config['build_command']
        self.copyfile(os.path.join(dirs['mozharness_scriptdir'],
                                   os.path.join('spidermonkey',
                                                build_command)),
                      os.path.join(analysis_dir, build_command),
                      copystat=True)

    def run_analysis(self):
        dirs = self.query_abs_dirs()

        env = self.query_build_env().copy()
        self.enable_mock()

        gonk_misc = os.path.join(dirs['b2g_src'], 'gonk-misc')
        mozconfig = os.path.join(gonk_misc, 'hazard-analysis-config')
        mozconfig_text = ''
        mozconfig_text += 'CXXFLAGS="-Wno-attributes"\n'
        mozconfig_text += '. "%s/default-gecko-config"\n' % gonk_misc
        basecc = os.path.join(dirs['abs_work_dir'], self.config['sixgill'],
                              'scripts', 'wrap_gcc', 'basecc')
        mozconfig_text += "ac_add_options --with-compiler-wrapper=" + basecc + "\n"
        mozconfig_text += "ac_add_options --without-ccache\n"
        file(mozconfig, "w").write(mozconfig_text)

        # Stuff I set in my .userconfig for manual builds
        env['B2G_SOURCE'] = dirs['b2g_src']
        env['MOZCONFIG_PATH'] = mozconfig
        env['GECKO_PATH'] = dirs['gecko_src']
        env['TARGET_TOOLS_PREFIX'] = os.path.join(
            dirs['abs_work_dir'], self.config['b2g_target_compiler_prefix'])

        try:
            self.analysis.run(self, env=env, error_list=B2GMakefileErrorList)
        except HazardError as e:
            self.fatal(e, exit_code=FAILURE)

        self.disable_mock()

    def collect_analysis_output(self):
        self.analysis.collect_output(self)

    def upload_analysis(self):
        if not self.query_do_upload():
            self.info("Uploads disabled for this build. Skipping...")
            return

        self.enable_mock()

        try:
            self.analysis.upload_results(self)
        except HazardError as e:
            self.error(e)
            self.return_code = WARNINGS

        self.disable_mock()

    def check_expectations(self):
        try:
            self.analysis.check_expectations(self)
        except HazardError as e:
            self.fatal(e, exit_code=FAILURE)