示例#1
0
 def test_extract_zipfile(self):
     """test extracting a zipfile"""
     _zipfile = self.create_zip()
     self.assertTrue(os.path.exists(_zipfile))
     try:
         dest = tempfile.mkdtemp()
         try:
             mozfile.extract_zip(_zipfile, dest)
             self.ensure_directory_contents(dest)
         finally:
             shutil.rmtree(dest)
     finally:
         os.remove(_zipfile)
示例#2
0
 def test_extract_zipfile(self):
     """test extracting a zipfile"""
     _zipfile = self.create_zip()
     self.assertTrue(os.path.exists(_zipfile))
     try:
         dest = tempfile.mkdtemp()
         try:
             mozfile.extract_zip(_zipfile, dest)
             self.ensure_directory_contents(dest)
         finally:
             shutil.rmtree(dest)
     finally:
         os.remove(_zipfile)
示例#3
0
 def _get_symbols(self):
     # This updates self.symbols_path so we only download once
     if self.symbols_path and mozfile.is_url(self.symbols_path):
         self.remove_symbols = True
         self.logger.info("Downloading symbols from: %s" % self.symbols_path)
         # Get the symbols and write them to a temporary zipfile
         data = urllib2.urlopen(self.symbols_path)
         with tempfile.TemporaryFile() as symbols_file:
             symbols_file.write(data.read())
             # extract symbols to a temporary directory (which we'll delete after
             # processing all crashes)
             self.symbols_path = tempfile.mkdtemp()
             with zipfile.ZipFile(symbols_file, 'r') as zfile:
                 mozfile.extract_zip(zfile, self.symbols_path)
示例#4
0
 def _get_symbols(self):
     # This updates self.symbols_path so we only download once
     if self.symbols_path and mozfile.is_url(self.symbols_path):
         self.remove_symbols = True
         self.logger.info("Downloading symbols from: %s" % self.symbols_path)
         # Get the symbols and write them to a temporary zipfile
         data = urllib2.urlopen(self.symbols_path)
         with tempfile.TemporaryFile() as symbols_file:
             symbols_file.write(data.read())
             # extract symbols to a temporary directory (which we'll delete after
             # processing all crashes)
             self.symbols_path = tempfile.mkdtemp()
             with zipfile.ZipFile(symbols_file, 'r') as zfile:
                 mozfile.extract_zip(zfile, self.symbols_path)
示例#5
0
    def test_extract(self):
        """test the generalized extract function"""

        # test extracting a tarball
        tarball = self.create_tarball()
        self.assertTrue(os.path.exists(tarball))
        try:
            dest = tempfile.mkdtemp()
            try:
                mozfile.extract(tarball, dest)
                self.ensure_directory_contents(dest)
            finally:
                shutil.rmtree(dest)
        finally:
            os.remove(tarball)

        # test extracting a zipfile
        _zipfile = self.create_zip()
        self.assertTrue(os.path.exists(_zipfile))
        try:
            dest = tempfile.mkdtemp()
            try:
                mozfile.extract_zip(_zipfile, dest)
                self.ensure_directory_contents(dest)
            finally:
                shutil.rmtree(dest)
        finally:
            os.remove(_zipfile)

        # test extracting some non-archive; this should fail
        fd, filename = tempfile.mkstemp()
        os.write(fd, b'This is not a zipfile or tarball')
        os.close(fd)
        exception = None

        try:
            dest = tempfile.mkdtemp()
            mozfile.extract(filename, dest)
        except Exception as exc:
            exception = exc
        finally:
            os.remove(filename)
            os.rmdir(dest)

        self.assertTrue(isinstance(exception, Exception))
示例#6
0
    def test_extract(self):
        """test the generalized extract function"""

        # test extracting a tarball
        tarball = self.create_tarball()
        self.assertTrue(os.path.exists(tarball))
        try:
            dest = tempfile.mkdtemp()
            try:
                mozfile.extract(tarball, dest)
                self.ensure_directory_contents(dest)
            finally:
                shutil.rmtree(dest)
        finally:
            os.remove(tarball)

        # test extracting a zipfile
        _zipfile = self.create_zip()
        self.assertTrue(os.path.exists(_zipfile))
        try:
            dest = tempfile.mkdtemp()
            try:
                mozfile.extract_zip(_zipfile, dest)
                self.ensure_directory_contents(dest)
            finally:
                shutil.rmtree(dest)
        finally:
            os.remove(_zipfile)

        # test extracting some non-archive; this should fail
        fd, filename = tempfile.mkstemp()
        os.write(fd, "This is not a zipfile or tarball")
        os.close(fd)
        exception = None
        try:
            dest = tempfile.mkdtemp()
            mozfile.extract(filename, dest)
        except Exception as exception:
            pass
        finally:
            os.remove(filename)
            os.rmdir(dest)
        self.assertTrue(isinstance(exception, Exception))
示例#7
0
    def _get_symbols(self):
        # If no symbols path has been set create a temporary folder to let the
        # minidump stackwalk download the symbols.
        if not self.symbols_path:
            self.symbols_path = tempfile.mkdtemp()
            self.remove_symbols = True

        # This updates self.symbols_path so we only download once.
        if mozfile.is_url(self.symbols_path):
            self.remove_symbols = True
            self.logger.info("Downloading symbols from: %s" % self.symbols_path)
            # Get the symbols and write them to a temporary zipfile
            data = urlopen(self.symbols_path)
            with tempfile.TemporaryFile() as symbols_file:
                symbols_file.write(data.read())
                # extract symbols to a temporary directory (which we'll delete after
                # processing all crashes)
                self.symbols_path = tempfile.mkdtemp()
                with zipfile.ZipFile(symbols_file, 'r') as zfile:
                    mozfile.extract_zip(zfile, self.symbols_path)
示例#8
0
def test_extract_zipfile_missing_file_attributes(tmpdir):
    """if files do not have attributes set the default permissions have to be inherited."""
    _zipfile = os.path.join(os.path.dirname(__file__), 'files', 'missing_file_attributes.zip')
    assert os.path.exists(_zipfile)
    dest = tmpdir.mkdir('dest').strpath

    # Get the default file permissions for the user
    fname = os.path.join(dest, 'foo')
    with open(fname, 'w'):
        pass
    default_stmode = os.stat(fname).st_mode

    files = mozfile.extract_zip(_zipfile, dest)
    for filename in files:
        assert os.stat(os.path.join(dest, filename)).st_mode == default_stmode
示例#9
0
    def test_extract_zipfile_missing_file_attributes(self):
        """if files do not have attributes set the default permissions have to be inherited."""
        _zipfile = os.path.join(os.path.dirname(__file__), "files", "missing_file_attributes.zip")
        self.assertTrue(os.path.exists(_zipfile))
        dest = tempfile.mkdtemp()
        try:
            # Get the default file permissions for the user
            fname = os.path.join(dest, "foo")
            with open(fname, "w"):
                pass
            default_stmode = os.stat(fname).st_mode

            files = mozfile.extract_zip(_zipfile, dest)
            for filename in files:
                self.assertEqual(os.stat(os.path.join(dest, filename)).st_mode, default_stmode)
        finally:
            shutil.rmtree(dest)
    def test_extract_zipfile_missing_file_attributes(self):
        """if files do not have attributes set the default permissions have to be inherited."""
        _zipfile = os.path.join(os.path.dirname(__file__), 'files', 'missing_file_attributes.zip')
        self.assertTrue(os.path.exists(_zipfile))
        dest = tempfile.mkdtemp()
        try:
            # Get the default file permissions for the user
            fname = os.path.join(dest, 'foo')
            with open(fname, 'w'):
                pass
            default_stmode = os.stat(fname).st_mode

            files = mozfile.extract_zip(_zipfile, dest)
            for filename in files:
                self.assertEqual(os.stat(os.path.join(dest, filename)).st_mode,
                                 default_stmode)
        finally:
            shutil.rmtree(dest)
示例#11
0
def check_for_crashes(dump_directory, symbols_path,
                      stackwalk_binary=None,
                      dump_save_path=None,
                      test_name=None,
                      quiet=False):
    """
    Print a stack trace for minidump files left behind by a crashing program.

    `dump_directory` will be searched for minidump files. Any minidump files found will
    have `stackwalk_binary` executed on them, with `symbols_path` passed as an extra
    argument.

    `stackwalk_binary` should be a path to the minidump_stackwalk binary.
    If `stackwalk_binary` is not set, the MINIDUMP_STACKWALK environment variable
    will be checked and its value used if it is not empty.

    `symbols_path` should be a path to a directory containing symbols to use for
    dump processing. This can either be a path to a directory containing Breakpad-format
    symbols, or a URL to a zip file containing a set of symbols.

    If `dump_save_path` is set, it should be a path to a directory in which to copy minidump
    files for safekeeping after a stack trace has been printed. If not set, the environment
    variable MINIDUMP_SAVE_PATH will be checked and its value used if it is not empty.

    If `test_name` is set it will be used as the test name in log output. If not set the
    filename of the calling function will be used.

    If `quiet` is set, no PROCESS-CRASH message will be printed to stdout if a
    crash is detected.

    Returns True if any minidumps were found, False otherwise.
    """
    dumps = glob.glob(os.path.join(dump_directory, '*.dmp'))
    if not dumps:
        return False

    if stackwalk_binary is None:
        stackwalk_binary = os.environ.get('MINIDUMP_STACKWALK', None)

    # try to get the caller's filename if no test name is given
    if test_name is None:
        try:
            test_name = os.path.basename(sys._getframe(1).f_code.co_filename)
        except:
            test_name = "unknown"

    try:
        log = mozlog.getLogger('mozcrash')
        remove_symbols = False
        # If our symbols are at a remote URL, download them now
        # We want to download URLs like http://... but not Windows paths like c:\...
        if symbols_path and mozfile.is_url(symbols_path):
            log.info("Downloading symbols from: %s", symbols_path)
            remove_symbols = True
            # Get the symbols and write them to a temporary zipfile
            data = urllib2.urlopen(symbols_path)
            symbols_file = tempfile.TemporaryFile()
            symbols_file.write(data.read())
            # extract symbols to a temporary directory (which we'll delete after
            # processing all crashes)
            symbols_path = tempfile.mkdtemp()
            zfile = zipfile.ZipFile(symbols_file, 'r')
            mozfile.extract_zip(zfile, symbols_path)
            zfile.close()

        for d in dumps:
            extra = os.path.splitext(d)[0] + '.extra'

            stackwalk_output = []
            stackwalk_output.append("Crash dump filename: " + d)
            top_frame = None
            if symbols_path and stackwalk_binary and os.path.exists(stackwalk_binary):
                # run minidump_stackwalk
                p = subprocess.Popen([stackwalk_binary, d, symbols_path],
                                     stdout=subprocess.PIPE,
                                     stderr=subprocess.PIPE)
                (out, err) = p.communicate()
                if len(out) > 3:
                    # minidump_stackwalk is chatty,
                    # so ignore stderr when it succeeds.
                    stackwalk_output.append(out)
                    # The top frame of the crash is always the line after "Thread N (crashed)"
                    # Examples:
                    #  0  libc.so + 0xa888
                    #  0  libnss3.so!nssCertificate_Destroy [certificate.c : 102 + 0x0]
                    #  0  mozjs.dll!js::GlobalObject::getDebuggers() [GlobalObject.cpp:89df18f9b6da : 580 + 0x0]
                    #  0  libxul.so!void js::gc::MarkInternal<JSObject>(JSTracer*, JSObject**) [Marking.cpp : 92 + 0x28]
                    lines = out.splitlines()
                    for i, line in enumerate(lines):
                        if "(crashed)" in line:
                            match = re.search(r"^ 0  (?:.*!)?(?:void )?([^\[]+)", lines[i+1])
                            if match:
                                top_frame = "@ %s" % match.group(1).strip()
                            break
                else:
                    stackwalk_output.append("stderr from minidump_stackwalk:")
                    stackwalk_output.append(err)
                if p.returncode != 0:
                    stackwalk_output.append("minidump_stackwalk exited with return code %d" % p.returncode)
            else:
                if not symbols_path:
                    stackwalk_output.append("No symbols path given, can't process dump.")
                if not stackwalk_binary:
                    stackwalk_output.append("MINIDUMP_STACKWALK not set, can't process dump.")
                elif stackwalk_binary and not os.path.exists(stackwalk_binary):
                    stackwalk_output.append("MINIDUMP_STACKWALK binary not found: %s" % stackwalk_binary)
            if not top_frame:
                top_frame = "Unknown top frame"
            if not quiet:
                print "PROCESS-CRASH | %s | application crashed [%s]" % (test_name, top_frame)
                print '\n'.join(stackwalk_output)
            if dump_save_path is None:
                dump_save_path = os.environ.get('MINIDUMP_SAVE_PATH', None)
            if dump_save_path:
                # This code did not previously create the directory,
                # so there may be a file hanging out with its name.
                if os.path.isfile(dump_save_path):
                    os.unlink(dump_save_path)
                if not os.path.isdir(dump_save_path):
                    try:
                        os.makedirs(dump_save_path)
                    except OSError:
                        pass

                shutil.move(d, dump_save_path)
                log.info("Saved minidump as %s",
                         os.path.join(dump_save_path, os.path.basename(d)))

                if os.path.isfile(extra):
                    shutil.move(extra, dump_save_path)
                    log.info("Saved app info as %s",
                             os.path.join(dump_save_path, os.path.basename(extra)))
            else:
                mozfile.remove(d)
                mozfile.remove(extra)
    finally:
        if remove_symbols:
            mozfile.remove(symbols_path)

    return True
示例#12
0
    def android_geckoview_docs(self, archive, upload, upload_branch,
                               upload_message):

        tasks = (self.substs['GRADLE_ANDROID_GECKOVIEW_DOCS_ARCHIVE_TASKS']
                 if archive or upload else
                 self.substs['GRADLE_ANDROID_GECKOVIEW_DOCS_TASKS'])

        ret = self.gradle(tasks, verbose=True)
        if ret or not upload:
            return ret

        # Upload to Github.
        fmt = {
            'level': os.environ.get('MOZ_SCM_LEVEL', '0'),
            'project': os.environ.get('MH_BRANCH', 'unknown'),
            'revision': os.environ.get('GECKO_HEAD_REV', 'tip'),
        }
        env = {}

        # In order to push to GitHub from TaskCluster, we store a private key
        # in the TaskCluster secrets store in the format {"content": "<KEY>"},
        # and the corresponding public key as a writable deploy key for the
        # destination repo on GitHub.
        secret = os.environ.get('GECKOVIEW_DOCS_UPLOAD_SECRET',
                                '').format(**fmt)
        if secret:
            # Set up a private key from the secrets store if applicable.
            import requests
            req = requests.get('http://taskcluster/secrets/v1/secret/' +
                               secret)
            req.raise_for_status()

            keyfile = mozpath.abspath('gv-docs-upload-key')
            with open(keyfile, 'w') as f:
                os.chmod(keyfile, 0o600)
                f.write(req.json()['secret']['content'])

            # Turn off strict host key checking so ssh does not complain about
            # unknown github.com host. We're not pushing anything sensitive, so
            # it's okay to not check GitHub's host keys.
            env['GIT_SSH_COMMAND'] = 'ssh -i "%s" -o StrictHostKeyChecking=no' % keyfile

        # Clone remote repo.
        branch, _, branch_path = upload_branch.partition('/')
        repo_url = '[email protected]:%s.git' % upload
        repo_path = mozpath.abspath('gv-docs-repo')
        self.run_process([
            'git', 'clone', '--branch', branch, '--depth', '1', repo_url,
            repo_path
        ],
                         append_env=env,
                         pass_thru=True)
        env['GIT_DIR'] = mozpath.join(repo_path, '.git')
        env['GIT_WORK_TREE'] = repo_path
        env['GIT_AUTHOR_NAME'] = env[
            'GIT_COMMITTER_NAME'] = 'GeckoView Docs Bot'
        env['GIT_AUTHOR_EMAIL'] = env[
            'GIT_COMMITTER_EMAIL'] = '*****@*****.**'

        # Extract new javadoc to specified directory inside repo.
        import mozfile
        src_tar = mozpath.join(self.topobjdir, 'gradle', 'build', 'mobile',
                               'android', 'geckoview', 'libs',
                               'geckoview-javadoc.jar')
        dst_path = mozpath.join(repo_path, branch_path.format(**fmt))
        mozfile.remove(dst_path)
        mozfile.extract_zip(src_tar, dst_path)

        # Commit and push.
        self.run_process(['git', 'add', '--all'],
                         append_env=env,
                         pass_thru=True)
        if self.run_process(['git', 'diff', '--cached', '--quiet'],
                            append_env=env,
                            pass_thru=True,
                            ensure_exit_code=False) != 0:
            # We have something to commit.
            self.run_process(
                ['git', 'commit', '--message',
                 upload_message.format(**fmt)],
                append_env=env,
                pass_thru=True)
            self.run_process(['git', 'push', 'origin', 'gh-pages'],
                             append_env=env,
                             pass_thru=True)

        mozfile.remove(repo_path)
        if secret:
            mozfile.remove(keyfile)
        return 0
示例#13
0
    def run_awsy(self, tests, binary=None, **kwargs):
        import json
        from mozlog.structured import commandline

        from marionette_harness.runtests import (MarionetteTestRunner,
                                                 MarionetteHarness)

        parser = setup_awsy_argument_parser()

        if not tests:
            tests = [
                os.path.join(self.topsrcdir,
                             'testing/awsy/awsy/test_memory_usage.py')
            ]

        args = argparse.Namespace(tests=tests)

        args.binary = binary

        if kwargs['quick']:
            kwargs['entities'] = 3
            kwargs['iterations'] = 1
            kwargs['perTabPause'] = 1
            kwargs['settleWaitTime'] = 1

        runtime_testvars = {}
        for arg in ('webRootDir', 'pageManifest', 'resultsDir', 'entities',
                    'iterations', 'perTabPause', 'settleWaitTime', 'maxTabs'):
            if kwargs[arg]:
                runtime_testvars[arg] = kwargs[arg]

        if 'webRootDir' not in runtime_testvars:
            awsy_tests_dir = os.path.join(self.topobjdir, '_tests', 'awsy')
            web_root_dir = os.path.join(awsy_tests_dir, 'html')
            runtime_testvars['webRootDir'] = web_root_dir
        else:
            web_root_dir = runtime_testvars['webRootDir']
            awsy_tests_dir = os.path.dirname(web_root_dir)

        if 'resultsDir' not in runtime_testvars:
            runtime_testvars['resultsDir'] = os.path.join(
                awsy_tests_dir, 'results')
        page_load_test_dir = os.path.join(web_root_dir, 'page_load_test')
        if not os.path.isdir(page_load_test_dir):
            os.makedirs(page_load_test_dir)

        if not os.path.isdir(runtime_testvars["resultsDir"]):
            os.makedirs(runtime_testvars["resultsDir"])

        runtime_testvars_path = os.path.join(awsy_tests_dir,
                                             'runtime-testvars.json')
        if kwargs['testvars']:
            kwargs['testvars'].append(runtime_testvars_path)
        else:
            kwargs['testvars'] = [runtime_testvars_path]

        runtime_testvars_file = open(runtime_testvars_path, 'wb')
        runtime_testvars_file.write(json.dumps(runtime_testvars, indent=2))
        runtime_testvars_file.close()

        if not kwargs['webRootDir']:
            # Populate the Awsy webroot if not specified by the user.
            manifest_file = os.path.join(self.topsrcdir, 'testing', 'awsy',
                                         'tp5n-pageset.manifest')
            tooltool_args = {
                "args": [
                    os.path.join(
                        self.topsrcdir,
                        "python/mozbuild/mozbuild/action/tooltool.py"),
                    "--manifest=%s" % manifest_file, "--unpack",
                    "--cache-folder=%s" %
                    os.path.join(self.topsrcdir, "tooltool-cache"), "fetch"
                ]
            }

            self.run_process(cwd=page_load_test_dir, **tooltool_args)
            tp5nzip = os.path.join(page_load_test_dir, 'tp5n.zip')
            tp5nmanifest = os.path.join(page_load_test_dir, 'tp5n',
                                        'tp5n.manifest')
            if not os.path.exists(tp5nmanifest):
                files = mozfile.extract_zip(tp5nzip, page_load_test_dir)

        for k, v in kwargs.iteritems():
            setattr(args, k, v)

        parser.verify_usage(args)

        args.logger = commandline.setup_logging("Are We Slim Yet Tests", args,
                                                {"mach": sys.stdout})
        failed = MarionetteHarness(MarionetteTestRunner, args=vars(args)).run()
        if failed > 0:
            return 1
        else:
            return 0
示例#14
0
    def android_geckoview_docs(
        self,
        command_context,
        archive,
        upload,
        upload_branch,
        javadoc_path,
        upload_message,
    ):

        tasks = (self.substs["GRADLE_ANDROID_GECKOVIEW_DOCS_ARCHIVE_TASKS"]
                 if archive or upload else
                 self.substs["GRADLE_ANDROID_GECKOVIEW_DOCS_TASKS"])

        ret = self.gradle(command_context, tasks, verbose=True)
        if ret or not upload:
            return ret

        # Upload to Github.
        fmt = {
            "level": os.environ.get("MOZ_SCM_LEVEL", "0"),
            "project": os.environ.get("MH_BRANCH", "unknown"),
            "revision": os.environ.get("GECKO_HEAD_REV", "tip"),
        }
        env = {}

        # In order to push to GitHub from TaskCluster, we store a private key
        # in the TaskCluster secrets store in the format {"content": "<KEY>"},
        # and the corresponding public key as a writable deploy key for the
        # destination repo on GitHub.
        secret = os.environ.get("GECKOVIEW_DOCS_UPLOAD_SECRET",
                                "").format(**fmt)
        if secret:
            # Set up a private key from the secrets store if applicable.
            import requests

            req = requests.get("http://taskcluster/secrets/v1/secret/" +
                               secret)
            req.raise_for_status()

            keyfile = mozpath.abspath("gv-docs-upload-key")
            with open(keyfile, "w") as f:
                os.chmod(keyfile, 0o600)
                f.write(req.json()["secret"]["content"])

            # Turn off strict host key checking so ssh does not complain about
            # unknown github.com host. We're not pushing anything sensitive, so
            # it's okay to not check GitHub's host keys.
            env["GIT_SSH_COMMAND"] = 'ssh -i "%s" -o StrictHostKeyChecking=no' % keyfile

        # Clone remote repo.
        branch = upload_branch.format(**fmt)
        repo_url = "[email protected]:%s.git" % upload
        repo_path = mozpath.abspath("gv-docs-repo")
        self.run_process(
            [
                "git",
                "clone",
                "--branch",
                upload_branch,
                "--depth",
                "1",
                repo_url,
                repo_path,
            ],
            append_env=env,
            pass_thru=True,
        )
        env["GIT_DIR"] = mozpath.join(repo_path, ".git")
        env["GIT_WORK_TREE"] = repo_path
        env["GIT_AUTHOR_NAME"] = env[
            "GIT_COMMITTER_NAME"] = "GeckoView Docs Bot"
        env["GIT_AUTHOR_EMAIL"] = env[
            "GIT_COMMITTER_EMAIL"] = "*****@*****.**"

        # Copy over user documentation.
        import mozfile

        # Extract new javadoc to specified directory inside repo.
        src_tar = mozpath.join(
            self.topobjdir,
            "gradle",
            "build",
            "mobile",
            "android",
            "geckoview",
            "libs",
            "geckoview-javadoc.jar",
        )
        dst_path = mozpath.join(repo_path, javadoc_path.format(**fmt))
        mozfile.remove(dst_path)
        mozfile.extract_zip(src_tar, dst_path)

        # Commit and push.
        self.run_process(["git", "add", "--all"],
                         append_env=env,
                         pass_thru=True)
        if (self.run_process(
            ["git", "diff", "--cached", "--quiet"],
                append_env=env,
                pass_thru=True,
                ensure_exit_code=False,
        ) != 0):
            # We have something to commit.
            self.run_process(
                ["git", "commit", "--message",
                 upload_message.format(**fmt)],
                append_env=env,
                pass_thru=True,
            )
            self.run_process(["git", "push", "origin", branch],
                             append_env=env,
                             pass_thru=True)

        mozfile.remove(repo_path)
        if secret:
            mozfile.remove(keyfile)
        return 0
示例#15
0
    def android_geckoview_docs(self, archive, upload, upload_branch,
                               upload_message, variant):

        def capitalize(s):
            # Can't use str.capitalize because it lower cases trailing letters.
            return (s[0].upper() + s[1:]) if s else ''

        task = 'geckoview:javadoc' + ('Jar' if archive or upload else '') + capitalize(variant)
        ret = self.gradle([task], verbose=True)
        if ret or not upload:
            return ret

        # Upload to Github.
        fmt = {
            'level': os.environ.get('MOZ_SCM_LEVEL', '0'),
            'project': os.environ.get('MH_BRANCH', 'unknown'),
            'revision': os.environ.get('GECKO_HEAD_REV', 'tip'),
        }
        env = {}

        # In order to push to GitHub from TaskCluster, we store a private key
        # in the TaskCluster secrets store in the format {"content": "<KEY>"},
        # and the corresponding public key as a writable deploy key for the
        # destination repo on GitHub.
        secret = os.environ.get('GECKOVIEW_DOCS_UPLOAD_SECRET', '').format(**fmt)
        if secret:
            # Set up a private key from the secrets store if applicable.
            import requests
            req = requests.get('http://taskcluster/secrets/v1/secret/' + secret)
            req.raise_for_status()

            keyfile = mozpath.abspath('gv-docs-upload-key')
            with open(keyfile, 'w') as f:
                os.chmod(keyfile, 0o600)
                f.write(req.json()['secret']['content'])

            # Turn off strict host key checking so ssh does not complain about
            # unknown github.com host. We're not pushing anything sensitive, so
            # it's okay to not check GitHub's host keys.
            env['GIT_SSH_COMMAND'] = 'ssh -i "%s" -o StrictHostKeyChecking=no' % keyfile

        # Clone remote repo.
        branch, _, branch_path = upload_branch.partition('/')
        repo_url = '[email protected]:%s.git' % upload
        repo_path = mozpath.abspath('gv-docs-repo')
        self.run_process(['git', 'clone', '--branch', branch, '--depth', '1',
                          repo_url, repo_path], append_env=env, pass_thru=True)
        env['GIT_DIR'] = mozpath.join(repo_path, '.git')
        env['GIT_WORK_TREE'] = repo_path
        env['GIT_AUTHOR_NAME'] = env['GIT_COMMITTER_NAME'] = 'GeckoView Docs Bot'
        env['GIT_AUTHOR_EMAIL'] = env['GIT_COMMITTER_EMAIL'] = '*****@*****.**'

        # Extract new javadoc to specified directory inside repo.
        import mozfile
        src_tar = mozpath.join(self.topobjdir, 'gradle', 'build', 'mobile', 'android',
                               'geckoview', 'libs', 'geckoview-javadoc.jar')
        dst_path = mozpath.join(repo_path, branch_path.format(**fmt))
        mozfile.remove(dst_path)
        mozfile.extract_zip(src_tar, dst_path)

        # Commit and push.
        self.run_process(['git', 'add', '--all'], append_env=env, pass_thru=True)
        if self.run_process(['git', 'diff', '--cached', '--quiet'],
                            append_env=env, pass_thru=True, ensure_exit_code=False) != 0:
            # We have something to commit.
            self.run_process(['git', 'commit',
                              '--message', upload_message.format(**fmt)],
                             append_env=env, pass_thru=True)
            self.run_process(['git', 'push', 'origin', 'gh-pages'],
                             append_env=env, pass_thru=True)

        mozfile.remove(repo_path)
        if secret:
            mozfile.remove(keyfile)
        return 0