Пример #1
0
    def setup(self, should_clobber=False):
        r'''Install browsertime and visualmetrics.py requirements.'''

        from mozbuild.action.tooltool import unpack_file
        from mozbuild.artifact_cache import ArtifactCache
        sys.path.append(mozpath.join(self.topsrcdir, 'tools', 'lint', 'eslint'))
        import setup_helper

        # Download the visualmetrics.py requirements.
        artifact_cache = ArtifactCache(self.artifact_cache_path,
                                       log=self.log, skip_cache=False)

        fetches = host_fetches[host_platform()]
        for tool, fetch in sorted(fetches.items()):
            archive = artifact_cache.fetch(fetch['url'])
            # TODO: assert type, verify sha256 (and size?).

            if fetch.get('unpack', True):
                cwd = os.getcwd()
                try:
                    os.chdir(self.state_path)
                    self.log(
                        logging.INFO,
                        'browsertime',
                        {'path': archive},
                        'Unpacking temporary location {path}')
                    unpack_file(archive)
                finally:
                    os.chdir(cwd)

        # Install the browsertime Node.js requirements.
        if not setup_helper.check_node_executables_valid():
            return 1

        self.log(
            logging.INFO,
            'browsertime',
            {'package_json': mozpath.join(BROWSERTIME_ROOT, 'package.json')},
            'Installing browsertime node module from {package_json}')
        status = setup_helper.package_setup(
            BROWSERTIME_ROOT,
            'browsertime',
            should_clobber=should_clobber)

        if status:
            return status

        return self.check()
Пример #2
0
    def __init__(self, tree, substs, defines, job=None, log=None,
                 cache_dir='.', hg=None, git=None, skip_cache=False,
                 topsrcdir=None, download_tests=True, download_symbols=False,
                 download_host_bins=False):
        if (hg and git) or (not hg and not git):
            raise ValueError("Must provide path to exactly one of hg and git")

        self._substs = substs
        self._defines = defines
        self._tree = tree
        self._job = job or self._guess_artifact_job()
        self._log = log
        self._hg = hg
        self._git = git
        self._cache_dir = cache_dir
        self._skip_cache = skip_cache
        self._topsrcdir = topsrcdir

        app = self._substs.get('MOZ_BUILD_APP')
        job_details = COMM_JOB_DETAILS if app == 'comm/mail' else MOZ_JOB_DETAILS

        try:
            cls = job_details[self._job]
            self._artifact_job = cls(log=self._log,
                                     download_tests=download_tests,
                                     download_symbols=download_symbols,
                                     download_host_bins=download_host_bins,
                                     substs=self._substs)
        except KeyError:
            self.log(logging.INFO, 'artifact',
                {'job': self._job},
                'Unknown job {job}')
            raise KeyError("Unknown job")

        self._task_cache = TaskCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
        self._artifact_cache = ArtifactCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
        self._pushhead_cache = PushheadCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
Пример #3
0
    def setup_prerequisites(self):
        r"""Install browsertime and visualmetrics.py prerequisites."""

        from mozbuild.action.tooltool import unpack_file
        from mozbuild.artifact_cache import ArtifactCache

        if not AUTOMATION and host_platform().startswith("linux"):
            # On Linux ImageMagick needs to be installed manually, and `mach bootstrap` doesn't
            # do that (yet).  Provide some guidance.
            try:
                from shutil import which
            except ImportError:
                from shutil_which import which

            im_programs = ("compare", "convert", "mogrify")
            for im_program in im_programs:
                prog = which(im_program)
                if not prog:
                    print(
                        "Error: On Linux, ImageMagick must be on the PATH. "
                        "Install ImageMagick manually and try again (or update PATH). "
                        "On Ubuntu and Debian, try `sudo apt-get install imagemagick`. "
                        "On Fedora, try `sudo dnf install imagemagick`. "
                        "On CentOS, try `sudo yum install imagemagick`.")
                    return 1

        # Download the visualmetrics.py requirements.
        artifact_cache = ArtifactCache(self.artifact_cache_path,
                                       log=self.log,
                                       skip_cache=False)

        fetches = host_fetches[host_platform()]
        for tool, fetch in sorted(fetches.items()):
            archive = artifact_cache.fetch(fetch["url"])
            # TODO: assert type, verify sha256 (and size?).

            if fetch.get("unpack", True):
                cwd = os.getcwd()
                try:
                    mkdir(self.state_path)
                    os.chdir(self.state_path)
                    self.log(
                        logging.INFO,
                        "browsertime",
                        {"path": archive},
                        "Unpacking temporary location {path}",
                    )

                    if "win64" in host_platform(
                    ) and "imagemagick" in tool.lower():
                        # Windows archive does not contain a subfolder
                        # so we make one for it here
                        mkdir(fetch.get("path"))
                        os.chdir(
                            os.path.join(self.state_path, fetch.get("path")))
                        unpack_file(archive)
                        os.chdir(self.state_path)
                    else:
                        unpack_file(archive)

                    # Make sure the expected path exists after extraction
                    path = os.path.join(self.state_path, fetch.get("path"))
                    if not os.path.exists(path):
                        raise Exception(
                            "Cannot find an extracted directory: %s" % path)

                    try:
                        # Some archives provide binaries that don't have the
                        # executable bit set so we need to set it here
                        for root, dirs, files in os.walk(path):
                            for edir in dirs:
                                loc_to_change = os.path.join(root, edir)
                                st = os.stat(loc_to_change)
                                os.chmod(loc_to_change,
                                         st.st_mode | stat.S_IEXEC)
                            for efile in files:
                                loc_to_change = os.path.join(root, efile)
                                st = os.stat(loc_to_change)
                                os.chmod(loc_to_change,
                                         st.st_mode | stat.S_IEXEC)
                    except Exception as e:
                        raise Exception(
                            "Could not set executable bit in %s, error: %s" %
                            (path, str(e)))
                finally:
                    os.chdir(cwd)
Пример #4
0
    def setup(self, should_clobber=False):
        r'''Install browsertime and visualmetrics.py requirements.'''

        automation = bool(os.environ.get('MOZ_AUTOMATION'))

        from mozbuild.action.tooltool import unpack_file
        from mozbuild.artifact_cache import ArtifactCache
        sys.path.append(mozpath.join(self.topsrcdir, 'tools', 'lint', 'eslint'))
        import setup_helper

        if not os.environ.get('MOZ_AUTOMATION') and host_platform().startswith('linux'):
            # On Linux ImageMagick needs to be installed manually, and `mach bootstrap` doesn't
            # do that (yet).  Provide some guidance.
            try:
                from shutil import which
            except ImportError:
                from shutil_which import which

            im_programs = ('compare', 'convert', 'mogrify')
            for im_program in im_programs:
                prog = which(im_program)
                if not prog:
                    print('Error: On Linux, ImageMagick must be on the PATH. '
                          'Install ImageMagick manually and try again (or update PATH). '
                          'On Ubuntu and Debian, try `sudo apt-get install imagemagick`. '
                          'On Fedora, try `sudo dnf install imagemagick`. '
                          'On CentOS, try `sudo yum install imagemagick`.')
                    return 1

        # Download the visualmetrics.py requirements.
        artifact_cache = ArtifactCache(self.artifact_cache_path,
                                       log=self.log, skip_cache=False)

        fetches = host_fetches[host_platform()]
        for tool, fetch in sorted(fetches.items()):
            archive = artifact_cache.fetch(fetch['url'])
            # TODO: assert type, verify sha256 (and size?).

            if fetch.get('unpack', True):
                cwd = os.getcwd()
                try:
                    mkdir(self.state_path)
                    os.chdir(self.state_path)
                    self.log(
                        logging.INFO,
                        'browsertime',
                        {'path': archive},
                        'Unpacking temporary location {path}')
                    unpack_file(archive)

                    # Make sure the expected path exists after extraction
                    path = os.path.join(self.state_path, fetch.get('path'))
                    if not os.path.exists(path):
                        raise Exception("Cannot find an extracted directory: %s" % path)

                    try:
                        # Some archives provide binaries that don't have the
                        # executable bit set so we need to set it here
                        for root, dirs, files in os.walk(path):
                            for edir in dirs:
                                loc_to_change = os.path.join(root, edir)
                                st = os.stat(loc_to_change)
                                os.chmod(loc_to_change, st.st_mode | stat.S_IEXEC)
                            for efile in files:
                                loc_to_change = os.path.join(root, efile)
                                st = os.stat(loc_to_change)
                                os.chmod(loc_to_change, st.st_mode | stat.S_IEXEC)
                    except Exception as e:
                        raise Exception(
                            "Could not set executable bit in %s, error: %s" % (path, str(e))
                        )
                finally:
                    os.chdir(cwd)

        # Install the browsertime Node.js requirements.
        if not setup_helper.check_node_executables_valid():
            return 1

        if 'GECKODRIVER_BASE_URL' not in os.environ:
            # Use custom `geckodriver` with pre-release Android support.
            url = 'https://github.com/ncalexan/geckodriver/releases/download/v0.24.0-android/'
            os.environ['GECKODRIVER_BASE_URL'] = url

        self.log(
            logging.INFO,
            'browsertime',
            {'package_json': mozpath.join(BROWSERTIME_ROOT, 'package.json')},
            'Installing browsertime node module from {package_json}')
        status = setup_helper.package_setup(
            BROWSERTIME_ROOT,
            'browsertime',
            should_clobber=should_clobber,
            no_optional=automation)

        if status:
            return status

        if automation:
            return 0

        return self.check()
Пример #5
0
    def setup(self, should_clobber=False):
        r'''Install browsertime and visualmetrics.py requirements.'''

        from mozbuild.action.tooltool import unpack_file
        from mozbuild.artifact_cache import ArtifactCache
        sys.path.append(mozpath.join(self.topsrcdir, 'tools', 'lint',
                                     'eslint'))
        import setup_helper

        if host_platform().startswith('linux'):
            # On Linux ImageMagick needs to be installed manually, and `mach bootstrap` doesn't
            # do that (yet).  Provide some guidance.
            import which
            im_programs = ('compare', 'convert', 'mogrify')
            try:
                for im_program in im_programs:
                    which.which(im_program)
            except which.WhichError as e:
                print(
                    'Error: {} On Linux, ImageMagick must be on the PATH. '
                    'Install ImageMagick manually and try again (or update PATH). '
                    'On Ubuntu and Debian, try `sudo apt-get install imagemagick`. '
                    'On Fedora, try `sudo dnf install imagemagick`. '
                    'On CentOS, try `sudo yum install imagemagick`.'.format(e))
                return 1

        # Download the visualmetrics.py requirements.
        artifact_cache = ArtifactCache(self.artifact_cache_path,
                                       log=self.log,
                                       skip_cache=False)

        fetches = host_fetches[host_platform()]
        for tool, fetch in sorted(fetches.items()):
            archive = artifact_cache.fetch(fetch['url'])
            # TODO: assert type, verify sha256 (and size?).

            if fetch.get('unpack', True):
                cwd = os.getcwd()
                try:
                    mkdir(self.state_path)
                    os.chdir(self.state_path)
                    self.log(logging.INFO, 'browsertime', {'path': archive},
                             'Unpacking temporary location {path}')
                    unpack_file(archive)
                finally:
                    os.chdir(cwd)

        # Install the browsertime Node.js requirements.
        if not setup_helper.check_node_executables_valid():
            return 1

        if 'GECKODRIVER_BASE_URL' not in os.environ:
            # Use custom `geckodriver` with pre-release Android support.
            url = 'https://github.com/ncalexan/geckodriver/releases/download/v0.24.0-android/'
            os.environ['GECKODRIVER_BASE_URL'] = url

        self.log(
            logging.INFO, 'browsertime',
            {'package_json': mozpath.join(BROWSERTIME_ROOT, 'package.json')},
            'Installing browsertime node module from {package_json}')
        status = setup_helper.package_setup(BROWSERTIME_ROOT,
                                            'browsertime',
                                            should_clobber=should_clobber)

        if status:
            return status

        return self.check()
    def test_artifact_cache_persistence(self):
        cache = ArtifactCache(self.tmpdir)
        cache._download_manager.session = FakeSession()

        path = cache.fetch('http://server/foo')
        expected = [os.path.basename(path)]
        self.assertEqual(self.listtmpdir(), expected)

        path = cache.fetch('http://server/bar')
        expected.append(os.path.basename(path))
        self.assertEqual(sorted(self.listtmpdir()), sorted(expected))

        # We're downloading more than the cache allows us, but since it's all
        # in the same session, no purge happens.
        path = cache.fetch('http://server/qux')
        expected.append(os.path.basename(path))
        self.assertEqual(sorted(self.listtmpdir()), sorted(expected))

        path = cache.fetch('http://server/fuga')
        expected.append(os.path.basename(path))
        self.assertEqual(sorted(self.listtmpdir()), sorted(expected))

        cache = ArtifactCache(self.tmpdir)
        cache._download_manager.session = FakeSession()

        # Downloading a new file in a new session purges the oldest files in
        # the cache.
        path = cache.fetch('http://server/hoge')
        expected.append(os.path.basename(path))
        expected = expected[2:]
        self.assertEqual(sorted(self.listtmpdir()), sorted(expected))

        # Downloading a file already in the cache leaves the cache untouched
        cache = ArtifactCache(self.tmpdir)
        cache._download_manager.session = FakeSession()

        path = cache.fetch('http://server/qux')
        self.assertEqual(sorted(self.listtmpdir()), sorted(expected))

        # bar was purged earlier, re-downloading it should purge the oldest
        # downloaded file, which at this point would be qux, but we also
        # re-downloaded it in the mean time, so the next one (fuga) should be
        # the purged one.
        cache = ArtifactCache(self.tmpdir)
        cache._download_manager.session = FakeSession()

        path = cache.fetch('http://server/bar')
        expected.append(os.path.basename(path))
        expected = [p for p in expected if 'fuga' not in p]
        self.assertEqual(sorted(self.listtmpdir()), sorted(expected))

        # Downloading one file larger than the cache size should still leave
        # MIN_CACHED_ARTIFACTS files.
        cache = ArtifactCache(self.tmpdir)
        cache._download_manager.session = FakeSession()

        path = cache.fetch('http://server/larger')
        expected.append(os.path.basename(path))
        expected = expected[-2:]
        self.assertEqual(sorted(self.listtmpdir()), sorted(expected))
Пример #7
0
class Artifacts(object):
    '''Maintain state to efficiently fetch build artifacts from a Firefox tree.'''

    def __init__(self, tree, substs, defines, job=None, log=None,
                 cache_dir='.', hg=None, git=None, skip_cache=False,
                 topsrcdir=None, download_tests=True, download_symbols=False,
                 download_host_bins=False):
        if (hg and git) or (not hg and not git):
            raise ValueError("Must provide path to exactly one of hg and git")

        self._substs = substs
        self._defines = defines
        self._tree = tree
        self._job = job or self._guess_artifact_job()
        self._log = log
        self._hg = hg
        self._git = git
        self._cache_dir = cache_dir
        self._skip_cache = skip_cache
        self._topsrcdir = topsrcdir

        app = self._substs.get('MOZ_BUILD_APP')
        job_details = COMM_JOB_DETAILS if app == 'comm/mail' else MOZ_JOB_DETAILS

        try:
            cls = job_details[self._job]
            self._artifact_job = cls(log=self._log,
                                     download_tests=download_tests,
                                     download_symbols=download_symbols,
                                     download_host_bins=download_host_bins,
                                     substs=self._substs)
        except KeyError:
            self.log(logging.INFO, 'artifact',
                {'job': self._job},
                'Unknown job {job}')
            raise KeyError("Unknown job")

        self._task_cache = TaskCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
        self._artifact_cache = ArtifactCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
        self._pushhead_cache = PushheadCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)

    def log(self, *args, **kwargs):
        if self._log:
            self._log(*args, **kwargs)

    def _guess_artifact_job(self):
        # Add the "-debug" suffix to the guessed artifact job name
        # if MOZ_DEBUG is enabled.
        if self._substs.get('MOZ_DEBUG'):
            target_suffix = '-debug'
        elif self._substs.get('MOZ_PGO'):
            target_suffix = '-pgo'
        else:
            target_suffix = '-opt'

        if self._substs.get('MOZ_BUILD_APP', '') == 'mobile/android':
            if self._substs['ANDROID_CPU_ARCH'] == 'x86_64':
                return 'android-x86_64' + target_suffix
            if self._substs['ANDROID_CPU_ARCH'] == 'x86':
                return 'android-x86' + target_suffix
            if self._substs['ANDROID_CPU_ARCH'] == 'arm64-v8a':
                return 'android-aarch64' + target_suffix
            return 'android-api-16' + target_suffix

        target_64bit = False
        if self._substs['target_cpu'] == 'x86_64':
            target_64bit = True

        if self._defines.get('XP_LINUX', False):
            return ('linux64' if target_64bit else 'linux') + target_suffix
        if self._defines.get('XP_WIN', False):
            if self._substs['target_cpu'] == 'aarch64':
                return 'win64-aarch64' + target_suffix
            return ('win64' if target_64bit else 'win32') + target_suffix
        if self._defines.get('XP_MACOSX', False):
            # We only produce unified builds in automation, so the target_cpu
            # check is not relevant.
            return 'macosx64' + target_suffix
        raise Exception('Cannot determine default job for |mach artifact|!')

    def _pushheads_from_rev(self, rev, count):
        """Queries hg.mozilla.org's json-pushlog for pushheads that are nearby
        ancestors or `rev`. Multiple trees are queried, as the `rev` may
        already have been pushed to multiple repositories. For each repository
        containing `rev`, the pushhead introducing `rev` and the previous
        `count` pushheads from that point are included in the output.
        """

        with self._pushhead_cache as pushhead_cache:
            found_pushids = {}

            search_trees = self._artifact_job.candidate_trees
            for tree in search_trees:
                self.log(logging.INFO, 'artifact',
                         {'tree': tree,
                          'rev': rev},
                         'Attempting to find a pushhead containing {rev} on {tree}.')
                try:
                    pushid = pushhead_cache.parent_pushhead_id(tree, rev)
                    found_pushids[tree] = pushid
                except ValueError:
                    continue

            candidate_pushheads = collections.defaultdict(list)

            for tree, pushid in found_pushids.iteritems():
                end = pushid
                start = pushid - NUM_PUSHHEADS_TO_QUERY_PER_PARENT

                self.log(logging.INFO, 'artifact',
                         {'tree': tree,
                          'pushid': pushid,
                          'num': NUM_PUSHHEADS_TO_QUERY_PER_PARENT},
                         'Retrieving the last {num} pushheads starting with id {pushid} on {tree}')
                for pushhead in pushhead_cache.pushid_range(tree, start, end):
                    candidate_pushheads[pushhead].append(tree)

        return candidate_pushheads

    def _get_hg_revisions_from_git(self):
        rev_list = subprocess.check_output([
            self._git, 'rev-list', '--topo-order',
            '--max-count={num}'.format(num=NUM_REVISIONS_TO_QUERY),
            'HEAD',
        ], cwd=self._topsrcdir)

        hg_hash_list = subprocess.check_output([
            self._git, 'cinnabar', 'git2hg'
        ] + rev_list.splitlines(), cwd=self._topsrcdir)

        zeroes = "0" * 40

        hashes = []
        for hg_hash in hg_hash_list.splitlines():
            hg_hash = hg_hash.strip()
            if not hg_hash or hg_hash == zeroes:
                continue
            hashes.append(hg_hash)
        return hashes

    def _get_recent_public_revisions(self):
        """Returns recent ancestors of the working parent that are likely to
        to be known to Mozilla automation.

        If we're using git, retrieves hg revisions from git-cinnabar.
        """
        if self._git:
            return self._get_hg_revisions_from_git()

        # Mercurial updated the ordering of "last" in 4.3. We use revision
        # numbers to order here to accommodate multiple versions of hg.
        last_revs = subprocess.check_output([
            self._hg, 'log',
            '--template', '{rev}:{node}\n',
            '-r', 'last(public() and ::., {num})'.format(
                num=NUM_REVISIONS_TO_QUERY)
        ], cwd=self._topsrcdir).splitlines()

        if len(last_revs) == 0:
            raise Exception("""\
There are no public revisions.
This can happen if the repository is created from bundle file and never pulled
from remote.  Please run `hg pull` and build again.
see https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Source_Code/Mercurial/Bundles""")

        self.log(logging.INFO, 'artifact',
            {'len': len(last_revs)},
            'hg suggested {len} candidate revisions')

        def to_pair(line):
            rev, node = line.split(':', 1)
            return (int(rev), node)

        pairs = map(to_pair, last_revs)

        # Python's tuple sort orders by first component: here, the (local)
        # revision number.
        nodes = [pair[1] for pair in sorted(pairs, reverse=True)]

        for node in nodes[:20]:
            self.log(logging.INFO, 'artifact',
                     {'node': node},
                     'hg suggested candidate revision: {node}')
        self.log(logging.INFO, 'artifact',
                 {'remaining': max(0, len(nodes) - 20)},
                 'hg suggested candidate revision: and {remaining} more')

        return nodes

    def _find_pushheads(self):
        """Returns an iterator of recent pushhead revisions, starting with the
        working parent.
        """

        last_revs = self._get_recent_public_revisions()
        candidate_pushheads = self._pushheads_from_rev(last_revs[0].rstrip(),
                                                       NUM_PUSHHEADS_TO_QUERY_PER_PARENT)
        count = 0
        for rev in last_revs:
            rev = rev.rstrip()
            if not rev:
                continue
            if rev not in candidate_pushheads:
                continue
            count += 1
            yield candidate_pushheads[rev], rev

        if not count:
            raise Exception('Could not find any candidate pushheads in the last {num} revisions.\n'
                            'Search started with {rev}, which must be known to Mozilla automation.\n\n'
                            'see https://developer.mozilla.org/en-US/docs/Artifact_builds'.format(
                                rev=last_revs[0], num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT))

    def find_pushhead_artifacts(self, task_cache, job, tree, pushhead):
        try:
            taskId, artifacts = task_cache.artifacts(tree, job, self._artifact_job.__class__, pushhead)
        except ValueError:
            return None

        urls = []
        for artifact_name in self._artifact_job.find_candidate_artifacts(artifacts):
            # We can easily extract the task ID from the URL.  We can't easily
            # extract the build ID; we use the .ini files embedded in the
            # downloaded artifact for this.
            url = get_artifact_url(taskId, artifact_name)
            urls.append(url)
        if urls:
            self.log(logging.INFO, 'artifact',
                     {'pushhead': pushhead,
                      'tree': tree},
                     'Installing from remote pushhead {pushhead} on {tree}')
            return urls
        return None

    def install_from_file(self, filename, distdir):
        self.log(logging.INFO, 'artifact',
            {'filename': filename},
            'Installing from {filename}')

        # Do we need to post-process?
        processed_filename = filename + PROCESSED_SUFFIX

        if self._skip_cache and os.path.exists(processed_filename):
            self.log(logging.INFO, 'artifact',
                {'path': processed_filename},
                'Skipping cache: removing cached processed artifact {path}')
            os.remove(processed_filename)

        if not os.path.exists(processed_filename):
            self.log(logging.INFO, 'artifact',
                {'filename': filename},
                'Processing contents of {filename}')
            self.log(logging.INFO, 'artifact',
                {'processed_filename': processed_filename},
                'Writing processed {processed_filename}')
            self._artifact_job.process_artifact(filename, processed_filename)

        self._artifact_cache._persist_limit.register_file(processed_filename)

        self.log(logging.INFO, 'artifact',
            {'processed_filename': processed_filename},
            'Installing from processed {processed_filename}')

        # Copy all .so files, avoiding modification where possible.
        ensureParentDir(mozpath.join(distdir, '.dummy'))

        with zipfile.ZipFile(processed_filename) as zf:
            for info in zf.infolist():
                if info.filename.endswith('.ini'):
                    continue
                n = mozpath.join(distdir, info.filename)
                fh = FileAvoidWrite(n, mode='rb')
                shutil.copyfileobj(zf.open(info), fh)
                file_existed, file_updated = fh.close()
                self.log(logging.INFO, 'artifact',
                    {'updating': 'Updating' if file_updated else 'Not updating', 'filename': n},
                    '{updating} {filename}')
                if not file_existed or file_updated:
                    # Libraries and binaries may need to be marked executable,
                    # depending on platform.
                    perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689.
                    perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r.
                    os.chmod(n, perms)
        return 0

    def install_from_url(self, url, distdir):
        self.log(logging.INFO, 'artifact',
            {'url': url},
            'Installing from {url}')
        filename = self._artifact_cache.fetch(url)
        return self.install_from_file(filename, distdir)

    def _install_from_hg_pushheads(self, hg_pushheads, distdir):
        """Iterate pairs (hg_hash, {tree-set}) associating hg revision hashes
        and tree-sets they are known to be in, trying to download and
        install from each.
        """

        urls = None
        count = 0
        # with blocks handle handle persistence.
        with self._task_cache as task_cache:
            for trees, hg_hash in hg_pushheads:
                for tree in trees:
                    count += 1
                    self.log(logging.INFO, 'artifact',
                             {'hg_hash': hg_hash,
                              'tree': tree},
                             'Trying to find artifacts for hg revision {hg_hash} on tree {tree}.')
                    urls = self.find_pushhead_artifacts(task_cache, self._job, tree, hg_hash)
                    if urls:
                        for url in urls:
                            if self.install_from_url(url, distdir):
                                return 1
                        return 0

        self.log(logging.ERROR, 'artifact',
                 {'count': count},
                 'Tried {count} pushheads, no built artifacts found.')
        return 1

    def install_from_recent(self, distdir):
        hg_pushheads = self._find_pushheads()
        return self._install_from_hg_pushheads(hg_pushheads, distdir)

    def install_from_revset(self, revset, distdir):
        revision = None
        try:
            if self._hg:
                revision = subprocess.check_output([self._hg, 'log', '--template', '{node}\n',
                                                  '-r', revset], cwd=self._topsrcdir).strip()
            elif self._git:
                revset = subprocess.check_output([
                    self._git, 'rev-parse', '%s^{commit}' % revset],
                    stderr=open(os.devnull, 'w'), cwd=self._topsrcdir).strip()
            else:
                # Fallback to the exception handling case from both hg and git
                raise subprocess.CalledProcessError()
        except subprocess.CalledProcessError:
            # If the mercurial of git commands above failed, it means the given
            # revset is not known locally to the VCS. But if the revset looks
            # like a complete sha1, assume it is a mercurial sha1 that hasn't
            # been pulled, and use that.
            if re.match(r'^[A-Fa-f0-9]{40}$', revset):
                revision = revset

        if revision is None and self._git:
            revision = subprocess.check_output(
                [self._git, 'cinnabar', 'git2hg', revset], cwd=self._topsrcdir).strip()

        if revision == "0" * 40 or revision is None:
            raise ValueError('revision specification must resolve to a commit known to hg')
        if len(revision.split('\n')) != 1:
            raise ValueError('revision specification must resolve to exactly one commit')

        self.log(logging.INFO, 'artifact',
                 {'revset': revset,
                  'revision': revision},
                 'Will only accept artifacts from a pushhead at {revision} '
                 '(matched revset "{revset}").')
        # Include try in our search to allow pulling from a specific push.
        pushheads = [(
            self._artifact_job.candidate_trees + [self._artifact_job.try_tree],
            revision
        )]
        return self._install_from_hg_pushheads(pushheads, distdir)

    def install_from_task(self, taskId, distdir):
        artifacts = list_artifacts(taskId)

        urls = []
        for artifact_name in self._artifact_job.find_candidate_artifacts(artifacts):
            # We can easily extract the task ID from the URL.  We can't easily
            # extract the build ID; we use the .ini files embedded in the
            # downloaded artifact for this.
            url = get_artifact_url(taskId, artifact_name)
            urls.append(url)
        if not urls:
            raise ValueError('Task {taskId} existed, but no artifacts found!'.format(taskId=taskId))
        for url in urls:
            if self.install_from_url(url, distdir):
                return 1
        return 0

    def install_from(self, source, distdir):
        """Install artifacts from a ``source`` into the given ``distdir``.
        """
        if source and os.path.isfile(source):
            return self.install_from_file(source, distdir)
        elif source and urlparse.urlparse(source).scheme:
            return self.install_from_url(source, distdir)
        else:
            if source is None and 'MOZ_ARTIFACT_REVISION' in os.environ:
                source = os.environ['MOZ_ARTIFACT_REVISION']

            if source:
                return self.install_from_revset(source, distdir)

            for var in (
                'MOZ_ARTIFACT_TASK_%s' % self._job.upper().replace('-', '_'),
                'MOZ_ARTIFACT_TASK',
            ):
                if var in os.environ:
                    return self.install_from_task(os.environ[var], distdir)

            return self.install_from_recent(distdir)


    def clear_cache(self):
        self.log(logging.INFO, 'artifact',
            {},
            'Deleting cached artifacts and caches.')
        self._task_cache.clear_cache()
        self._artifact_cache.clear_cache()
        self._pushhead_cache.clear_cache()