Exemplo n.º 1
0
    def setup(self, should_clobber=False):
        r'''Install browsertime and visualmetrics.py requirements.'''

        from mozbuild.action.tooltool import unpack_file
        from mozbuild.artifact_cache import ArtifactCache
        sys.path.append(mozpath.join(self.topsrcdir, 'tools', 'lint', 'eslint'))
        import setup_helper

        # Download the visualmetrics.py requirements.
        artifact_cache = ArtifactCache(self.artifact_cache_path,
                                       log=self.log, skip_cache=False)

        fetches = host_fetches[host_platform()]
        for tool, fetch in sorted(fetches.items()):
            archive = artifact_cache.fetch(fetch['url'])
            # TODO: assert type, verify sha256 (and size?).

            if fetch.get('unpack', True):
                cwd = os.getcwd()
                try:
                    os.chdir(self.state_path)
                    self.log(
                        logging.INFO,
                        'browsertime',
                        {'path': archive},
                        'Unpacking temporary location {path}')
                    unpack_file(archive)
                finally:
                    os.chdir(cwd)

        # Install the browsertime Node.js requirements.
        if not setup_helper.check_node_executables_valid():
            return 1

        self.log(
            logging.INFO,
            'browsertime',
            {'package_json': mozpath.join(BROWSERTIME_ROOT, 'package.json')},
            'Installing browsertime node module from {package_json}')
        status = setup_helper.package_setup(
            BROWSERTIME_ROOT,
            'browsertime',
            should_clobber=should_clobber)

        if status:
            return status

        return self.check()
Exemplo n.º 2
0
    def artifact_toolchain(self,
                           verbose=False,
                           cache_dir=None,
                           skip_cache=False,
                           from_build=(),
                           tooltool_manifest=None,
                           authentication_file=None,
                           no_unpack=False,
                           retry=0,
                           bootstrap=False,
                           artifact_manifest=None,
                           files=()):
        '''Download, cache and install pre-built toolchains.
        '''
        from mozbuild.artifacts import ArtifactCache
        from mozbuild.action.tooltool import (
            FileRecord,
            open_manifest,
            unpack_file,
        )
        import redo
        import requests
        import time

        from taskgraph.util.taskcluster import (
            get_artifact_url, )

        start = time.time()
        self._set_log_level(verbose)
        # Normally, we'd use self.log_manager.enable_unstructured(),
        # but that enables all logging, while we only really want tooltool's
        # and it also makes structured log output twice.
        # So we manually do what it does, and limit that to the tooltool
        # logger.
        if self.log_manager.terminal_handler:
            logging.getLogger('mozbuild.action.tooltool').addHandler(
                self.log_manager.terminal_handler)
            logging.getLogger('redo').addHandler(
                self.log_manager.terminal_handler)
            self.log_manager.terminal_handler.addFilter(
                self.log_manager.structured_filter)
        if not cache_dir:
            cache_dir = os.path.join(self._mach_context.state_dir,
                                     'toolchains')

        tooltool_host = os.environ.get('TOOLTOOL_HOST',
                                       'tooltool.mozilla-releng.net')
        taskcluster_proxy_url = os.environ.get('TASKCLUSTER_PROXY_URL')
        if taskcluster_proxy_url:
            tooltool_url = '{}/{}'.format(taskcluster_proxy_url, tooltool_host)
        else:
            tooltool_url = 'https://{}'.format(tooltool_host)

        cache = ArtifactCache(cache_dir=cache_dir,
                              log=self.log,
                              skip_cache=skip_cache)

        class DownloadRecord(FileRecord):
            def __init__(self, url, *args, **kwargs):
                super(DownloadRecord, self).__init__(*args, **kwargs)
                self.url = url
                self.basename = self.filename

            def fetch_with(self, cache):
                self.filename = cache.fetch(self.url)
                return self.filename

            def validate(self):
                if self.size is None and self.digest is None:
                    return True
                return super(DownloadRecord, self).validate()

        class ArtifactRecord(DownloadRecord):
            def __init__(self, task_id, artifact_name):
                for _ in redo.retrier(attempts=retry + 1, sleeptime=60):
                    cot = cache._download_manager.session.get(
                        get_artifact_url(task_id,
                                         'public/chain-of-trust.json'))
                    if cot.status_code >= 500:
                        continue
                    cot.raise_for_status()
                    break
                else:
                    cot.raise_for_status()

                digest = algorithm = None
                data = json.loads(cot.text)
                for algorithm, digest in (data.get('artifacts',
                                                   {}).get(artifact_name,
                                                           {}).items()):
                    pass

                name = os.path.basename(artifact_name)
                artifact_url = get_artifact_url(
                    task_id,
                    artifact_name,
                    use_proxy=not artifact_name.startswith('public/'))
                super(ArtifactRecord, self).__init__(artifact_url,
                                                     name,
                                                     None,
                                                     digest,
                                                     algorithm,
                                                     unpack=True)

        records = OrderedDict()
        downloaded = []

        if tooltool_manifest:
            manifest = open_manifest(tooltool_manifest)
            for record in manifest.file_records:
                url = '{}/{}/{}'.format(tooltool_url, record.algorithm,
                                        record.digest)
                records[record.filename] = DownloadRecord(
                    url,
                    record.filename,
                    record.size,
                    record.digest,
                    record.algorithm,
                    unpack=record.unpack,
                    version=record.version,
                    visibility=record.visibility)

        if from_build:
            if 'MOZ_AUTOMATION' in os.environ:
                self.log(
                    logging.ERROR, 'artifact', {},
                    'Do not use --from-build in automation; all dependencies '
                    'should be determined in the decision task.')
                return 1
            from taskgraph.optimize.strategies import IndexSearch
            from taskgraph.generator import load_tasks_for_kind
            params = {
                'level': six.ensure_text(os.environ.get('MOZ_SCM_LEVEL', '3'))
            }

            root_dir = mozpath.join(self.topsrcdir, 'taskcluster/ci')
            toolchains = load_tasks_for_kind(params,
                                             'toolchain',
                                             root_dir=root_dir)

            aliases = {}
            for t in toolchains.values():
                alias = t.attributes.get('toolchain-alias')
                if alias:
                    aliases['toolchain-{}'.format(alias)] = \
                        t.task['metadata']['name']

            for b in from_build:
                user_value = b

                if not b.startswith('toolchain-'):
                    b = 'toolchain-{}'.format(b)

                task = toolchains.get(aliases.get(b, b))
                if not task:
                    self.log(
                        logging.ERROR, 'artifact', {'build': user_value},
                        'Could not find a toolchain build named `{build}`')
                    return 1

                # Ensure that toolchains installed by `mach bootstrap` have the
                # `local-toolchain attribute set. Taskgraph ensures that these
                # are built on trunk projects, so the task will be available to
                # install here.
                if bootstrap and not task.attributes.get('local-toolchain'):
                    self.log(
                        logging.ERROR, 'artifact', {'build': user_value},
                        'Toolchain `{build}` is not annotated as used for local development.'
                    )
                    return 1

                artifact_name = task.attributes.get('toolchain-artifact')
                self.log(
                    logging.DEBUG, 'artifact', {
                        'name': artifact_name,
                        'index': task.optimization.get('index-search')
                    }, 'Searching for {name} in {index}')
                task_id = IndexSearch().should_replace_task(
                    task, {}, task.optimization.get('index-search', []))
                if task_id in (True, False) or not artifact_name:
                    self.log(logging.ERROR, 'artifact', {'build': user_value},
                             _COULD_NOT_FIND_ARTIFACTS_TEMPLATE)
                    return 1

                self.log(logging.DEBUG, 'artifact', {
                    'name': artifact_name,
                    'task_id': task_id
                }, 'Found {name} in {task_id}')

                record = ArtifactRecord(task_id, artifact_name)
                records[record.filename] = record

        # Handle the list of files of the form path@task-id on the command
        # line. Each of those give a path to an artifact to download.
        for f in files:
            if '@' not in f:
                self.log(logging.ERROR, 'artifact', {},
                         'Expected a list of files of the form path@task-id')
                return 1
            name, task_id = f.rsplit('@', 1)
            record = ArtifactRecord(task_id, name)
            records[record.filename] = record

        for record in six.itervalues(records):
            self.log(logging.INFO, 'artifact', {'name': record.basename},
                     'Setting up artifact {name}')
            valid = False
            # sleeptime is 60 per retry.py, used by tooltool_wrapper.sh
            for attempt, _ in enumerate(
                    redo.retrier(attempts=retry + 1, sleeptime=60)):
                try:
                    record.fetch_with(cache)
                except (requests.exceptions.HTTPError,
                        requests.exceptions.ChunkedEncodingError,
                        requests.exceptions.ConnectionError) as e:

                    if isinstance(e, requests.exceptions.HTTPError):
                        # The relengapi proxy likes to return error 400 bad request
                        # which seems improbably to be due to our (simple) GET
                        # being borked.
                        status = e.response.status_code
                        should_retry = status >= 500 or status == 400
                    else:
                        should_retry = True

                    if should_retry or attempt < retry:
                        level = logging.WARN
                    else:
                        level = logging.ERROR
                    self.log(level, 'artifact', {}, str(e))
                    if not should_retry:
                        break
                    if attempt < retry:
                        self.log(logging.INFO, 'artifact', {},
                                 'Will retry in a moment...')
                    continue
                try:
                    valid = record.validate()
                except Exception:
                    pass
                if not valid:
                    os.unlink(record.filename)
                    if attempt < retry:
                        self.log(
                            logging.INFO, 'artifact', {},
                            'Corrupt download. Will retry in a moment...')
                    continue

                downloaded.append(record)
                break

            if not valid:
                self.log(logging.ERROR, 'artifact', {'name': record.basename},
                         'Failed to download {name}')
                return 1

        artifacts = {} if artifact_manifest else None

        for record in downloaded:
            local = os.path.join(os.getcwd(), record.basename)
            if os.path.exists(local):
                os.unlink(local)
            # unpack_file needs the file with its final name to work
            # (https://github.com/mozilla/build-tooltool/issues/38), so we
            # need to copy it, even though we remove it later. Use hard links
            # when possible.
            try:
                os.link(record.filename, local)
            except Exception:
                shutil.copy(record.filename, local)
            # Keep a sha256 of each downloaded file, for the chain-of-trust
            # validation.
            if artifact_manifest is not None:
                with open(local, 'rb') as fh:
                    h = hashlib.sha256()
                    while True:
                        data = fh.read(1024 * 1024)
                        if not data:
                            break
                        h.update(data)
                artifacts[record.url] = {
                    'sha256': h.hexdigest(),
                }
            if record.unpack and not no_unpack:
                # Try to unpack the file. If we get an exception importing
                # zstandard when calling unpack_file, we can try installing
                # zstandard locally and trying again
                try:
                    unpack_file(local)
                except ImportError as e:
                    # Need to do this branch while this code is still exercised
                    # by Python 2.
                    if six.PY3 and e.name != "zstandard":
                        raise
                    elif six.PY2 and e.message != 'No module named zstandard':
                        raise
                    self._ensure_zstd()
                    unpack_file(local)
                os.unlink(local)

        if not downloaded:
            self.log(logging.ERROR, 'artifact', {}, 'Nothing to download')
            if files:
                return 1

        if artifacts:
            ensureParentDir(artifact_manifest)
            with open(artifact_manifest, 'w') as fh:
                json.dump(artifacts, fh, indent=4, sort_keys=True)

        if 'MOZ_AUTOMATION' in os.environ:
            end = time.time()

            perfherder_data = {
                'framework': {
                    'name': 'build_metrics'
                },
                'suites': [{
                    'name': 'mach_artifact_toolchain',
                    'value': end - start,
                    'lowerIsBetter': True,
                    'shouldAlert': False,
                    'subtests': [],
                }],
            }
            self.log(logging.INFO, 'perfherder',
                     {'data': json.dumps(perfherder_data)},
                     'PERFHERDER_DATA: {data}')

        return 0
Exemplo n.º 3
0
    def artifact_toolchain(
        self,
        verbose=False,
        cache_dir=None,
        skip_cache=False,
        from_build=(),
        tooltool_manifest=None,
        no_unpack=False,
        retry=0,
        bootstrap=False,
        artifact_manifest=None,
    ):
        """Download, cache and install pre-built toolchains."""
        from mozbuild.artifacts import ArtifactCache
        from mozbuild.action.tooltool import (
            FileRecord,
            open_manifest,
            unpack_file,
        )
        import redo
        import requests
        import time

        from taskgraph.util.taskcluster import get_artifact_url

        start = time.time()
        self._set_log_level(verbose)
        # Normally, we'd use self.log_manager.enable_unstructured(),
        # but that enables all logging, while we only really want tooltool's
        # and it also makes structured log output twice.
        # So we manually do what it does, and limit that to the tooltool
        # logger.
        if self.log_manager.terminal_handler:
            logging.getLogger("mozbuild.action.tooltool").addHandler(
                self.log_manager.terminal_handler)
            logging.getLogger("redo").addHandler(
                self.log_manager.terminal_handler)
            self.log_manager.terminal_handler.addFilter(
                self.log_manager.structured_filter)
        if not cache_dir:
            cache_dir = os.path.join(self._mach_context.state_dir,
                                     "toolchains")

        tooltool_host = os.environ.get("TOOLTOOL_HOST",
                                       "tooltool.mozilla-releng.net")
        taskcluster_proxy_url = os.environ.get("TASKCLUSTER_PROXY_URL")
        if taskcluster_proxy_url:
            tooltool_url = "{}/{}".format(taskcluster_proxy_url, tooltool_host)
        else:
            tooltool_url = "https://{}".format(tooltool_host)

        cache = ArtifactCache(cache_dir=cache_dir,
                              log=self.log,
                              skip_cache=skip_cache)

        class DownloadRecord(FileRecord):
            def __init__(self, url, *args, **kwargs):
                super(DownloadRecord, self).__init__(*args, **kwargs)
                self.url = url
                self.basename = self.filename

            def fetch_with(self, cache):
                self.filename = cache.fetch(self.url)
                return self.filename

            def validate(self):
                if self.size is None and self.digest is None:
                    return True
                return super(DownloadRecord, self).validate()

        class ArtifactRecord(DownloadRecord):
            def __init__(self, task_id, artifact_name):
                for _ in redo.retrier(attempts=retry + 1, sleeptime=60):
                    cot = cache._download_manager.session.get(
                        get_artifact_url(task_id,
                                         "public/chain-of-trust.json"))
                    if cot.status_code >= 500:
                        continue
                    cot.raise_for_status()
                    break
                else:
                    cot.raise_for_status()

                digest = algorithm = None
                data = json.loads(cot.text)
                for algorithm, digest in (data.get("artifacts",
                                                   {}).get(artifact_name,
                                                           {}).items()):
                    pass

                name = os.path.basename(artifact_name)
                artifact_url = get_artifact_url(
                    task_id,
                    artifact_name,
                    use_proxy=not artifact_name.startswith("public/"),
                )
                super(ArtifactRecord, self).__init__(artifact_url,
                                                     name,
                                                     None,
                                                     digest,
                                                     algorithm,
                                                     unpack=True)

        records = OrderedDict()
        downloaded = []

        if tooltool_manifest:
            manifest = open_manifest(tooltool_manifest)
            for record in manifest.file_records:
                url = "{}/{}/{}".format(tooltool_url, record.algorithm,
                                        record.digest)
                records[record.filename] = DownloadRecord(
                    url,
                    record.filename,
                    record.size,
                    record.digest,
                    record.algorithm,
                    unpack=record.unpack,
                    version=record.version,
                    visibility=record.visibility,
                )

        if from_build:
            if "MOZ_AUTOMATION" in os.environ:
                self.log(
                    logging.ERROR,
                    "artifact",
                    {},
                    "Do not use --from-build in automation; all dependencies "
                    "should be determined in the decision task.",
                )
                return 1
            from taskgraph.optimize.strategies import IndexSearch
            from mozbuild.toolchains import toolchain_task_definitions

            tasks = toolchain_task_definitions()

            for b in from_build:
                user_value = b

                if not b.startswith("toolchain-"):
                    b = "toolchain-{}".format(b)

                task = tasks.get(b)
                if not task:
                    self.log(
                        logging.ERROR,
                        "artifact",
                        {"build": user_value},
                        "Could not find a toolchain build named `{build}`",
                    )
                    return 1

                # Ensure that toolchains installed by `mach bootstrap` have the
                # `local-toolchain attribute set. Taskgraph ensures that these
                # are built on trunk projects, so the task will be available to
                # install here.
                if bootstrap and not task.attributes.get("local-toolchain"):
                    self.log(
                        logging.ERROR,
                        "artifact",
                        {"build": user_value},
                        "Toolchain `{build}` is not annotated as used for local development.",
                    )
                    return 1

                artifact_name = task.attributes.get("toolchain-artifact")
                self.log(
                    logging.DEBUG,
                    "artifact",
                    {
                        "name": artifact_name,
                        "index": task.optimization.get("index-search"),
                    },
                    "Searching for {name} in {index}",
                )
                task_id = IndexSearch().should_replace_task(
                    task, {}, task.optimization.get("index-search", []))
                if task_id in (True, False) or not artifact_name:
                    self.log(
                        logging.ERROR,
                        "artifact",
                        {"build": user_value},
                        _COULD_NOT_FIND_ARTIFACTS_TEMPLATE,
                    )
                    # Get and print some helpful info for diagnosis.
                    repo = mozversioncontrol.get_repository_object(
                        self.topsrcdir)
                    changed_files = set(repo.get_outgoing_files()) | set(
                        repo.get_changed_files())
                    if changed_files:
                        self.log(
                            logging.ERROR,
                            "artifact",
                            {},
                            "Hint: consider reverting your local changes "
                            "to the following files: %s" %
                            sorted(changed_files),
                        )
                    if "TASKCLUSTER_ROOT_URL" in os.environ:
                        self.log(
                            logging.ERROR,
                            "artifact",
                            {"build": user_value},
                            "Due to the environment variable TASKCLUSTER_ROOT_URL "
                            "being set, the artifacts were expected to be found "
                            "on {}. If this was unintended, unset "
                            "TASKCLUSTER_ROOT_URL and try again.".format(
                                os.environ["TASKCLUSTER_ROOT_URL"]),
                        )
                    return 1

                self.log(
                    logging.DEBUG,
                    "artifact",
                    {
                        "name": artifact_name,
                        "task_id": task_id
                    },
                    "Found {name} in {task_id}",
                )

                record = ArtifactRecord(task_id, artifact_name)
                records[record.filename] = record

        for record in six.itervalues(records):
            self.log(
                logging.INFO,
                "artifact",
                {"name": record.basename},
                "Setting up artifact {name}",
            )
            valid = False
            # sleeptime is 60 per retry.py, used by tooltool_wrapper.sh
            for attempt, _ in enumerate(
                    redo.retrier(attempts=retry + 1, sleeptime=60)):
                try:
                    record.fetch_with(cache)
                except (
                        requests.exceptions.HTTPError,
                        requests.exceptions.ChunkedEncodingError,
                        requests.exceptions.ConnectionError,
                ) as e:

                    if isinstance(e, requests.exceptions.HTTPError):
                        # The relengapi proxy likes to return error 400 bad request
                        # which seems improbably to be due to our (simple) GET
                        # being borked.
                        status = e.response.status_code
                        should_retry = status >= 500 or status == 400
                    else:
                        should_retry = True

                    if should_retry or attempt < retry:
                        level = logging.WARN
                    else:
                        level = logging.ERROR
                    self.log(level, "artifact", {}, str(e))
                    if not should_retry:
                        break
                    if attempt < retry:
                        self.log(logging.INFO, "artifact", {},
                                 "Will retry in a moment...")
                    continue
                try:
                    valid = record.validate()
                except Exception:
                    pass
                if not valid:
                    os.unlink(record.filename)
                    if attempt < retry:
                        self.log(
                            logging.INFO,
                            "artifact",
                            {},
                            "Corrupt download. Will retry in a moment...",
                        )
                    continue

                downloaded.append(record)
                break

            if not valid:
                self.log(
                    logging.ERROR,
                    "artifact",
                    {"name": record.basename},
                    "Failed to download {name}",
                )
                return 1

        artifacts = {} if artifact_manifest else None

        for record in downloaded:
            local = os.path.join(os.getcwd(), record.basename)
            if os.path.exists(local):
                os.unlink(local)
            # unpack_file needs the file with its final name to work
            # (https://github.com/mozilla/build-tooltool/issues/38), so we
            # need to copy it, even though we remove it later. Use hard links
            # when possible.
            try:
                os.link(record.filename, local)
            except Exception:
                shutil.copy(record.filename, local)
            # Keep a sha256 of each downloaded file, for the chain-of-trust
            # validation.
            if artifact_manifest is not None:
                with open(local, "rb") as fh:
                    h = hashlib.sha256()
                    while True:
                        data = fh.read(1024 * 1024)
                        if not data:
                            break
                        h.update(data)
                artifacts[record.url] = {
                    "sha256": h.hexdigest(),
                }
            if record.unpack and not no_unpack:
                unpack_file(local)
                os.unlink(local)

        if not downloaded:
            self.log(logging.ERROR, "artifact", {}, "Nothing to download")

        if artifacts:
            ensureParentDir(artifact_manifest)
            with open(artifact_manifest, "w") as fh:
                json.dump(artifacts, fh, indent=4, sort_keys=True)

        if "MOZ_AUTOMATION" in os.environ:
            end = time.time()

            perfherder_data = {
                "framework": {
                    "name": "build_metrics"
                },
                "suites": [{
                    "name": "mach_artifact_toolchain",
                    "value": end - start,
                    "lowerIsBetter": True,
                    "shouldAlert": False,
                    "subtests": [],
                }],
            }
            self.log(
                logging.INFO,
                "perfherder",
                {"data": json.dumps(perfherder_data)},
                "PERFHERDER_DATA: {data}",
            )

        return 0
Exemplo n.º 4
0
    def setup_prerequisites(self):
        r"""Install browsertime and visualmetrics.py prerequisites."""

        from mozbuild.action.tooltool import unpack_file
        from mozbuild.artifact_cache import ArtifactCache

        if not AUTOMATION and host_platform().startswith("linux"):
            # On Linux ImageMagick needs to be installed manually, and `mach bootstrap` doesn't
            # do that (yet).  Provide some guidance.
            try:
                from shutil import which
            except ImportError:
                from shutil_which import which

            im_programs = ("compare", "convert", "mogrify")
            for im_program in im_programs:
                prog = which(im_program)
                if not prog:
                    print(
                        "Error: On Linux, ImageMagick must be on the PATH. "
                        "Install ImageMagick manually and try again (or update PATH). "
                        "On Ubuntu and Debian, try `sudo apt-get install imagemagick`. "
                        "On Fedora, try `sudo dnf install imagemagick`. "
                        "On CentOS, try `sudo yum install imagemagick`.")
                    return 1

        # Download the visualmetrics.py requirements.
        artifact_cache = ArtifactCache(self.artifact_cache_path,
                                       log=self.log,
                                       skip_cache=False)

        fetches = host_fetches[host_platform()]
        for tool, fetch in sorted(fetches.items()):
            archive = artifact_cache.fetch(fetch["url"])
            # TODO: assert type, verify sha256 (and size?).

            if fetch.get("unpack", True):
                cwd = os.getcwd()
                try:
                    mkdir(self.state_path)
                    os.chdir(self.state_path)
                    self.log(
                        logging.INFO,
                        "browsertime",
                        {"path": archive},
                        "Unpacking temporary location {path}",
                    )

                    if "win64" in host_platform(
                    ) and "imagemagick" in tool.lower():
                        # Windows archive does not contain a subfolder
                        # so we make one for it here
                        mkdir(fetch.get("path"))
                        os.chdir(
                            os.path.join(self.state_path, fetch.get("path")))
                        unpack_file(archive)
                        os.chdir(self.state_path)
                    else:
                        unpack_file(archive)

                    # Make sure the expected path exists after extraction
                    path = os.path.join(self.state_path, fetch.get("path"))
                    if not os.path.exists(path):
                        raise Exception(
                            "Cannot find an extracted directory: %s" % path)

                    try:
                        # Some archives provide binaries that don't have the
                        # executable bit set so we need to set it here
                        for root, dirs, files in os.walk(path):
                            for edir in dirs:
                                loc_to_change = os.path.join(root, edir)
                                st = os.stat(loc_to_change)
                                os.chmod(loc_to_change,
                                         st.st_mode | stat.S_IEXEC)
                            for efile in files:
                                loc_to_change = os.path.join(root, efile)
                                st = os.stat(loc_to_change)
                                os.chmod(loc_to_change,
                                         st.st_mode | stat.S_IEXEC)
                    except Exception as e:
                        raise Exception(
                            "Could not set executable bit in %s, error: %s" %
                            (path, str(e)))
                finally:
                    os.chdir(cwd)
Exemplo n.º 5
0
    def setup(self, should_clobber=False):
        r'''Install browsertime and visualmetrics.py requirements.'''

        automation = bool(os.environ.get('MOZ_AUTOMATION'))

        from mozbuild.action.tooltool import unpack_file
        from mozbuild.artifact_cache import ArtifactCache
        sys.path.append(mozpath.join(self.topsrcdir, 'tools', 'lint', 'eslint'))
        import setup_helper

        if not os.environ.get('MOZ_AUTOMATION') and host_platform().startswith('linux'):
            # On Linux ImageMagick needs to be installed manually, and `mach bootstrap` doesn't
            # do that (yet).  Provide some guidance.
            try:
                from shutil import which
            except ImportError:
                from shutil_which import which

            im_programs = ('compare', 'convert', 'mogrify')
            for im_program in im_programs:
                prog = which(im_program)
                if not prog:
                    print('Error: On Linux, ImageMagick must be on the PATH. '
                          'Install ImageMagick manually and try again (or update PATH). '
                          'On Ubuntu and Debian, try `sudo apt-get install imagemagick`. '
                          'On Fedora, try `sudo dnf install imagemagick`. '
                          'On CentOS, try `sudo yum install imagemagick`.')
                    return 1

        # Download the visualmetrics.py requirements.
        artifact_cache = ArtifactCache(self.artifact_cache_path,
                                       log=self.log, skip_cache=False)

        fetches = host_fetches[host_platform()]
        for tool, fetch in sorted(fetches.items()):
            archive = artifact_cache.fetch(fetch['url'])
            # TODO: assert type, verify sha256 (and size?).

            if fetch.get('unpack', True):
                cwd = os.getcwd()
                try:
                    mkdir(self.state_path)
                    os.chdir(self.state_path)
                    self.log(
                        logging.INFO,
                        'browsertime',
                        {'path': archive},
                        'Unpacking temporary location {path}')
                    unpack_file(archive)

                    # Make sure the expected path exists after extraction
                    path = os.path.join(self.state_path, fetch.get('path'))
                    if not os.path.exists(path):
                        raise Exception("Cannot find an extracted directory: %s" % path)

                    try:
                        # Some archives provide binaries that don't have the
                        # executable bit set so we need to set it here
                        for root, dirs, files in os.walk(path):
                            for edir in dirs:
                                loc_to_change = os.path.join(root, edir)
                                st = os.stat(loc_to_change)
                                os.chmod(loc_to_change, st.st_mode | stat.S_IEXEC)
                            for efile in files:
                                loc_to_change = os.path.join(root, efile)
                                st = os.stat(loc_to_change)
                                os.chmod(loc_to_change, st.st_mode | stat.S_IEXEC)
                    except Exception as e:
                        raise Exception(
                            "Could not set executable bit in %s, error: %s" % (path, str(e))
                        )
                finally:
                    os.chdir(cwd)

        # Install the browsertime Node.js requirements.
        if not setup_helper.check_node_executables_valid():
            return 1

        if 'GECKODRIVER_BASE_URL' not in os.environ:
            # Use custom `geckodriver` with pre-release Android support.
            url = 'https://github.com/ncalexan/geckodriver/releases/download/v0.24.0-android/'
            os.environ['GECKODRIVER_BASE_URL'] = url

        self.log(
            logging.INFO,
            'browsertime',
            {'package_json': mozpath.join(BROWSERTIME_ROOT, 'package.json')},
            'Installing browsertime node module from {package_json}')
        status = setup_helper.package_setup(
            BROWSERTIME_ROOT,
            'browsertime',
            should_clobber=should_clobber,
            no_optional=automation)

        if status:
            return status

        if automation:
            return 0

        return self.check()
Exemplo n.º 6
0
    def setup(self, should_clobber=False):
        r'''Install browsertime and visualmetrics.py requirements.'''

        from mozbuild.action.tooltool import unpack_file
        from mozbuild.artifact_cache import ArtifactCache
        sys.path.append(mozpath.join(self.topsrcdir, 'tools', 'lint',
                                     'eslint'))
        import setup_helper

        if host_platform().startswith('linux'):
            # On Linux ImageMagick needs to be installed manually, and `mach bootstrap` doesn't
            # do that (yet).  Provide some guidance.
            import which
            im_programs = ('compare', 'convert', 'mogrify')
            try:
                for im_program in im_programs:
                    which.which(im_program)
            except which.WhichError as e:
                print(
                    'Error: {} On Linux, ImageMagick must be on the PATH. '
                    'Install ImageMagick manually and try again (or update PATH). '
                    'On Ubuntu and Debian, try `sudo apt-get install imagemagick`. '
                    'On Fedora, try `sudo dnf install imagemagick`. '
                    'On CentOS, try `sudo yum install imagemagick`.'.format(e))
                return 1

        # Download the visualmetrics.py requirements.
        artifact_cache = ArtifactCache(self.artifact_cache_path,
                                       log=self.log,
                                       skip_cache=False)

        fetches = host_fetches[host_platform()]
        for tool, fetch in sorted(fetches.items()):
            archive = artifact_cache.fetch(fetch['url'])
            # TODO: assert type, verify sha256 (and size?).

            if fetch.get('unpack', True):
                cwd = os.getcwd()
                try:
                    mkdir(self.state_path)
                    os.chdir(self.state_path)
                    self.log(logging.INFO, 'browsertime', {'path': archive},
                             'Unpacking temporary location {path}')
                    unpack_file(archive)
                finally:
                    os.chdir(cwd)

        # Install the browsertime Node.js requirements.
        if not setup_helper.check_node_executables_valid():
            return 1

        if 'GECKODRIVER_BASE_URL' not in os.environ:
            # Use custom `geckodriver` with pre-release Android support.
            url = 'https://github.com/ncalexan/geckodriver/releases/download/v0.24.0-android/'
            os.environ['GECKODRIVER_BASE_URL'] = url

        self.log(
            logging.INFO, 'browsertime',
            {'package_json': mozpath.join(BROWSERTIME_ROOT, 'package.json')},
            'Installing browsertime node module from {package_json}')
        status = setup_helper.package_setup(BROWSERTIME_ROOT,
                                            'browsertime',
                                            should_clobber=should_clobber)

        if status:
            return status

        return self.check()
Exemplo n.º 7
0
    def artifact_toolchain(self,
                           verbose=False,
                           cache_dir=None,
                           skip_cache=False,
                           from_build=(),
                           tooltool_manifest=None,
                           authentication_file=None,
                           tooltool_url=None,
                           no_unpack=False,
                           retry=None,
                           artifact_manifest=None,
                           files=()):
        '''Download, cache and install pre-built toolchains.
        '''
        from mozbuild.artifacts import ArtifactCache
        from mozbuild.action.tooltool import (
            FileRecord,
            open_manifest,
            unpack_file,
        )
        from requests.adapters import HTTPAdapter
        import redo
        import requests

        from taskgraph.util.taskcluster import (
            get_artifact_url, )

        self._set_log_level(verbose)
        # Normally, we'd use self.log_manager.enable_unstructured(),
        # but that enables all logging, while we only really want tooltool's
        # and it also makes structured log output twice.
        # So we manually do what it does, and limit that to the tooltool
        # logger.
        if self.log_manager.terminal_handler:
            logging.getLogger('mozbuild.action.tooltool').addHandler(
                self.log_manager.terminal_handler)
            logging.getLogger('redo').addHandler(
                self.log_manager.terminal_handler)
            self.log_manager.terminal_handler.addFilter(
                self.log_manager.structured_filter)
        if not cache_dir:
            cache_dir = os.path.join(self._mach_context.state_dir,
                                     'toolchains')

        tooltool_url = (tooltool_url
                        or 'https://tooltool.mozilla-releng.net').rstrip('/')

        cache = ArtifactCache(cache_dir=cache_dir,
                              log=self.log,
                              skip_cache=skip_cache)

        if authentication_file:
            with open(authentication_file, 'rb') as f:
                token = f.read().strip()

            class TooltoolAuthenticator(HTTPAdapter):
                def send(self, request, *args, **kwargs):
                    request.headers['Authorization'] = \
                        'Bearer {}'.format(token)
                    return super(TooltoolAuthenticator,
                                 self).send(request, *args, **kwargs)

            cache._download_manager.session.mount(tooltool_url,
                                                  TooltoolAuthenticator())

        class DownloadRecord(FileRecord):
            def __init__(self, url, *args, **kwargs):
                super(DownloadRecord, self).__init__(*args, **kwargs)
                self.url = url
                self.basename = self.filename

            def fetch_with(self, cache):
                self.filename = cache.fetch(self.url)
                return self.filename

            def validate(self):
                if self.size is None and self.digest is None:
                    return True
                return super(DownloadRecord, self).validate()

        class ArtifactRecord(DownloadRecord):
            def __init__(self, task_id, artifact_name):
                for _ in redo.retrier(attempts=retry + 1, sleeptime=60):
                    cot = cache._download_manager.session.get(
                        get_artifact_url(task_id,
                                         'public/chain-of-trust.json'))
                    if cot.status_code >= 500:
                        continue
                    cot.raise_for_status()
                    break
                else:
                    cot.raise_for_status()

                digest = algorithm = None
                data = json.loads(cot.content)
                for algorithm, digest in (data.get('artifacts',
                                                   {}).get(artifact_name,
                                                           {}).items()):
                    pass

                name = os.path.basename(artifact_name)
                artifact_url = get_artifact_url(
                    task_id,
                    artifact_name,
                    use_proxy=not artifact_name.startswith('public/'))
                super(ArtifactRecord, self).__init__(artifact_url,
                                                     name,
                                                     None,
                                                     digest,
                                                     algorithm,
                                                     unpack=True)

        records = OrderedDict()
        downloaded = []

        if tooltool_manifest:
            manifest = open_manifest(tooltool_manifest)
            for record in manifest.file_records:
                url = '{}/{}/{}'.format(tooltool_url, record.algorithm,
                                        record.digest)
                records[record.filename] = DownloadRecord(
                    url,
                    record.filename,
                    record.size,
                    record.digest,
                    record.algorithm,
                    unpack=record.unpack,
                    version=record.version,
                    visibility=record.visibility)

        if from_build:
            if 'MOZ_AUTOMATION' in os.environ:
                self.log(
                    logging.ERROR, 'artifact', {},
                    'Do not use --from-build in automation; all dependencies '
                    'should be determined in the decision task.')
                return 1
            from taskgraph.optimize import IndexSearch
            from taskgraph.parameters import Parameters
            from taskgraph.generator import load_tasks_for_kind
            params = Parameters(
                level=os.environ.get('MOZ_SCM_LEVEL', '3'),
                strict=False,
            )

            root_dir = mozpath.join(self.topsrcdir, 'taskcluster/ci')
            toolchains = load_tasks_for_kind(params,
                                             'toolchain',
                                             root_dir=root_dir)

            aliases = {}
            for t in toolchains.values():
                alias = t.attributes.get('toolchain-alias')
                if alias:
                    aliases['toolchain-{}'.format(alias)] = \
                        t.task['metadata']['name']

            for b in from_build:
                user_value = b

                if not b.startswith('toolchain-'):
                    b = 'toolchain-{}'.format(b)

                task = toolchains.get(aliases.get(b, b))
                if not task:
                    self.log(
                        logging.ERROR, 'artifact', {'build': user_value},
                        'Could not find a toolchain build named `{build}`')
                    return 1

                task_id = IndexSearch().should_replace_task(
                    task, {}, task.optimization.get('index-search', []))
                artifact_name = task.attributes.get('toolchain-artifact')
                if task_id in (True, False) or not artifact_name:
                    self.log(
                        logging.ERROR, 'artifact', {'build': user_value},
                        'Could not find artifacts for a toolchain build '
                        'named `{build}`. Local commits and other changes '
                        'in your checkout may cause this error. Try '
                        'updating to a fresh checkout of mozilla-central '
                        'to use artifact builds.')
                    return 1

                record = ArtifactRecord(task_id, artifact_name)
                records[record.filename] = record

        # Handle the list of files of the form path@task-id on the command
        # line. Each of those give a path to an artifact to download.
        for f in files:
            if '@' not in f:
                self.log(logging.ERROR, 'artifact', {},
                         'Expected a list of files of the form path@task-id')
                return 1
            name, task_id = f.rsplit('@', 1)
            record = ArtifactRecord(task_id, name)
            records[record.filename] = record

        for record in records.itervalues():
            self.log(logging.INFO, 'artifact', {'name': record.basename},
                     'Downloading {name}')
            valid = False
            # sleeptime is 60 per retry.py, used by tooltool_wrapper.sh
            for attempt, _ in enumerate(
                    redo.retrier(attempts=retry + 1, sleeptime=60)):
                try:
                    record.fetch_with(cache)
                except (requests.exceptions.HTTPError,
                        requests.exceptions.ChunkedEncodingError,
                        requests.exceptions.ConnectionError) as e:

                    if isinstance(e, requests.exceptions.HTTPError):
                        # The relengapi proxy likes to return error 400 bad request
                        # which seems improbably to be due to our (simple) GET
                        # being borked.
                        status = e.response.status_code
                        should_retry = status >= 500 or status == 400
                    else:
                        should_retry = True

                    if should_retry or attempt < retry:
                        level = logging.WARN
                    else:
                        level = logging.ERROR
                    # e.message is not always a string, so convert it first.
                    self.log(level, 'artifact', {}, str(e.message))
                    if not should_retry:
                        break
                    if attempt < retry:
                        self.log(logging.INFO, 'artifact', {},
                                 'Will retry in a moment...')
                    continue
                try:
                    valid = record.validate()
                except Exception:
                    pass
                if not valid:
                    os.unlink(record.filename)
                    if attempt < retry:
                        self.log(
                            logging.INFO, 'artifact', {},
                            'Corrupt download. Will retry in a moment...')
                    continue

                downloaded.append(record)
                break

            if not valid:
                self.log(logging.ERROR, 'artifact', {'name': record.basename},
                         'Failed to download {name}')
                return 1

        artifacts = {} if artifact_manifest else None

        for record in downloaded:
            local = os.path.join(os.getcwd(), record.basename)
            if os.path.exists(local):
                os.unlink(local)
            # unpack_file needs the file with its final name to work
            # (https://github.com/mozilla/build-tooltool/issues/38), so we
            # need to copy it, even though we remove it later. Use hard links
            # when possible.
            try:
                os.link(record.filename, local)
            except Exception:
                shutil.copy(record.filename, local)
            # Keep a sha256 of each downloaded file, for the chain-of-trust
            # validation.
            if artifact_manifest is not None:
                with open(local) as fh:
                    h = hashlib.sha256()
                    while True:
                        data = fh.read(1024 * 1024)
                        if not data:
                            break
                        h.update(data)
                artifacts[record.url] = {
                    'sha256': h.hexdigest(),
                }
            if record.unpack and not no_unpack:
                unpack_file(local)
                os.unlink(local)

        if not downloaded:
            self.log(logging.ERROR, 'artifact', {}, 'Nothing to download')
            if files:
                return 1

        if artifacts:
            ensureParentDir(artifact_manifest)
            with open(artifact_manifest, 'w') as fh:
                json.dump(artifacts, fh, indent=4, sort_keys=True)

        return 0