Example #1
0
async def sign_geckodriver(config, sign_config, all_paths):
    """Sign geckodriver.

    Args:
        sign_config (dict): the running config
        all_paths (list): list of App objects

    Raises:
        IScriptError: on error.

    """
    identity = sign_config["identity"]
    keychain = sign_config["signing_keychain"]
    sign_command = _get_sign_command(identity, keychain, sign_config)

    for app in all_paths:
        app.check_required_attrs(["orig_path", "parent_dir", "artifact_prefix"])
        app.target_tar_path = "{}/{}{}".format(config["artifact_dir"], app.artifact_prefix, app.orig_path.split(app.artifact_prefix)[1])
        file_ = "geckodriver"
        path = os.path.join(app.parent_dir, file_)
        if not os.path.exists(path):
            raise IScriptError(f"No such file {path}!")
        await retry_async(
            run_command,
            args=[sign_command + [file_]],
            kwargs={"cwd": app.parent_dir, "exception": IScriptError, "output_log_on_exception": True},
            retry_exceptions=(IScriptError,),
        )
        env = deepcopy(os.environ)
        # https://superuser.com/questions/61185/why-do-i-get-files-like-foo-in-my-tarball-on-os-x
        env["COPYFILE_DISABLE"] = "1"
        makedirs(os.path.dirname(app.target_tar_path))
        await run_command(
            ["tar", _get_tar_create_options(app.target_tar_path), app.target_tar_path, file_], cwd=app.parent_dir, env=env, exception=IScriptError
        )
Example #2
0
async def log_outgoing(config, task, repo_path):
    """Log current changes that will be pushed (or would have been, if dry-run).

    Args:
        config (dict): the running config
        task (dict): the running task
        repo_path (str): the source repo path

    Returns:
        int: the number of outgoing changesets

    """
    log.info("Outgoing changesets...")

    repo = Repo(repo_path)
    branch = get_branch(task, "master")

    upstream_to_local_branch_interval = "{}..{}".format(
        _get_upstream_branch_name(branch), branch)
    log.debug(
        "Checking the number of changesets between these 2 references: {}".
        format(upstream_to_local_branch_interval))
    num_changesets = len(
        list(repo.iter_commits(upstream_to_local_branch_interval)))
    diff = repo.git.diff(branch)

    if diff:
        path = os.path.join(config["artifact_dir"], "public", "logs",
                            "outgoing.diff")
        makedirs(os.path.dirname(path))
        with open(path, "w") as fh:
            fh.write(diff)

    log.info("Found {} new changesets".format(num_changesets))
    return num_changesets
Example #3
0
def config(tmpdir):
    config = get_default_config()
    config["work_dir"] = os.path.join(tmpdir, "work")
    config["artifact_dir"] = os.path.join(tmpdir, "artifact")
    makedirs(config["work_dir"])
    makedirs(config["artifact_dir"])
    yield config
async def test_sign_app(mocker, tmpdir, sign_with_entitlements, has_clearkey):
    """Render ``sign_app`` noop and verify we have complete code coverage.

    """
    key_config = {
        "identity": "id",
        "signing_keychain": "keychain",
        "sign_with_entitlements": sign_with_entitlements,
    }
    entitlements_path = os.path.join(tmpdir, "entitlements")
    app_path = os.path.join(tmpdir, "foo.app")

    contents_dir = os.path.join(app_path, "Contents")
    dir1 = os.path.join(contents_dir, "MacOS")
    dir2 = os.path.join(dir1, "foo.app", "Contents", "MacOS")
    ignore_dir = os.path.join(contents_dir, "ignoreme")
    for dir_ in (dir1, dir2, ignore_dir):
        makedirs(dir_)
    for dir_ in (dir1, dir2):
        touch(os.path.join(dir_, "other"))
        touch(os.path.join(dir_, "main"))
    touch(os.path.join(contents_dir, "dont_sign"))
    if has_clearkey:
        dir_ = os.path.join(contents_dir, "Resources/gmp-clearkey/0.1")
        file_ = "libclearkey.dylib"
        makedirs(dir_)
        touch(os.path.join(dir_, file_))
    mocker.patch.object(mac, "run_command", new=noop_async)
    mocker.patch.object(mac, "get_bundle_executable", return_value="main")
    await mac.sign_app(key_config, app_path, entitlements_path)
Example #5
0
async def sign_langpacks(config, key_config, all_paths):
    """Signs langpacks that are specified in all_paths.

    Raises:
        IScriptError if we don't have any valid language packs to sign in any path.

    """
    for app in all_paths:
        app.check_required_attrs(["orig_path", "formats", "artifact_prefix"])
        if not {"autograph_langpack"} & set(app.formats):
            raise IScriptError(
                f"{app.formats} does not contain 'autograph_langpack'")
        app.target_tar_path = "{}/{}{}".format(
            config["artifact_dir"],
            app.artifact_prefix,
            app.orig_path.split(app.artifact_prefix)[1],
        )

        id = langpack_id(app)
        log.info("Identified {} as extension id: {}".format(app.orig_path, id))
        makedirs(os.path.dirname(app.target_tar_path))
        await sign_file_with_autograph(
            key_config,
            app.orig_path,
            "autograph_langpack",
            to=app.target_tar_path,
            extension_id=id,
        )
async def test_copy_pkgs_to_artifact_dir(tmpdir, artifact_prefix):
    """``copy_pkgs_to_artifact_dir`` creates all needed parent directories and
    copies pkg artifacts successfully.

    """
    num_pkgs = 3
    work_dir = os.path.join(str(tmpdir), "work")
    artifact_dir = os.path.join(str(tmpdir), "artifact")
    config = {"artifact_dir": artifact_dir, "work_dir": work_dir}
    all_paths = []
    expected_paths = []
    for i in range(num_pkgs):
        app = mac.App(
            pkg_path=os.path.join(work_dir, str(i), "target.pkg".format(i)),
            artifact_prefix=artifact_prefix,
            orig_path=os.path.join(
                work_dir, f"cot/taskId/{artifact_prefix}build/{i}/target-{i}.tar.gz"
            ),
        )
        expected_path = os.path.join(
            artifact_dir, f"{artifact_prefix}build/{i}/target-{i}.pkg"
        )
        expected_paths.append(expected_path)
        makedirs(os.path.dirname(app.pkg_path))
        with open(app.pkg_path, "w") as fh:
            fh.write(expected_path)
        all_paths.append(app)

    await mac.copy_pkgs_to_artifact_dir(config, all_paths)
    for i in range(num_pkgs):
        expected_path = expected_paths[i]
        assert os.path.exists(expected_path)
        assert expected_path == all_paths[i].target_pkg_path
        with open(expected_path) as fh:
            assert fh.read() == expected_path
async def log_outgoing(config, task, repo_path):
    """Run `hg out` against the current revision in the repository.

    This logs current changes that will be pushed (or would have been, if dry-run)

    Args:
        config (dict): the running config
        task (dict): the running task
        repo_path (str): the source repo path

    Raises:
        FailedSubprocess: on failure

    """
    dest_repo = get_source_repo(task)
    log.info("outgoing changesets..")
    output = await run_hg_command(
        config,
        "out",
        "-vp",
        "-r",
        ".",
        dest_repo,
        repo_path=repo_path,
        return_output=True,
    )
    if output:
        path = os.path.join(config["artifact_dir"], "public", "logs", "outgoing.diff")
        makedirs(os.path.dirname(path))
        with open(path, "w") as fh:
            fh.write(output)
Example #8
0
async def _maybe_bump_l10n(config, task, repo_path):
    if get_l10n_bump_info(task, raise_on_empty=False):
        await l10n_bump(config, task, repo_path)
        output = await run_hg_command(config, "log", "--patch", "--verbose", "-r", ".", repo_path=repo_path, return_output=True, expected_exit_codes=(0, 1))
        path = os.path.join(config["artifact_dir"], "public", "logs", "l10n_bump.diff")
        makedirs(os.path.dirname(path))
        with open(path, "w") as fh:
            fh.write(output)
Example #9
0
async def notarize_1_behavior(config, task):
    """Sign and submit all mac apps for notarization.

    This task will not wait for the notarization to finish. Instead, it
    will upload all signed apps and a uuid manifest.

    Args:
        config (dict): the running configuration
        task (dict): the running task

    Raises:
        IScriptError: on fatal error.

    """
    work_dir = config["work_dir"]

    sign_config = get_sign_config(config, task, base_key="mac_config")
    entitlements_path = await download_entitlements_file(config, sign_config, task)
    path_attrs = ["app_path"]

    all_paths = get_app_paths(config, task)
    langpack_apps = filter_apps(all_paths, fmt="autograph_langpack")
    if langpack_apps:
        await sign_langpacks(config, sign_config, langpack_apps)
        all_paths = filter_apps(all_paths, fmt="autograph_langpack", inverted=True)

    # app
    await extract_all_apps(config, all_paths)
    await unlock_keychain(sign_config["signing_keychain"], sign_config["keychain_password"])
    await update_keychain_search_path(config, sign_config["signing_keychain"])
    await sign_all_apps(config, sign_config, entitlements_path, all_paths)

    # pkg
    if sign_config["create_pkg"]:
        path_attrs.append("pkg_path")
        # Unlock keychain again in case it's locked since previous unlock
        await unlock_keychain(sign_config["signing_keychain"], sign_config["keychain_password"])
        await update_keychain_search_path(config, sign_config["signing_keychain"])
        await create_pkg_files(config, sign_config, all_paths)

    log.info("Submitting for notarization.")
    if sign_config["notarize_type"] == "multi_account":
        await create_all_notarization_zipfiles(all_paths, path_attrs=path_attrs)
        poll_uuids = await wrap_notarization_with_sudo(config, sign_config, all_paths, path_attr="zip_path")
    else:
        zip_path = await create_one_notarization_zipfile(work_dir, all_paths, sign_config, path_attrs)
        poll_uuids = await notarize_no_sudo(work_dir, sign_config, zip_path)

    # create uuid_manifest.json
    uuids_path = "{}/public/uuid_manifest.json".format(config["artifact_dir"])
    makedirs(os.path.dirname(uuids_path))
    with open(uuids_path, "w") as fh:
        json.dump(sorted(poll_uuids.keys()), fh)

    await tar_apps(config, all_paths)
    await copy_pkgs_to_artifact_dir(config, all_paths)

    log.info("Done signing apps and submitting them for notarization.")
async def test_tar_apps(mocker, tmpdir, raises, artifact_prefix):
    """``tar_apps`` runs tar concurrently for each ``App``, creating the
    app ``target_tar_path``s, and raises any exceptions hit along the way.

    """

    async def fake_raise_future_exceptions(futures):
        await asyncio.wait(futures)
        if raises:
            raise IScriptError("foo")

    work_dir = os.path.join(tmpdir, "work")
    config = {"artifact_dir": os.path.join(tmpdir, "artifact")}
    all_paths = []
    expected = []
    for i in range(3):
        parent_dir = os.path.join(work_dir, str(i))
        app_name = "{}.app".format(i)
        makedirs(parent_dir)
        # touch parent_dir/app_name
        with open(os.path.join(parent_dir, app_name), "w") as fh:
            fh.write("foo")
        orig_path = os.path.join(
            work_dir, "cot", "foo", artifact_prefix, "build", str(i), f"{i}.tar.gz"
        )
        # overload pkg_path to track i
        all_paths.append(
            mac.App(
                parent_dir=parent_dir,
                app_name=app_name,
                app_path=os.path.join(parent_dir, app_name),
                artifact_prefix=artifact_prefix,
                orig_path=orig_path,
                pkg_path=str(i),
            )
        )
        expected.append(
            os.path.join(
                config["artifact_dir"],
                artifact_prefix,
                "build",
                "{}/{}.tar.gz".format(i, i),
            )
        )

    mocker.patch.object(mac, "run_command", new=noop_async)
    mocker.patch.object(
        mac, "raise_future_exceptions", new=fake_raise_future_exceptions
    )
    if raises:
        with pytest.raises(IScriptError):
            await mac.tar_apps(config, all_paths)
    else:
        assert await mac.tar_apps(config, all_paths) is None
        assert [x.target_tar_path for x in all_paths] == expected
        for path in expected:
            assert os.path.isdir(os.path.dirname(path))
Example #11
0
def config(tmpdir):
    _config = deepcopy(dict(DEFAULT_CONFIG))
    with open(os.path.join(os.path.dirname(__file__), "data", "good.json")) as fh:
        _config.update(json.load(fh))
    _config["artifact_dir"] = os.path.join(str(tmpdir), "artifacts")
    _config["log_dir"] = os.path.join(str(tmpdir), "logs")
    _config["work_dir"] = os.path.join(str(tmpdir), "work")
    for name in ("artifact_dir", "log_dir", "work_dir"):
        makedirs(_config[name])
    yield _config
Example #12
0
def config(tmpdir):
    config = get_default_config()
    config["work_dir"] = os.path.join(tmpdir, "work")
    config["artifact_dir"] = os.path.join(tmpdir, "artifact")
    makedirs(config["work_dir"])
    makedirs(config["artifact_dir"])
    config["git_ssh_config"] = {
        "default": {
            "emailAddress": "*****@*****.**"
        }
    }
    yield config
Example #13
0
def test_build_platform_dict(contents, mocker, bump_config, expected, tmpdir):
    """build_platform_dict builds a list of platforms per locale, given
    the ignore_config and platform_configs in the l10n_bump_config.

    """
    for pc in bump_config["platform_configs"]:
        path = os.path.join(tmpdir, pc["path"])
        makedirs(os.path.dirname(path))
        with open(path, "w") as fh:
            fh.write(contents.pop(0))

    assert l10n.build_platform_dict(bump_config, tmpdir) == expected
Example #14
0
async def extract_all_apps(config, all_paths):
    """Extract all the apps into their own directories.

    Args:
        work_dir (str): the ``work_dir`` path
        all_paths (list): a list of ``App`` objects with their ``orig_path`` set

    Raises:
        IScriptError: on failure

    """
    log.info("Extracting all apps")
    futures = []
    work_dir = config["work_dir"]
    unpack_dmg = os.path.join(os.path.dirname(__file__), "data",
                              "unpack-diskimage")
    for counter, app in enumerate(all_paths):
        app.check_required_attrs(["orig_path"])
        app.parent_dir = os.path.join(work_dir, str(counter))
        rm(app.parent_dir)
        makedirs(app.parent_dir)
        if app.orig_path.endswith((".tar.bz2", ".tar.gz", ".tgz")):
            futures.append(
                asyncio.ensure_future(
                    run_command(
                        ["tar", "xf", app.orig_path],
                        cwd=app.parent_dir,
                        exception=IScriptError,
                    )))
        elif app.orig_path.endswith(".dmg"):
            unpack_mountpoint = os.path.join(
                "/tmp", f"{config.get('dmg_prefix', 'dmg')}-{counter}-unpack")
            futures.append(
                asyncio.ensure_future(
                    run_command(
                        [
                            unpack_dmg, app.orig_path, unpack_mountpoint,
                            app.parent_dir
                        ],
                        cwd=app.parent_dir,
                        exception=IScriptError,
                        log_level=logging.DEBUG,
                    )))
        else:
            raise IScriptError(f"unknown file type {app.orig_path}")
    await raise_future_exceptions(futures)
    if app.orig_path.endswith(".dmg"):
        # nuke the softlink to /Applications
        for counter, app in enumerate(all_paths):
            rm(os.path.join(app.parent_dir, " "))
Example #15
0
def test_run_generate_precomplete(tmp_path, num_precomplete, raises, mocker):
    mocker.patch.object(autograph, "generate_precomplete", new=noop_sync)
    work_dir = tmp_path / "work"
    config = {"artifact_dir": tmp_path / "artifacts"}
    for i in range(0, num_precomplete):
        path = os.path.join(work_dir, "foo", str(i))
        makedirs(path)
        with open(os.path.join(path, "precomplete"), "w") as fh:
            fh.write("blah")
    if raises:
        with pytest.raises(IScriptError):
            autograph._run_generate_precomplete(config, work_dir)
    else:
        autograph._run_generate_precomplete(config, work_dir)
Example #16
0
def test_makedirs(path, raises, tmpdir):
    """``makedirs`` creates ``path`` and all missing parent directories if it is a
    nonexistent directory. If ``path`` is ``None``, it is noop. And if ``path``
    is an existing file, it raises ``TaskError``.

    """
    if raises:
        with pytest.raises(TaskError):
            utils.makedirs(path)
    else:
        if path and "%s" in path:
            path = path % tmpdir
        utils.makedirs(path)
        if path:
            assert os.path.isdir(path)
Example #17
0
def _run_generate_precomplete(config, app_dir):
    """Regenerate `precomplete` file with widevine sig paths for complete mar."""
    log.info("Generating `precomplete` file...")
    path = _ensure_one_precomplete(app_dir, "before")
    with open(path, "r") as fh:
        before = fh.readlines()
    generate_precomplete(os.path.dirname(path))
    path = _ensure_one_precomplete(app_dir, "after")
    with open(path, "r") as fh:
        after = fh.readlines()
    # Create diff file
    makedirs(os.path.join(config["artifact_dir"], "public", "logs"))
    diff_path = os.path.join(config["artifact_dir"], "public", "logs",
                             "precomplete.diff")
    with open(diff_path, "w") as fh:
        for line in difflib.ndiff(before, after):
            fh.write(line)
Example #18
0
async def sign_widevine_dir(config, key_config, app_dir):
    """Sign the internals of a tarfile with the widevine key.

    Extract the entire tarball, but only sign a handful of files (see
    `_WIDEVINE_BLESSED_FILENAMES` and `_WIDEVINE_UNBLESSED_FILENAMES).
    The blessed files should be signed with the `widevine_blessed` format.
    Then recreate the tarball.

    Ideally we would be able to append the sigfiles to the original tarball,
    but that's not possible with compressed tarballs.

    Args:
        config (dict): the running config
        key_config (dict): the config for this signing key
        app_dir (str): the .app directory to sign

    Returns:
        str: the path to the signed archive

    """
    log.info(f"Signing widevine in {app_dir}...")
    all_files = []
    for top_dir, dirs, files in os.walk(app_dir):
        for file_ in files:
            all_files.append(os.path.join(top_dir, file_))
    files_to_sign = _get_widevine_signing_files(all_files)
    log.debug("Widevine files to sign: %s", files_to_sign)
    if files_to_sign:
        tasks = []
        for from_, fmt in files_to_sign.items():
            to = _get_mac_sigpath(from_)
            log.debug("Adding %s to the sigfile paths...", to)
            makedirs(os.path.dirname(to))
            tasks.append(
                asyncio.ensure_future(
                    sign_widevine_with_autograph(key_config,
                                                 from_,
                                                 "blessed" in fmt,
                                                 to=to)))
            all_files.append(to)
        await raise_future_exceptions(tasks)
        remove_extra_files(app_dir, all_files)
        # Regenerate the `precomplete` file, which is used for cleanup before
        # applying a complete mar.
        _run_generate_precomplete(config, app_dir)
    return app_dir
Example #19
0
async def tar_apps(config, all_paths):
    """Create tar artifacts from the app directories.

    These tar artifacts will live in the ``artifact_dir``

    Args:
        config (dict): the running config
        all_paths (list): the App objects to tar up

    Raises:
        IScriptError: on failure

    """
    log.info("Tarring up artifacts")
    futures = []
    for app in all_paths:
        app.check_required_attrs(
            ["orig_path", "parent_dir", "app_path", "artifact_prefix"])
        # If we downloaded public/build/locale/target.tar.gz, then write to
        # artifact_dir/public/build/locale/target.tar.gz
        app.target_tar_path = "{}/{}{}".format(
            config["artifact_dir"],
            app.artifact_prefix,
            app.orig_path.split(app.artifact_prefix)[1],
        ).replace(".dmg", ".tar.gz")
        makedirs(os.path.dirname(app.target_tar_path))
        cwd = os.path.dirname(app.app_path)
        env = deepcopy(os.environ)
        # https://superuser.com/questions/61185/why-do-i-get-files-like-foo-in-my-tarball-on-os-x
        env["COPYFILE_DISABLE"] = "1"
        futures.append(
            asyncio.ensure_future(
                run_command(
                    [
                        "tar",
                        _get_tar_create_options(app.target_tar_path),
                        app.target_tar_path,
                    ] + [
                        f for f in os.listdir(cwd)
                        if f != "[]" and not f.endswith(".pkg")
                    ],
                    cwd=cwd,
                    env=env,
                    exception=IScriptError,
                )))
    await raise_future_exceptions(futures)
Example #20
0
async def copy_pkgs_to_artifact_dir(config, all_paths):
    """Copy the files to the artifact directory.

    Args:
        config (dict): the running config
        all_paths (list): the list of App objects to sign pkg for

    """
    log.info("Copying pkgs to the artifact dir")
    for app in all_paths:
        app.check_required_attrs(["orig_path", "pkg_path", "artifact_prefix"])
        app.target_pkg_path = _get_pkg_name_from_tarball("{}/{}{}".format(
            config["artifact_dir"], app.artifact_prefix,
            app.orig_path.split(app.artifact_prefix)[1]))
        makedirs(os.path.dirname(app.target_pkg_path))
        log.debug("Copying %s to %s", app.pkg_path, app.target_pkg_path)
        copy2(app.pkg_path, app.target_pkg_path)
Example #21
0
async def copy_xpis_to_artifact_dir(config, all_paths):
    """Copy the xpi files to the artifact directory.

    This is specifically for ``notarize_3_behavior``, since ``sign_langpacks``
    already puts the signed xpis into the ``artifact_dir``.

    Args:
        config (dict): the running config
        all_paths (list): the list of App objects to sign pkg for

    """
    log.info("Copying xpis to the artifact dir")
    for app in all_paths:
        app.check_required_attrs(["orig_path", "artifact_prefix"])
        target_xpi_path = "{}/{}{}".format(config["artifact_dir"], app.artifact_prefix, app.orig_path.split(app.artifact_prefix)[1])
        makedirs(os.path.dirname(target_xpi_path))
        log.debug("Copying %s to %s", app.orig_path, target_xpi_path)
        copy2(app.orig_path, target_xpi_path)
Example #22
0
def main(event_loop=None):
    """Notarization poller entry point: get everything set up, then enter the main loop.

    Args:
        event_loop (asyncio.BaseEventLoop, optional): the event loop to use.
            If None, use ``asyncio.get_event_loop()``. Defaults to None.

    """
    event_loop = event_loop or asyncio.get_event_loop()
    config = get_config_from_cmdln(sys.argv[1:])
    update_logging_config(config)

    log.info("Notarization poller starting up at {} UTC".format(
        arrow.utcnow().format()))
    log.info("Worker FQDN: {}".format(socket.getfqdn()))
    rm(config["work_dir"])
    makedirs(config["work_dir"])
    running_tasks = RunTasks(config)

    async def _handle_sigterm():
        log.info("SIGTERM received; shutting down")
        await running_tasks.cancel()

    def _handle_sigusr1():
        """Stop accepting new tasks."""
        log.info("SIGUSR1 received; no more tasks will be taken")
        running_tasks.is_stopped = True

    event_loop.add_signal_handler(
        signal.SIGTERM, lambda: asyncio.ensure_future(_handle_sigterm()))
    event_loop.add_signal_handler(signal.SIGUSR1, _handle_sigusr1)

    try:
        event_loop.run_until_complete(running_tasks.invoke())
    except Exception:
        log.critical("Fatal exception", exc_info=1)
        raise
    finally:
        log.info("Notarization poller stopped at {} UTC".format(
            arrow.utcnow().format()))
        log.info("Worker FQDN: {}".format(socket.getfqdn()))
async def download_file(url, abs_filename, log_url=None, chunk_size=128, timeout=300):
    """Download a file, async.

    Args:
        url (str): the url to download
        abs_filename (str): the path to download to
        log_url (str, optional): the url to log, should ``url`` contain sensitive information.
            If ``None``, use ``url``. Defaults to ``None``
        chunk_size (int, optional): the chunk size to read from the response
            at a time. Default is 128.
        timeout (int, optional): seconds to time out the request. Default is 300.

    """
    aiohttp_timeout = aiohttp.ClientTimeout(total=timeout)
    async with aiohttp.ClientSession(timeout=aiohttp_timeout) as session:
        log_url = log_url or url
        log.info("Downloading %s", log_url)
        parent_dir = os.path.dirname(abs_filename)
        async with session.get(url) as resp:
            if resp.status == 404:
                await _log_download_error(
                    resp, log_url, "404 downloading %(url)s: %(status)s; body=%(body)s"
                )
                raise Download404("{} status {}!".format(log_url, resp.status))
            elif resp.status != 200:
                await _log_download_error(
                    resp,
                    log_url,
                    "Failed to download %(url)s: %(status)s; body=%(body)s",
                )
                raise DownloadError(
                    "{} status {} is not 200!".format(log_url, resp.status)
                )
            makedirs(parent_dir)
            with open(abs_filename, "wb") as fd:
                while True:
                    chunk = await resp.content.read(chunk_size)
                    if not chunk:
                        break
                    fd.write(chunk)
        log.info("Done")
async def test_sign_geckodriver(exists, mocker, tmpdir):
    """Render ``sign_geckodriver`` noop and verify we have complete code coverage.

    """
    key_config = {"identity": "id", "signing_keychain": "keychain"}
    config = {"artifact_dir": os.path.join(tmpdir, "artifacts")}
    app = mac.App(
        orig_path=os.path.join(tmpdir, "cot/task1/public/build/geckodriver.tar.gz"),
        parent_dir=os.path.join(tmpdir, "0"),
        artifact_prefix=os.path.join("public/build"),
    )

    makedirs(app.parent_dir)
    if exists:
        touch(os.path.join(app.parent_dir, "geckodriver"))
    mocker.patch.object(mac, "run_command", new=noop_async)
    if exists:
        await mac.sign_geckodriver(config, key_config, [app])
    else:
        with pytest.raises(IScriptError):
            await mac.sign_geckodriver(config, key_config, [app])
Example #25
0
def test_remove_extra_files(tmp_path):
    extra = ["a", "b/c"]
    good = ["d", "e/f"]
    work_dir = tmp_path
    all_files = []
    for f in extra + good:
        path = os.path.join(work_dir, f)
        makedirs(os.path.dirname(path))
        with open(path, "w") as fh:
            fh.write("x")
        if f in good:
            all_files.append(path)
    for f in good:
        assert os.path.exists(os.path.join(work_dir, f))
    output = autograph.remove_extra_files(work_dir, all_files)
    for f in extra:
        path = os.path.realpath(os.path.join(work_dir, f))
        assert path in output
        assert not os.path.exists(path)
    for f in good:
        assert os.path.exists(os.path.join(work_dir, f))
Example #26
0
async def do_merge(config, task, repo_path):
    """Perform a merge day operation.

    This function takes its inputs from task's payload.

    Args:
        config (dict): the running config
        task (dict): the running task
        repo_path (str): the source directory

    Raises:
        TaskverificationError: from get_merge_config if the payload is invalid.

    Returns:
        list: A list of the branches that need pushing, and the corresponding revision.
              This is unlike other actions as the list of outgoing changes is
              not related to the number of commands we've performed, but we do need
              to know which branches to push.
    """
    merge_config = get_merge_config(task)

    from_branch = merge_config.get("from_branch")
    to_branch = merge_config.get("to_branch")

    await run_hg_command(config,
                         "pull",
                         "https://hg.mozilla.org/mozilla-unified",
                         repo_path=repo_path)

    # Used if end_tag is set.
    await run_hg_command(config, "up", "-C", to_branch, repo_path=repo_path)
    to_fx_major_version = get_version("browser/config/version.txt",
                                      repo_path).major_number
    base_to_rev = await get_revision(config, repo_path, branch=to_branch)

    if from_branch:
        await run_hg_command(config,
                             "up",
                             "-C",
                             from_branch,
                             repo_path=repo_path)
        base_from_rev = await get_revision(config,
                                           repo_path,
                                           branch=from_branch)

    base_tag = merge_config.get("base_tag")
    if base_tag:
        base_tag = base_tag.format(major_version=get_version(
            "browser/config/version.txt", repo_path).major_number)
        tag_message = f"No bug - tagging {base_from_rev} with {base_tag} a=release DONTBUILD CLOSED TREE"
        await run_hg_command(config,
                             "tag",
                             "-m",
                             tag_message,
                             "-r",
                             base_from_rev,
                             "-f",
                             base_tag,
                             repo_path=repo_path)

    tagged_from_rev = await get_revision(config, repo_path, branch=".")

    # TODO This shouldn't be run on esr, according to old configs.
    # perhaps: hg push -r bookmark("release") esrNN
    # Perform the kludge-merge.
    if merge_config.get("merge_old_head", False):
        await run_hg_command(config,
                             "debugsetparents",
                             tagged_from_rev,
                             base_to_rev,
                             repo_path=repo_path)
        await run_hg_command(
            config,
            "commit",
            "-m",
            "Merge old head via |hg debugsetparents {} {}| CLOSED TREE DONTBUILD a=release"
            .format(tagged_from_rev, base_to_rev),
            repo_path=repo_path,
        )
        await preserve_tags(config, repo_path, to_branch)

    end_tag = merge_config.get("end_tag")  # tag the end of the to repo
    if end_tag:
        end_tag = end_tag.format(major_version=to_fx_major_version)
        tag_message = f"No bug - tagging {base_to_rev} with {end_tag} a=release DONTBUILD CLOSED TREE"
        await run_hg_command(config,
                             "tag",
                             "-m",
                             tag_message,
                             "-r",
                             base_to_rev,
                             "-f",
                             end_tag,
                             repo_path=repo_path)

    await apply_rebranding(config, repo_path, merge_config)

    diff_output = await run_hg_command(config,
                                       "diff",
                                       repo_path=repo_path,
                                       return_output=True)
    path = os.path.join(config["artifact_dir"], "public", "logs",
                        "{}.diff".format(to_branch))
    makedirs(os.path.dirname(path))
    with open(path, "w") as fh:
        fh.write(diff_output)

    await run_hg_command(
        config,
        "commit",
        "-m",
        "Update configs. IGNORE BROKEN CHANGESETS CLOSED TREE NO BUG a=release ba=release",
        repo_path=repo_path)
    push_revision_to = await get_revision(config, repo_path, branch=".")

    # Do we need to perform multiple pushes for the push stage? If so, return
    # what to do.
    desired_pushes = list()
    if merge_config.get("from_repo"):
        desired_pushes.append((merge_config["from_repo"], tagged_from_rev))
    if merge_config.get("to_repo"):
        desired_pushes.append((merge_config["to_repo"], push_revision_to))
    return desired_pushes
 async def fake_extract(_, all_paths):
     for app in all_paths:
         assert "autograph_langpack" not in app.formats
         app.parent_dir = f"{work_dir}/0"
         makedirs(app.parent_dir)
         touch(f"{app.parent_dir}/geckodriver")
def touch(path):
    parent_dir = os.path.dirname(path)
    makedirs(parent_dir)
    with open(path, "w"):
        pass
Example #29
0
 def start(self):
     """Start the task."""
     rm(self.task_dir)
     makedirs(self.task_dir)
     self._reclaim_task = {}
     self.main_fut = self.event_loop.create_task(self.async_start())