async def test_private_artifacts(context_function):
    task_group_id = task_id = slugid.nice()
    override = {
        'task_script': ('bash', '-c', '>&2 echo'),
    }
    async with context_function(override) as context:
        result = await create_task(context, task_id, task_group_id)
        assert result['status']['state'] == 'pending'
        path = os.path.join(context.config['artifact_dir'],
                            'SampleArtifacts/_/X.txt')
        utils.makedirs(os.path.dirname(path))
        with open(path, "w") as fh:
            fh.write("bar")
        async with remember_cwd():
            os.chdir(os.path.dirname(context.config['work_dir']))
            status = await worker.run_tasks(context)
        assert status == 0
        result = await task_status(context, task_id)
        assert result['status']['state'] == 'completed'
        url = artifacts.get_artifact_url(context, task_id,
                                         'SampleArtifacts/_/X.txt')
        path2 = os.path.join(context.config['work_dir'], 'downloaded_file')
        await utils.download_file(context, url, path2)
        with open(path2, "r") as fh:
            contents = fh.read().strip()
        assert contents == 'bar'
示例#2
0
async def test_verify_parent_task(chain, action_link, release_action_link,
                                  decision_link, build_link, mocker):
    for parent_link in (action_link, release_action_link, decision_link):
        build_link.decision_task_id = parent_link.decision_task_id
        build_link.parent_task_id = parent_link.task_id

        def task_graph(*args, **kwargs):
            return {
                build_link.task_id: {
                    'task': deepcopy(build_link.task)
                },
                chain.task_id: {
                    'task': deepcopy(chain.task)
                },
            }

        path = os.path.join(parent_link.cot_dir, "public", "task-graph.json")
        makedirs(os.path.dirname(path))
        touch(path)
        chain.links = [parent_link, build_link]
        parent_link.task['workerType'] = chain.context.config[
            'valid_decision_worker_types'][0]
        mocker.patch.object(cotverify, 'load_json', new=task_graph)
        mocker.patch.object(cotverify,
                            'verify_firefox_decision_command',
                            new=noop_sync)
        await cotverify.verify_parent_task(chain, parent_link)
示例#3
0
def test_get_release_props(context, mocker, taskjson, locale, relprops, expected):
    context.task = get_fake_valid_task(taskjson)
    if locale:
        context.task['payload']['locale'] = 'lang'

    context.task['payload']['releaseProperties'] = relprops
    assert get_release_props(context) == (expected, None)

    # also check balrog_props method works with same data
    # TODO remove the end of this function when method is not supported anymore
    del context.task['payload']['releaseProperties']

    context.task['payload']['upstreamArtifacts'] = [{
      "locale": "lang",
      "paths": [
        "public/build/lang/balrog_props.json"
      ],
      "taskId": "buildTaskId",
      "taskType": "build"
    }]

    balrog_props_path = os.path.abspath(os.path.join(context.config['work_dir'], 'cot', 'buildTaskId', 'public/build/lang/balrog_props.json'))
    makedirs(os.path.dirname(balrog_props_path))
    with open(balrog_props_path, 'w') as f:
        json.dump({
            'properties': relprops
        }, f)

    assert get_release_props(context) == (expected, balrog_props_path)
示例#4
0
def contextual_log_handler(context,
                           path,
                           log_obj=None,
                           level=logging.DEBUG,
                           formatter=None):
    """Add a short-lived log with a contextmanager for cleanup.

    Args:
        context (scriptworker.context.Context): the scriptworker context
        path (str): the path to the log file to create
        log_obj (logging.Logger): the log object to modify.  If None, use
            ``scriptworker.log.log``.  Defaults to None.
        level (int, optional): the logging level.  Defaults to logging.DEBUG.
        formatter (logging.Formatter, optional): the logging formatter. If None,
            defaults to ``logging.Formatter(fmt=fmt)``. Default is None.

    Yields:
        None: but cleans up the handler afterwards.
    """
    log_obj = log_obj or log
    formatter = formatter or logging.Formatter(
        fmt=context.config['log_fmt'],
        datefmt=context.config['log_datefmt'],
    )
    parent_path = os.path.dirname(path)
    makedirs(parent_path)
    contextual_handler = logging.FileHandler(path, encoding='utf-8')
    contextual_handler.setLevel(level)
    contextual_handler.setFormatter(formatter)
    log_obj.addHandler(contextual_handler)
    yield
    log_obj.removeHandler(contextual_handler)
def _copy_files_to_work_dir(file_name, context):
    original_file_path = os.path.join(TEST_DATA_DIR, file_name)
    copied_file_folder = os.path.join(
        context.config["work_dir"], "cot", "upstream-task-id1"
    )
    makedirs(copied_file_folder)
    shutil.copy(original_file_path, copied_file_folder)
async def test_integration_autograph_focus(context, tmpdir):
    file_name = 'app.apk'
    original_file_path = os.path.join(TEST_DATA_DIR, file_name)
    copied_file_folder = os.path.join(context.config['work_dir'], 'cot',
                                      'upstream-task-id1')
    makedirs(copied_file_folder)
    shutil.copy(original_file_path, copied_file_folder)

    zip_infos_before_signature = _extract_compress_type_per_filename(
        os.path.join(copied_file_folder, file_name))

    context.config['signing_server_config'] = _write_server_config(tmpdir)
    context.task = _craft_task([file_name], signing_format='autograph_focus')

    keystore_path = os.path.join(tmpdir, 'keystore')
    certificate_path = os.path.join(TEST_DATA_DIR, 'autograph_apk.pub')
    certificate_alias = 'autograph_focus'
    _instantiate_keystore(keystore_path, certificate_path, certificate_alias)

    await async_main(context)

    signed_path = os.path.join(tmpdir, 'artifact', file_name)
    assert _verify_apk_signature(keystore_path, signed_path, certificate_alias)

    zip_infos_after_signature = _extract_compress_type_per_filename(
        signed_path)
    for signature_file in ('META-INF/SIGNATURE.RSA', 'META-INF/SIGNATURE.SF',
                           'META-INF/MANIFEST.MF'):
        del zip_infos_after_signature[signature_file]

    # We want to make sure compression type hasn't changed after the signature
    # https://github.com/mozilla-services/autograph/issues/164

    assert zip_infos_before_signature == zip_infos_after_signature
async def test_private_artifacts(context_function):
    task_group_id = task_id = slugid.nice()
    override = {
        'task_script': (
            'bash', '-c',
            '>&2 echo'
        ),
    }
    async with context_function(override) as context:
        result = await create_task(context, task_id, task_group_id)
        assert result['status']['state'] == 'pending'
        path = os.path.join(context.config['artifact_dir'], 'SampleArtifacts/_/X.txt')
        utils.makedirs(os.path.dirname(path))
        with open(path, "w") as fh:
            fh.write("bar")
        async with remember_cwd():
            os.chdir(os.path.dirname(context.config['work_dir']))
            status = await worker.run_tasks(context)
        assert status == 0
        result = await task_status(context, task_id)
        assert result['status']['state'] == 'completed'
        url = artifacts.get_artifact_url(context, task_id, 'SampleArtifacts/_/X.txt')
        path2 = os.path.join(context.config['work_dir'], 'downloaded_file')
        await utils.download_file(context, url, path2)
        with open(path2, "r") as fh:
            contents = fh.read().strip()
        assert contents == 'bar'
示例#8
0
def test_verify_cot_cmdln(chain, args, tmpdir, mocker, event_loop):
    context = mock.MagicMock()
    context.queue = mock.MagicMock()
    context.queue.task = noop_async
    path = os.path.join(tmpdir, 'x')
    makedirs(path)

    def eloop():
        return event_loop

    def get_context():
        return context

    def mkdtemp():
        return path

    def cot(*args, **kwargs):
        m = mock.MagicMock()
        m.links = [mock.MagicMock()]
        m.dependent_task_ids = noop_sync
        return m

    mocker.patch.object(tempfile, 'mkdtemp', new=mkdtemp)
    mocker.patch.object(asyncio, 'get_event_loop', new=eloop)
    mocker.patch.object(cotverify, 'read_worker_creds', new=noop_sync)
    mocker.patch.object(cotverify, 'Context', new=get_context)
    mocker.patch.object(cotverify, 'ChainOfTrust', new=cot)
    mocker.patch.object(cotverify, 'verify_chain_of_trust', new=noop_async)

    cotverify.verify_cot_cmdln(args=args)
示例#9
0
async def build_task_dependencies(chain, task, name, my_task_id):
    """Recursively build the task dependencies of a task.

    Args:
        chain (ChainOfTrust): the chain of trust to add to.
        task (dict): the task definition to operate on.
        name (str): the name of the task to operate on.
        my_task_id (str): the taskId of the task to operate on.

    Raises:
        CoTError: on failure.
    """
    log.info("build_task_dependencies {} {}".format(name, my_task_id))
    if name.count(':') > 5:
        raise CoTError("Too deep recursion!\n{}".format(name))
    sorted_dependencies = find_sorted_task_dependencies(task, name, my_task_id)

    for task_name, task_id in sorted_dependencies:
        if task_id not in chain.dependent_task_ids():
            link = LinkOfTrust(chain.context, task_name, task_id)
            json_path = link.get_artifact_full_path('task.json')
            try:
                task_defn = await chain.context.queue.task(task_id)
                link.task = task_defn
                chain.links.append(link)
                # write task json to disk
                makedirs(os.path.dirname(json_path))
                with open(json_path, 'w') as fh:
                    fh.write(format_json(task_defn))
                await build_task_dependencies(chain, task_defn, task_name,
                                              task_id)
            except TaskclusterFailure as exc:
                raise CoTError(str(exc))
示例#10
0
def _craft_credentials_file(context, channel, temp_dir):
    macaroon_original_location = context.config['macaroons_locations'][channel]

    snapcraft_dir = os.path.join(temp_dir, '.snapcraft')
    makedirs(snapcraft_dir)
    macaroon_target_location = os.path.join(snapcraft_dir, 'snapcraft.cfg')
    shutil.copyfile(macaroon_original_location, macaroon_target_location)
示例#11
0
 def write_json(self, path, contents, message):
     """Write json to disk.
     """
     if contents:
         log.debug(message.format(path=path))
         makedirs(os.path.dirname(path))
         with open(path, "w") as fh:
             json.dump(contents, fh, indent=2, sort_keys=True)
示例#12
0
def get_log_fhs(context):
    """Helper contextmanager function to open the log and error
    filehandles.
    """
    log_file, error_file = get_log_filenames(context)
    makedirs(context.config['log_dir'])
    with open(log_file, "w") as log_fh:
        with open(error_file, "w") as error_fh:
            yield (log_fh, error_fh)
示例#13
0
def get_log_fhs(context):
    """Helper contextmanager function to open the log and error
    filehandles.
    """
    log_file, error_file = get_log_filenames(context)
    makedirs(context.config['log_dir'])
    with open(log_file, "w") as log_fh:
        with open(error_file, "w") as error_fh:
            yield (log_fh, error_fh)
示例#14
0
def test_script_can_push_snaps_with_credentials(event_loop, monkeypatch,
                                                channel):
    push_call_counter = (n for n in range(0, 2))

    task = {
        'dependencies': ['some_snap_build_taskId'],
        'scopes': ['project:releng:snapcraft:firefox:{}'.format(channel)],
        'payload': {
            'upstreamArtifacts': [{
                'paths': ['public/build/firefox-59.0.snap'],
                'taskId': 'some_snap_build_taskId',
                'taskType': 'build'
            }],
        },
    }

    with tempfile.NamedTemporaryFile('w+') as macaroon_beta, \
            tempfile.NamedTemporaryFile('w+') as macaroon_candidate:
        config = {
            'macaroons_locations': {
                'candidate': macaroon_candidate.name,
                'beta': macaroon_beta.name,
            },
        }

        with tempfile.TemporaryDirectory() as work_dir:
            config['work_dir'] = work_dir

            with open(os.path.join(work_dir, 'task.json'), 'w') as task_file:
                json.dump(task, task_file)

            snap_artifact_dir = os.path.join(
                work_dir, 'cot/some_snap_build_taskId/public/build/')
            makedirs(snap_artifact_dir)
            snap_artifact_path = os.path.join(snap_artifact_dir,
                                              'firefox-59.0.snap')
            with open(snap_artifact_path, 'w') as snap_file:
                snap_file.write(' ')

            # config_file is not put in the TemporaryDirectory() (like the others), because it usually lives
            # elsewhere on the filesystem
            with tempfile.NamedTemporaryFile('w+') as config_file:
                json.dump(config, config_file)
                config_file.seek(0)

                def snapcraft_store_client_push_fake(snap_filename,
                                                     release_channels):
                    assert snap_filename == snap_artifact_path
                    assert release_channels == [channel]
                    next(push_call_counter)

                monkeypatch.setattr(snapcraft_store_client, 'push',
                                    snapcraft_store_client_push_fake)
                main(config_path=config_file.name)

    assert next(push_call_counter) == 1
示例#15
0
 def write_json(self, path, contents, message):
     """Write json to disk.
     """
     if contents:
         log.debug(message.format(path=path))
         parent_dir = os.path.dirname(path)
         if parent_dir:
             makedirs(os.path.dirname(path))
         with open(path, "w") as fh:
             json.dump(contents, fh, indent=2, sort_keys=True)
示例#16
0
def update_logging_config(context: Any,
                          log_name: Optional[str] = None,
                          file_name: str = "worker.log") -> None:
    """Update python logging settings from config.

    By default, this sets the ``scriptworker`` log settings, but this will
    change if some other package calls this function or specifies the ``log_name``.

    * Use formatting from config settings.
    * Log to screen if ``verbose``
    * Add a rotating logfile from config settings.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        log_name (str, optional): the name of the Logger to modify.
            If None, use the top level module ('scriptworker').
            Defaults to None.

    """
    log_name = log_name or __name__.split(".")[0]
    top_level_logger = logging.getLogger(log_name)

    datefmt = context.config["log_datefmt"]
    fmt = context.config["log_fmt"]
    formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)

    if context.config.get("verbose"):
        top_level_logger.setLevel(logging.DEBUG)
        if len(top_level_logger.handlers) == 0:
            handler = logging.StreamHandler()
            handler.setFormatter(formatter)
            top_level_logger.addHandler(handler)
    else:
        top_level_logger.setLevel(logging.INFO)

    # Rotating log file
    makedirs(context.config["log_dir"])
    path = os.path.join(context.config["log_dir"], file_name)
    if context.config["watch_log_file"]:
        # If we rotate the log file via logrotate.d, let's watch the file
        # so we can automatically close/reopen on move.
        handler = logging.handlers.WatchedFileHandler(path)  # type: ignore
    elif context.config["log_max_bytes"] and context.config["log_max_backups"]:
        handler = logging.handlers.RotatingFileHandler(  # type: ignore
            filename=path,
            maxBytes=context.config["log_max_bytes"],
            backupCount=context.config["log_max_backups"],
        )
    else:
        # Avoid using WatchedFileHandler during scriptworker unittests
        handler = logging.FileHandler(path)  # type: ignore
    handler.setFormatter(formatter)
    top_level_logger.addHandler(handler)
    top_level_logger.addHandler(logging.NullHandler())
示例#17
0
    def write_json(self, path, contents, message):
        """Write json to disk.

        Args:
            path (str): the path to write to
            contents (dict): the contents of the json blob
            message (str): the message to log
        """
        log.debug(message.format(path=path))
        makedirs(os.path.dirname(path))
        with open(path, "w") as fh:
            json.dump(contents, fh, indent=2, sort_keys=True)
示例#18
0
async def sign_widevine_tar(context, orig_path, fmt):
    """Sign the internals of a tarfile with the widevine key.

    Extract the entire tarball, but only sign a handful of files (see
    `_WIDEVINE_BLESSED_FILENAMES` and `_WIDEVINE_UNBLESSED_FILENAMES).
    The blessed files should be signed with the `widevine_blessed` format.
    Then recreate the tarball.

    Ideally we would be able to append the sigfiles to the original tarball,
    but that's not possible with compressed tarballs.

    Args:
        context (Context): the signing context
        orig_path (str): the source file to sign
        fmt (str): the format to sign with

    Returns:
        str: the path to the signed archive

    """
    _, compression = os.path.splitext(orig_path)
    # This will get cleaned up when we nuke `work_dir`. Clean up at that point
    # rather than immediately after `sign_widevine`, to optimize task runtime
    # speed over disk space.
    tmp_dir = tempfile.mkdtemp(prefix="wvtar", dir=context.config["work_dir"])
    # Get file list
    all_files = await _get_tarfile_files(orig_path, compression)
    files_to_sign = _get_widevine_signing_files(all_files)
    log.debug("Widevine files to sign: %s", files_to_sign)
    if files_to_sign:
        # Extract all files so we can create `precomplete` with the full
        # file list
        all_files = await _extract_tarfile(context, orig_path, compression, tmp_dir=tmp_dir)
        tasks = []
        # Sign the appropriate inner files
        for from_, fmt in files_to_sign.items():
            from_ = os.path.join(tmp_dir, from_)
            # Don't try to sign directories
            if not os.path.isfile(from_):
                continue
            # Move the sig location on mac. This should be noop on linux.
            to = _get_mac_sigpath(from_)
            log.debug("Adding %s to the sigfile paths...", to)
            makedirs(os.path.dirname(to))
            tasks.append(asyncio.ensure_future(sign_widevine_with_autograph(context, from_, "blessed" in fmt, to=to)))
            all_files.append(to)
        await raise_future_exceptions(tasks)
        remove_extra_files(tmp_dir, all_files)
        # Regenerate the `precomplete` file, which is used for cleanup before
        # applying a complete mar.
        _run_generate_precomplete(context, tmp_dir)
        await _create_tarfile(context, orig_path, all_files, compression, tmp_dir=tmp_dir)
    return orig_path
示例#19
0
def test_verify_cot_signatures_bad_sig(chain, build_link, mocker):
    def die(*args, **kwargs):
        raise ScriptWorkerGPGException("x")

    path = os.path.join(build_link.cot_dir, 'public/chainOfTrust.json.asc')
    makedirs(os.path.dirname(path))
    touch(path)
    chain.links = [build_link]
    mocker.patch.object(cotverify, 'GPG', new=noop_sync)
    mocker.patch.object(cotverify, 'get_body', new=die)
    with pytest.raises(CoTError):
        cotverify.verify_cot_signatures(chain)
示例#20
0
def test_run_generate_precomplete(context, num_precomplete, raises, mocker):
    mocker.patch.object(sign, "generate_precomplete", new=noop_sync)
    work_dir = context.config['work_dir']
    for i in range(0, num_precomplete):
        path = os.path.join(work_dir, "foo", str(i))
        makedirs(path)
        with open(os.path.join(path, "precomplete"), "w") as fh:
            fh.write("blah")
    if raises:
        with pytest.raises(SigningScriptError):
            sign._run_generate_precomplete(context, work_dir)
    else:
        sign._run_generate_precomplete(context, work_dir)
示例#21
0
def rw_context():
    with tempfile.TemporaryDirectory() as tmp:
        context = Context()
        context.config = get_unfrozen_copy(DEFAULT_CONFIG)
        context.config['gpg_lockfile'] = os.path.join(tmp, 'gpg_lockfile')
        context.config['cot_job_type'] = "signing"
        for key, value in context.config.items():
            if key.endswith("_dir"):
                context.config[key] = os.path.join(tmp, key)
                makedirs(context.config[key])
            if key.endswith("key_path") or key in ("gpg_home", ):
                context.config[key] = os.path.join(tmp, key)
        yield context
示例#22
0
    def write_json(self, path, contents, message):
        """Write json to disk.

        Args:
            path (str): the path to write to
            contents (dict): the contents of the json blob
            message (str): the message to log

        """
        log.debug(message.format(path=path))
        makedirs(os.path.dirname(path))
        with open(path, "w") as fh:
            json.dump(contents, fh, indent=2, sort_keys=True)
示例#23
0
def get_log_filehandle(context):
    """Helper contextmanager function to open the log and error filehandles.

    Args:
        context (scriptworker.context.Context): the scriptworker context.

    Yields:
        log filehandle
    """
    log_file_name = get_log_filename(context)
    makedirs(context.config['task_log_dir'])
    with open(log_file_name, "w", encoding="utf-8") as filehandle:
        yield filehandle
示例#24
0
def get_log_filehandle(context: Any) -> Iterator[IO[str]]:
    """Open the log and error filehandles.

    Args:
        context (scriptworker.context.Context): the scriptworker context.

    Yields:
        log filehandle

    """
    log_file_name = get_log_filename(context)
    makedirs(context.config["task_log_dir"])
    with open(log_file_name, "w", encoding="utf-8") as filehandle:
        yield filehandle
示例#25
0
def get_log_fhs(context):
    """Helper contextmanager function to open the log and error
    filehandles.

    Args:
        context (scriptworker.context.Context): the scriptworker context.

    Yields:
        tuple: log filehandle, error log filehandle
    """
    log_file, error_file = get_log_filenames(context)
    makedirs(context.config['task_log_dir'])
    with open(log_file, "w") as log_fh:
        with open(error_file, "w") as error_fh:
            yield (log_fh, error_fh)
def test_script_can_push_snaps_with_credentials(monkeypatch, channel, expected_revision):
    task = {
        "dependencies": ["some_snap_build_taskId"],
        "scopes": ["project:releng:snapcraft:firefox:{}".format(channel)],
        "payload": {"upstreamArtifacts": [{"paths": ["public/build/target.snap"], "taskId": "some_snap_build_taskId", "taskType": "build"}]},
    }

    snapcraft_store_client_mock = MagicMock()
    store_mock = MagicMock()
    store_mock.get_snap_revisions.return_value = _ALL_REVISIONS_ABSTRACT

    def cpi_get_side_effect(*args, **kwargs):
        revision = kwargs["params"]["revision"]
        cpi_get_mock = MagicMock()
        cpi_get_mock.json.return_value = {"download_sha3_384": "fake_hash_rev{}".format(revision)}
        return cpi_get_mock

    store_mock.cpi.get.side_effect = cpi_get_side_effect

    monkeypatch.setattr(snap_store, "StoreClient", lambda: store_mock)
    monkeypatch.setattr(snap_store, "get_hash", lambda *args, **kwargs: "fake_hash_rev{}".format(expected_revision))

    with tempfile.NamedTemporaryFile("w+") as macaroon_beta, tempfile.NamedTemporaryFile("w+") as macaroon_candidate:
        config = {"push_to_store": True, "macaroons_locations": {"candidate": macaroon_candidate.name, "beta": macaroon_beta.name}}

        with tempfile.TemporaryDirectory() as work_dir:
            config["work_dir"] = work_dir

            with open(os.path.join(work_dir, "task.json"), "w") as task_file:
                json.dump(task, task_file)

            snap_artifact_dir = os.path.join(work_dir, "cot/some_snap_build_taskId/public/build/")
            makedirs(snap_artifact_dir)
            snap_artifact_path = os.path.join(snap_artifact_dir, "target.snap")
            with open(snap_artifact_path, "w") as snap_file:
                snap_file.write(" ")

            # config_file is not put in the TemporaryDirectory() (like the others), because it usually lives
            # elsewhere on the filesystem
            with tempfile.NamedTemporaryFile("w+") as config_file:
                json.dump(config, config_file)
                config_file.seek(0)

                monkeypatch.setattr(snap_store, "snapcraft_store_client", snapcraft_store_client_mock)
                main(config_path=config_file.name)

    snapcraft_store_client_mock.push.assert_called_once_with(snap_filename=snap_artifact_path)
    store_mock.release.assert_called_once_with(snap_name="firefox", revision=expected_revision, channels=[channel])
示例#27
0
def _craft_rw_context(tmp, event_loop, cot_product, session):
    config = get_unfrozen_copy(DEFAULT_CONFIG)
    config['cot_product'] = cot_product
    context = Context()
    context.session = session
    context.config = apply_product_config(config)
    context.config['cot_job_type'] = "signing"
    for key, value in context.config.items():
        if key.endswith("_dir"):
            context.config[key] = os.path.join(tmp, key)
            makedirs(context.config[key])
        if key.endswith("key_path"):
            context.config[key] = os.path.join(tmp, key)
    context.config['verbose'] = VERBOSE
    context.event_loop = event_loop
    return context
示例#28
0
def test_verify_cot_signatures(chain, build_link, mocker):
    def fake_body(*args, **kwargs):
        return '{"taskId": "build_task_id"}'

    build_link._cot = None
    unsigned_path = os.path.join(build_link.cot_dir,
                                 'public/chainOfTrust.json.asc')
    path = os.path.join(build_link.cot_dir, 'chainOfTrust.json')
    makedirs(os.path.dirname(unsigned_path))
    touch(unsigned_path)
    chain.links = [build_link]
    mocker.patch.object(cotverify, 'GPG', new=noop_sync)
    mocker.patch.object(cotverify, 'get_body', new=fake_body)
    cotverify.verify_cot_signatures(chain)
    assert os.path.exists(path)
    with open(path, "r") as fh:
        assert json.load(fh) == {"taskId": "build_task_id"}
示例#29
0
文件: log.py 项目: kmoir/scriptworker
def update_logging_config(context, log_name=None, file_name='worker.log'):
    """Update python logging settings from config.

    By default, this sets the ``scriptworker`` log settings, but this will
    change if some other package calls this function or specifies the ``log_name``.

    * Use formatting from config settings.
    * Log to screen if ``verbose``
    * Add a rotating logfile from config settings.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        log_name (str, optional): the name of the Logger to modify.
            If None, use the top level module ('scriptworker').
            Defaults to None.

    """
    log_name = log_name or __name__.split('.')[0]
    top_level_logger = logging.getLogger(log_name)

    datefmt = context.config['log_datefmt']
    fmt = context.config['log_fmt']
    formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)

    if context.config.get("verbose"):
        top_level_logger.setLevel(logging.DEBUG)
        if len(top_level_logger.handlers) == 0:
            handler = logging.StreamHandler()
            handler.setFormatter(formatter)
            top_level_logger.addHandler(handler)
    else:
        top_level_logger.setLevel(logging.INFO)

    # Rotating log file
    makedirs(context.config['log_dir'])
    path = os.path.join(context.config['log_dir'], file_name)
    handler = logging.handlers.RotatingFileHandler(
        path,
        maxBytes=context.config['log_max_bytes'],
        backupCount=context.config['log_num_backups'],
    )
    handler.setFormatter(formatter)
    top_level_logger.addHandler(handler)
    top_level_logger.addHandler(logging.NullHandler())
示例#30
0
async def test_integration_autograph_apk(context, tmpdir):
    file_name = 'app.apk'
    original_file_path = os.path.join(TEST_DATA_DIR, file_name)
    copied_file_folder = os.path.join(context.config['work_dir'], 'cot', 'upstream-task-id1')
    makedirs(copied_file_folder)
    shutil.copy(original_file_path, copied_file_folder)

    context.config['signing_server_config'] = _write_server_config(tmpdir)
    context.task = _craft_task([file_name], signing_format='autograph_apk')

    keystore_path = os.path.join(tmpdir, 'keystore')
    certificate_path = os.path.join(TEST_DATA_DIR, 'autograph_apk.pub')
    certificate_alias = 'autograph_apk'
    _instanciate_keystore(keystore_path, certificate_path, certificate_alias)

    await async_main(context)

    signed_path = os.path.join(tmpdir, 'artifact', file_name)
    assert _verify_apk_signature(keystore_path, signed_path, certificate_alias)
示例#31
0
def _craft_rw_context(tmp, event_loop, cot_product, session, private=False):
    config = get_unfrozen_copy(DEFAULT_CONFIG)
    config["cot_product"] = cot_product
    context = Context()
    context.session = session
    context.config = apply_product_config(config)
    context.config["cot_job_type"] = "scriptworker"
    for key, value in context.config.items():
        if key.endswith("_dir"):
            context.config[key] = os.path.join(tmp, key)
            makedirs(context.config[key])
        if key.endswith("key_path"):
            context.config[key] = os.path.join(tmp, key)
    if private:
        for rule in context.config["trusted_vcs_rules"]:
            rule["require_secret"] = True
    context.config["verbose"] = VERBOSE
    context.event_loop = event_loop
    return context
示例#32
0
async def rw_context(request, event_loop):
    with tempfile.TemporaryDirectory() as tmp:
        config = get_unfrozen_copy(DEFAULT_CONFIG)
        config['cot_product'] = request.param
        context = Context()
        context.config = apply_product_config(config)
        context.config['gpg_lockfile'] = os.path.join(tmp, 'gpg_lockfile')
        context.config['cot_job_type'] = "signing"
        for key, value in context.config.items():
            if key.endswith("_dir"):
                context.config[key] = os.path.join(tmp, key)
                makedirs(context.config[key])
            if key.endswith("key_path") or key in ("gpg_home", ):
                context.config[key] = os.path.join(tmp, key)
        context.config['verbose'] = VERBOSE
        context.event_loop = event_loop
        yield context
        await _close_session(context)
        await _close_session(context.queue)
        await _close_session(context.temp_queue)
示例#33
0
def test_remove_extra_files(context):
    extra = ["a", "b/c"]
    good = ["d", "e/f"]
    work_dir = context.config['work_dir']
    all_files = []
    for f in extra + good:
        path = os.path.join(work_dir, f)
        makedirs(os.path.dirname(path))
        with open(path, "w") as fh:
            fh.write("x")
        if f in good:
            all_files.append(path)
    for f in good:
        assert os.path.exists(os.path.join(work_dir, f))
    output = sign.remove_extra_files(work_dir, all_files)
    for f in extra:
        path = os.path.realpath(os.path.join(work_dir, f))
        assert path in output
        assert not os.path.exists(path)
    for f in good:
        assert os.path.exists(os.path.join(work_dir, f))
示例#34
0
async def test_verify_decision_task_worker_type(chain, decision_link, build_link, mocker):

    def task_graph(*args, **kwargs):
        return {
            build_link.task_id: {
                'task': deepcopy(build_link.task)
            },
            chain.task_id: {
                'task': deepcopy(chain.task)
            },
        }

    path = os.path.join(decision_link.cot_dir, "public", "task-graph.json")
    makedirs(os.path.dirname(path))
    touch(path)
    chain.links = [decision_link, build_link]
    decision_link.task['workerType'] = 'bad-worker-type'
    mocker.patch.object(cotverify, 'load_json', new=task_graph)
    mocker.patch.object(cotverify, 'verify_firefox_decision_command', new=noop_sync)
    with pytest.raises(CoTError):
        await cotverify.verify_decision_task(chain, decision_link)
示例#35
0
def update_logging_config(context, log_name=None):
    """Update python logging settings from config.

    By default, this sets the `scriptworker` log settings, but this will
    change if some other package calls this function or specifies the `log_name`.

    * Use formatting from config settings.
    * Log to screen if `verbose`
    * Add a rotating logfile from config settings.
    """
    log_name = log_name or __name__.split('.')[0]
    top_level_logger = logging.getLogger(log_name)

    datefmt = context.config['log_datefmt']
    fmt = context.config['log_fmt']
    formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)

    if context.config.get("verbose"):
        top_level_logger.setLevel(logging.DEBUG)
        if len(top_level_logger.handlers) == 0:
            handler = logging.StreamHandler()
            handler.setFormatter(formatter)
            top_level_logger.addHandler(handler)
    else:
        top_level_logger.setLevel(logging.INFO)

    # Rotating log file
    makedirs(context.config['log_dir'])
    path = os.path.join(context.config['log_dir'], 'worker.log')
    handler = logging.handlers.RotatingFileHandler(
        path, maxBytes=context.config['log_max_bytes'],
        backupCount=context.config['log_num_backups'],
    )
    handler.setFormatter(formatter)
    top_level_logger.addHandler(handler)
    top_level_logger.addHandler(logging.NullHandler())
示例#36
0
def test_makedirs_existing_file():
    path = os.path.join(os.path.dirname(__file__), "data", "azure.xml")
    with pytest.raises(ScriptWorkerException):
        utils.makedirs(path)
示例#37
0
def test_makedirs_existing_dir():
    path = os.path.join(os.path.dirname(__file__))
    utils.makedirs(path)
示例#38
0
def test_makedirs_empty():
    utils.makedirs(None)