Ejemplo n.º 1
0
def test_cleanup(rw_context):
    for name in "work_dir", "artifact_dir", "task_log_dir":
        path = rw_context.config[name]
        open(os.path.join(path, "tempfile"), "w").close()
        assert os.path.exists(os.path.join(path, "tempfile"))
    utils.cleanup(rw_context)
    for name in "work_dir", "artifact_dir":
        path = rw_context.config[name]
        assert os.path.exists(path)
        assert not os.path.exists(os.path.join(path, "tempfile"))
    # 2nd pass
    utils.rm(rw_context.config["work_dir"])
    utils.cleanup(rw_context)
Ejemplo n.º 2
0
def test_cleanup(context):
    for name in 'work_dir', 'artifact_dir', 'task_log_dir':
        path = context.config[name]
        open(os.path.join(path, 'tempfile'), "w").close()
        assert os.path.exists(os.path.join(path, "tempfile"))
    utils.cleanup(context)
    for name in 'work_dir', 'artifact_dir':
        path = context.config[name]
        assert os.path.exists(path)
        assert not os.path.exists(os.path.join(path, "tempfile"))
    # 2nd pass
    utils.rm(context.config['work_dir'])
    utils.cleanup(context)
Ejemplo n.º 3
0
def test_cleanup(context):
    for name in 'work_dir', 'artifact_dir', 'task_log_dir':
        path = context.config[name]
        open(os.path.join(path, 'tempfile'), "w").close()
        assert os.path.exists(os.path.join(path, "tempfile"))
    utils.cleanup(context)
    for name in 'work_dir', 'artifact_dir':
        path = context.config[name]
        assert os.path.exists(path)
        assert not os.path.exists(os.path.join(path, "tempfile"))
    # 2nd pass
    utils.rm(context.config['work_dir'])
    utils.cleanup(context)
Ejemplo n.º 4
0
async def _extract_tarfile(context, from_, compression, tmp_dir=None):
    work_dir = context.config["work_dir"]
    tmp_dir = tmp_dir or os.path.join(work_dir, "untarred")
    compression = _get_tarfile_compression(compression)
    try:
        files = []
        rm(tmp_dir)
        utils.mkdir(tmp_dir)
        with tarfile.open(from_, mode="r:{}".format(compression)) as t:
            t.extractall(path=tmp_dir)
            for name in t.getnames():
                path = os.path.join(tmp_dir, name)
                os.path.isfile(path) and files.append(path)
        return files
    except Exception as e:
        raise SigningScriptError(e)
Ejemplo n.º 5
0
async def async_main(context):
    """Main async loop, following http://docs.taskcluster.net/queue/worker-interaction/ .

    This is a simple loop, mainly to keep each function more testable.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
    """
    tmp_gpg_home = get_tmp_base_gpg_home_dir(context)
    state = is_lockfile_present(context, "scriptworker", logging.DEBUG)
    if os.path.exists(tmp_gpg_home) and state == "ready":
        try:
            rm(context.config['base_gpg_home_dir'])
            os.rename(tmp_gpg_home, context.config['base_gpg_home_dir'])
        finally:
            rm_lockfile(context)
    await run_loop(context)
    await asyncio.sleep(context.config['poll_interval'])
Ejemplo n.º 6
0
def remove_extra_files(top_dir, file_list):
    """Find any extra files in `top_dir`, given an expected `file_list`.

    Args:
        top_dir (str): the dir to walk
        file_list (list): the list of expected files

    Returns:
        list: the list of extra files

    """
    all_files = [os.path.realpath(f) for f in glob.glob(os.path.join(top_dir, "**", "*"), recursive=True)]
    good_files = [os.path.realpath(f) for f in file_list]
    extra_files = list(set(all_files) - set(good_files))
    for f in extra_files:
        if os.path.isfile(f):
            log.warning("Extra file to clean up: {}".format(f))
            rm(f)
    return extra_files
def test_rebuild_gpg_homedirs_exception(context, mocker, nuke_dir):
    def fake_context(*args):
        return (context, None)

    if nuke_dir:
        rm(context.config['git_key_repo_dir'])

    mocker.patch.object(sgpg, "get_context_from_cmdln", new=fake_context)
    mocker.patch.object(sgpg, "update_logging_config", new=noop_sync)
    mocker.patch.object(sgpg, "rebuild_gpg_home_signed", new=die_sync)
    mocker.patch.object(sgpg, "retry_async", new=noop_async)
    mocker.patch.object(sgpg, "update_ownertrust", new=noop_sync)
    mocker.patch.object(sgpg, "verify_signed_tag", new=noop_async)
    mocker.patch.object(sgpg, "overwrite_gpg_home", new=noop_sync)
    mocker.patch.object(sgpg, "update_signed_git_repo", new=noop_async)
    mocker.patch.object(sgpg, "build_gpg_homedirs_from_repo", new=noop_sync)

    with pytest.raises(SystemExit):
        sgpg.rebuild_gpg_homedirs(event_loop=context.event_loop)
Ejemplo n.º 8
0
async def _extract_zipfile(context, from_, files=None, tmp_dir=None):
    work_dir = context.config["work_dir"]
    tmp_dir = tmp_dir or os.path.join(work_dir, "unzipped")
    log.debug("Extracting {} from {} to {}...".format(files or "all files", from_, tmp_dir))
    try:
        extracted_files = []
        rm(tmp_dir)
        utils.mkdir(tmp_dir)
        with zipfile.ZipFile(from_, mode="r") as z:
            if files is not None:
                for name in files:
                    z.extract(name, path=tmp_dir)
                    extracted_files.append(os.path.join(tmp_dir, name))
            else:
                for name in z.namelist():
                    extracted_files.append(os.path.join(tmp_dir, name))
                z.extractall(path=tmp_dir)
        return extracted_files
    except Exception as e:
        raise SigningScriptError(e)
Ejemplo n.º 9
0
async def async_main(context, credentials):
    """Set up and run tasks for this iteration.

    http://docs.taskcluster.net/queue/worker-interaction/

    Args:
        context (scriptworker.context.Context): the scriptworker context.
    """
    conn = aiohttp.TCPConnector(limit=context.config['aiohttp_max_connections'])
    async with aiohttp.ClientSession(connector=conn) as session:
        context.session = session
        context.credentials = credentials
        tmp_gpg_home = get_tmp_base_gpg_home_dir(context)
        state = is_lockfile_present(context, "scriptworker", logging.DEBUG)
        if os.path.exists(tmp_gpg_home) and state == "ready":
            try:
                rm(context.config['base_gpg_home_dir'])
                os.rename(tmp_gpg_home, context.config['base_gpg_home_dir'])
            finally:
                rm_lockfile(context)
        await run_tasks(context)
Ejemplo n.º 10
0
def test_rm_dir():
    tmp = tempfile.mkdtemp()
    assert os.path.exists(tmp)
    utils.rm(tmp)
    assert not os.path.exists(tmp)
Ejemplo n.º 11
0
def test_rm_file():
    _, tmp = tempfile.mkstemp()
    assert os.path.exists(tmp)
    utils.rm(tmp)
    assert not os.path.exists(tmp)
Ejemplo n.º 12
0
def test_rm_empty():
    utils.rm(None)
Ejemplo n.º 13
0
def verify_cot_cmdln(args=None):
    """Test the chain of trust from the commandline, for debugging purposes.

    Args:
        args (list, optional): the commandline args to parse.  If None, use
            ``sys.argv[1:]`` .  Defaults to None.
    """
    args = args or sys.argv[1:]
    parser = argparse.ArgumentParser(
        description="""Verify a given task's chain of trust.

Given a task's `task_id`, get its task definition, then trace its chain of
trust back to the tree.  This doesn't verify chain of trust artifact signatures,
but does run the other tests in `scriptworker.cot.verify.verify_chain_of_trust`.

This is helpful in debugging chain of trust changes or issues.

To use, first either set your taskcluster creds in your env http://bit.ly/2eDMa6N
or in the CREDS_FILES http://bit.ly/2fVMu0A""")
    parser.add_argument('task_id', help='the task id to test')
    parser.add_argument('--task-type',
                        help='the task type to test',
                        choices=['signing', 'balrog', 'beetmover', 'pushapk'],
                        required=True)
    parser.add_argument('--cleanup',
                        help='clean up the temp dir afterwards',
                        dest='cleanup',
                        action='store_true',
                        default=False)
    opts = parser.parse_args(args)
    tmp = tempfile.mkdtemp()
    log = logging.getLogger('scriptworker')
    log.setLevel(logging.DEBUG)
    logging.basicConfig()
    loop = asyncio.get_event_loop()
    conn = aiohttp.TCPConnector()
    try:
        with aiohttp.ClientSession(connector=conn) as session:
            context = Context()
            context.session = session
            context.credentials = read_worker_creds()
            context.task = loop.run_until_complete(
                context.queue.task(opts.task_id))
            context.config = dict(deepcopy(DEFAULT_CONFIG))
            context.config.update({
                'work_dir':
                os.path.join(tmp, 'work'),
                'artifact_dir':
                os.path.join(tmp, 'artifacts'),
                'task_log_dir':
                os.path.join(tmp, 'artifacts', 'public', 'logs'),
                'base_gpg_home_dir':
                os.path.join(tmp, 'gpg'),
                'verify_cot_signature':
                False,
            })
            cot = ChainOfTrust(context, opts.task_type, task_id=opts.task_id)
            loop.run_until_complete(verify_chain_of_trust(cot))
            log.info(pprint.pformat(cot.dependent_task_ids()))
            log.info("Cot task_id: {}".format(cot.task_id))
            for link in cot.links:
                log.info("task_id: {}".format(link.task_id))
            context.session.close()
        context.queue.session.close()
        loop.close()
    finally:
        if opts.cleanup:
            rm(tmp)
        else:
            log.info("Artifacts are in {}".format(tmp))
Ejemplo n.º 14
0
def test_rm_dir():
    tmp = tempfile.mkdtemp()
    assert os.path.exists(tmp)
    utils.rm(tmp)
    assert not os.path.exists(tmp)
Ejemplo n.º 15
0
def test_rm_file():
    _, tmp = tempfile.mkstemp()
    assert os.path.exists(tmp)
    utils.rm(tmp)
    assert not os.path.exists(tmp)
Ejemplo n.º 16
0
def test_rm_empty():
    utils.rm(None)