Пример #1
0
 def test_unreachable(self):
     with self.assertRaisesRegex(AssertionError, 'error 1'):
         ASSERT.unreachable('error {}', 1)
Пример #2
0
def define_pod(
    *,
    name: str,
    apps: typing.List[App] = (),
    images: typing.List[str] = (),
    mounts: typing.List[Mount] = (),
    volumes: typing.List[Volume] = (),
    systemd_unit_groups: typing.List[SystemdUnitGroup] = (),
    token_names: typing.Mapping[str, str] = None,
):
    """Define a pod.

    This defines:
    * Parameter: name/version.
    * Rule: name/build.  NOTE: This rule is generally run in the host
      system, not inside a builder pod.
    """
    ASSERT(len(images) <= 1, 'expect at most one image per pod for now: {}')
    # Let's require absolute release labels (because it is quite hard to
    # derive label path for images and volumes from pod label).
    ASSERT.all(images, lambda label: label.startswith('//'))
    ASSERT.all(volumes, lambda volume: volume.label.startswith('//'))
    ASSERT.unique(map(_get_label_name, images))
    ASSERT.unique(_get_label_name(volume.label) for volume in volumes)

    name_prefix = shipyard2.rules.canonicalize_name_prefix(name)
    parameter_version = name_prefix + 'version'
    rule_build = name_prefix + 'build'

    (foreman.define_parameter(parameter_version)\
     .with_doc('pod version'))

    images = list(map(foreman.Label.parse, images))

    @foreman.rule(rule_build)
    @foreman.rule.depend('//pods/bases:build')
    @foreman.rule.depend('//releases:build')
    def build(parameters):
        version = ASSERT.not_none(parameters[parameter_version])
        pod_dir_path = releases.get_output_dir_path(parameters, name, version)
        if (
            pod_dir_path / \
            shipyard2.POD_DIR_RELEASE_METADATA_FILENAME
        ).exists():
            LOG.info('skip: build pod: %s %s', name, version)
            return
        LOG.info('build pod: %s %s', name, version)
        try:
            scripts.mkdir(pod_dir_path)
            releases.generate_release_metadata(
                parameters,
                pod_dir_path / shipyard2.POD_DIR_RELEASE_METADATA_FILENAME,
            )
            _generate_deploy_instruction(
                parameters=parameters,
                pod_dir_path=pod_dir_path,
                name=name,
                version=version,
                apps=apps,
                images=images,
                mounts=mounts,
                volumes=volumes,
                systemd_unit_groups=systemd_unit_groups,
                token_names=token_names,
            )
            _link_images(parameters, pod_dir_path, images)
            _link_volumes(parameters, pod_dir_path, volumes)
        except Exception:
            # Roll back on error.
            scripts.rm(pod_dir_path, recursive=True)
            raise

    for label in images:
        build.depend(str(_images.derive_rule(label)))

    return PodRules(build=build)
Пример #3
0
 def get_exception_nonblocking(self):
     ASSERT.true(self.is_completed())
     self._consumed = True
     return self._exception
Пример #4
0
    def _decode_raw_value(self, value_type, raw_value):
        """Decode a raw value into ``value_type``-typed value.

        This and ``_encode_value`` complement each other.
        """

        if typings.is_recursive_type(value_type):

            if value_type.__origin__ in (list, set, frozenset):
                element_type = value_type.__args__[0]
                return value_type.__origin__(
                    self._decode_raw_value(element_type, raw_element)
                    for raw_element in raw_value)

            elif value_type.__origin__ is tuple:
                ASSERT.equal(len(raw_value), len(value_type.__args__))
                return tuple(
                    self._decode_raw_value(element_type, raw_element)
                    for element_type, raw_element in zip(
                        value_type.__args__,
                        raw_value,
                    ))

            elif typings.is_union_type(value_type):

                # Handle ``None`` special case.
                if raw_value is None:
                    ASSERT.in_(NoneType, value_type.__args__)
                    return None

                # Handle ``Optional[T]`` special case.
                type_ = typings.match_optional_type(value_type)
                if type_:
                    return self._decode_raw_value(type_, raw_value)

                ASSERT.equal(len(raw_value), 1)
                type_name, raw_element = next(iter(raw_value.items()))
                for type_ in value_type.__args__:
                    if typings.is_recursive_type(type_):
                        candidate = str(type_)
                    else:
                        candidate = type_.__name__
                    if type_name == candidate:
                        return self._decode_raw_value(type_, raw_element)

                return ASSERT.unreachable(
                    'raw value is not any union element type: {!r} {!r}',
                    value_type,
                    raw_value,
                )

            else:
                return ASSERT.unreachable('unsupported generic: {!r}',
                                          value_type)

        elif wiredata.is_message_type(value_type):
            return value_type(
                **{
                    f.name: self._decode_raw_value(f.type, raw_value[f.name])
                    for f in dataclasses.fields(value_type)
                    if f.name in raw_value
                })

        elif not isinstance(value_type, type):
            # Non-``type`` instance cannot be passed to ``issubclass``.
            return ASSERT.unreachable('unsupported value type: {!r}',
                                      value_type)

        elif issubclass(value_type, datetime.datetime):
            return value_type.fromisoformat(raw_value)

        elif issubclass(value_type, enum.Enum):
            return value_type[raw_value]

        elif issubclass(value_type, bytes):
            return base64.standard_b64decode(raw_value.encode('ascii'))

        elif issubclass(value_type, Exception):
            ASSERT.equal(len(raw_value), 1)
            return value_type(
                *(ASSERT.isinstance(raw_arg, _DIRECTLY_SERIALIZABLE_TYPES)
                  for raw_arg in raw_value[value_type.__name__]))

        elif issubclass(value_type, _DIRECTLY_SERIALIZABLE_TYPES):
            if value_type in _DIRECTLY_SERIALIZABLE_TYPES:
                return ASSERT.isinstance(raw_value, value_type)
            else:
                # Support sub-type of int, etc.
                return value_type(raw_value)

        else:
            return ASSERT.unreachable('unsupported value type: {!r}',
                                      value_type)
Пример #5
0
 def to_upper(self, message_type, wire_message):
     ASSERT.predicate(message_type, wiredata.is_message_type)
     raw_message = json.loads(wire_message)
     return self._decode_raw_value(message_type, raw_message)
Пример #6
0
def _find_root_project(src_path):
    for path in itertools.chain([src_path], src_path.parents):
        if _is_root_project(path):
            return path
    return ASSERT.unreachable('cannot find root project from: {}', src_path)
Пример #7
0
def cleanup(parameters):
    ASSERT.is_(parameters['inside-builder-pod'], True)
    ASSERT.all(parameters['roots'], _is_root_dir)
    with scripts.using_sudo():
        scripts.apt_get_clean()
Пример #8
0
 def rollback_due_to_timeout(self):
     ASSERT.not_equal(self._tx_id, 0)
     self._tx.rollback()
     self._end(self._timeout_tx_ids)
Пример #9
0
 def notify(self, n=1):
     ASSERT.true(self._lock.is_owner())
     for _ in range(min(n, len(self._waiters))):
         self._waiters.pop().unblock()
Пример #10
0
 def append(self, data):
     ASSERT.isinstance(data, bytes)
     errors.check(self._chunk_append(self._get(), data, len(data)))
Пример #11
0
def _get_dialect(db_url):
    for dialect in ('postgresql', 'sqlite'):
        if db_url.startswith(dialect):
            return dialect
    return ASSERT.unreachable('unsupported dialect: {}', db_url)
Пример #12
0
 def _get(self):
     return ASSERT.not_none(self._msg_p)
Пример #13
0
 def test_assert_collection_mapper(self):
     self.assertEqual(ASSERT.all([2, 4, 6], is_even), [2, 4, 6])
     pattern = r'expect all .*is_even.*, not \[2, 4, 6, 7\]'
     with self.assertRaisesRegex(AssertionError, pattern):
         ASSERT.all([2, 4, 6, 7], is_even)
Пример #14
0
    def test_assertion_methods_pass(self):
        checks = [
            ('__call__', (1, ''), 1),
            ('true', (True, ), True),
            ('true', (1, ), 1),
            ('false', (False, ), False),
            ('false', ('', ), ''),
            ('empty', ((), ), ()),
            ('empty', ([], ), []),
            ('empty', ({}, ), {}),
            ('empty', (set(), ), set()),
            ('not_empty', ((1, ), ), (1, )),
            ('not_empty', ([2], ), [2]),
            ('not_empty', (dict(x=1), ), dict(x=1)),
            ('not_empty', (set([42]), ), set([42])),
            ('none', (None, ), None),
            ('not_none', (0, ), 0),
            ('predicate', (0, is_even), 0),
            ('not_predicate', (3, is_even), 3),
            ('xor', (0, 1), 0),
            ('xor', (1, 0), 1),
            ('not_xor', (0, 0), 0),
            ('not_xor', (1, 1), 1),
            ('is_', (0, 0), 0),
            ('is_not', (0, 1), 0),
            ('isinstance', ('hello', (int, str)), 'hello'),
            ('not_isinstance', ('hello', (int, bytes)), 'hello'),
            ('issubclass', (Derived, Base), Derived),
            ('not_issubclass', (Base, Derived), Base),
            ('in_', (1, [1]), 1),
            ('not_in', (0, [1]), 0),
            ('contains', ([1], 1), [1]),
            ('not_contains', ([1], 0), [1]),
            (
                'getitem',
                (
                    {
                        'x': 1,
                    },
                    'x',
                ),
                1,
            ),
            ('equal', (0, 0), 0),
            ('not_equal', (0, 1), 0),
            ('greater', (1, 0), 1),
            ('greater_or_equal', (1, 0), 1),
            ('greater_or_equal', (0, 0), 0),
            ('less', (0, 1), 0),
            ('less_or_equal', (0, 1), 0),
            ('less_or_equal', (1, 1), 1),
            ('in_range', (0, (0, 1)), 0),
            ('not_in_range', (1, (0, 1)), 1),
            ('startswith', ('hello world', 'hello'), 'hello world'),
            ('startswith', (b'hello world', b'hello'), b'hello world'),
            ('not_startswith', ('hello', 'hello world'), 'hello'),
            ('not_startswith', (b'hello', b'hello world'), b'hello'),
            ('isdisjoint', ({1, 2}, {3, 4}), {1, 2}),
            ('not_isdisjoint', ({1, 2}, {2, 3, 4}), {1, 2}),
            ('issubset', ({1, 2}, {1, 2}), {1, 2}),
            ('not_issubset', ({1, 2}, {2, 3}), {1, 2}),
            ('issubset_proper', ({1, 2}, {1, 2, 3}), {1, 2}),
            ('not_issubset_proper', ({1, 2}, {1, 2}), {1, 2}),
            ('issuperset', ({1, 2}, {1, 2}), {1, 2}),
            ('not_issuperset', ({1, 2}, {2, 3}), {1, 2}),
            ('issuperset_proper', ({1, 2, 3}, {1, 2}), {1, 2, 3}),
            ('not_issuperset_proper', ({1, 2}, {1, 2}), {1, 2}),
        ]
        for check_name, args, expect_ret in checks:
            with self.subTest(check=check_name):
                check = getattr(ASSERT, check_name)
                self.assertEqual(check(*args), expect_ret)

        d = {}
        self.assert_.setitem(d, 0, 0)
        self.assertEqual(d, {0: 0})

        for args in [
            (),
            ([], ),
            ([], []),
            ([], [], []),
            ('a', 'b', 'c'),
            ('ad', 'be', 'cf'),
        ]:
            with self.subTest(args):
                self.assertEqual(list(ASSERT.zip(*args)), list(zip(*args)))
Пример #15
0
 def notify_open(self, fd):
     ASSERT.false(self._epoll.closed)
     try:
         self._epoll.register(fd, self._EVENT_MASK)
     except FileExistsError:
         pass
Пример #16
0
 def __init__(self, value=1):
     self._value = ASSERT.greater_or_equal(value, 0)
     self._gate = Gate()
Пример #17
0
_DEFINITION_LIST_COLUMNS = frozenset((
    'token-name',
    'range',
    'values',
))
_DEFINITION_LIST_DEFAULT_COLUMNS = (
    'token-name',
    'range',
    'values',
)
_DEFINITION_LIST_STRINGIFIERS = {
    'range': lambda args: ' '.join(map(str, args)),
    'values': ' '.join,
}
ASSERT.issuperset(_DEFINITION_LIST_COLUMNS, _DEFINITION_LIST_DEFAULT_COLUMNS)
ASSERT.issuperset(_DEFINITION_LIST_COLUMNS, _DEFINITION_LIST_STRINGIFIERS)


@argparses.begin_parser('list-definitions',
                        **argparses.make_help_kwargs('list token definitions'))
@columns_argparses.columnar_arguments(_DEFINITION_LIST_COLUMNS,
                                      _DEFINITION_LIST_DEFAULT_COLUMNS)
@argparses.end
def cmd_list_definitions(args):
    columnar = columns.Columnar(
        **columns_argparses.make_columnar_kwargs(args),
        stringifiers=_DEFINITION_LIST_STRINGIFIERS,
    )
    for token_name, definition in (
            tokens.make_tokens_database().get().definitions.items()):
Пример #18
0
 def release(self, n=1):
     self._value += ASSERT.greater_or_equal(n, 1)
     self._gate.unblock()
Пример #19
0
def chown_app(path):
    """Change owner to root and group to the application group."""
    shutil.chown(path, 'root', ASSERT.true(PARAMS.application_group.get()))
Пример #20
0
 def release(self, n=1):
     ASSERT.less_or_equal(self._value + n, self.__upper_bound)
     return super().release(n)
Пример #21
0
]

import base64
import dataclasses
import datetime
import enum
import json
import sys

from g1.bases import typings
from g1.bases.assertions import ASSERT

from .. import wiredata

# Python 3.7 supports parsing ISO 8601 (bpo-15873), finally!
ASSERT.greater_or_equal(sys.version_info, (3, 7))

NoneType = type(None)

_DIRECTLY_SERIALIZABLE_TYPES = (dict, list, tuple, str, int, float, bool,
                                NoneType)


class JsonWireData(wiredata.WireData):
    """JSON wire data converter.

    This supports ``datetime.datetime``, ``enum.Enum``, ``Exception``,
    ``typing.Tuple``, ``typing.List``, ``typing.Set``,
    ``typing.FrozenSet``, and ``typing.Union``.

    Caveats:
Пример #22
0
 def release(self):
     ASSERT.true(self.is_owner())
     self._locked = False
     contexts.get_kernel().unblock(self)
Пример #23
0
 def to_lower(self, message):
     ASSERT.predicate(message, wiredata.is_message)
     raw_message = self._encode_value(type(message), message)
     return json.dumps(raw_message).encode('ascii')
Пример #24
0
def build_image(
    *,
    parameters,
    builder_id,
    builder_images,
    name,
    version,
    rules,
    output,
):
    # Although it is tempting to mount source repos under the drydock
    # directory rather than /usr/src, this is not possible because the
    # base image does not have /home/plumber/drydock directory yet, and
    # so systemd-nspawn will reject mounting source repos under drydock.
    root_host_paths = parameters['//bases:roots']
    builder_config = _generate_builder_config(
        name=name,
        version=version,
        apps=_get_apps(
            parameters,
            builder_images,
            root_host_paths,
            rules,
        ),
        images=_get_images(
            builder_images,
            ASSERT.not_none(parameters['//images/bases:base/version']),
        ),
        mounts=_get_mounts(
            parameters['//releases:shipyard-data'],
            name,
            rules,
        ),
        overlays=_get_overlays(root_host_paths),
    )
    with contextlib.ExitStack() as stack:
        tempdir_path = Path(
            stack.enter_context(
                tempfile.TemporaryDirectory(dir=output.parent)
            )
        )
        builder_config_path = tempdir_path / 'builder.json'
        builder_config_path.write_text(json.dumps(builder_config))
        if shipyard2.is_debug():
            LOG.debug('builder config: %s', builder_config_path.read_text())
        # The builder pod might not be cleaned up when `ctr pods run`
        # fails; so let's always do `ctr pods remove` on our way out.
        stack.callback(ctr_scripts.ctr_remove_pod, builder_id)
        LOG.info('start builder pod')
        ctr_scripts.ctr_run_pod(builder_id, builder_config_path)
        LOG.info('export intermediate builder image to: %s', output)
        rootfs_path = tempdir_path / 'rootfs'
        stack.callback(scripts.rm, rootfs_path, recursive=True)
        ctr_scripts.ctr([
            'pods',
            'export-overlay',
            builder_id,
            rootfs_path,
        ])
        ctr_scripts.ctr_build_image(
            utils.get_builder_name(name), version, rootfs_path, output
        )
        ctr_scripts.ctr_import_image(output)
Пример #25
0
    def _encode_value(self, value_type, value):
        """Encode a value into a raw value.

        This and ``_decode_raw_value`` complement each other.
        """

        if typings.is_recursive_type(value_type):

            if value_type.__origin__ in (list, set, frozenset):
                element_type = value_type.__args__[0]
                return [
                    self._encode_value(element_type, element)
                    for element in value
                ]

            elif value_type.__origin__ is tuple:
                ASSERT.equal(len(value), len(value_type.__args__))
                return tuple(
                    self._encode_value(element_type, element)
                    for element_type, element in zip(
                        value_type.__args__,
                        value,
                    ))

            elif typings.is_union_type(value_type):

                # Make a special case for ``None``.
                if value is None:
                    ASSERT.in_(NoneType, value_type.__args__)
                    return None

                # Make a special case for ``Optional[T]``.
                type_ = typings.match_optional_type(value_type)
                if type_:
                    return self._encode_value(type_, value)

                for type_ in value_type.__args__:
                    if typings.is_recursive_type(type_):
                        if _match_recursive_type(type_, value):
                            return {
                                str(type_): self._encode_value(type_, value)
                            }
                    elif isinstance(value, type_):
                        return {
                            type_.__name__: self._encode_value(type_, value)
                        }

                return ASSERT.unreachable(
                    'value is not any union element type: {!r} {!r}',
                    value_type,
                    value,
                )

            else:
                return ASSERT.unreachable('unsupported generic: {!r}',
                                          value_type)

        elif wiredata.is_message(value):
            ASSERT.predicate(value_type, wiredata.is_message_type)
            return {
                f.name: self._encode_value(f.type, getattr(value, f.name))
                for f in dataclasses.fields(value)
            }

        elif isinstance(value, datetime.datetime):
            ASSERT.issubclass(value_type, datetime.datetime)
            return value.isoformat()

        elif isinstance(value, enum.Enum):
            ASSERT.issubclass(value_type, enum.Enum)
            return value.name

        # JSON does not support binary type; so it has to be encoded.
        elif isinstance(value, bytes):
            ASSERT.issubclass(value_type, bytes)
            return base64.standard_b64encode(value).decode('ascii')

        elif isinstance(value, Exception):
            ASSERT.issubclass(value_type, Exception)
            return {
                type(value).__name__: [
                    ASSERT.isinstance(arg, _DIRECTLY_SERIALIZABLE_TYPES)
                    for arg in value.args
                ]
            }

        elif isinstance(value, _DIRECTLY_SERIALIZABLE_TYPES):
            ASSERT.issubclass(value_type, _DIRECTLY_SERIALIZABLE_TYPES)
            return value

        else:
            return ASSERT.unreachable('unsupported value type: {!r} {!r}',
                                      value_type, value)
Пример #26
0
 def __init__(self, max_retries, backoff_base):
     self._max_retries = ASSERT.greater(max_retries, 0)
     self._backoff_base = ASSERT.greater(backoff_base, 0)
Пример #27
0
 def get_result_nonblocking(self):
     ASSERT.true(self.is_completed())
     self._consumed = True
     if self._exception:
         raise self._exception
     return self._result
Пример #28
0
 def add(self, t):
     if self._log:
         ASSERT.greater(t, self._log[-1])
     self._log.append(t)
Пример #29
0
 def __setitem__(self, header, value):
     ASSERT.true(self._is_uncommitted())
     ASSERT.isinstance(header, str)
     ASSERT.isinstance(value, str)
     self._headers[header] = value
Пример #30
0
from g1.bases.assertions import ASSERT
from g1.texts import columns
from g1.texts.columns import argparses as columns_argparses

from . import envs
from . import models

_ENV_LIST_COLUMNS = frozenset((
    'name',
    'value',
))
_ENV_LIST_DEFAULT_COLUMNS = (
    'name',
    'value',
)
ASSERT.issuperset(_ENV_LIST_COLUMNS, _ENV_LIST_DEFAULT_COLUMNS)


@argparses.begin_parser(
    'list',
    **argparses.make_help_kwargs('list environment variables'),
)
@columns_argparses.columnar_arguments(_ENV_LIST_COLUMNS,
                                      _ENV_LIST_DEFAULT_COLUMNS)
@argparses.end
def cmd_list(args):
    columnar = columns.Columnar(**columns_argparses.make_columnar_kwargs(args))
    for name, value in envs.load().items():
        columnar.append({'name': name, 'value': value})
    columnar.output(sys.stdout)
    return 0