Ejemplo n.º 1
0
def test_parallel_execute_with_upstream_errors():
    log = []

    def process(x):
        if x is data_volume:
            raise APIError(None, None, "Something went wrong")
        log.append(x)

    parallel_execute(
        objects=objects,
        func=process,
        get_name=lambda obj: obj,
        msg="Processing",
        get_deps=get_deps,
    )

    assert log == [cache]

    events = [(obj, result, type(exception))
              for obj, result, exception in parallel_execute_iter(
                  objects, process, get_deps, None)]

    assert (cache, None, type(None)) in events
    assert (data_volume, None, APIError) in events
    assert (db, None, UpstreamError) in events
    assert (web, None, UpstreamError) in events
Ejemplo n.º 2
0
def test_parallel_execute_with_upstream_errors():
    log = []

    def process(x):
        if x is data_volume:
            raise APIError(None, None, "Something went wrong")
        log.append(x)

    parallel_execute(
        objects=objects,
        func=process,
        get_name=lambda obj: obj,
        msg="Processing",
        get_deps=get_deps,
    )

    assert log == [cache]

    events = [
        (obj, result, type(exception))
        for obj, result, exception
        in parallel_execute_stream(objects, process, get_deps)
    ]

    assert (cache, None, type(None)) in events
    assert (data_volume, None, APIError) in events
    assert (db, None, UpstreamError) in events
    assert (web, None, UpstreamError) in events
Ejemplo n.º 3
0
def test_parallel_execute_with_deps():
    log = []

    def process(x):
        log.append(x)

    parallel_execute(objects=objects, func=process, get_name=lambda obj: obj, msg="Processing", get_deps=get_deps)

    assert sorted(log) == sorted(objects)

    assert log.index(data_volume) < log.index(db)
    assert log.index(db) < log.index(web)
    assert log.index(cache) < log.index(web)
Ejemplo n.º 4
0
def test_parallel_execute():
    results, errors = parallel_execute(
        objects=[1, 2, 3, 4, 5], func=lambda x: x * 2, get_name=six.text_type, msg="Doubling"
    )

    assert sorted(results) == [2, 4, 6, 8, 10]
    assert errors == {}
Ejemplo n.º 5
0
    def run(self):
        if not self.options['--repo']:
            log.error(
                '--repo is required and must be a docker repository base url')
        self.docker_registry_base = self.options['--repo']

        services = []
        for service in self.project.get_services_without_duplicate(
                service_names=self.options['SERVICES']):
            if service.can_be_built():
                services.append(service)
            else:
                log.info('%s uses an image, skipping' % service.name)

        def process_service(service):
            self.process_service(service)

        if not self.options['--disable-parallel']:
            _, errors = parallel.parallel_execute(
                services,
                process_service,
                operator.attrgetter('name'),
                'Syncing',
                limit=5,
            )
            if len(errors):
                combined_errors = '\n'.join([
                    e.decode('utf-8') if isinstance(e, six.binary_type) else e
                    for e in errors.values()
                ])
                raise ProjectError(combined_errors)

        else:
            for service in services:
                process_service(service)
Ejemplo n.º 6
0
def test_parallel_execute():
    results = parallel_execute(
        objects=[1, 2, 3, 4, 5],
        func=lambda x: x * 2,
        get_name=six.text_type,
        msg="Doubling",
    )

    assert sorted(results) == [2, 4, 6, 8, 10]
Ejemplo n.º 7
0
def test_parallel_execute_with_deps():
    log = []

    def process(x):
        log.append(x)

    parallel_execute(
        objects=objects,
        func=process,
        get_name=lambda obj: obj,
        msg="Processing",
        get_deps=get_deps,
    )

    assert sorted(log) == sorted(objects)

    assert log.index(data_volume) < log.index(db)
    assert log.index(db) < log.index(web)
    assert log.index(cache) < log.index(web)
Ejemplo n.º 8
0
    def test_parallel_execute(self):
        results, errors = parallel_execute(
            objects=[1, 2, 3, 4, 5],
            func=lambda x: x * 2,
            get_name=str,
            msg="Doubling",
        )

        assert sorted(results) == [2, 4, 6, 8, 10]
        assert errors == {}
Ejemplo n.º 9
0
def test_parallel_execute_alignment(capsys):
    results, errors = parallel_execute(
        objects=["short", "a very long name"],
        func=lambda x: x,
        get_name=six.text_type,
        msg="Aligning",
    )

    assert errors == {}

    _, err = capsys.readouterr()
    a, b = err.split('\n')[:2]
    assert a.index('...') == b.index('...')
Ejemplo n.º 10
0
def test_parallel_execute_alignment(capsys):
    results, errors = parallel_execute(
        objects=["short", "a very long name"],
        func=lambda x: x,
        get_name=six.text_type,
        msg="Aligning",
    )

    assert errors == {}

    _, err = capsys.readouterr()
    a, b = err.split('\n')[:2]
    assert a.index('...') == b.index('...')
Ejemplo n.º 11
0
def test_parallel_execute_noansi(capsys):
    ParallelStreamWriter.set_noansi()
    results, errors = parallel_execute(
        objects=["something", "something more"],
        func=lambda x: x,
        get_name=six.text_type,
        msg="Control characters",
    )

    assert errors == {}

    _, err = capsys.readouterr()
    assert "\x1b" not in err
Ejemplo n.º 12
0
    def up(self,
           service_names=None,
           start_deps=True,
           strategy=ConvergenceStrategy.changed,
           do_build=BuildAction.none,
           timeout=DEFAULT_TIMEOUT,
           detached=False,
           remove_orphans=False):

        warn_for_swarm_mode(self.project.client)

        self.project.initialize()
        self.project.find_orphan_containers(remove_orphans)

        services = self.project.get_services_without_duplicate(
            service_names,
            include_deps=start_deps)

        for svc in services:
            svc.ensure_image_exists(do_build=do_build)
        plans = self.project._get_convergence_plans(services, strategy)

        def do(service):
            return service.execute_convergence_plan(
                plans[service.name],
                timeout=timeout,
                detached=detached
            )

        def get_deps(service):
            return {self.project.get_service(dep) for dep in service.get_dependency_names()}

        results, errors = parallel.parallel_execute(
            services,
            do,
            operator.attrgetter('name'),
            None,
            get_deps
        )
        if errors:
            raise ProjectError(
                'Encountered errors while bringing up the project.', errors
            )

        return [
            container
            for svc_containers in results
            if svc_containers is not None
            for container in svc_containers
        ]
Ejemplo n.º 13
0
def test_parallel_execute_ansi(capsys):
    ParallelStreamWriter.instance = None
    ParallelStreamWriter.set_noansi(value=False)
    results, errors = parallel_execute(
        objects=["something", "something more"],
        func=lambda x: x,
        get_name=str,
        msg="Control characters",
    )

    assert errors == {}

    _, err = capsys.readouterr()
    assert "\x1b" in err
Ejemplo n.º 14
0
def test_parallel_execute_with_limit():
    limit = 1
    tasks = 20
    lock = Lock()

    def f(obj):
        locked = lock.acquire(False)
        # we should always get the lock because we're the only thread running
        assert locked
        lock.release()
        return None

    results, errors = parallel_execute(
        objects=list(range(tasks)),
        func=f,
        get_name=six.text_type,
        msg="Testing",
        limit=limit,
    )

    assert results == tasks * [None]
    assert errors == {}
Ejemplo n.º 15
0
def test_parallel_execute_with_limit():
    limit = 1
    tasks = 20
    lock = Lock()

    def f(obj):
        locked = lock.acquire(False)
        # we should always get the lock because we're the only thread running
        assert locked
        lock.release()
        return None

    results, errors = parallel_execute(
        objects=list(range(tasks)),
        func=f,
        get_name=six.text_type,
        msg="Testing",
        limit=limit,
    )

    assert results == tasks * [None]
    assert errors == {}
Ejemplo n.º 16
0
    def test_parallel_execute_with_global_limit(self):
        GlobalLimit.set_global_limit(1)
        self.addCleanup(GlobalLimit.set_global_limit, None)
        tasks = 20
        lock = Lock()

        def f(obj):
            locked = lock.acquire(False)
            # we should always get the lock because we're the only thread running
            assert locked
            lock.release()
            return None

        results, errors = parallel_execute(
            objects=list(range(tasks)),
            func=f,
            get_name=str,
            msg="Testing",
        )

        assert results == tasks * [None]
        assert errors == {}