示例#1
0
 def test_log_none_effectful_fields(self):
     """
     When log is not passed, but there are log fields from BoundFields,
     the log passed to treq has those fields.
     """
     log = mock_log()
     # we have to include system='otter' in the expected log here because
     # the code falls back to otter.log.log, which has the system key bound.
     expected_log = matches(IsBoundWith(bound='stuff', system='otter'))
     req = ('GET', 'http://google.com/', None, None, None, {
         'log': expected_log
     })
     response = StubResponse(200, {})
     treq = StubTreq(reqs=[(req, response)],
                     contents=[(response, "content")])
     req = Request(method="get", url="http://google.com/")
     req.treq = treq
     req_eff = Effect(req)
     bound_log_eff = with_log(req_eff, bound='stuff')
     dispatcher = ComposedDispatcher(
         [get_simple_dispatcher(None),
          get_log_dispatcher(log, {})])
     self.assertEqual(
         self.successResultOf(perform(dispatcher, bound_log_eff)),
         (response, "content"))
示例#2
0
文件: _fabric.py 项目: ruo91/flocker
def perform_run_remotely(base_dispatcher, intent):
    """
    Run a series of commands on a remote host.
    """

    dispatcher = ComposedDispatcher([
        TypeDispatcher({
            Run: perform_run,
            Sudo: perform_sudo,
            Put: perform_put,
            Comment: perform_comment,
        }),
        base_dispatcher,
    ])

    host_string = "%s@%s" % (intent.username, intent.address)
    with settings(
            connection_attempts=24,
            timeout=5,
            pty=False,
            host_string=host_string):

        sync_perform(dispatcher, intent.commands)

    disconnect_all()
示例#3
0
def perform_tenant_scope(authenticator,
                         log,
                         service_configs,
                         throttler,
                         dispatcher,
                         tenant_scope,
                         box,
                         _concretize=concretize_service_request):
    """
    Perform a :obj:`TenantScope` by performing its :attr:`TenantScope.effect`,
    with a dispatcher extended with a performer for :obj:`ServiceRequest`
    intents. The performer will use the tenant provided by the
    :obj:`TenantScope`.

    The first arguments before (dispatcher, tenant_scope, box) are intended
    to be partially applied, and the result is a performer that can be put into
    a dispatcher.
    """
    @sync_performer
    def scoped_performer(dispatcher, service_request):
        return _concretize(authenticator, log, service_configs, throttler,
                           tenant_scope.tenant_id, service_request)

    new_disp = ComposedDispatcher(
        [TypeDispatcher({ServiceRequest: scoped_performer}), dispatcher])
    perform(new_disp, tenant_scope.effect.on(box.succeed, box.fail))
示例#4
0
def publish_docs_main(args, base_path, top_level):
    """
    :param list args: The arguments passed to the script.
    :param FilePath base_path: The executable being run.
    :param FilePath top_level: The top-level of the flocker repository.
    """
    options = PublishDocsOptions()

    try:
        options.parseOptions(args)
    except UsageError as e:
        sys.stderr.write("%s: %s\n" % (base_path.basename(), e))
        raise SystemExit(1)

    try:
        sync_perform(dispatcher=ComposedDispatcher(
            [boto_dispatcher, base_dispatcher]),
                     effect=publish_docs(
                         flocker_version=options['flocker-version'],
                         doc_version=options['doc-version'],
                         environment=options.environment,
                     ))
    except NotARelease:
        sys.stderr.write("%s: Can't publish non-release.\n" %
                         (base_path.basename(), ))
        raise SystemExit(1)
    except NotTagged:
        sys.stderr.write(
            "%s: Can't publish non-tagged version to production.\n" %
            (base_path.basename(), ))
        raise SystemExit(1)
示例#5
0
 def setUp(self):
     self.clock = Clock()
     self.log = mock_log()
     self.disp = ComposedDispatcher([
         get_msg_time_dispatcher(self.clock),
         get_log_dispatcher(self.log, {})
     ])
示例#6
0
    def perform_retry_without_delay(actual_retry_intent):
        should_retry = actual_retry_intent.should_retry
        if isinstance(should_retry, ShouldDelayAndRetry):

            def should_retry(exc_info):
                exc_type, exc_value, exc_traceback = exc_info
                failure = Failure(exc_value, exc_type, exc_traceback)
                return Effect(
                    Constant(
                        actual_retry_intent.should_retry.can_retry(failure)))

        new_retry_effect = Effect(
            Retry(effect=actual_retry_intent.effect,
                  should_retry=should_retry))

        _dispatchers = [
            TypeDispatcher({Retry: perform_retry}), base_dispatcher
        ]
        if fallback_dispatcher is not None:
            _dispatchers.append(fallback_dispatcher)

        seq = [(expected_retry_intent.effect.intent, performer)
               for performer in performers]

        return perform_sequence(seq, new_retry_effect,
                                ComposedDispatcher(_dispatchers))
示例#7
0
def perform_run_remotely(reactor, base_dispatcher, intent):
    connection_helper = get_connection_helper(reactor,
                                              username=intent.username,
                                              address=intent.address,
                                              port=intent.port)

    context = Message.new(username=intent.username,
                          address=intent.address,
                          port=intent.port)

    def connect():
        connection = connection_helper.secureConnection()
        connection.addErrback(write_failure)
        timeout(reactor, connection, 30)
        return connection

    connection = yield loop_until(reactor, connect)

    dispatcher = ComposedDispatcher([
        get_ssh_dispatcher(
            connection=connection,
            context=context,
        ),
        base_dispatcher,
    ])

    yield perform(dispatcher, intent.commands)

    yield connection_helper.cleanupConnection(connection, False)
示例#8
0
文件: release.py 项目: jhbsz/flocker
def publish_artifacts_main(args, base_path, top_level):
    """
    Publish release artifacts.

    :param list args: The arguments passed to the scripts.
    :param FilePath base_path: The executable being run.
    :param FilePath top_level: The top-level of the flocker repository.
    """
    options = UploadOptions()

    try:
        options.parseOptions(args)
    except UsageError as e:
        sys.stderr.write("%s: %s\n" % (base_path.basename(), e))
        raise SystemExit(1)
    except NotARelease:
        sys.stderr.write("%s: Can't publish artifacts for a non-release.\n"
                         % (base_path.basename(),))
        raise SystemExit(1)
    except DocumentationRelease:
        sys.stderr.write("%s: Can't publish artifacts for a documentation "
                         "release.\n" % (base_path.basename(),))
        raise SystemExit(1)

    dispatcher = ComposedDispatcher([boto_dispatcher, yum_dispatcher,
                                     base_dispatcher])

    scratch_directory = FilePath(tempfile.mkdtemp(
        prefix=b'flocker-upload-'))
    scratch_directory.child('packages').createDirectory()
    scratch_directory.child('python').createDirectory()
    scratch_directory.child('pip').createDirectory()

    try:
        sync_perform(
            dispatcher=dispatcher,
            effect=sequence([
                upload_packages(
                    scratch_directory=scratch_directory.child('packages'),
                    target_bucket=options['target'],
                    version=options['flocker-version'],
                    build_server=options['build-server'],
                    top_level=top_level,
                ),
                upload_python_packages(
                    scratch_directory=scratch_directory.child('python'),
                    target_bucket=options['target'],
                    top_level=top_level,
                    output=sys.stdout,
                    error=sys.stderr,
                ),
                upload_pip_index(
                    scratch_directory=scratch_directory.child('pip'),
                    target_bucket=options['target'],
                ),
            ]),
        )

    finally:
        scratch_directory.remove()
示例#9
0
    def update_repo(self, aws, yum,
                    rpm_directory, target_bucket, target_key, source_repo,
                    packages, flocker_version, distro_name, distro_version):
        """
        Call :func:``update_repo``, interacting with a fake AWS and yum
        utilities.

        :param FakeAWS aws: Fake AWS to interact with.
        :param FakeYum yum: Fake yum utilities to interact with.

        See :py:func:`update_repo` for other parameter documentation.
        """
        dispatchers = [aws.get_dispatcher(), yum.get_dispatcher(),
                       base_dispatcher]
        sync_perform(
            ComposedDispatcher(dispatchers),
            update_repo(
                rpm_directory=rpm_directory,
                target_bucket=target_bucket,
                target_key=target_key,
                source_repo=source_repo,
                packages=packages,
                flocker_version=flocker_version,
                distro_name=distro_name,
                distro_version=distro_version,
            )
        )
示例#10
0
 def dispatcher(self):
     return ComposedDispatcher([
         TypeDispatcher({
             PackerConfigure: self.perform_packer_configure,
             PackerBuild: self.perform_packer_build,
             WriteToS3: self.perform_write_to_s3,
         }), base_dispatcher
     ])
示例#11
0
 def dispatcher(self):
     return ComposedDispatcher([
         TypeDispatcher({
             PackerConfigure: self.perform_packer_configure,
             PackerBuild: self.perform_packer_build,
             StandardOut: self.perform_standard_out,
         }), base_dispatcher
     ])
示例#12
0
def get_working_cql_dispatcher(reactor, cass_client):
    """
    Get dispatcher with CQLQueryExecute performer along with any other
    dependent performers to make it work
    """
    return ComposedDispatcher(
        [get_simple_dispatcher(reactor),
         get_cql_dispatcher(cass_client)])
示例#13
0
def test_dispatcher(disp=None):
    disps = [
        base_dispatcher,
        TypeDispatcher({ParallelEffects: perform_parallel_async}),
    ]
    if disp is not None:
        disps.append(disp)
    return ComposedDispatcher(disps)
示例#14
0
def make_dispatcher(reactor):
    return ComposedDispatcher([
        TypeDispatcher({
            RunRemotely: partial(perform_run_remotely, reactor),
        }),
        make_twisted_dispatcher(reactor),
        base_dispatcher,
    ])
示例#15
0
def make_dispatcher(reactor):
    patch_twisted_7672()
    return ComposedDispatcher([
        TypeDispatcher({
            RunRemotely: perform_run_remotely,
        }),
        make_twisted_dispatcher(reactor),
        base_dispatcher,
    ])
示例#16
0
def get_legacy_dispatcher(reactor, authenticator, log, service_configs):
    """
    Return a dispatcher that can perform effects that are needed by the old
    worker code.
    """
    return ComposedDispatcher([
        get_cloud_client_dispatcher(reactor, authenticator, log,
                                    service_configs),
        get_simple_dispatcher(reactor),
    ])
示例#17
0
def publish_dev_box_main(args, base_path, top_level):
    """
    Publish a development Vagrant box.

    :param list args: The arguments passed to the script.
    :param FilePath base_path: The executable being run.
    :param FilePath top_level: The top-level of the flocker repository.
    """
    options = PublishDevBoxOptions()

    try:
        options.parseOptions(args)
    except UsageError as e:
        sys.stderr.write("%s: %s\n" % (base_path.basename(), e))
        raise SystemExit(1)

    scratch_directory = FilePath(tempfile.mkdtemp(
        prefix=b'flocker-upload-'))
    scratch_directory.child('vagrant').createDirectory()

    box_type = "flocker-dev"
    prefix = 'vagrant/dev/'

    box_name = "{box_type}-{version}.box".format(
        box_type=box_type,
        version=options['flocker-version'],
    )

    box_url = "https://{bucket}.s3.amazonaws.com/{key}".format(
        bucket=options['target'],
        key=prefix + box_name,
    )

    sync_perform(
        dispatcher=ComposedDispatcher([boto_dispatcher, base_dispatcher]),
        effect=sequence([
            Effect(
                CopyS3Keys(
                    source_bucket=DEV_ARCHIVE_BUCKET,
                    source_prefix=prefix,
                    destination_bucket=options['target'],
                    destination_prefix=prefix,
                    keys=[box_name],
                )
            ),
            publish_vagrant_metadata(
                version=options['flocker-version'],
                box_url=box_url,
                scratch_directory=scratch_directory.child('vagrant'),
                box_name=box_type,
                target_bucket=options['target'],
            ),
        ]),
    )
示例#18
0
    def test_performs_tenant_scope(self, deferred_lock_run):
        """
        :func:`perform_tenant_scope` performs :obj:`TenantScope`, and uses the
        default throttler
        """
        # We want to ensure
        # 1. the TenantScope can be performed
        # 2. the ServiceRequest is run within a lock, since it matches the
        #    default throttling policy

        set_config_data({
            "cloud_client": {
                "throttling": {
                    "create_server_delay": 1,
                    "delete_server_delay": 0.4
                }
            }
        })
        self.addCleanup(set_config_data, {})
        clock = Clock()
        authenticator = object()
        log = object()
        dispatcher = get_cloud_client_dispatcher(clock, authenticator, log,
                                                 make_service_configs())
        svcreq = service_request(ServiceType.CLOUD_SERVERS, 'POST', 'servers')
        tscope = TenantScope(tenant_id='111', effect=svcreq)

        def run(f, *args, **kwargs):
            result = f(*args, **kwargs)
            result.addCallback(lambda x: (x[0], assoc(x[1], 'locked', True)))
            return result

        deferred_lock_run.side_effect = run

        response = stub_pure_response({}, 200)
        seq = SequenceDispatcher([
            (Authenticate(authenticator=authenticator,
                          tenant_id='111',
                          log=log), lambda i: ('token', fake_service_catalog)),
            (Request(method='POST',
                     url='http://dfw.openstack/servers',
                     headers=headers('token'),
                     log=log), lambda i: response),
        ])

        disp = ComposedDispatcher([seq, dispatcher])
        with seq.consume():
            result = perform(disp, Effect(tscope))
            self.assertNoResult(result)
            clock.advance(1)
            self.assertEqual(self.successResultOf(result), (response[0], {
                'locked': True
            }))
示例#19
0
def dispatcher():
    prefetch_dispatcher = TypeDispatcher({
        TryPrefetch:
        try_prefetch_performer,
        CalculateSha256Sum:
        sync_performer(lambda _, intent: calculate_sha256_sum(intent)),
        GetListRemote:
        get_list_remote_performer,
        AbortWithErrorMessage:
        abort_with_error_message_performer,
    })
    return ComposedDispatcher([base_dispatcher, prefetch_dispatcher])
示例#20
0
    def check_and_call():
        class DoFunc(object):
            pass

        @deferred_performer
        def func_performer(d, i):
            return maybeDeferred(func, *args, **kwargs)

        comp_dispatcher = ComposedDispatcher(
            [TypeDispatcher({DoFunc: func_performer}), dispatcher])
        return perform(comp_dispatcher,
                       call_if_acquired(lock, Effect(DoFunc())))
示例#21
0
    def get_dispatcher(self, service_request_mappings):
        """
        Set up an empty dictionary of intents to fake responses, and set up
        the dispatcher.
        """
        eq_dispatcher = EQDispatcher
        if callable(service_request_mappings[0][-1]):
            eq_dispatcher = EQFDispatcher

        return ComposedDispatcher([
            TypeDispatcher({ParallelEffects: perform_parallel_async}),
            eq_dispatcher(service_request_mappings)
        ])
示例#22
0
    def test_perform_throttle(self):
        """
        The bracket given to :obj:`_Throttle` is used to call the nested
        performer.
        """
        def bracket(f, *args, **kwargs):
            return f(*args, **kwargs).addCallback(lambda r: ('bracketed', r))

        throttle = _Throttle(bracket=bracket, effect=Effect(Constant('foo')))
        dispatcher = ComposedDispatcher(
            [TypeDispatcher({_Throttle: _perform_throttle}), base_dispatcher])
        result = sync_perform(dispatcher, Effect(throttle))
        self.assertEqual(result, ('bracketed', 'foo'))
示例#23
0
def get_full_dispatcher(reactor, authenticator, log, service_configs,
                        kz_client, store, supervisor, cass_client):
    """
    Return a dispatcher that can perform all of Otter's effects.
    """
    return ComposedDispatcher([
        get_legacy_dispatcher(reactor, authenticator, log, service_configs),
        get_zk_dispatcher(kz_client),
        get_model_dispatcher(log, store),
        get_eviction_dispatcher(supervisor),
        get_msg_time_dispatcher(reactor),
        get_cql_dispatcher(cass_client)
    ])
示例#24
0
    def publish_docs(self, aws, flocker_version, doc_version, environment):
        """
        Call :func:``publish_docs``, interacting with a fake AWS.

        :param FakeAWS aws: Fake AWS to interact with.
        :param flocker_version: See :py:func:`publish_docs`.
        :param doc_version: See :py:func:`publish_docs`.
        :param environment: See :py:func:`environment`.
        """
        sync_perform(
            ComposedDispatcher([aws.get_dispatcher(), base_dispatcher]),
            publish_docs(flocker_version, doc_version,
                         environment=environment))
示例#25
0
文件: 07.py 项目: danmack/reveal.js
    def test_echo(self, line):
        sequence = SequenceDispatcher([
            (Print('What... is your quest?'), lambda _: None),
            (Readline(), lambda _: line),
            (Print(line), lambda _: None),
        ])

        with sequence.consume():
            dispatcher = ComposedDispatcher([
                sequence,
                base_dispatcher,
            ])
            sync_perform(dispatcher, echo())
示例#26
0
 def perform_with_group(self, eff, expected_lookup, group,
                        fallback_dispatcher=None):
     """Run an effect that will look up group info."""
     def gsg(log, tenant_id, group_id):
         assert (log, tenant_id, group_id) == expected_lookup
         return group
     store = self.get_store()
     store.get_scaling_group.side_effect = gsg
     dispatcher = self.get_dispatcher(store)
     if fallback_dispatcher is not None:
         dispatcher = ComposedDispatcher([dispatcher,
                                          fallback_dispatcher])
     return sync_perform(dispatcher, eff)
示例#27
0
 def test_challenge(self, line):
     sequence = SequenceDispatcher([
         (Print('What... is your quest?'), lambda _:None),
         (Readline(), lambda _: line),
         (Print('What... is your quest?'), lambda _:None),
         (Readline(), lambda _:'To seek the Holy Grail.\n'),
         (Print('What... is your favourite colour?'), lambda _:None),
         ])
     with sequence.consume():
         dispatcher = ComposedDispatcher([
             sequence,
             base_dispatcher,
             ])
         sync_perform(dispatcher, challenge())
def get_dispatcher(reactor):
    """
    Create a dispatcher that can find performers for :obj:`ReadLine`,
    :obj:`HTTPRequest`, and :obj:`ParallelEffects`.
    :func:`make_twisted_dispatcher` is able to provide the ``ParallelEffects``
    performer, so we compose it with our own custom :obj:`TypeDispatcher`.
    """
    return ComposedDispatcher([
        TypeDispatcher({
            ReadLine: perform_readline_stdin,
            HTTPRequest: perform_request_with_treq,
        }),
        make_twisted_dispatcher(reactor),
    ])
示例#29
0
文件: 05.py 项目: danmack/reveal.js
    def test_print(self):
        outputs = []
        @sync_performer
        def perform_print(dispatcher, print_):
            outputs.append(print_.line)

        test_interpreter = ComposedDispatcher([
            TypeDispatcher({
                Print: perform_print,
                }),
            base_dispatcher])

        dispatcher = test_interpreter
        sync_perform(dispatcher, program())
        self.assertEqual(["What... is your quest?"], outputs)
示例#30
0
def get_dispatcher():
    """
    Create a dispatcher that can find performers for :obj:`ReadLine`,
    :obj:`HTTPRequest`, and :obj:`ParallelEffects`.  There's a built-in
    performer for ParallelEffects that uses a multiprocessing ThreadPool,
    :func:`effect.perform_parallel_with_pool`.
    """
    my_pool = ThreadPool()
    pool_performer = partial(perform_parallel_with_pool, my_pool)
    return ComposedDispatcher([
        TypeDispatcher({
            ReadLine: perform_readline_stdin,
            HTTPRequest: perform_request_requests,
            ParallelEffects: pool_performer,
        })
    ])