def test_log_none_effectful_fields(self): """ When log is not passed, but there are log fields from BoundFields, the log passed to treq has those fields. """ log = mock_log() # we have to include system='otter' in the expected log here because # the code falls back to otter.log.log, which has the system key bound. expected_log = matches(IsBoundWith(bound='stuff', system='otter')) req = ('GET', 'http://google.com/', None, None, None, { 'log': expected_log }) response = StubResponse(200, {}) treq = StubTreq(reqs=[(req, response)], contents=[(response, "content")]) req = Request(method="get", url="http://google.com/") req.treq = treq req_eff = Effect(req) bound_log_eff = with_log(req_eff, bound='stuff') dispatcher = ComposedDispatcher( [get_simple_dispatcher(None), get_log_dispatcher(log, {})]) self.assertEqual( self.successResultOf(perform(dispatcher, bound_log_eff)), (response, "content"))
def perform_run_remotely(base_dispatcher, intent): """ Run a series of commands on a remote host. """ dispatcher = ComposedDispatcher([ TypeDispatcher({ Run: perform_run, Sudo: perform_sudo, Put: perform_put, Comment: perform_comment, }), base_dispatcher, ]) host_string = "%s@%s" % (intent.username, intent.address) with settings( connection_attempts=24, timeout=5, pty=False, host_string=host_string): sync_perform(dispatcher, intent.commands) disconnect_all()
def perform_tenant_scope(authenticator, log, service_configs, throttler, dispatcher, tenant_scope, box, _concretize=concretize_service_request): """ Perform a :obj:`TenantScope` by performing its :attr:`TenantScope.effect`, with a dispatcher extended with a performer for :obj:`ServiceRequest` intents. The performer will use the tenant provided by the :obj:`TenantScope`. The first arguments before (dispatcher, tenant_scope, box) are intended to be partially applied, and the result is a performer that can be put into a dispatcher. """ @sync_performer def scoped_performer(dispatcher, service_request): return _concretize(authenticator, log, service_configs, throttler, tenant_scope.tenant_id, service_request) new_disp = ComposedDispatcher( [TypeDispatcher({ServiceRequest: scoped_performer}), dispatcher]) perform(new_disp, tenant_scope.effect.on(box.succeed, box.fail))
def publish_docs_main(args, base_path, top_level): """ :param list args: The arguments passed to the script. :param FilePath base_path: The executable being run. :param FilePath top_level: The top-level of the flocker repository. """ options = PublishDocsOptions() try: options.parseOptions(args) except UsageError as e: sys.stderr.write("%s: %s\n" % (base_path.basename(), e)) raise SystemExit(1) try: sync_perform(dispatcher=ComposedDispatcher( [boto_dispatcher, base_dispatcher]), effect=publish_docs( flocker_version=options['flocker-version'], doc_version=options['doc-version'], environment=options.environment, )) except NotARelease: sys.stderr.write("%s: Can't publish non-release.\n" % (base_path.basename(), )) raise SystemExit(1) except NotTagged: sys.stderr.write( "%s: Can't publish non-tagged version to production.\n" % (base_path.basename(), )) raise SystemExit(1)
def setUp(self): self.clock = Clock() self.log = mock_log() self.disp = ComposedDispatcher([ get_msg_time_dispatcher(self.clock), get_log_dispatcher(self.log, {}) ])
def perform_retry_without_delay(actual_retry_intent): should_retry = actual_retry_intent.should_retry if isinstance(should_retry, ShouldDelayAndRetry): def should_retry(exc_info): exc_type, exc_value, exc_traceback = exc_info failure = Failure(exc_value, exc_type, exc_traceback) return Effect( Constant( actual_retry_intent.should_retry.can_retry(failure))) new_retry_effect = Effect( Retry(effect=actual_retry_intent.effect, should_retry=should_retry)) _dispatchers = [ TypeDispatcher({Retry: perform_retry}), base_dispatcher ] if fallback_dispatcher is not None: _dispatchers.append(fallback_dispatcher) seq = [(expected_retry_intent.effect.intent, performer) for performer in performers] return perform_sequence(seq, new_retry_effect, ComposedDispatcher(_dispatchers))
def perform_run_remotely(reactor, base_dispatcher, intent): connection_helper = get_connection_helper(reactor, username=intent.username, address=intent.address, port=intent.port) context = Message.new(username=intent.username, address=intent.address, port=intent.port) def connect(): connection = connection_helper.secureConnection() connection.addErrback(write_failure) timeout(reactor, connection, 30) return connection connection = yield loop_until(reactor, connect) dispatcher = ComposedDispatcher([ get_ssh_dispatcher( connection=connection, context=context, ), base_dispatcher, ]) yield perform(dispatcher, intent.commands) yield connection_helper.cleanupConnection(connection, False)
def publish_artifacts_main(args, base_path, top_level): """ Publish release artifacts. :param list args: The arguments passed to the scripts. :param FilePath base_path: The executable being run. :param FilePath top_level: The top-level of the flocker repository. """ options = UploadOptions() try: options.parseOptions(args) except UsageError as e: sys.stderr.write("%s: %s\n" % (base_path.basename(), e)) raise SystemExit(1) except NotARelease: sys.stderr.write("%s: Can't publish artifacts for a non-release.\n" % (base_path.basename(),)) raise SystemExit(1) except DocumentationRelease: sys.stderr.write("%s: Can't publish artifacts for a documentation " "release.\n" % (base_path.basename(),)) raise SystemExit(1) dispatcher = ComposedDispatcher([boto_dispatcher, yum_dispatcher, base_dispatcher]) scratch_directory = FilePath(tempfile.mkdtemp( prefix=b'flocker-upload-')) scratch_directory.child('packages').createDirectory() scratch_directory.child('python').createDirectory() scratch_directory.child('pip').createDirectory() try: sync_perform( dispatcher=dispatcher, effect=sequence([ upload_packages( scratch_directory=scratch_directory.child('packages'), target_bucket=options['target'], version=options['flocker-version'], build_server=options['build-server'], top_level=top_level, ), upload_python_packages( scratch_directory=scratch_directory.child('python'), target_bucket=options['target'], top_level=top_level, output=sys.stdout, error=sys.stderr, ), upload_pip_index( scratch_directory=scratch_directory.child('pip'), target_bucket=options['target'], ), ]), ) finally: scratch_directory.remove()
def update_repo(self, aws, yum, rpm_directory, target_bucket, target_key, source_repo, packages, flocker_version, distro_name, distro_version): """ Call :func:``update_repo``, interacting with a fake AWS and yum utilities. :param FakeAWS aws: Fake AWS to interact with. :param FakeYum yum: Fake yum utilities to interact with. See :py:func:`update_repo` for other parameter documentation. """ dispatchers = [aws.get_dispatcher(), yum.get_dispatcher(), base_dispatcher] sync_perform( ComposedDispatcher(dispatchers), update_repo( rpm_directory=rpm_directory, target_bucket=target_bucket, target_key=target_key, source_repo=source_repo, packages=packages, flocker_version=flocker_version, distro_name=distro_name, distro_version=distro_version, ) )
def dispatcher(self): return ComposedDispatcher([ TypeDispatcher({ PackerConfigure: self.perform_packer_configure, PackerBuild: self.perform_packer_build, WriteToS3: self.perform_write_to_s3, }), base_dispatcher ])
def dispatcher(self): return ComposedDispatcher([ TypeDispatcher({ PackerConfigure: self.perform_packer_configure, PackerBuild: self.perform_packer_build, StandardOut: self.perform_standard_out, }), base_dispatcher ])
def get_working_cql_dispatcher(reactor, cass_client): """ Get dispatcher with CQLQueryExecute performer along with any other dependent performers to make it work """ return ComposedDispatcher( [get_simple_dispatcher(reactor), get_cql_dispatcher(cass_client)])
def test_dispatcher(disp=None): disps = [ base_dispatcher, TypeDispatcher({ParallelEffects: perform_parallel_async}), ] if disp is not None: disps.append(disp) return ComposedDispatcher(disps)
def make_dispatcher(reactor): return ComposedDispatcher([ TypeDispatcher({ RunRemotely: partial(perform_run_remotely, reactor), }), make_twisted_dispatcher(reactor), base_dispatcher, ])
def make_dispatcher(reactor): patch_twisted_7672() return ComposedDispatcher([ TypeDispatcher({ RunRemotely: perform_run_remotely, }), make_twisted_dispatcher(reactor), base_dispatcher, ])
def get_legacy_dispatcher(reactor, authenticator, log, service_configs): """ Return a dispatcher that can perform effects that are needed by the old worker code. """ return ComposedDispatcher([ get_cloud_client_dispatcher(reactor, authenticator, log, service_configs), get_simple_dispatcher(reactor), ])
def publish_dev_box_main(args, base_path, top_level): """ Publish a development Vagrant box. :param list args: The arguments passed to the script. :param FilePath base_path: The executable being run. :param FilePath top_level: The top-level of the flocker repository. """ options = PublishDevBoxOptions() try: options.parseOptions(args) except UsageError as e: sys.stderr.write("%s: %s\n" % (base_path.basename(), e)) raise SystemExit(1) scratch_directory = FilePath(tempfile.mkdtemp( prefix=b'flocker-upload-')) scratch_directory.child('vagrant').createDirectory() box_type = "flocker-dev" prefix = 'vagrant/dev/' box_name = "{box_type}-{version}.box".format( box_type=box_type, version=options['flocker-version'], ) box_url = "https://{bucket}.s3.amazonaws.com/{key}".format( bucket=options['target'], key=prefix + box_name, ) sync_perform( dispatcher=ComposedDispatcher([boto_dispatcher, base_dispatcher]), effect=sequence([ Effect( CopyS3Keys( source_bucket=DEV_ARCHIVE_BUCKET, source_prefix=prefix, destination_bucket=options['target'], destination_prefix=prefix, keys=[box_name], ) ), publish_vagrant_metadata( version=options['flocker-version'], box_url=box_url, scratch_directory=scratch_directory.child('vagrant'), box_name=box_type, target_bucket=options['target'], ), ]), )
def test_performs_tenant_scope(self, deferred_lock_run): """ :func:`perform_tenant_scope` performs :obj:`TenantScope`, and uses the default throttler """ # We want to ensure # 1. the TenantScope can be performed # 2. the ServiceRequest is run within a lock, since it matches the # default throttling policy set_config_data({ "cloud_client": { "throttling": { "create_server_delay": 1, "delete_server_delay": 0.4 } } }) self.addCleanup(set_config_data, {}) clock = Clock() authenticator = object() log = object() dispatcher = get_cloud_client_dispatcher(clock, authenticator, log, make_service_configs()) svcreq = service_request(ServiceType.CLOUD_SERVERS, 'POST', 'servers') tscope = TenantScope(tenant_id='111', effect=svcreq) def run(f, *args, **kwargs): result = f(*args, **kwargs) result.addCallback(lambda x: (x[0], assoc(x[1], 'locked', True))) return result deferred_lock_run.side_effect = run response = stub_pure_response({}, 200) seq = SequenceDispatcher([ (Authenticate(authenticator=authenticator, tenant_id='111', log=log), lambda i: ('token', fake_service_catalog)), (Request(method='POST', url='http://dfw.openstack/servers', headers=headers('token'), log=log), lambda i: response), ]) disp = ComposedDispatcher([seq, dispatcher]) with seq.consume(): result = perform(disp, Effect(tscope)) self.assertNoResult(result) clock.advance(1) self.assertEqual(self.successResultOf(result), (response[0], { 'locked': True }))
def dispatcher(): prefetch_dispatcher = TypeDispatcher({ TryPrefetch: try_prefetch_performer, CalculateSha256Sum: sync_performer(lambda _, intent: calculate_sha256_sum(intent)), GetListRemote: get_list_remote_performer, AbortWithErrorMessage: abort_with_error_message_performer, }) return ComposedDispatcher([base_dispatcher, prefetch_dispatcher])
def check_and_call(): class DoFunc(object): pass @deferred_performer def func_performer(d, i): return maybeDeferred(func, *args, **kwargs) comp_dispatcher = ComposedDispatcher( [TypeDispatcher({DoFunc: func_performer}), dispatcher]) return perform(comp_dispatcher, call_if_acquired(lock, Effect(DoFunc())))
def get_dispatcher(self, service_request_mappings): """ Set up an empty dictionary of intents to fake responses, and set up the dispatcher. """ eq_dispatcher = EQDispatcher if callable(service_request_mappings[0][-1]): eq_dispatcher = EQFDispatcher return ComposedDispatcher([ TypeDispatcher({ParallelEffects: perform_parallel_async}), eq_dispatcher(service_request_mappings) ])
def test_perform_throttle(self): """ The bracket given to :obj:`_Throttle` is used to call the nested performer. """ def bracket(f, *args, **kwargs): return f(*args, **kwargs).addCallback(lambda r: ('bracketed', r)) throttle = _Throttle(bracket=bracket, effect=Effect(Constant('foo'))) dispatcher = ComposedDispatcher( [TypeDispatcher({_Throttle: _perform_throttle}), base_dispatcher]) result = sync_perform(dispatcher, Effect(throttle)) self.assertEqual(result, ('bracketed', 'foo'))
def get_full_dispatcher(reactor, authenticator, log, service_configs, kz_client, store, supervisor, cass_client): """ Return a dispatcher that can perform all of Otter's effects. """ return ComposedDispatcher([ get_legacy_dispatcher(reactor, authenticator, log, service_configs), get_zk_dispatcher(kz_client), get_model_dispatcher(log, store), get_eviction_dispatcher(supervisor), get_msg_time_dispatcher(reactor), get_cql_dispatcher(cass_client) ])
def publish_docs(self, aws, flocker_version, doc_version, environment): """ Call :func:``publish_docs``, interacting with a fake AWS. :param FakeAWS aws: Fake AWS to interact with. :param flocker_version: See :py:func:`publish_docs`. :param doc_version: See :py:func:`publish_docs`. :param environment: See :py:func:`environment`. """ sync_perform( ComposedDispatcher([aws.get_dispatcher(), base_dispatcher]), publish_docs(flocker_version, doc_version, environment=environment))
def test_echo(self, line): sequence = SequenceDispatcher([ (Print('What... is your quest?'), lambda _: None), (Readline(), lambda _: line), (Print(line), lambda _: None), ]) with sequence.consume(): dispatcher = ComposedDispatcher([ sequence, base_dispatcher, ]) sync_perform(dispatcher, echo())
def perform_with_group(self, eff, expected_lookup, group, fallback_dispatcher=None): """Run an effect that will look up group info.""" def gsg(log, tenant_id, group_id): assert (log, tenant_id, group_id) == expected_lookup return group store = self.get_store() store.get_scaling_group.side_effect = gsg dispatcher = self.get_dispatcher(store) if fallback_dispatcher is not None: dispatcher = ComposedDispatcher([dispatcher, fallback_dispatcher]) return sync_perform(dispatcher, eff)
def test_challenge(self, line): sequence = SequenceDispatcher([ (Print('What... is your quest?'), lambda _:None), (Readline(), lambda _: line), (Print('What... is your quest?'), lambda _:None), (Readline(), lambda _:'To seek the Holy Grail.\n'), (Print('What... is your favourite colour?'), lambda _:None), ]) with sequence.consume(): dispatcher = ComposedDispatcher([ sequence, base_dispatcher, ]) sync_perform(dispatcher, challenge())
def get_dispatcher(reactor): """ Create a dispatcher that can find performers for :obj:`ReadLine`, :obj:`HTTPRequest`, and :obj:`ParallelEffects`. :func:`make_twisted_dispatcher` is able to provide the ``ParallelEffects`` performer, so we compose it with our own custom :obj:`TypeDispatcher`. """ return ComposedDispatcher([ TypeDispatcher({ ReadLine: perform_readline_stdin, HTTPRequest: perform_request_with_treq, }), make_twisted_dispatcher(reactor), ])
def test_print(self): outputs = [] @sync_performer def perform_print(dispatcher, print_): outputs.append(print_.line) test_interpreter = ComposedDispatcher([ TypeDispatcher({ Print: perform_print, }), base_dispatcher]) dispatcher = test_interpreter sync_perform(dispatcher, program()) self.assertEqual(["What... is your quest?"], outputs)
def get_dispatcher(): """ Create a dispatcher that can find performers for :obj:`ReadLine`, :obj:`HTTPRequest`, and :obj:`ParallelEffects`. There's a built-in performer for ParallelEffects that uses a multiprocessing ThreadPool, :func:`effect.perform_parallel_with_pool`. """ my_pool = ThreadPool() pool_performer = partial(perform_parallel_with_pool, my_pool) return ComposedDispatcher([ TypeDispatcher({ ReadLine: perform_readline_stdin, HTTPRequest: perform_request_requests, ParallelEffects: pool_performer, }) ])