def test_unsupported_arg(self): self.create_fixture(value=required(int, mock_value=0, spaghetti="foo")) loader = load_from_dict() metadata = Metadata("test", testing=True) config = configure(self.registry.defaults, metadata, loader) assert_that(config, has_entries(foo=has_entries(value=0, ), ))
def setup(self): loaders = load_each( load_from_dict( multi_tenant_key_registry=dict( context_keys=[ "private", ], key_ids=[ "key_id", ], ), ), load_from_environ, ) self.graph = create_object_graph( name="example", testing=True, import_name="microcosm_postgres", loader=loaders, ) self.encryptable_store = self.graph.encryptable_store self.encrypted_store = self.graph.encrypted_store self.json_encryptable_store = self.graph.json_encryptable_store self.json_encrypted_store = self.graph.json_encrypted_store self.nullable_encryptable_store = self.graph.nullable_encryptable_store self.nullable_encrypted_store = self.graph.nullable_encrypted_store self.encryptor = self.graph.multi_tenant_encryptor with SessionContext(self.graph) as context: context.recreate_all()
def test_mock_value(self): self.create_fixture(required(boolean, mock_value="true")) loader = load_from_dict() metadata = Metadata("test", testing=True) config = configure(self.registry.defaults, metadata, loader) assert_that(config, has_entries(foo=has_entries(value=True, ), ))
def test_comma_separated_list_empty(self): self.create_fixture(typed(comma_separated_list, mock_value="")) loader = load_from_dict() metadata = Metadata("test", testing=True) config = configure(self.registry.defaults, metadata, loader) assert_that(config, has_entries(foo=has_entries(value=[], ), ))
def test_cycle_multi_tenant(): loader = load_from_dict( multi_tenant_key_registry=dict( context_keys=[ "foo", "bar", ], key_ids=[ ["foo1", "foo2"], ["bar1", "bar2"], ], ), ) graph = create_object_graph( name="example", testing=True, import_name="microcosm_postgres", loader=loader, ) cycle( encryptor=graph.multi_tenant_encryptor, encryption_context_key="foo", key_ids=["foo1", "foo2"], ) cycle( encryptor=graph.multi_tenant_encryptor, encryption_context_key="bar", key_ids=["bar1", "bar2"], )
def test_invalid_missing(self): self.create_fixture(required(int)) loader = load_from_dict() assert_that( calling(configure).with_args(self.registry.defaults, self.metadata, loader), raises(ValidationError), )
def test_configure_batch_metrics_disable(): """ Disable metrics explicitly. """ loader = load_from_dict(pubsub_send_batch_metrics=dict(enabled=False, ), ) graph = create_object_graph("example", testing=True, loader=loader) assert_that(graph.pubsub_send_batch_metrics.enabled, is_(equal_to(False)))
def setup(self): self.build_version = "asdf1234" self.graph = create_object_graph( "test", testing=True, loader=load_from_dict( dict( resource_cache=dict(enabled=True), build_info=dict(sha1=self.build_version, build_num="5"), )), ) self.graph.use("controller", ) self.cache_prefix = "test" controller = self.graph.controller self.cached_retrieve = cached(controller, TestSchema)(controller.retrieve) self.cached_extended_retrieve = cached(controller, TestExtendedSchema)( controller.extended_retrieve) self.cached_retrieve_for = cached(controller, TestForSchema)( controller.retrieve_for) invalidations = [ Invalidation( schema=TestForSchema, arguments=[ "key_id", ], ), Invalidation( schema=TestSchema, arguments=[ "key_id", ], ), Invalidation( schema=TestExtendedSchema, arguments=[ "extended_key_id", ], kwarg_mappings=dict(extended_key_id="key_id", ), ), ] self.cached_create = invalidates( controller, invalidations=invalidations, )(controller.create) self.cached_create_batch = invalidate_batch( controller, batch_attribute="items", invalidations=invalidations, )(controller.create_batch) self.cached_retrieve_for = cached(controller, TestForSchema)( controller.retrieve_for)
def test_invalid_malformed(self): self.create_fixture(value=required(int)) loader = load_from_dict(foo=dict(value="bar", ), ) assert_that( calling(configure).with_args(self.registry.defaults, self.metadata, loader), raises(ValidationError), )
def test_comma_separated_list_unconverted(self): self.create_fixture(value=required(comma_separated_list, mock_value=["abc", "def", "ghi"])) loader = load_from_dict() metadata = Metadata("test", testing=True) config = configure(self.registry.defaults, metadata, loader) assert_that( config, has_entries(foo=has_entries(value=["abc", "def", "ghi"], ), ))
def test_cached(self): graph = create_object_graph( "test", testing=True, loader=load_from_dict( dict(build_info=dict(sha1="asdf1234", build_num="5"), )), ) graph.use("build_info", ) assert_that(graph.build_info.sha1, is_("asdf1234"))
def setup(self): loader = load_from_dict( sns_producer=dict( # NB: mock the SNS producer itself (non-default, except for daemons) mock_sns=False, ), sns_topic_arns=dict(default="topic", )) self.graph = create_object_graph("example", testing=True, loader=loader) self.graph.sns_producer.sns_client.reset_mocks()
def test_typed_optional(self): self.create_fixture(typed(int)) loader = load_from_dict() config = configure(self.registry.defaults, self.metadata, loader) assert_that(config, has_entries( foo=has_entries( value=None, ), ))
def test_valid_default(self): self.create_fixture(required(int, default_value="1")) loader = load_from_dict() config = configure(self.registry.defaults, self.metadata, loader) assert_that(config, has_entries( foo=has_entries( value=1, ), ))
def test_nullable_null_default(self): self.create_fixture(value=typed( int, default_value=None, nullable=True, )) loader = load_from_dict() config = configure(self.registry.defaults, self.metadata, loader) assert_that(config, has_entries(foo=has_entries(value=None, ), ))
def setup(self): loader = load_from_dict( secret=dict(postgres=dict(host="127.0.0.1", ), ), sessionmaker=dict( engine_routing_strategy="model_engine_routing_strategy", ), ) self.graph = create_object_graph(name="example", testing=True, loader=loader) self.graph.use("sessionmaker")
def setup(self): loader = load_from_dict( sns_producer=dict( # NB: mock the boto SNS client (default) mock_sns=True, ), sns_topic_arns=dict(default="topic", )) self.graph = create_object_graph("example", testing=True, loader=loader) self.graph.sns_producer.sns_client.reset_mocks()
def test_scoped(): loader = load_from_dict(bar=dict(adder=dict(first=3, ), ), ) graph = create_object_graph("example", testing=True, loader=loader) @graph.adder.scoped def helper(expected, **kwargs): assert_that(graph.adder(), is_(equal_to(expected))) helper(3) helper(5, scope="bar") helper(3, scope="baz")
def setup(self): loader = load_from_dict( sns_producer=dict( # NB: mock the SNS producer itself (non-default, except for daemons) mock_sns=False, ), sns_topic_arns=dict( default="topic", ) ) self.graph = create_object_graph("example", testing=True, loader=loader) self.graph.sns_producer.sns_client.reset_mocks()
def setup(self): loader = load_from_dict( sns_producer=dict( # NB: mock the boto SNS client (default) mock_sns=True, ), sns_topic_arns=dict( default="topic", ) ) self.graph = create_object_graph("example", testing=True, loader=loader) self.graph.sns_producer.sns_client.reset_mocks()
def test_configure_metrics_disable(): """ Disable metrics explicitly. """ loader = load_from_dict( pubsub_send_metrics=dict( enabled=False, ), ) graph = create_object_graph("example", testing=True, loader=loader) assert_that(graph.pubsub_send_metrics.enabled, is_(equal_to(False)))
def test_nullable(self): self.create_fixture(value=typed( int, default_value=0, nullable=True, mock_value=None, )) loader = load_from_dict() metadata = Metadata("test", testing=True) config = configure(self.registry.defaults, metadata, loader) assert_that(config, has_entries(foo=has_entries(value=None, ), ))
def setup(self): loader = load_from_dict( secret=dict( postgres=dict( host="127.0.0.1", ), ), sessionmaker=dict( engine_routing_strategy="model_engine_routing_strategy", ), ) self.graph = create_object_graph(name="example", testing=True, loader=loader) self.graph.use("sessionmaker")
def test_scoped_to(): """ Factory can be scoped to a specific value. """ loader = load_from_dict(bar=dict(adder=dict(first=3, ), ), ) graph = create_object_graph("example", testing=True, loader=loader) with graph.adder.scoped_to("bar"): assert_that(graph.adder(), is_(equal_to(5))) with graph.adder.scoped_to("baz"): assert_that(graph.adder(), is_(equal_to(3)))
def test_typed_converted(self): self.create_fixture(typed(int)) loader = load_from_dict( foo=dict( value="1", ), ) config = configure(self.registry.defaults, self.metadata, loader) assert_that(config, has_entries( foo=has_entries( value=1, ), ))
def setup(self): self.loader = load_from_dict(memory_profiler=dict(enabled="true", ), ) self.graph = create_object_graph("example", testing=True, debug=True, loader=self.loader) self.graph.use( "flask", "memory_profiler", ) self.now = datetime.now() self.last_sampling_time = self.now - timedelta(minutes=5) self.graph.memory_profiler.last_sampling_time = self.last_sampling_time
def test_collaboration(): """ All microcosm collaborators should have access to the same opaque context. """ # set up a parent collaborator that uses a child collaborator @binding("parent_collaborator") class Parent: def __init__(self, graph): self.child_collaborator = graph.child_collaborator def __call__(self): return self.child_collaborator() @binding("child_collaborator") class Child: def __init__(self, graph): self.opaque = graph.opaque def __call__(self): return self.opaque.as_dict() # create the object graph with both collaborators and opaque data graph = create_object_graph( "test", testing=True, loader=load_from_dict(opaque={THIS: VALUE})) graph.use( "child_collaborator", "opaque", "parent_collaborator", ) graph.lock() # we should be able to initialize the opaque data and observe it from the collaborators decorated_func = graph.opaque.initialize( example_func, OTHER, OTHER )(graph.parent_collaborator.__call__) assert_that(graph.opaque.as_dict(), is_(equal_to({THIS: VALUE}))) assert_that(decorated_func(), is_(equal_to(example_func(OTHER, OTHER)))) assert_that(graph.opaque.as_dict(), is_(equal_to({THIS: VALUE})))
def test_cycle_single_tenant(): loader = load_from_dict(multi_tenant_key_registry=dict( context_keys=[ "default", ], key_ids=[ ["key1", "key2"], ], ), ) graph = create_object_graph( name="example", testing=True, import_name="microcosm_postgres", loader=loader, ) cycle( encryptor=graph.multi_tenant_encryptor, encryption_context_key="whatever", key_ids=["key1", "key2"], )
def test_boolean_typed_converted(self): self.create_fixture( bar=typed(bool, default_value=None), baz=typed(bool, default_value=None), qux=typed(bool, default_value=None), kog=typed(bool, default_value=None), ) loader = load_from_dict(foo=dict( bar="False", baz="True", qux="false", kog="true", ), ) config = configure(self.registry.defaults, self.metadata, loader) assert_that( config, has_entries(foo=has_entries( bar=False, baz=True, qux=False, kog=True, ), ))
def setup(self): try: import microcosm_metrics # noqa: F401 except ImportError: raise SkipTest self.loader = load_from_dict( metrics=dict( host="statsd", ), ) self.graph = create_object_graph("example", testing=True, loader=self.loader) self.graph.use( "metrics", "flask", "route", ) self.client = self.graph.flask.test_client() self.ns = Namespace( subject="foo", version="v1", )
def test_cycle_cache(): loader = load_from_dict( materials_manager=dict( enable_cache=True, ), multi_tenant_key_registry=dict( context_keys=[ "default", ], key_ids=[ ["key1", "key2"], ], ), ) graph = create_object_graph( name="example", testing=True, import_name="microcosm_postgres", loader=loader, ) encryptor = graph.multi_tenant_encryptor.encryptors["default"] master_key_provider = encryptor.materials_manager.master_key_provider decrypt_data_key = master_key_provider.decrypt_data_key with patch.object(master_key_provider, "decrypt_data_key") as mocked_decrypt_data_key: mocked_decrypt_data_key.side_effect = decrypt_data_key for _ in range(5): cycle( encryptor=graph.multi_tenant_encryptor, encryption_context_key="whatever", key_ids=["key1", "key2"], ) assert_that( mocked_decrypt_data_key.call_count, is_(equal_to(1)), )
def test_cached_with_no_build_version(self): name = "test" graph = create_object_graph( name, testing=True, loader=load_from_dict(dict(resource_cache=dict(enabled=True), )), ) graph.use("controller") controller = graph.controller cached_retrieve = cached(controller, TestSchema)(controller.retrieve) first_call = cached_retrieve(key_id=1) key = cache_key(name, TestSchema, (), dict(key_id=1), version=None) # Check that we pushed the resource into the cache assert_that( graph.resource_cache.get(key), is_({"value": 1}), ) # And that a subsequent call hits the cache assert_that( cached_retrieve(key_id=1)["value"], is_(first_call["value"]))
def test_false_default(self): self.create_fixture(value=typed(bool, default_value=False)) loader = load_from_dict() config = configure(self.registry.defaults, self.metadata, loader) assert_that(config, has_entries(foo=has_entries(value=False, ), ))
def test_valid_default_factory(self): self.create_fixture(value=typed(list, default_factory=list)) loader = load_from_dict() config = configure(self.registry.defaults, self.metadata, loader) assert_that(config, has_entries(foo=has_entries(value=empty(), ), ))