def __init__(self, config, producer): print("Init dapr publisher...") if "dapr" not in config: print("dapr section missing from configuration, exiting") sys.exit(-1) self.producer = producer dapr_config=config['dapr'] self.daprClient = DaprClient() if "topics" not in dapr_config: print("no topics sepcified, exiting") sys.exit(-1) self.topics=dapr_config.get('topics').replace(" ", "").split(',') for topic in self.topics: kuksa_message = self.producer.client.getValue(topic) jsonMsg = json.loads(kuksa_message) req_data = { 'id': 0, 'timestamp': jsonMsg["timestamp"], 'value': jsonMsg["value"], 'topic': topic } self.publisherTopic(topic, req_data) self.producer.subscribe(topic, self.publisher)
def test_global_timeout_setting_is_honored(self): previous_timeout = settings.DAPR_HTTP_TIMEOUT_SECONDS settings.DAPR_HTTP_TIMEOUT_SECONDS = 1 new_client = DaprClient() self.server.set_server_delay(1.5) with self.assertRaises(TimeoutError): new_client.invoke_method(self.app_id, self.method_name, "") settings.DAPR_HTTP_TIMEOUT_SECONDS = previous_timeout
def setUp(self): self.server = FakeHttpServer() self.server_port = self.server.get_port() self.server.start() settings.DAPR_HTTP_PORT = self.server_port settings.DAPR_API_METHOD_INVOCATION_PROTOCOL = 'http' self.client = DaprClient() self.app_id = 'fakeapp' self.method_name = 'fakemethod' self.invoke_url = f'/v1.0/invoke/{self.app_id}/method/{self.method_name}'
async def executeConfiguration(): with DaprClient() as d: storeName = 'configurationstore' key = 'orderId' # Wait for sidecar to be up within 20 seconds. d.wait(20) # Get one configuration by key. configuration = d.get_configuration(store_name=storeName, keys=[key], config_metadata={}) print( f"Got key={configuration.items[0].key} value={configuration.items[0].value} version={configuration.items[0].version}", flush=True) # Subscribe to configuration by key. configuration = await d.subscribe_configuration(store_name=storeName, keys=[key], config_metadata={}) for x in range(10): if configuration != None: items = configuration.get_items() for item in items: print( f"Subscribe key={item.key} value={item.value} version={item.version}", flush=True) else: print("Nothing yet") sleep(5)
def __init__(self, config, producer): print("Init dapr Storage...") if "dapr" not in config: print("dapr section missing from configuration, exiting") sys.exit(-1) self.producer = producer dapr_config = config['dapr'] if "topics" not in dapr_config: print("no topics sepcified, exiting") sys.exit(-1) self.topics = dapr_config.get('topics').replace(" ", "").split(',') self.daprClient = DaprClient() for topic in self.topics: self.storeValue(topic, self.producer.client.getValue(topic)) self.producer.subscribe(topic, self.store)
class Dapr_Publisher(): def __init__(self, config, producer): print("Init dapr publisher...") if "dapr" not in config: print("dapr section missing from configuration, exiting") sys.exit(-1) self.producer = producer dapr_config=config['dapr'] self.daprClient = DaprClient() if "topics" not in dapr_config: print("no topics sepcified, exiting") sys.exit(-1) self.topics=dapr_config.get('topics').replace(" ", "").split(',') for topic in self.topics: kuksa_message = self.producer.client.getValue(topic) jsonMsg = json.loads(kuksa_message) req_data = { 'id': 0, 'timestamp': jsonMsg["timestamp"], 'value': jsonMsg["value"], 'topic': topic } self.publisherTopic(topic, req_data) self.producer.subscribe(topic, self.publisher) def publisher(self, kuksa_message): jsonMsg = json.loads(kuksa_message) topic = self.producer.subscriptionMap[jsonMsg["subscriptionId"]] print("KUKSA: " + topic) req_data = { 'id': jsonMsg["subscriptionId"], 'timestamp': jsonMsg["timestamp"], 'value': jsonMsg["value"], 'topic': topic } self.publisherTopic(topic, req_data) def publisherTopic(self, topic, data): # Create a typed message with content type and body resp = self.daprClient.publish_event( pubsub_name='pubsub', topic_name=topic, data=json.dumps(data), data_content_type='application/json', ) # Print the request print(data, flush=True) def shutdown(self): self.producer.shutdown()
def main(): global analytics_key, analytics_endpoint if COGNITIVE_SERVICE_API_KEY == '': with DaprClient() as d: resp = d.get_secret(SECRET_STORE_NAME, SECRET_STORE_KEY) analytics_key = resp.secret[SECRET_STORE_KEY] else: analytics_key = COGNITIVE_SERVICE_API_KEY if COGNITIVE_SERVICE_API_ENDPOINT == '': with DaprClient() as d: resp = d.get_secret(SECRET_STORE_NAME, SECRET_STORE_ENDPOINT) analytics_endpoint = resp.secret[SECRET_STORE_ENDPOINT] else: analytics_endpoint = COGNITIVE_SERVICE_API_ENDPOINT app.run(APP_PORT)
def test_generic_client_unknown_protocol(self): settings.DAPR_API_METHOD_INVOCATION_PROTOCOL = 'unknown' expected_msg = "Unknown value for DAPR_API_METHOD_INVOCATION_PROTOCOL: UNKNOWN" with self.assertRaises(DaprInternalError) as ctx: client = DaprClient() self.assertEqual(expected_msg, str(ctx.exception)) settings.DAPR_API_METHOD_INVOCATION_PROTOCOL = 'grpc' client = DaprClient() self.assertIsNotNone(client) settings.DAPR_API_METHOD_INVOCATION_PROTOCOL = 'http' client = DaprClient() self.assertIsNotNone(client)
def test_invoke_method_with_tracer(self): tracer = Tracer(sampler=samplers.AlwaysOnSampler(), exporter=print_exporter.PrintExporter()) self.client = DaprClient(headers_callback=lambda: tracer.propagator. to_headers(tracer.span_context)) self.server.set_response(b"FOO") with tracer.span(name="test"): req = common_v1.StateItem(key='test') resp = self.client.invoke_method( self.app_id, self.method_name, http_verb='PUT', data=req, ) request_headers = self.server.get_request_headers() self.assertIn('Traceparent', request_headers) self.assertEqual(b'FOO', resp.data)
class Dapr_Storage(): def __init__(self, config, producer): print("Init dapr Storage...") if "dapr" not in config: print("dapr section missing from configuration, exiting") sys.exit(-1) self.producer = producer dapr_config = config['dapr'] if "topics" not in dapr_config: print("no topics sepcified, exiting") sys.exit(-1) self.topics = dapr_config.get('topics').replace(" ", "").split(',') self.daprClient = DaprClient() for topic in self.topics: self.storeValue(topic, self.producer.client.getValue(topic)) self.producer.subscribe(topic, self.store) def storeValue(self, path, message): storeName = 'statestore' jsonMsg = json.loads(message) key = path value = str(jsonMsg["data"]["dp"]["value"]) self.daprClient.save_state(store_name=storeName, key=key, value=value) print(f"State store has successfully saved {key}: {value}") state = self.daprClient.get_state( store_name=storeName, key=key, state_metadata={"metakey": "metavalue"}) print(f"Got value={state.data} eTag={state.etag}") def store(self, message): jsonMsg = json.loads(message) key = self.producer.subscriptionMap[jsonMsg["subscriptionId"]] self.storeValue(key, message) def shutdown(self): self.producer.shutdown()
def send_email(): with DaprClient() as d: req_data = { 'metadata': { 'emailTo': emailTo, 'subject': subject }, 'data': data } print(req_data, flush=True) # Create a typed message with content type and body resp = d.invoke_binding(binding_name, 'create', json.dumps(req_data)) print(resp, flush=True)
def binding(request: BindingRequest): payload = request.text() m = extract_tweets(json.loads(payload)) logging.info(m) with DaprClient() as d: tweet_data = json.dumps(m) d.save_state(STORE_NAME, m['id'], tweet_data) resp = d.invoke_service(id='tweet-processor', method='sentiment-score', data=tweet_data) m['sentiment'] = json.loads(resp.data) d.publish_event(PUBSUB_NAME, TOPIC_NAME, json.dumps(m))
from dapr.clients import DaprClient from opencensus.trace.tracer import Tracer from opencensus.trace import time_event as time_event_module from opencensus.ext.zipkin.trace_exporter import ZipkinExporter from opencensus.trace.samplers import AlwaysOnSampler ze = ZipkinExporter(service_name="python-example", host_name='localhost', port=9411, endpoint='/api/v2/spans') tracer = Tracer(exporter=ze, sampler=AlwaysOnSampler()) with tracer.span(name="main") as span: with DaprClient(tracer=tracer) as d: num_messages = 2 for i in range(num_messages): # Create a typed message with content type and body resp = d.invoke_method( 'invoke-receiver', 'say', data=json.dumps({ 'id': i, 'message': 'hello world' }), ) # Print the response print(resp.content_type, flush=True)
from dapr.clients import DaprClient from uuid import uuid4 import os from context import WorkflowContext step_name = "step_1" with WorkflowContext(step_name) as context: with DaprClient(context["dapr_address"]) as d: store_name = os.environ.get('STATE_STORE_NAME') key = os.environ.get('SAMPLE_KEY_NAME') value = uuid4().hex print(f"Storing key locally:\n\tkey: {key}\n\tvalue: {value}") resp = d.save_state(store_name=store_name, key=key, value=value) context.set_value(f"{step_name}: Stored key/value", f"{key}-{value}")
def test_timeout_exception_thrown_when_timeout_reached(self): new_client = DaprClient(http_timeout_seconds=1) self.server.set_server_delay(1.5) with self.assertRaises(TimeoutError): new_client.invoke_method(self.app_id, self.method_name, "")
class DaprInvocationHttpClientTests(unittest.TestCase): def setUp(self): self.server = FakeHttpServer() self.server_port = self.server.get_port() self.server.start() settings.DAPR_HTTP_PORT = self.server_port settings.DAPR_API_METHOD_INVOCATION_PROTOCOL = 'http' self.client = DaprClient() self.app_id = 'fakeapp' self.method_name = 'fakemethod' self.invoke_url = f'/v1.0/invoke/{self.app_id}/method/{self.method_name}' def tearDown(self): self.server.shutdown_server() settings.DAPR_API_TOKEN = None settings.DAPR_API_METHOD_INVOCATION_PROTOCOL = 'http' def test_basic_invoke(self): self.server.set_response(b"STRING_BODY") response = self.client.invoke_method(self.app_id, self.method_name, "") self.assertEqual(b"STRING_BODY", response.data) self.assertEqual(self.invoke_url, self.server.request_path()) def test_invoke_PUT_with_body(self): self.server.set_response(b"STRING_BODY") response = self.client.invoke_method(self.app_id, self.method_name, b"FOO", http_verb='PUT') self.assertEqual(b"STRING_BODY", response.data) self.assertEqual(self.invoke_url, self.server.request_path()) self.assertEqual(b"FOO", self.server.get_request_body()) def test_invoke_PUT_with_bytes_body(self): self.server.set_response(b"STRING_BODY") response = self.client.invoke_method(self.app_id, self.method_name, b"FOO", http_verb='PUT') self.assertEqual(b"STRING_BODY", response.data) self.assertEqual(self.invoke_url, self.server.request_path()) self.assertEqual(b"FOO", self.server.get_request_body()) def test_invoke_GET_with_query_params(self): self.server.set_response(b"STRING_BODY") query_params = (('key1', 'value1'), ('key2', 'value2')) response = self.client.invoke_method(self.app_id, self.method_name, '', http_querystring=query_params) self.assertEqual(b"STRING_BODY", response.data) self.assertEqual(f"{self.invoke_url}?key1=value1&key2=value2", self.server.request_path()) def test_invoke_GET_with_duplicate_query_params(self): self.server.set_response(b"STRING_BODY") query_params = (('key1', 'value1'), ('key1', 'value2')) response = self.client.invoke_method(self.app_id, self.method_name, '', http_querystring=query_params) self.assertEqual(b"STRING_BODY", response.data) self.assertEqual(f"{self.invoke_url}?key1=value1&key1=value2", self.server.request_path()) def test_invoke_PUT_with_content_type(self): self.server.set_response(b"STRING_BODY") sample_object = {'foo': ['val1', 'val2']} response = self.client.invoke_method(self.app_id, self.method_name, json.dumps(sample_object), content_type='application/json') self.assertEqual(b"STRING_BODY", response.data) self.assertEqual(b'{"foo": ["val1", "val2"]}', self.server.get_request_body()) def test_invoke_method_proto_data(self): self.server.set_response(b"\x0a\x04resp") self.server.reply_header('Content-Type', 'application/x-protobuf') req = common_v1.StateItem(key='test') resp = self.client.invoke_method(self.app_id, self.method_name, http_verb='PUT', data=req) self.assertEqual(b"\x0a\x04test", self.server.get_request_body()) # unpack to new protobuf object new_resp = common_v1.StateItem() resp.unpack(new_resp) self.assertEqual('resp', new_resp.key) def test_invoke_method_metadata(self): self.server.set_response(b"FOO") req = common_v1.StateItem(key='test') resp = self.client.invoke_method(self.app_id, self.method_name, http_verb='PUT', data=req, metadata=(('header1', 'value1'), ('header2', 'value2'))) request_headers = self.server.get_request_headers() self.assertEqual(b'FOO', resp.data) self.assertEqual('value1', request_headers['header1']) self.assertEqual('value2', request_headers['header2']) def test_invoke_method_protobuf_response_with_suffix(self): self.server.set_response(b"\x0a\x04resp") self.server.reply_header('Content-Type', 'application/x-protobuf; gzip') req = common_v1.StateItem(key='test') resp = self.client.invoke_method(self.app_id, self.method_name, http_verb='PUT', data=req, metadata=(('header1', 'value1'), ('header2', 'value2'))) self.assertEqual(b"\x0a\x04test", self.server.get_request_body()) # unpack to new protobuf object new_resp = common_v1.StateItem() resp.unpack(new_resp) self.assertEqual('resp', new_resp.key) def test_invoke_method_protobuf_response_case_insensitive(self): self.server.set_response(b"\x0a\x04resp") self.server.reply_header('Content-Type', 'apPlicaTion/x-protobuf; gzip') req = common_v1.StateItem(key='test') resp = self.client.invoke_method(self.app_id, self.method_name, http_verb='PUT', data=req, metadata=(('header1', 'value1'), ('header2', 'value2'))) self.assertEqual(b"\x0a\x04test", self.server.get_request_body()) # unpack to new protobuf object new_resp = common_v1.StateItem() resp.unpack(new_resp) self.assertEqual('resp', new_resp.key) def test_invoke_method_error_returned(self): error_response = b'{"errorCode":"ERR_DIRECT_INVOKE","message":"Something bad happend"}' self.server.set_response(error_response, 500) expected_msg = "('Something bad happend', 'ERR_DIRECT_INVOKE')" with self.assertRaises(DaprInternalError) as ctx: self.client.invoke_method( self.app_id, self.method_name, http_verb='PUT', data='FOO', ) self.assertEqual(expected_msg, str(ctx.exception)) def test_invoke_method_non_dapr_error(self): error_response = b'UNPARSABLE_ERROR' self.server.set_response(error_response, 500) expected_msg = "Unknown Dapr Error. HTTP status code: 500" with self.assertRaises(DaprInternalError) as ctx: self.client.invoke_method( self.app_id, self.method_name, http_verb='PUT', data='FOO', ) self.assertEqual(expected_msg, str(ctx.exception)) def test_generic_client_unknown_protocol(self): settings.DAPR_API_METHOD_INVOCATION_PROTOCOL = 'unknown' expected_msg = "Unknown value for DAPR_API_METHOD_INVOCATION_PROTOCOL: UNKNOWN" with self.assertRaises(DaprInternalError) as ctx: client = DaprClient() self.assertEqual(expected_msg, str(ctx.exception)) settings.DAPR_API_METHOD_INVOCATION_PROTOCOL = 'grpc' client = DaprClient() self.assertIsNotNone(client) settings.DAPR_API_METHOD_INVOCATION_PROTOCOL = 'http' client = DaprClient() self.assertIsNotNone(client) def test_invoke_method_with_api_token(self): self.server.set_response(b"FOO") settings.DAPR_API_TOKEN = 'c29saSBkZW8gZ2xvcmlhCg==' req = common_v1.StateItem(key='test') resp = self.client.invoke_method( self.app_id, self.method_name, http_verb='PUT', data=req, ) request_headers = self.server.get_request_headers() self.assertEqual('c29saSBkZW8gZ2xvcmlhCg==', request_headers['dapr-api-token']) self.assertEqual(b'FOO', resp.data) def test_invoke_method_with_tracer(self): tracer = Tracer(sampler=samplers.AlwaysOnSampler(), exporter=print_exporter.PrintExporter()) self.client = DaprClient(headers_callback=lambda: tracer.propagator. to_headers(tracer.span_context)) self.server.set_response(b"FOO") with tracer.span(name="test"): req = common_v1.StateItem(key='test') resp = self.client.invoke_method( self.app_id, self.method_name, http_verb='PUT', data=req, ) request_headers = self.server.get_request_headers() self.assertIn('Traceparent', request_headers) self.assertEqual(b'FOO', resp.data) def test_timeout_exception_thrown_when_timeout_reached(self): new_client = DaprClient(http_timeout_seconds=1) self.server.set_server_delay(1.5) with self.assertRaises(TimeoutError): new_client.invoke_method(self.app_id, self.method_name, "") def test_global_timeout_setting_is_honored(self): previous_timeout = settings.DAPR_HTTP_TIMEOUT_SECONDS settings.DAPR_HTTP_TIMEOUT_SECONDS = 1 new_client = DaprClient() self.server.set_server_delay(1.5) with self.assertRaises(TimeoutError): new_client.invoke_method(self.app_id, self.method_name, "") settings.DAPR_HTTP_TIMEOUT_SECONDS = previous_timeout
# ------------------------------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------------------------------ import json import time from dapr.clients import DaprClient with DaprClient() as d: id = 0 while True: id += 1 req_data = {'id': id, 'message': 'hello world'} # Create a typed message with content type and body resp = d.publish_event( pubsub_name='pubsub', topic_name='TOPIC_A', data=json.dumps(req_data), ) # Print the request print(req_data, flush=True) time.sleep(2)
from dapr.clients import DaprClient from opencensus.trace.tracer import Tracer from opencensus.ext.zipkin.trace_exporter import ZipkinExporter from opencensus.trace.samplers import AlwaysOnSampler ze = ZipkinExporter(service_name="python-example", host_name='localhost', port=9411, endpoint='/api/v2/spans') tracer = Tracer(exporter=ze, sampler=AlwaysOnSampler()) with tracer.span(name="main") as span: with DaprClient(headers_callback=lambda: tracer.propagator.to_headers( tracer.span_context)) as d: num_messages = 2 for i in range(num_messages): # Create a typed message with content type and body resp = d.invoke_method( 'invoke-receiver', 'say', data=json.dumps({ 'id': i, 'message': 'hello world' }), ) # Print the response print(resp.content_type, flush=True)
from dapr.clients import DaprClient import json import time import logging logging.basicConfig(level=logging.INFO) for i in range(1, 10): order = {'orderId': i} with DaprClient() as client: # Publish an event/message using Dapr PubSub result = client.publish_event( pubsub_name='order_pub_sub', topic_name='orders', data=json.dumps(order), data_content_type='application/json', ) logging.info('Published data: ' + json.dumps(order)) time.sleep(1)
# remote (cloud) components, to be discovered remote_components = [] # secret store components; value indicates if installed secret_component_map = {'aws-secrets-manager': False, 'gcp-secret-manager': False, 'azure-keyvault': False} # local components; value indicates if installed local_component_map = {'local-mqtt': False} # topic names for local MQTT component; blank if not defined local_mqtt_topic_map = {'out': os.getenv('RELAY_OUT_TOPIC', ''), 'in': os.getenv('RELAY_IN_TOPIC', '')} dapr_port = os.getenv("DAPR_HTTP_PORT", 3500) local_publish_url = f'http://localhost:{dapr_port}/v1.0/publish/local-mqtt/{local_mqtt_topic_map["in"]}?metadata.rawPayload=true' app = App() dc = DaprClient() def find_components(): """Build list of remote components from all components configured with dapr. :return: List of components found """ components = [] try: response = requests.get(f"http://localhost:{dapr_port}/v1.0/metadata") # print("components found " + str(response.content), flush=True) response_json = json.loads(response.content) for component in response_json["components"]: name = component['name'] if name in local_component_map: