From 331f5805c60be3af8cb4e70719f82f3f3f8ecf3c Mon Sep 17 00:00:00 2001 From: Cassie Coyle Date: Fri, 31 Oct 2025 07:11:21 +0000 Subject: [PATCH 01/16] Merge release-1.16 branch changes to Main (#851) * 1.16.0-rc1 Signed-off-by: Elena Kolevska * [Conversation API - Alpha2] Add new tool calling capability (#822) (#832) * initial * fixes after proto change upstream * minor name changes and cleanup unused function * refactors, updates to readme, linting * feedback * feedback, updates * fix import in examples * cleanup, import, lint, more conversation helpers * clarify README, minor test import changes, copyright * feedback DRY test_conversation file * lint * move conversation classes in _response module to conversation module. Some example README refactor/lint * minor readme change * Update daprdocs/content/en/python-sdk-docs/python-client.md * lint * updates to fix issue with tool calling helper when dealing with classes instead of dataclasses, and also with serializatin output of the tool back to the LLM * coalesce conv helper tests, fix typing lint * make indent line method doc more dev friendly * tackle some feedback, still missing unit tests * add unit test to convert_value_to_struct * more unit tests per feedback * make async version of unit test conversation * add some information how to run markdown tests with a different runtime * ran tox -e ruff, even though tox -e flake8 was fine * add tests to increase coverage in conversation and conversation_helpers that codecov pointed out * add more information on execute registered tools, also added more tests for them to validate * fix test failing on py 1.13. Merge two unit test files per feedback * Linter * fix typing issue with UnionType in py3.9 --------- Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com> Signed-off-by: Elena Kolevska Co-authored-by: Albert Callarisa Co-authored-by: Elena Kolevska Co-authored-by: Elena Kolevska * update docs with tool calling helpers info (#838) Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com> * 1.16.0rc2 Signed-off-by: Elena Kolevska * use latest durabletask (#840) Signed-off-by: Cassandra Coyle * 1.16.0 Signed-off-by: Elena Kolevska * Adds support for interceptors and concurrency_options arguments in the workflow engine (#841) Signed-off-by: Albert Callarisa * Implement multi-app workflows (#844) * feat: Adds support for cross-app calls. Signed-off-by: Albert Callarisa * Use durabletask alpha.9 Signed-off-by: Albert Callarisa * Added examples for error scenarios in multi-app workflow Signed-off-by: Albert Callarisa * Remove unnecessary hardcoded ports Signed-off-by: Albert Callarisa --------- Signed-off-by: Albert Callarisa * chore: Rename wait_until_ready to wait_for_sidecar (#843) Signed-off-by: Albert Callarisa Co-authored-by: Elena Kolevska * 1.16.1rc1 (#846) Signed-off-by: Albert Callarisa --------- Signed-off-by: Elena Kolevska Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com> Signed-off-by: Cassandra Coyle Signed-off-by: Albert Callarisa Co-authored-by: Elena Kolevska Co-authored-by: Filinto Duran <1373693+filintod@users.noreply.github.com> Co-authored-by: Albert Callarisa Co-authored-by: Elena Kolevska Co-authored-by: Albert Callarisa --- README.md | 2 +- dapr/aio/clients/grpc/client.py | 2 +- dapr/aio/clients/grpc/subscription.py | 2 +- dapr/clients/grpc/client.py | 2 +- dapr/clients/grpc/subscription.py | 2 +- dapr/clients/health.py | 10 ++ dapr/clients/http/client.py | 2 +- dapr/version/version.py | 2 +- dev-requirements.txt | 2 +- .../demo_actor/demo_actor/requirements.txt | 2 +- .../demo_workflow/requirements.txt | 2 +- examples/invoke-simple/requirements.txt | 4 +- examples/w3c-tracing/requirements.txt | 4 +- examples/workflow/README.md | 141 +++++++++++++++++- examples/workflow/cross-app1.py | 58 +++++++ examples/workflow/cross-app2.py | 50 +++++++ examples/workflow/cross-app3.py | 32 ++++ examples/workflow/requirements.txt | 4 +- .../dapr/ext/fastapi/version.py | 2 +- ext/dapr-ext-fastapi/setup.cfg | 4 +- ext/dapr-ext-grpc/dapr/ext/grpc/version.py | 2 +- ext/dapr-ext-grpc/setup.cfg | 4 +- .../ext/workflow/dapr_workflow_context.py | 53 ++++++- .../dapr/ext/workflow/version.py | 2 +- .../dapr/ext/workflow/workflow_context.py | 23 ++- .../dapr/ext/workflow/workflow_runtime.py | 20 ++- ext/dapr-ext-workflow/setup.cfg | 6 +- .../tests/test_dapr_workflow_context.py | 4 +- ext/flask_dapr/flask_dapr/version.py | 2 +- ext/flask_dapr/setup.cfg | 2 +- tests/clients/test_heatlhcheck.py | 16 +- 31 files changed, 402 insertions(+), 61 deletions(-) create mode 100644 examples/workflow/cross-app1.py create mode 100644 examples/workflow/cross-app2.py create mode 100644 examples/workflow/cross-app3.py diff --git a/README.md b/README.md index df576494e..17e434808 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,7 @@ pip3 install dapr-ext-fastapi ```sh # Install Dapr client sdk -pip3 install dapr-dev +pip3 install dapr # Install Dapr gRPC AppCallback service extension pip3 install dapr-ext-grpc-dev diff --git a/dapr/aio/clients/grpc/client.py b/dapr/aio/clients/grpc/client.py index 995b82680..1b76dcb0f 100644 --- a/dapr/aio/clients/grpc/client.py +++ b/dapr/aio/clients/grpc/client.py @@ -153,7 +153,7 @@ def __init__( max_grpc_message_length (int, optional): The maximum grpc send and receive message length in bytes. """ - DaprHealth.wait_until_ready() + DaprHealth.wait_for_sidecar() self.retry_policy = retry_policy or RetryPolicy() useragent = f'dapr-sdk-python/{__version__}' diff --git a/dapr/aio/clients/grpc/subscription.py b/dapr/aio/clients/grpc/subscription.py index 9aabf8b28..e0e380ca6 100644 --- a/dapr/aio/clients/grpc/subscription.py +++ b/dapr/aio/clients/grpc/subscription.py @@ -51,7 +51,7 @@ async def outgoing_request_iterator(): async def reconnect_stream(self): await self.close() - DaprHealth.wait_until_ready() + DaprHealth.wait_for_sidecar() print('Attempting to reconnect...') await self.start() diff --git a/dapr/clients/grpc/client.py b/dapr/clients/grpc/client.py index e4ffb2646..6c276dd3f 100644 --- a/dapr/clients/grpc/client.py +++ b/dapr/clients/grpc/client.py @@ -145,7 +145,7 @@ def __init__( message length in bytes. retry_policy (RetryPolicy optional): Specifies retry behaviour """ - DaprHealth.wait_until_ready() + DaprHealth.wait_for_sidecar() self.retry_policy = retry_policy or RetryPolicy() useragent = f'dapr-sdk-python/{__version__}' diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index 111946b1b..6dcfcb4dd 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -65,7 +65,7 @@ def outgoing_request_iterator(): def reconnect_stream(self): self.close() - DaprHealth.wait_until_ready() + DaprHealth.wait_for_sidecar() print('Attempting to reconnect...') self.start() diff --git a/dapr/clients/health.py b/dapr/clients/health.py index e3daec79d..37c42a875 100644 --- a/dapr/clients/health.py +++ b/dapr/clients/health.py @@ -15,6 +15,7 @@ import urllib.request import urllib.error import time +from warnings import warn from dapr.clients.http.conf import DAPR_API_TOKEN_HEADER, USER_AGENT_HEADER, DAPR_USER_AGENT from dapr.clients.http.helpers import get_api_url @@ -24,6 +25,15 @@ class DaprHealth: @staticmethod def wait_until_ready(): + warn( + 'This method is deprecated. Use DaprHealth.wait_for_sidecar instead.', + DeprecationWarning, + stacklevel=2, + ) + DaprHealth.wait_for_sidecar() + + @staticmethod + def wait_for_sidecar(): health_url = f'{get_api_url()}/healthz/outbound' headers = {USER_AGENT_HEADER: DAPR_USER_AGENT} if settings.DAPR_API_TOKEN is not None: diff --git a/dapr/clients/http/client.py b/dapr/clients/http/client.py index 86e9ab6f0..f6f95aa74 100644 --- a/dapr/clients/http/client.py +++ b/dapr/clients/http/client.py @@ -51,7 +51,7 @@ def __init__( timeout (int, optional): Timeout in seconds, defaults to 60. headers_callback (lambda: Dict[str, str]], optional): Generates header for each request. """ - DaprHealth.wait_until_ready() + DaprHealth.wait_for_sidecar() self._timeout = aiohttp.ClientTimeout(total=timeout) self._serializer = message_serializer diff --git a/dapr/version/version.py b/dapr/version/version.py index 112a2520f..8c6c12960 100644 --- a/dapr/version/version.py +++ b/dapr/version/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.15.0.dev' +__version__ = '1.16.1rc1' diff --git a/dev-requirements.txt b/dev-requirements.txt index cbd719859..461d92391 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -15,7 +15,7 @@ Flask>=1.1 # needed for auto fix ruff===0.2.2 # needed for dapr-ext-workflow -durabletask-dapr >= 0.2.0a7 +durabletask-dapr >= 0.2.0a9 # needed for .env file loading in examples python-dotenv>=1.0.0 # needed for enhanced schema generation from function features diff --git a/examples/demo_actor/demo_actor/requirements.txt b/examples/demo_actor/demo_actor/requirements.txt index 4c2215b51..9496602ed 100644 --- a/examples/demo_actor/demo_actor/requirements.txt +++ b/examples/demo_actor/demo_actor/requirements.txt @@ -1 +1 @@ -dapr-ext-fastapi-dev>=1.15.0.dev +dapr-ext-fastapi>=1.16.1rc1 diff --git a/examples/demo_workflow/demo_workflow/requirements.txt b/examples/demo_workflow/demo_workflow/requirements.txt index 7f7a666d2..a70b02692 100644 --- a/examples/demo_workflow/demo_workflow/requirements.txt +++ b/examples/demo_workflow/demo_workflow/requirements.txt @@ -1 +1 @@ -dapr-ext-workflow-dev>=1.15.0.dev \ No newline at end of file +dapr-ext-workflow>=1.16.1rc1 diff --git a/examples/invoke-simple/requirements.txt b/examples/invoke-simple/requirements.txt index ee0ce7078..e77f5d6e2 100644 --- a/examples/invoke-simple/requirements.txt +++ b/examples/invoke-simple/requirements.txt @@ -1,2 +1,2 @@ -dapr-ext-grpc-dev >= 1.15.0.dev -dapr-dev >= 1.15.0.dev +dapr-ext-grpc >= 1.16.1rc1 +dapr >= 1.16.1rc1 diff --git a/examples/w3c-tracing/requirements.txt b/examples/w3c-tracing/requirements.txt index cd15885bd..514e2606a 100644 --- a/examples/w3c-tracing/requirements.txt +++ b/examples/w3c-tracing/requirements.txt @@ -1,5 +1,5 @@ -dapr-ext-grpc-dev >= 1.15.0.dev -dapr-dev >= 1.15.0.dev +dapr-ext-grpc >= 1.16.1rc1 +dapr >= 1.16.1rc1 opentelemetry-sdk opentelemetry-instrumentation-grpc opentelemetry-exporter-zipkin diff --git a/examples/workflow/README.md b/examples/workflow/README.md index f5b901d1c..2e09eeef8 100644 --- a/examples/workflow/README.md +++ b/examples/workflow/README.md @@ -20,7 +20,7 @@ pip3 install -r requirements.txt Each of the examples in this directory can be run directly from the command line. ### Simple Workflow -This example represents a workflow that manages counters through a series of activities and child workflows. +This example represents a workflow that manages counters through a series of activities and child workflows. It shows several Dapr Workflow features including: - Basic activity execution with counter increments - Retryable activities with configurable retry policies @@ -57,7 +57,7 @@ timeout_seconds: 30 --> ```sh -dapr run --app-id wf-simple-example --dapr-grpc-port 50001 -- python3 simple.py +dapr run --app-id wf-simple-example -- python3 simple.py ``` @@ -99,7 +99,7 @@ timeout_seconds: 30 --> ```sh -dapr run --app-id wfexample --dapr-grpc-port 50001 -- python3 task_chaining.py +dapr run --app-id wfexample -- python3 task_chaining.py ``` @@ -146,7 +146,7 @@ timeout_seconds: 30 --> ```sh -dapr run --app-id wfexample --dapr-grpc-port 50001 -- python3 fan_out_fan_in.py +dapr run --app-id wfexample -- python3 fan_out_fan_in.py ``` @@ -186,7 +186,7 @@ This example demonstrates how to use a workflow to interact with a human user. T The Dapr CLI can be started using the following command: ```sh -dapr run --app-id wfexample --dapr-grpc-port 50001 +dapr run --app-id wfexample ``` In a separate terminal window, run the following command to start the Python workflow app: @@ -222,7 +222,7 @@ This example demonstrates how to eternally running workflow that polls an endpoi The Dapr CLI can be started using the following command: ```sh -dapr run --app-id wfexample --dapr-grpc-port 50001 +dapr run --app-id wfexample ``` In a separate terminal window, run the following command to start the Python workflow app: @@ -254,7 +254,7 @@ This workflow runs forever or until you press `ENTER` to stop it. Starting the a This example demonstrates how to call a child workflow. The Dapr CLI can be started using the following command: ```sh -dapr run --app-id wfexample --dapr-grpc-port 50001 +dapr run --app-id wfexample ``` In a separate terminal window, run the following command to start the Python workflow app: @@ -269,4 +269,129 @@ When you run the example, you will see output like this: *** Calling child workflow 29a7592a1e874b07aad2bb58de309a51-child *** Child workflow 6feadc5370184b4998e50875b20084f6 called ... -``` \ No newline at end of file +``` + + +### Cross-app Workflow + +This example demonstrates how to call child workflows and activities in different apps. The multiple Dapr CLI instances can be started using the following commands: + + + +```sh +dapr run --app-id wfexample3 python3 cross-app3.py & +dapr run --app-id wfexample2 python3 cross-app2.py & +dapr run --app-id wfexample1 python3 cross-app1.py +``` + + +When you run the apps, you will see output like this: +``` +... +app1 - triggering app2 workflow +app2 - triggering app3 activity +... +``` +among others. This shows that the workflow calls are working as expected. + + +#### Error handling on activity calls + +This example demonstrates how the error handling works on activity calls across apps. + +Error handling on activity calls across apps works as normal workflow activity calls. + +In this example we run `app3` in failing mode, which makes the activity call return error constantly. The activity call from `app2` will fail after the retry policy is exhausted. + + + +```sh +export ERROR_ACTIVITY_MODE=true +dapr run --app-id wfexample3 python3 cross-app3.py & +dapr run --app-id wfexample2 python3 cross-app2.py & +dapr run --app-id wfexample1 python3 cross-app1.py +``` + + + +When you run the apps with the `ERROR_ACTIVITY_MODE` environment variable set, you will see output like this: +``` +... +app3 - received activity call +app3 - raising error in activity due to error mode being enabled +app2 - received activity error from app3 +... +``` +among others. This shows that the activity calls are failing as expected, and they are being handled as expected too. + + +#### Error handling on workflow calls + +This example demonstrates how the error handling works on workflow calls across apps. + +Error handling on workflow calls across apps works as normal workflow calls. + +In this example we run `app2` in failing mode, which makes the workflow call return error constantly. The workflow call from `app1` will fail after the retry policy is exhausted. + + + +```sh +export ERROR_WORKFLOW_MODE=true +dapr run --app-id wfexample3 python3 cross-app3.py & +dapr run --app-id wfexample2 python3 cross-app2.py & +dapr run --app-id wfexample1 python3 cross-app1.py +``` + + +When you run the apps with the `ERROR_WORKFLOW_MODE` environment variable set, you will see output like this: +``` +... +app2 - received workflow call +app2 - raising error in workflow due to error mode being enabled +app1 - received workflow error from app2 +... +``` +among others. This shows that the workflow calls are failing as expected, and they are being handled as expected too. + diff --git a/examples/workflow/cross-app1.py b/examples/workflow/cross-app1.py new file mode 100644 index 000000000..f84de662a --- /dev/null +++ b/examples/workflow/cross-app1.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import timedelta + +from durabletask.task import TaskFailedError +import dapr.ext.workflow as wf +import time + +wfr = wf.WorkflowRuntime() + + +@wfr.workflow +def app1_workflow(ctx: wf.DaprWorkflowContext): + print(f'app1 - received workflow call', flush=True) + print(f'app1 - triggering app2 workflow', flush=True) + + try: + retry_policy = wf.RetryPolicy( + max_number_of_attempts=2, + first_retry_interval=timedelta(milliseconds=100), + max_retry_interval=timedelta(seconds=3), + ) + yield ctx.call_child_workflow( + workflow='app2_workflow', + input=None, + app_id='wfexample2', + retry_policy=retry_policy, + ) + print(f'app1 - received workflow result', flush=True) + except TaskFailedError as e: + print(f'app1 - received workflow error from app2', flush=True) + + print(f'app1 - returning workflow result', flush=True) + return 1 + + +if __name__ == '__main__': + wfr.start() + time.sleep(10) # wait for workflow runtime to start + + wf_client = wf.DaprWorkflowClient() + print(f'app1 - triggering app1 workflow', flush=True) + instance_id = wf_client.schedule_new_workflow(workflow=app1_workflow) + + # Wait for the workflow to complete + time.sleep(7) + + wfr.shutdown() diff --git a/examples/workflow/cross-app2.py b/examples/workflow/cross-app2.py new file mode 100644 index 000000000..4cb30874c --- /dev/null +++ b/examples/workflow/cross-app2.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import timedelta +import os + +from durabletask.task import TaskFailedError +import dapr.ext.workflow as wf +import time + +wfr = wf.WorkflowRuntime() + + +@wfr.workflow +def app2_workflow(ctx: wf.DaprWorkflowContext): + print(f'app2 - received workflow call', flush=True) + if os.getenv('ERROR_WORKFLOW_MODE', 'false') == 'true': + print(f'app2 - raising error in workflow due to error mode being enabled', flush=True) + raise ValueError('Error in workflow due to error mode being enabled') + print(f'app2 - triggering app3 activity', flush=True) + try: + retry_policy = wf.RetryPolicy( + max_number_of_attempts=2, + first_retry_interval=timedelta(milliseconds=100), + max_retry_interval=timedelta(seconds=3), + ) + result = yield ctx.call_activity( + 'app3_activity', input=None, app_id='wfexample3', retry_policy=retry_policy + ) + print(f'app2 - received activity result', flush=True) + except TaskFailedError as e: + print(f'app2 - received activity error from app3', flush=True) + + print(f'app2 - returning workflow result', flush=True) + return 2 + + +if __name__ == '__main__': + wfr.start() + time.sleep(15) # wait for workflow runtime to start + wfr.shutdown() diff --git a/examples/workflow/cross-app3.py b/examples/workflow/cross-app3.py new file mode 100644 index 000000000..ecc945ca3 --- /dev/null +++ b/examples/workflow/cross-app3.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import dapr.ext.workflow as wf +import time + +wfr = wf.WorkflowRuntime() + + +@wfr.activity +def app3_activity(ctx: wf.DaprWorkflowContext) -> int: + print(f'app3 - received activity call', flush=True) + if os.getenv('ERROR_ACTIVITY_MODE', 'false') == 'true': + print(f'app3 - raising error in activity due to error mode being enabled', flush=True) + raise ValueError('Error in activity due to error mode being enabled') + print(f'app3 - returning activity result', flush=True) + return 3 + + +if __name__ == '__main__': + wfr.start() + time.sleep(15) # wait for workflow runtime to start + wfr.shutdown() diff --git a/examples/workflow/requirements.txt b/examples/workflow/requirements.txt index e220036d6..fab86e728 100644 --- a/examples/workflow/requirements.txt +++ b/examples/workflow/requirements.txt @@ -1,2 +1,2 @@ -dapr-ext-workflow-dev>=1.15.0.dev -dapr-dev>=1.15.0.dev +dapr-ext-workflow>=1.16.1rc1 +dapr>=1.16.1rc1 diff --git a/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py b/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py index 112a2520f..8c6c12960 100644 --- a/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py +++ b/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.15.0.dev' +__version__ = '1.16.1rc1' diff --git a/ext/dapr-ext-fastapi/setup.cfg b/ext/dapr-ext-fastapi/setup.cfg index 560a795f8..8b6080ebf 100644 --- a/ext/dapr-ext-fastapi/setup.cfg +++ b/ext/dapr-ext-fastapi/setup.cfg @@ -24,7 +24,7 @@ python_requires = >=3.9 packages = find_namespace: include_package_data = True install_requires = - dapr-dev >= 1.15.0.dev + dapr >= 1.16.1rc1 uvicorn >= 0.11.6 fastapi >= 0.60.1 @@ -32,5 +32,5 @@ install_requires = include = dapr.* -exclude = +exclude = tests diff --git a/ext/dapr-ext-grpc/dapr/ext/grpc/version.py b/ext/dapr-ext-grpc/dapr/ext/grpc/version.py index 112a2520f..8c6c12960 100644 --- a/ext/dapr-ext-grpc/dapr/ext/grpc/version.py +++ b/ext/dapr-ext-grpc/dapr/ext/grpc/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.15.0.dev' +__version__ = '1.16.1rc1' diff --git a/ext/dapr-ext-grpc/setup.cfg b/ext/dapr-ext-grpc/setup.cfg index caf84a2ec..d08757c78 100644 --- a/ext/dapr-ext-grpc/setup.cfg +++ b/ext/dapr-ext-grpc/setup.cfg @@ -24,12 +24,12 @@ python_requires = >=3.9 packages = find_namespace: include_package_data = True install_requires = - dapr-dev >= 1.15.0.dev + dapr >= 1.16.1rc1 cloudevents >= 1.0.0 [options.packages.find] include = dapr.* -exclude = +exclude = tests diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py b/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py index 2dee46fe2..476ab765f 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py @@ -63,11 +63,29 @@ def create_timer(self, fire_at: Union[datetime, timedelta]) -> task.Task: def call_activity( self, - activity: Callable[[WorkflowActivityContext, TInput], TOutput], + activity: Union[Callable[[WorkflowActivityContext, TInput], TOutput], str], *, input: TInput = None, retry_policy: Optional[RetryPolicy] = None, + app_id: Optional[str] = None, ) -> task.Task[TOutput]: + # Handle string activity names for cross-app scenarios + if isinstance(activity, str): + activity_name = activity + if app_id is not None: + self._logger.debug( + f'{self.instance_id}: Creating cross-app activity {activity_name} for app {app_id}' + ) + else: + self._logger.debug(f'{self.instance_id}: Creating activity {activity_name}') + + if retry_policy is None: + return self.__obj.call_activity(activity=activity_name, input=input, app_id=app_id) + return self.__obj.call_activity( + activity=activity_name, input=input, retry_policy=retry_policy.obj, app_id=app_id + ) + + # Handle function activity objects (original behavior) self._logger.debug(f'{self.instance_id}: Creating activity {activity.__name__}') if hasattr(activity, '_dapr_alternate_name'): act = activity.__dict__['_dapr_alternate_name'] @@ -75,17 +93,38 @@ def call_activity( # this case should ideally never happen act = activity.__name__ if retry_policy is None: - return self.__obj.call_activity(activity=act, input=input) - return self.__obj.call_activity(activity=act, input=input, retry_policy=retry_policy.obj) + return self.__obj.call_activity(activity=act, input=input, app_id=app_id) + return self.__obj.call_activity( + activity=act, input=input, retry_policy=retry_policy.obj, app_id=app_id + ) def call_child_workflow( self, - workflow: Workflow, + workflow: Union[Workflow, str], *, input: Optional[TInput] = None, instance_id: Optional[str] = None, retry_policy: Optional[RetryPolicy] = None, + app_id: Optional[str] = None, ) -> task.Task[TOutput]: + # Handle string workflow names for cross-app scenarios + if isinstance(workflow, str): + workflow_name = workflow + self._logger.debug(f'{self.instance_id}: Creating child workflow {workflow_name}') + + if retry_policy is None: + return self.__obj.call_sub_orchestrator( + workflow_name, input=input, instance_id=instance_id, app_id=app_id + ) + return self.__obj.call_sub_orchestrator( + workflow_name, + input=input, + instance_id=instance_id, + retry_policy=retry_policy.obj, + app_id=app_id, + ) + + # Handle function workflow objects (original behavior) self._logger.debug(f'{self.instance_id}: Creating child workflow {workflow.__name__}') def wf(ctx: task.OrchestrationContext, inp: TInput): @@ -100,9 +139,11 @@ def wf(ctx: task.OrchestrationContext, inp: TInput): # this case should ideally never happen wf.__name__ = workflow.__name__ if retry_policy is None: - return self.__obj.call_sub_orchestrator(wf, input=input, instance_id=instance_id) + return self.__obj.call_sub_orchestrator( + wf, input=input, instance_id=instance_id, app_id=app_id + ) return self.__obj.call_sub_orchestrator( - wf, input=input, instance_id=instance_id, retry_policy=retry_policy.obj + wf, input=input, instance_id=instance_id, retry_policy=retry_policy.obj, app_id=app_id ) def wait_for_external_event(self, name: str) -> task.Task: diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/version.py b/ext/dapr-ext-workflow/dapr/ext/workflow/version.py index 112a2520f..8c6c12960 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/version.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.15.0.dev' +__version__ = '1.16.1rc1' diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py index b4c85f6a6..d6e6ba072 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py @@ -107,18 +107,22 @@ def create_timer(self, fire_at: Union[datetime, timedelta]) -> task.Task: @abstractmethod def call_activity( - self, activity: Activity[TOutput], *, input: Optional[TInput] = None + self, + activity: Union[Activity[TOutput], str], + *, + input: Optional[TInput] = None, + app_id: Optional[str] = None, ) -> task.Task[TOutput]: """Schedule an activity for execution. Parameters ---------- - activity: Activity[TInput, TOutput] - A reference to the activity function to call. + activity: Activity[TInput, TOutput] | str + A reference to the activity function to call, or a string name for cross-app activities. input: TInput | None The JSON-serializable input (or None) to pass to the activity. - return_type: task.Task[TOutput] - The JSON-serializable output type to expect from the activity result. + app_id: str | None + The AppID that will execute the activity. Returns ------- @@ -130,22 +134,25 @@ def call_activity( @abstractmethod def call_child_workflow( self, - orchestrator: Workflow[TOutput], + orchestrator: Union[Workflow[TOutput], str], *, input: Optional[TInput] = None, instance_id: Optional[str] = None, + app_id: Optional[str] = None, ) -> task.Task[TOutput]: """Schedule child-workflow function for execution. Parameters ---------- - orchestrator: Orchestrator[TInput, TOutput] - A reference to the orchestrator function to call. + orchestrator: Orchestrator[TInput, TOutput] | str + A reference to the orchestrator function to call, or a string name for cross-app workflows. input: TInput The optional JSON-serializable input to pass to the orchestrator function. instance_id: str A unique ID to use for the sub-orchestration instance. If not specified, a random UUID will be used. + app_id: str + The AppID that will execute the workflow. Returns ------- diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_runtime.py b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_runtime.py index d1f02b354..9f4be6222 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_runtime.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_runtime.py @@ -15,7 +15,8 @@ import inspect from functools import wraps -from typing import Optional, TypeVar +from typing import Optional, TypeVar, Union, Sequence +import grpc from durabletask import worker, task @@ -34,6 +35,13 @@ TInput = TypeVar('TInput') TOutput = TypeVar('TOutput') +ClientInterceptor = Union[ + grpc.UnaryUnaryClientInterceptor, + grpc.UnaryStreamClientInterceptor, + grpc.StreamUnaryClientInterceptor, + grpc.StreamStreamClientInterceptor, +] + class WorkflowRuntime: """WorkflowRuntime is the entry point for registering workflows and activities.""" @@ -43,6 +51,10 @@ def __init__( host: Optional[str] = None, port: Optional[str] = None, logger_options: Optional[LoggerOptions] = None, + interceptors: Optional[Sequence[ClientInterceptor]] = None, + maximum_concurrent_activity_work_items: Optional[int] = None, + maximum_concurrent_orchestration_work_items: Optional[int] = None, + maximum_thread_pool_workers: Optional[int] = None, ): self._logger = Logger('WorkflowRuntime', logger_options) metadata = tuple() @@ -62,6 +74,12 @@ def __init__( secure_channel=uri.tls, log_handler=options.log_handler, log_formatter=options.log_formatter, + interceptors=interceptors, + concurrency_options=worker.ConcurrencyOptions( + maximum_concurrent_activity_work_items=maximum_concurrent_activity_work_items, + maximum_concurrent_orchestration_work_items=maximum_concurrent_orchestration_work_items, + maximum_thread_pool_workers=maximum_thread_pool_workers, + ), ) def register_workflow(self, fn: Workflow, *, name: Optional[str] = None): diff --git a/ext/dapr-ext-workflow/setup.cfg b/ext/dapr-ext-workflow/setup.cfg index 3776ec899..83869566c 100644 --- a/ext/dapr-ext-workflow/setup.cfg +++ b/ext/dapr-ext-workflow/setup.cfg @@ -24,12 +24,12 @@ python_requires = >=3.9 packages = find_namespace: include_package_data = True install_requires = - dapr-dev >= 1.15.0.dev - durabletask-dapr >= 0.2.0a7 + dapr >= 1.16.1rc1 + durabletask-dapr >= 0.2.0a9 [options.packages.find] include = dapr.* -exclude = +exclude = tests diff --git a/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py b/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py index 9fdfe0440..3ae5fdaf5 100644 --- a/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py +++ b/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py @@ -36,10 +36,10 @@ def __init__(self): def create_timer(self, fire_at): return mock_create_timer - def call_activity(self, activity, input): + def call_activity(self, activity, input, app_id): return mock_call_activity - def call_sub_orchestrator(self, orchestrator, input, instance_id): + def call_sub_orchestrator(self, orchestrator, input, instance_id, app_id): return mock_call_sub_orchestrator def set_custom_status(self, custom_status): diff --git a/ext/flask_dapr/flask_dapr/version.py b/ext/flask_dapr/flask_dapr/version.py index 112a2520f..8c6c12960 100644 --- a/ext/flask_dapr/flask_dapr/version.py +++ b/ext/flask_dapr/flask_dapr/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.15.0.dev' +__version__ = '1.16.1rc1' diff --git a/ext/flask_dapr/setup.cfg b/ext/flask_dapr/setup.cfg index 64d15941d..531a9aea5 100644 --- a/ext/flask_dapr/setup.cfg +++ b/ext/flask_dapr/setup.cfg @@ -26,4 +26,4 @@ include_package_data = true zip_safe = false install_requires = Flask >= 1.1 - dapr-dev >= 1.15.0.dev + dapr >= 1.16.1rc1 diff --git a/tests/clients/test_heatlhcheck.py b/tests/clients/test_heatlhcheck.py index f3be8a475..d447e072c 100644 --- a/tests/clients/test_heatlhcheck.py +++ b/tests/clients/test_heatlhcheck.py @@ -24,13 +24,13 @@ class DaprHealthCheckTests(unittest.TestCase): @patch.object(settings, 'DAPR_HTTP_ENDPOINT', 'http://domain.com:3500') @patch('urllib.request.urlopen') - def test_wait_until_ready_success(self, mock_urlopen): + def test_wait_for_sidecar_success(self, mock_urlopen): mock_urlopen.return_value.__enter__.return_value = MagicMock(status=200) try: - DaprHealth.wait_until_ready() + DaprHealth.wait_for_sidecar() except Exception as e: - self.fail(f'wait_until_ready() raised an exception unexpectedly: {e}') + self.fail(f'wait_for_sidecar() raised an exception unexpectedly: {e}') mock_urlopen.assert_called_once() @@ -45,13 +45,13 @@ def test_wait_until_ready_success(self, mock_urlopen): @patch.object(settings, 'DAPR_HTTP_ENDPOINT', 'http://domain.com:3500') @patch.object(settings, 'DAPR_API_TOKEN', 'mytoken') @patch('urllib.request.urlopen') - def test_wait_until_ready_success_with_api_token(self, mock_urlopen): + def test_wait_for_sidecar_success_with_api_token(self, mock_urlopen): mock_urlopen.return_value.__enter__.return_value = MagicMock(status=200) try: - DaprHealth.wait_until_ready() + DaprHealth.wait_for_sidecar() except Exception as e: - self.fail(f'wait_until_ready() raised an exception unexpectedly: {e}') + self.fail(f'wait_for_sidecar() raised an exception unexpectedly: {e}') mock_urlopen.assert_called_once() @@ -64,13 +64,13 @@ def test_wait_until_ready_success_with_api_token(self, mock_urlopen): @patch.object(settings, 'DAPR_HEALTH_TIMEOUT', '2.5') @patch('urllib.request.urlopen') - def test_wait_until_ready_timeout(self, mock_urlopen): + def test_wait_for_sidecar_timeout(self, mock_urlopen): mock_urlopen.return_value.__enter__.return_value = MagicMock(status=500) start = time.time() with self.assertRaises(TimeoutError): - DaprHealth.wait_until_ready() + DaprHealth.wait_for_sidecar() self.assertGreaterEqual(time.time() - start, 2.5) self.assertGreater(mock_urlopen.call_count, 1) From c44c28d83f0648052094af6e7dfaa5a362fcf118 Mon Sep 17 00:00:00 2001 From: Albert Callarisa Date: Tue, 4 Nov 2025 17:43:16 +0100 Subject: [PATCH 02/16] Lint update (#854) * Linting and tooling updates - Update ruff and set it to fix imports - Remove flake8 and replace it with ruff Signed-off-by: Albert Callarisa * run `tox -e ruff` Signed-off-by: Albert Callarisa --------- Signed-off-by: Albert Callarisa --- .devcontainer/devcontainer.json | 1 - .github/workflows/build-push-to-main.yaml | 3 - .github/workflows/build-tag.yaml | 3 - .github/workflows/build.yaml | 5 +- README.md | 32 +++--- dapr/actor/__init__.py | 1 - dapr/actor/client/proxy.py | 5 +- dapr/actor/runtime/_reminder_data.py | 1 - dapr/actor/runtime/_state_provider.py | 7 +- dapr/actor/runtime/_type_information.py | 6 +- dapr/actor/runtime/actor.py | 5 +- dapr/actor/runtime/context.py | 6 +- dapr/actor/runtime/manager.py | 7 +- dapr/actor/runtime/method_dispatcher.py | 3 +- dapr/actor/runtime/reentrancy_context.py | 2 +- dapr/actor/runtime/runtime.py | 10 +- dapr/actor/runtime/state_change.py | 2 +- dapr/aio/clients/__init__.py | 15 +-- dapr/aio/clients/grpc/_request.py | 2 +- dapr/aio/clients/grpc/_response.py | 8 +- dapr/aio/clients/grpc/client.py | 104 +++++++++--------- dapr/aio/clients/grpc/interceptors.py | 6 +- dapr/aio/clients/grpc/subscription.py | 3 +- dapr/clients/__init__.py | 16 +-- dapr/clients/base.py | 25 ++--- dapr/clients/exceptions.py | 3 +- dapr/clients/grpc/_conversation_helpers.py | 37 +++---- dapr/clients/grpc/_helpers.py | 14 +-- dapr/clients/grpc/_jobs.py | 3 +- dapr/clients/grpc/_response.py | 30 +++-- dapr/clients/grpc/_state.py | 3 +- dapr/clients/grpc/client.py | 100 ++++++++--------- dapr/clients/grpc/conversation.py | 7 +- dapr/clients/grpc/interceptors.py | 6 +- dapr/clients/grpc/subscription.py | 11 +- dapr/clients/health.py | 7 +- dapr/clients/http/client.py | 10 +- dapr/clients/http/dapr_actor_http_client.py | 4 +- .../http/dapr_invocation_http_client.py | 6 +- dapr/clients/retry.py | 5 +- dapr/common/pubsub/subscription.py | 4 +- dapr/conf/helpers.py | 4 +- dapr/serializers/base.py | 6 +- dapr/serializers/json.py | 6 +- dev-requirements.txt | 3 +- examples/configuration/configuration.py | 3 +- .../real_llm_providers_example.py | 42 +++---- examples/crypto/crypto-async.py | 2 +- examples/crypto/crypto.py | 2 +- examples/demo_actor/demo_actor/demo_actor.py | 5 +- .../demo_actor/demo_actor_client.py | 5 +- .../demo_actor/demo_actor/demo_actor_flask.py | 6 +- .../demo_actor/demo_actor_interface.py | 18 +-- .../demo_actor/demo_actor_service.py | 6 +- examples/demo_workflow/app.py | 11 +- examples/distributed_lock/lock.py | 3 +- examples/error_handling/error_handling.py | 1 - .../grpc_proxying/helloworld_service_pb2.py | 2 + .../helloworld_service_pb2_grpc.py | 2 +- examples/grpc_proxying/invoke-caller.py | 3 +- examples/grpc_proxying/invoke-receiver.py | 3 +- examples/invoke-custom-data/invoke-caller.py | 4 +- .../invoke-custom-data/invoke-receiver.py | 3 +- .../invoke-custom-data/proto/response_pb2.py | 2 + .../proto/response_pb2_grpc.py | 1 - examples/invoke-http/invoke-caller.py | 1 - examples/invoke-http/invoke-receiver.py | 3 +- examples/jobs/job_management.py | 17 +-- examples/jobs/job_processing.py | 5 +- examples/pubsub-simple/subscriber.py | 5 +- .../subscriber-handler.py | 1 + examples/state_store/state_store.py | 2 - .../state_store_query/state_store_query.py | 3 +- examples/w3c-tracing/invoke-receiver.py | 4 +- examples/workflow/child_workflow.py | 3 +- examples/workflow/cross-app1.py | 18 +-- examples/workflow/cross-app2.py | 22 ++-- examples/workflow/cross-app3.py | 9 +- examples/workflow/fan_out_fan_in.py | 1 + examples/workflow/human_approval.py | 5 +- examples/workflow/monitor.py | 3 +- examples/workflow/simple.py | 10 +- examples/workflow/task_chaining.py | 1 - .../dapr/ext/fastapi/__init__.py | 1 - .../dapr/ext/fastapi/actor.py | 4 +- ext/dapr-ext-fastapi/dapr/ext/fastapi/app.py | 1 + ext/dapr-ext-fastapi/tests/test_app.py | 3 +- ext/dapr-ext-fastapi/tests/test_dapractor.py | 3 +- ext/dapr-ext-grpc/dapr/ext/grpc/__init__.py | 7 +- .../dapr/ext/grpc/_health_servicer.py | 2 +- ext/dapr-ext-grpc/dapr/ext/grpc/_servicer.py | 16 +-- ext/dapr-ext-grpc/dapr/ext/grpc/app.py | 8 +- ext/dapr-ext-grpc/tests/test_app.py | 2 +- ext/dapr-ext-grpc/tests/test_servicier.py | 9 +- .../dapr/ext/workflow/__init__.py | 4 +- .../dapr/ext/workflow/dapr_workflow_client.py | 11 +- .../ext/workflow/dapr_workflow_context.py | 11 +- .../dapr/ext/workflow/logger/__init__.py | 2 +- .../dapr/ext/workflow/logger/logger.py | 1 + .../dapr/ext/workflow/logger/options.py | 2 +- .../dapr/ext/workflow/retry_policy.py | 2 +- .../dapr/ext/workflow/util.py | 2 +- .../ext/workflow/workflow_activity_context.py | 1 + .../dapr/ext/workflow/workflow_context.py | 4 +- .../dapr/ext/workflow/workflow_runtime.py | 13 +-- .../dapr/ext/workflow/workflow_state.py | 2 +- .../tests/test_dapr_workflow_context.py | 3 +- .../tests/test_workflow_activity_context.py | 3 +- .../tests/test_workflow_client.py | 7 +- .../tests/test_workflow_runtime.py | 7 +- .../tests/test_workflow_util.py | 3 +- ext/flask_dapr/flask_dapr/app.py | 1 + pyproject.toml | 23 ++-- setup.cfg | 14 --- tests/actor/fake_actor_classes.py | 36 +++--- tests/actor/fake_client.py | 28 +++-- tests/actor/test_actor.py | 9 +- tests/actor/test_actor_factory.py | 5 +- tests/actor/test_actor_manager.py | 5 +- tests/actor/test_actor_reentrancy.py | 14 +-- tests/actor/test_actor_runtime.py | 7 +- tests/actor/test_actor_runtime_config.py | 4 +- tests/actor/test_client_proxy.py | 10 +- tests/actor/test_method_dispatcher.py | 3 +- tests/actor/test_mock_actor.py | 33 ++---- tests/actor/test_state_manager.py | 5 +- tests/actor/test_timer_data.py | 2 +- tests/actor/test_type_information.py | 4 +- tests/actor/test_type_utils.py | 9 +- tests/clients/certs.py | 2 +- tests/clients/fake_dapr_server.py | 53 +++++---- tests/clients/fake_http_server.py | 3 +- tests/clients/test_conversation.py | 21 ++-- tests/clients/test_conversation_helpers.py | 31 +++--- tests/clients/test_dapr_grpc_client.py | 31 +++--- tests/clients/test_dapr_grpc_client_async.py | 25 +++-- .../test_dapr_grpc_client_async_secure.py | 5 +- tests/clients/test_dapr_grpc_client_secure.py | 3 +- tests/clients/test_dapr_grpc_helpers.py | 12 +- tests/clients/test_dapr_grpc_request.py | 6 +- tests/clients/test_dapr_grpc_request_async.py | 4 +- tests/clients/test_dapr_grpc_response.py | 9 +- .../clients/test_dapr_grpc_response_async.py | 2 +- tests/clients/test_exceptions.py | 2 +- tests/clients/test_heatlhcheck.py | 3 +- tests/clients/test_http_helpers.py | 2 +- .../test_http_service_invocation_client.py | 3 +- tests/clients/test_jobs.py | 3 +- tests/clients/test_retries_policy.py | 5 +- tests/clients/test_retries_policy_async.py | 5 +- ...t_secure_http_service_invocation_client.py | 4 +- tests/clients/test_subscription.py | 7 +- tests/clients/test_timeout_interceptor.py | 1 + .../clients/test_timeout_interceptor_async.py | 1 + .../test_default_json_serializer.py | 2 +- tests/serializers/test_util.py | 4 +- tox.ini | 10 +- 157 files changed, 651 insertions(+), 738 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index f03d84153..6042b7429 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -9,7 +9,6 @@ "terminal.integrated.shell.linux": "/bin/bash", "python.pythonPath": "/usr/local/bin/python", "python.linting.enabled": true, - "python.linting.flake8Enabled": true, "python.linting.mypyEnabled": true, }, diff --git a/.github/workflows/build-push-to-main.yaml b/.github/workflows/build-push-to-main.yaml index 4a3a63053..47273aa0a 100644 --- a/.github/workflows/build-push-to-main.yaml +++ b/.github/workflows/build-push-to-main.yaml @@ -30,9 +30,6 @@ jobs: echo "Source files are not formatted correctly. Run 'tox -e ruff' to autoformat." exit 1 fi - - name: Run Linter - run: | - tox -e flake8 build: needs: lint diff --git a/.github/workflows/build-tag.yaml b/.github/workflows/build-tag.yaml index 46593dd96..176fbaf04 100644 --- a/.github/workflows/build-tag.yaml +++ b/.github/workflows/build-tag.yaml @@ -34,9 +34,6 @@ jobs: echo "Source files are not formatted correctly. Run 'tox -e ruff' to autoformat." exit 1 fi - - name: Run Linter - run: | - tox -e flake8 build: needs: lint diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 67f052fae..e2a254237 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -36,9 +36,6 @@ jobs: echo "Source files are not formatted correctly. Run 'tox -e ruff' to autoformat." exit 1 fi - - name: Run Linter - run: | - tox -e flake8 build: needs: lint @@ -64,4 +61,4 @@ jobs: run: | tox -e py`echo "${{ matrix.python_ver }}" | sed 's/\.//g'` - name: Upload test coverage - uses: codecov/codecov-action@v5 \ No newline at end of file + uses: codecov/codecov-action@v5 diff --git a/README.md b/README.md index 17e434808..a65b9ec51 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,13 @@ # Dapr SDK for Python -[![PyPI - Version](https://img.shields.io/pypi/v/dapr?style=flat&logo=pypi&logoColor=white&label=Latest%20version)](https://pypi.org/project/dapr/) -[![PyPI - Downloads](https://img.shields.io/pypi/dm/dapr?style=flat&logo=pypi&logoColor=white&label=Downloads)](https://pypi.org/project/dapr/) -[![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/dapr/python-sdk/.github%2Fworkflows%2Fbuild.yaml?branch=main&label=Build&logo=github)](https://github.com/dapr/python-sdk/actions/workflows/build.yaml) -[![codecov](https://codecov.io/gh/dapr/python-sdk/branch/main/graph/badge.svg)](https://codecov.io/gh/dapr/python-sdk) -[![GitHub License](https://img.shields.io/github/license/dapr/python-sdk?style=flat&label=License&logo=github)](https://github.com/dapr/python-sdk/blob/main/LICENSE) -[![GitHub issue custom search in repo](https://img.shields.io/github/issues-search/dapr/python-sdk?query=type%3Aissue%20is%3Aopen%20label%3A%22good%20first%20issue%22&label=Good%20first%20issues&style=flat&logo=github)](https://github.com/dapr/python-sdk/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) -[![Discord](https://img.shields.io/discord/778680217417809931?label=Discord&style=flat&logo=discord)](http://bit.ly/dapr-discord) -[![YouTube Channel Views](https://img.shields.io/youtube/channel/views/UCtpSQ9BLB_3EXdWAUQYwnRA?style=flat&label=YouTube%20views&logo=youtube)](https://youtube.com/@daprdev) +[![PyPI - Version](https://img.shields.io/pypi/v/dapr?style=flat&logo=pypi&logoColor=white&label=Latest%20version)](https://pypi.org/project/dapr/) +[![PyPI - Downloads](https://img.shields.io/pypi/dm/dapr?style=flat&logo=pypi&logoColor=white&label=Downloads)](https://pypi.org/project/dapr/) +[![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/dapr/python-sdk/.github%2Fworkflows%2Fbuild.yaml?branch=main&label=Build&logo=github)](https://github.com/dapr/python-sdk/actions/workflows/build.yaml) +[![codecov](https://codecov.io/gh/dapr/python-sdk/branch/main/graph/badge.svg)](https://codecov.io/gh/dapr/python-sdk) +[![GitHub License](https://img.shields.io/github/license/dapr/python-sdk?style=flat&label=License&logo=github)](https://github.com/dapr/python-sdk/blob/main/LICENSE) +[![GitHub issue custom search in repo](https://img.shields.io/github/issues-search/dapr/python-sdk?query=type%3Aissue%20is%3Aopen%20label%3A%22good%20first%20issue%22&label=Good%20first%20issues&style=flat&logo=github)](https://github.com/dapr/python-sdk/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) +[![Discord](https://img.shields.io/discord/778680217417809931?label=Discord&style=flat&logo=discord)](http://bit.ly/dapr-discord) +[![YouTube Channel Views](https://img.shields.io/youtube/channel/views/UCtpSQ9BLB_3EXdWAUQYwnRA?style=flat&label=YouTube%20views&logo=youtube)](https://youtube.com/@daprdev) [![X (formerly Twitter) Follow](https://img.shields.io/twitter/follow/daprdev?logo=x&style=flat)](https://twitter.com/daprdev) @@ -94,31 +94,25 @@ pip3 install -e ./ext/dapr-ext-workflow/ pip3 install -r dev-requirements.txt ``` -4. Run linter - -```bash -tox -e flake8 -``` - -5. Run autofix +4. Run linter and autofix ```bash tox -e ruff ``` -6. Run unit-test +5. Run unit-test ```bash tox -e py311 ``` -7. Run type check +6. Run type check ```bash tox -e type ``` -8. Run examples +7. Run examples ```bash tox -e examples @@ -153,7 +147,7 @@ export DAPR_BRANCH=release-1.16 # Optional, defaults to master ./tools/regen_grpcclient.sh ``` -> Note: The `grpcio-tools` version we're using doesn't support Python 3.13. +> Note: The `grpcio-tools` version we're using doesn't support Python 3.13. ## Help & Feedback diff --git a/dapr/actor/__init__.py b/dapr/actor/__init__.py index 4323caae2..bf21f488c 100644 --- a/dapr/actor/__init__.py +++ b/dapr/actor/__init__.py @@ -20,7 +20,6 @@ from dapr.actor.runtime.remindable import Remindable from dapr.actor.runtime.runtime import ActorRuntime - __all__ = [ 'ActorInterface', 'ActorProxy', diff --git a/dapr/actor/client/proxy.py b/dapr/actor/client/proxy.py index a7648bf97..dcf1ca436 100644 --- a/dapr/actor/client/proxy.py +++ b/dapr/actor/client/proxy.py @@ -21,8 +21,8 @@ from dapr.actor.runtime._type_utils import get_dispatchable_attrs_from_interface from dapr.clients import DaprActorClientBase, DaprActorHttpClient from dapr.clients.retry import RetryPolicy -from dapr.serializers import Serializer, DefaultJSONSerializer from dapr.conf import settings +from dapr.serializers import DefaultJSONSerializer, Serializer # Actor factory Callable type hint. ACTOR_FACTORY_CALLBACK = Callable[[ActorInterface, str, str], 'ActorProxy'] @@ -35,8 +35,7 @@ def create( actor_type: str, actor_id: ActorId, actor_interface: Optional[Type[ActorInterface]] = None, - ) -> 'ActorProxy': - ... + ) -> 'ActorProxy': ... class ActorProxyFactory(ActorFactoryBase): diff --git a/dapr/actor/runtime/_reminder_data.py b/dapr/actor/runtime/_reminder_data.py index 8821c94bc..5453b8162 100644 --- a/dapr/actor/runtime/_reminder_data.py +++ b/dapr/actor/runtime/_reminder_data.py @@ -14,7 +14,6 @@ """ import base64 - from datetime import timedelta from typing import Any, Dict, Optional diff --git a/dapr/actor/runtime/_state_provider.py b/dapr/actor/runtime/_state_provider.py index 54f6b5837..eeb1e4995 100644 --- a/dapr/actor/runtime/_state_provider.py +++ b/dapr/actor/runtime/_state_provider.py @@ -14,12 +14,11 @@ """ import io +from typing import Any, List, Tuple, Type -from typing import Any, List, Type, Tuple -from dapr.actor.runtime.state_change import StateChangeKind, ActorStateChange +from dapr.actor.runtime.state_change import ActorStateChange, StateChangeKind from dapr.clients.base import DaprActorClientBase -from dapr.serializers import Serializer, DefaultJSONSerializer - +from dapr.serializers import DefaultJSONSerializer, Serializer # Mapping StateChangeKind to Dapr State Operation _MAP_CHANGE_KIND_TO_OPERATION = { diff --git a/dapr/actor/runtime/_type_information.py b/dapr/actor/runtime/_type_information.py index 72566eb17..f9171aea8 100644 --- a/dapr/actor/runtime/_type_information.py +++ b/dapr/actor/runtime/_type_information.py @@ -13,10 +13,10 @@ limitations under the License. """ -from dapr.actor.runtime.remindable import Remindable -from dapr.actor.runtime._type_utils import is_dapr_actor, get_actor_interfaces +from typing import TYPE_CHECKING, List, Type -from typing import List, Type, TYPE_CHECKING +from dapr.actor.runtime._type_utils import get_actor_interfaces, is_dapr_actor +from dapr.actor.runtime.remindable import Remindable if TYPE_CHECKING: from dapr.actor.actor_interface import ActorInterface # noqa: F401 diff --git a/dapr/actor/runtime/actor.py b/dapr/actor/runtime/actor.py index 79b1e6ab1..fab02fc70 100644 --- a/dapr/actor/runtime/actor.py +++ b/dapr/actor/runtime/actor.py @@ -14,16 +14,15 @@ """ import uuid - from datetime import timedelta from typing import Any, Optional from dapr.actor.id import ActorId from dapr.actor.runtime._method_context import ActorMethodContext -from dapr.actor.runtime.context import ActorRuntimeContext -from dapr.actor.runtime.state_manager import ActorStateManager from dapr.actor.runtime._reminder_data import ActorReminderData from dapr.actor.runtime._timer_data import TIMER_CALLBACK, ActorTimerData +from dapr.actor.runtime.context import ActorRuntimeContext +from dapr.actor.runtime.state_manager import ActorStateManager class Actor: diff --git a/dapr/actor/runtime/context.py b/dapr/actor/runtime/context.py index ec66ba366..b2610bed4 100644 --- a/dapr/actor/runtime/context.py +++ b/dapr/actor/runtime/context.py @@ -13,16 +13,16 @@ limitations under the License. """ +from typing import TYPE_CHECKING, Callable, Optional + from dapr.actor.id import ActorId from dapr.actor.runtime._state_provider import StateProvider from dapr.clients.base import DaprActorClientBase from dapr.serializers import Serializer -from typing import Callable, Optional, TYPE_CHECKING - if TYPE_CHECKING: - from dapr.actor.runtime.actor import Actor from dapr.actor.runtime._type_information import ActorTypeInformation + from dapr.actor.runtime.actor import Actor class ActorRuntimeContext: diff --git a/dapr/actor/runtime/manager.py b/dapr/actor/runtime/manager.py index a6d1a792a..969e48e2a 100644 --- a/dapr/actor/runtime/manager.py +++ b/dapr/actor/runtime/manager.py @@ -15,17 +15,16 @@ import asyncio import uuid - from typing import Any, Callable, Coroutine, Dict, Optional from dapr.actor.id import ActorId -from dapr.clients.exceptions import DaprInternalError +from dapr.actor.runtime._method_context import ActorMethodContext +from dapr.actor.runtime._reminder_data import ActorReminderData from dapr.actor.runtime.actor import Actor from dapr.actor.runtime.context import ActorRuntimeContext -from dapr.actor.runtime._method_context import ActorMethodContext from dapr.actor.runtime.method_dispatcher import ActorMethodDispatcher -from dapr.actor.runtime._reminder_data import ActorReminderData from dapr.actor.runtime.reentrancy_context import reentrancy_ctx +from dapr.clients.exceptions import DaprInternalError TIMER_METHOD_NAME = 'fire_timer' REMINDER_METHOD_NAME = 'receive_reminder' diff --git a/dapr/actor/runtime/method_dispatcher.py b/dapr/actor/runtime/method_dispatcher.py index 8d9b65114..ffe66d991 100644 --- a/dapr/actor/runtime/method_dispatcher.py +++ b/dapr/actor/runtime/method_dispatcher.py @@ -14,9 +14,10 @@ """ from typing import Any, Dict, List -from dapr.actor.runtime.actor import Actor + from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.actor.runtime._type_utils import get_dispatchable_attrs +from dapr.actor.runtime.actor import Actor class ActorMethodDispatcher: diff --git a/dapr/actor/runtime/reentrancy_context.py b/dapr/actor/runtime/reentrancy_context.py index 0fc9927df..b295b57d7 100644 --- a/dapr/actor/runtime/reentrancy_context.py +++ b/dapr/actor/runtime/reentrancy_context.py @@ -13,7 +13,7 @@ limitations under the License. """ -from typing import Optional from contextvars import ContextVar +from typing import Optional reentrancy_ctx: ContextVar[Optional[str]] = ContextVar('reentrancy_ctx', default=None) diff --git a/dapr/actor/runtime/runtime.py b/dapr/actor/runtime/runtime.py index 3659f1479..b03f0bc75 100644 --- a/dapr/actor/runtime/runtime.py +++ b/dapr/actor/runtime/runtime.py @@ -14,20 +14,18 @@ """ import asyncio - -from typing import Dict, List, Optional, Type, Callable +from typing import Callable, Dict, List, Optional, Type from dapr.actor.id import ActorId +from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.actor.runtime.actor import Actor from dapr.actor.runtime.config import ActorRuntimeConfig from dapr.actor.runtime.context import ActorRuntimeContext -from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.actor.runtime.manager import ActorManager +from dapr.actor.runtime.reentrancy_context import reentrancy_ctx from dapr.clients.http.dapr_actor_http_client import DaprActorHttpClient -from dapr.serializers import Serializer, DefaultJSONSerializer from dapr.conf import settings - -from dapr.actor.runtime.reentrancy_context import reentrancy_ctx +from dapr.serializers import DefaultJSONSerializer, Serializer class ActorRuntime: diff --git a/dapr/actor/runtime/state_change.py b/dapr/actor/runtime/state_change.py index dba21e2c1..4937fcb53 100644 --- a/dapr/actor/runtime/state_change.py +++ b/dapr/actor/runtime/state_change.py @@ -14,7 +14,7 @@ """ from enum import Enum -from typing import TypeVar, Generic, Optional +from typing import Generic, Optional, TypeVar T = TypeVar('T') diff --git a/dapr/aio/clients/__init__.py b/dapr/aio/clients/__init__.py index e945b1307..3f7ce6363 100644 --- a/dapr/aio/clients/__init__.py +++ b/dapr/aio/clients/__init__.py @@ -15,14 +15,15 @@ from typing import Callable, Dict, List, Optional, Union +from google.protobuf.message import Message as GrpcMessage + +from dapr.aio.clients.grpc.client import DaprGrpcClientAsync, InvokeMethodResponse, MetadataTuple from dapr.clients.base import DaprActorClientBase -from dapr.clients.exceptions import DaprInternalError, ERROR_CODE_UNKNOWN -from dapr.aio.clients.grpc.client import DaprGrpcClientAsync, MetadataTuple, InvokeMethodResponse -from dapr.clients.grpc._jobs import Job, FailurePolicy, DropFailurePolicy, ConstantFailurePolicy +from dapr.clients.exceptions import ERROR_CODE_UNKNOWN, DaprInternalError +from dapr.clients.grpc._jobs import ConstantFailurePolicy, DropFailurePolicy, FailurePolicy, Job from dapr.clients.http.dapr_actor_http_client import DaprActorHttpClient from dapr.clients.http.dapr_invocation_http_client import DaprInvocationHttpClient from dapr.conf import settings -from google.protobuf.message import Message as GrpcMessage __all__ = [ 'DaprClient', @@ -37,10 +38,10 @@ ] from grpc.aio import ( # type: ignore - UnaryUnaryClientInterceptor, - UnaryStreamClientInterceptor, - StreamUnaryClientInterceptor, StreamStreamClientInterceptor, + StreamUnaryClientInterceptor, + UnaryStreamClientInterceptor, + UnaryUnaryClientInterceptor, ) diff --git a/dapr/aio/clients/grpc/_request.py b/dapr/aio/clients/grpc/_request.py index b3c3ce2d4..129c556f3 100644 --- a/dapr/aio/clients/grpc/_request.py +++ b/dapr/aio/clients/grpc/_request.py @@ -16,7 +16,7 @@ import io from typing import Union -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions from dapr.clients.grpc._helpers import to_bytes from dapr.clients.grpc._request import DaprRequest from dapr.proto import api_v1, common_v1 diff --git a/dapr/aio/clients/grpc/_response.py b/dapr/aio/clients/grpc/_response.py index 480eb7769..5380ede6d 100644 --- a/dapr/aio/clients/grpc/_response.py +++ b/dapr/aio/clients/grpc/_response.py @@ -15,8 +15,8 @@ from typing import AsyncGenerator, Generic -from dapr.proto import api_v1 from dapr.clients.grpc._response import DaprResponse, TCryptoResponse +from dapr.proto import api_v1 class CryptoResponse(DaprResponse, Generic[TCryptoResponse]): @@ -83,9 +83,7 @@ async def read(self, size: int = -1) -> bytes: return data[:size] -class EncryptResponse(CryptoResponse[api_v1.EncryptResponse]): - ... +class EncryptResponse(CryptoResponse[api_v1.EncryptResponse]): ... -class DecryptResponse(CryptoResponse[api_v1.DecryptResponse]): - ... +class DecryptResponse(CryptoResponse[api_v1.DecryptResponse]): ... diff --git a/dapr/aio/clients/grpc/client.py b/dapr/aio/clients/grpc/client.py index 1b76dcb0f..028eaef51 100644 --- a/dapr/aio/clients/grpc/client.py +++ b/dapr/aio/clients/grpc/client.py @@ -14,96 +14,90 @@ """ import asyncio -import time -import socket import json +import socket +import time import uuid - from datetime import datetime +from typing import Any, Awaitable, Callable, Dict, List, Optional, Sequence, Text, Union from urllib.parse import urlencode - from warnings import warn -from typing import Callable, Dict, Optional, Text, Union, Sequence, List, Any, Awaitable -from typing_extensions import Self - -from google.protobuf.message import Message as GrpcMessage -from google.protobuf.empty_pb2 import Empty as GrpcEmpty -from google.protobuf.any_pb2 import Any as GrpcAny - import grpc.aio # type: ignore +from google.protobuf.any_pb2 import Any as GrpcAny +from google.protobuf.empty_pb2 import Empty as GrpcEmpty +from google.protobuf.message import Message as GrpcMessage from grpc.aio import ( # type: ignore - UnaryUnaryClientInterceptor, - UnaryStreamClientInterceptor, - StreamUnaryClientInterceptor, - StreamStreamClientInterceptor, AioRpcError, + StreamStreamClientInterceptor, + StreamUnaryClientInterceptor, + UnaryStreamClientInterceptor, + UnaryUnaryClientInterceptor, ) +from typing_extensions import Self -from dapr.aio.clients.grpc.subscription import Subscription -from dapr.clients.exceptions import DaprInternalError, DaprGrpcError -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions -from dapr.clients.grpc._state import StateOptions, StateItem -from dapr.clients.grpc._helpers import getWorkflowRuntimeStatus -from dapr.clients.health import DaprHealth -from dapr.clients.retry import RetryPolicy -from dapr.common.pubsub.subscription import StreamInactiveError -from dapr.conf.helpers import GrpcEndpoint -from dapr.conf import settings -from dapr.proto import api_v1, api_service_v1, common_v1 -from dapr.proto.runtime.v1.dapr_pb2 import UnsubscribeConfigurationResponse -from dapr.version import __version__ - +from dapr.aio.clients.grpc._request import ( + DecryptRequestIterator, + EncryptRequestIterator, +) +from dapr.aio.clients.grpc._response import ( + DecryptResponse, + EncryptResponse, +) from dapr.aio.clients.grpc.interceptors import ( DaprClientInterceptorAsync, DaprClientTimeoutInterceptorAsync, ) +from dapr.aio.clients.grpc.subscription import Subscription +from dapr.clients.exceptions import DaprGrpcError, DaprInternalError +from dapr.clients.grpc import conversation +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions from dapr.clients.grpc._helpers import ( MetadataTuple, - to_bytes, - validateNotNone, - validateNotBlankString, convert_dict_to_grpc_dict_of_any, convert_value_to_struct, + getWorkflowRuntimeStatus, + to_bytes, + validateNotBlankString, + validateNotNone, ) -from dapr.aio.clients.grpc._request import ( - EncryptRequestIterator, - DecryptRequestIterator, -) -from dapr.aio.clients.grpc._response import ( - EncryptResponse, - DecryptResponse, -) +from dapr.clients.grpc._jobs import Job from dapr.clients.grpc._request import ( - InvokeMethodRequest, BindingRequest, + InvokeMethodRequest, TransactionalStateOperation, ) -from dapr.clients.grpc import conversation - -from dapr.clients.grpc._jobs import Job from dapr.clients.grpc._response import ( BindingResponse, + BulkStateItem, + BulkStatesResponse, + ConfigurationResponse, + ConfigurationWatcher, DaprResponse, - GetSecretResponse, GetBulkSecretResponse, GetMetadataResponse, + GetSecretResponse, + GetWorkflowResponse, InvokeMethodResponse, - UnlockResponseStatus, - StateResponse, - BulkStatesResponse, - BulkStateItem, - ConfigurationResponse, QueryResponse, QueryResponseItem, RegisteredComponents, - ConfigurationWatcher, - TryLockResponse, - UnlockResponse, - GetWorkflowResponse, StartWorkflowResponse, + StateResponse, TopicEventResponse, + TryLockResponse, + UnlockResponse, + UnlockResponseStatus, ) +from dapr.clients.grpc._state import StateItem, StateOptions +from dapr.clients.health import DaprHealth +from dapr.clients.retry import RetryPolicy +from dapr.common.pubsub.subscription import StreamInactiveError +from dapr.conf import settings +from dapr.conf.helpers import GrpcEndpoint +from dapr.proto import api_service_v1, api_v1, common_v1 +from dapr.proto.runtime.v1.dapr_pb2 import UnsubscribeConfigurationResponse +from dapr.version import __version__ class DaprGrpcClientAsync: @@ -170,7 +164,7 @@ def __init__( if not address: address = settings.DAPR_GRPC_ENDPOINT or ( - f'{settings.DAPR_RUNTIME_HOST}:' f'{settings.DAPR_GRPC_PORT}' + f'{settings.DAPR_RUNTIME_HOST}:{settings.DAPR_GRPC_PORT}' ) try: diff --git a/dapr/aio/clients/grpc/interceptors.py b/dapr/aio/clients/grpc/interceptors.py index bf83cf56a..0444d5acb 100644 --- a/dapr/aio/clients/grpc/interceptors.py +++ b/dapr/aio/clients/grpc/interceptors.py @@ -16,7 +16,11 @@ from collections import namedtuple from typing import List, Tuple -from grpc.aio import UnaryUnaryClientInterceptor, StreamStreamClientInterceptor, ClientCallDetails # type: ignore +from grpc.aio import ( # type: ignore + ClientCallDetails, + StreamStreamClientInterceptor, + UnaryUnaryClientInterceptor, +) from dapr.conf import settings diff --git a/dapr/aio/clients/grpc/subscription.py b/dapr/aio/clients/grpc/subscription.py index e0e380ca6..fff74f166 100644 --- a/dapr/aio/clients/grpc/subscription.py +++ b/dapr/aio/clients/grpc/subscription.py @@ -1,13 +1,14 @@ import asyncio + from grpc import StatusCode from grpc.aio import AioRpcError from dapr.clients.grpc._response import TopicEventResponse from dapr.clients.health import DaprHealth from dapr.common.pubsub.subscription import ( + StreamCancelledError, StreamInactiveError, SubscriptionMessage, - StreamCancelledError, ) from dapr.proto import api_v1, appcallback_v1 diff --git a/dapr/clients/__init__.py b/dapr/clients/__init__.py index 78ad99eb4..5d92b56c7 100644 --- a/dapr/clients/__init__.py +++ b/dapr/clients/__init__.py @@ -16,16 +16,16 @@ from typing import Callable, Dict, List, Optional, Union from warnings import warn +from google.protobuf.message import Message as GrpcMessage + from dapr.clients.base import DaprActorClientBase -from dapr.clients.exceptions import DaprInternalError, ERROR_CODE_UNKNOWN -from dapr.clients.grpc.client import DaprGrpcClient, MetadataTuple, InvokeMethodResponse -from dapr.clients.grpc._jobs import Job, FailurePolicy, DropFailurePolicy, ConstantFailurePolicy +from dapr.clients.exceptions import ERROR_CODE_UNKNOWN, DaprInternalError +from dapr.clients.grpc._jobs import ConstantFailurePolicy, DropFailurePolicy, FailurePolicy, Job +from dapr.clients.grpc.client import DaprGrpcClient, InvokeMethodResponse, MetadataTuple from dapr.clients.http.dapr_actor_http_client import DaprActorHttpClient from dapr.clients.http.dapr_invocation_http_client import DaprInvocationHttpClient from dapr.clients.retry import RetryPolicy from dapr.conf import settings -from google.protobuf.message import Message as GrpcMessage - __all__ = [ 'DaprClient', @@ -41,10 +41,10 @@ from grpc import ( # type: ignore - UnaryUnaryClientInterceptor, - UnaryStreamClientInterceptor, - StreamUnaryClientInterceptor, StreamStreamClientInterceptor, + StreamUnaryClientInterceptor, + UnaryStreamClientInterceptor, + UnaryUnaryClientInterceptor, ) diff --git a/dapr/clients/base.py b/dapr/clients/base.py index d2b972245..da2bb5257 100644 --- a/dapr/clients/base.py +++ b/dapr/clients/base.py @@ -23,31 +23,28 @@ class DaprActorClientBase(ABC): @abstractmethod async def invoke_method( self, actor_type: str, actor_id: str, method: str, data: Optional[bytes] = None - ) -> bytes: - ... + ) -> bytes: ... @abstractmethod - async def save_state_transactionally(self, actor_type: str, actor_id: str, data: bytes) -> None: - ... + async def save_state_transactionally( + self, actor_type: str, actor_id: str, data: bytes + ) -> None: ... @abstractmethod - async def get_state(self, actor_type: str, actor_id: str, name: str) -> bytes: - ... + async def get_state(self, actor_type: str, actor_id: str, name: str) -> bytes: ... @abstractmethod async def register_reminder( self, actor_type: str, actor_id: str, name: str, data: bytes - ) -> None: - ... + ) -> None: ... @abstractmethod - async def unregister_reminder(self, actor_type: str, actor_id: str, name: str) -> None: - ... + async def unregister_reminder(self, actor_type: str, actor_id: str, name: str) -> None: ... @abstractmethod - async def register_timer(self, actor_type: str, actor_id: str, name: str, data: bytes) -> None: - ... + async def register_timer( + self, actor_type: str, actor_id: str, name: str, data: bytes + ) -> None: ... @abstractmethod - async def unregister_timer(self, actor_type: str, actor_id: str, name: str) -> None: - ... + async def unregister_timer(self, actor_type: str, actor_id: str, name: str) -> None: ... diff --git a/dapr/clients/exceptions.py b/dapr/clients/exceptions.py index 61ae0d8b6..f6358cb85 100644 --- a/dapr/clients/exceptions.py +++ b/dapr/clients/exceptions.py @@ -12,6 +12,7 @@ See the License for the specific language governing permissions and limitations under the License. """ + import base64 import json from typing import TYPE_CHECKING, Optional @@ -20,9 +21,9 @@ from dapr.serializers import Serializer from google.protobuf.json_format import MessageToDict +from google.rpc import error_details_pb2 # type: ignore from grpc import RpcError # type: ignore from grpc_status import rpc_status # type: ignore -from google.rpc import error_details_pb2 # type: ignore ERROR_CODE_UNKNOWN = 'UNKNOWN' ERROR_CODE_DOES_NOT_EXIST = 'ERR_DOES_NOT_EXIST' diff --git a/dapr/clients/grpc/_conversation_helpers.py b/dapr/clients/grpc/_conversation_helpers.py index 37bb81c18..9f57c9cc7 100644 --- a/dapr/clients/grpc/_conversation_helpers.py +++ b/dapr/clients/grpc/_conversation_helpers.py @@ -16,6 +16,7 @@ import inspect import random import string +import types from dataclasses import fields, is_dataclass from enum import Enum from typing import ( @@ -23,21 +24,19 @@ Callable, Dict, List, + Literal, Mapping, Optional, Sequence, Union, - Literal, + cast, get_args, get_origin, get_type_hints, - cast, ) from dapr.conf import settings -import types - # Make mypy happy. Runtime handle: real class on 3.10+, else None. # TODO: Python 3.9 is about to be end-of-life, so we can drop this at some point next year (2026) UnionType: Any = getattr(types, 'UnionType', None) @@ -190,14 +189,14 @@ def _json_primitive_type(v: Any) -> str: if settings.DAPR_CONVERSATION_TOOLS_LARGE_ENUM_BEHAVIOR == 'error': raise ValueError( f"Enum '{getattr(python_type, '__name__', str(python_type))}' has {count} members, " - f"exceeding DAPR_CONVERSATION_MAX_ENUM_ITEMS={settings.DAPR_CONVERSATION_TOOLS_MAX_ENUM_ITEMS}. " - f"Either reduce the enum size or set DAPR_CONVERSATION_LARGE_ENUM_BEHAVIOR=string to allow compact schema." + f'exceeding DAPR_CONVERSATION_MAX_ENUM_ITEMS={settings.DAPR_CONVERSATION_TOOLS_MAX_ENUM_ITEMS}. ' + f'Either reduce the enum size or set DAPR_CONVERSATION_LARGE_ENUM_BEHAVIOR=string to allow compact schema.' ) # Default behavior: compact schema as a string with helpful context and a few examples example_values = [item.value for item in members[:5]] if members else [] desc = ( - f"{getattr(python_type, '__name__', 'Enum')} (enum with {count} values). " - f"Provide a valid value. Schema compacted to avoid oversized enum listing." + f'{getattr(python_type, "__name__", "Enum")} (enum with {count} values). ' + f'Provide a valid value. Schema compacted to avoid oversized enum listing.' ) schema = {'type': 'string', 'description': desc} if example_values: @@ -696,8 +695,8 @@ def stringify_tool_output(value: Any) -> str: * dataclass -> asdict If JSON serialization still fails, fallback to str(value). If that fails, return ''. """ - import json as _json import base64 as _b64 + import json as _json from dataclasses import asdict as _asdict if isinstance(value, str): @@ -760,20 +759,16 @@ def _default(o: Any): # --- Errors ---- -class ToolError(RuntimeError): - ... +class ToolError(RuntimeError): ... -class ToolNotFoundError(ToolError): - ... +class ToolNotFoundError(ToolError): ... -class ToolExecutionError(ToolError): - ... +class ToolExecutionError(ToolError): ... -class ToolArgumentError(ToolError): - ... +class ToolArgumentError(ToolError): ... def _coerce_bool(value: Any) -> bool: @@ -962,7 +957,7 @@ def _coerce_and_validate(value: Any, expected_type: Any) -> Any: missing.append(pname) if missing: raise ValueError( - f"Missing required constructor arg(s) for {expected_type.__name__}: {', '.join(missing)}" + f'Missing required constructor arg(s) for {expected_type.__name__}: {", ".join(missing)}' ) try: return expected_type(**kwargs) @@ -978,7 +973,7 @@ def _coerce_and_validate(value: Any, expected_type: Any) -> Any: if expected_type is Any or isinstance(value, expected_type): return value raise ValueError( - f"Expected {getattr(expected_type, '__name__', str(expected_type))}, got {type(value).__name__}" + f'Expected {getattr(expected_type, "__name__", str(expected_type))}, got {type(value).__name__}' ) @@ -1014,12 +1009,12 @@ def bind_params_to_func(fn: Callable[..., Any], params: Params): and p.name not in bound.arguments ] if missing: - raise ToolArgumentError(f"Missing required parameter(s): {', '.join(missing)}") + raise ToolArgumentError(f'Missing required parameter(s): {", ".join(missing)}') # unexpected kwargs unless **kwargs present if not any(p.kind is inspect.Parameter.VAR_KEYWORD for p in sig.parameters.values()): extra = set(params) - set(sig.parameters) if extra: - raise ToolArgumentError(f"Unexpected parameter(s): {', '.join(sorted(extra))}") + raise ToolArgumentError(f'Unexpected parameter(s): {", ".join(sorted(extra))}') elif isinstance(params, Sequence): bound = sig.bind(*params) else: diff --git a/dapr/clients/grpc/_helpers.py b/dapr/clients/grpc/_helpers.py index c68b0f56a..8eb9a1e97 100644 --- a/dapr/clients/grpc/_helpers.py +++ b/dapr/clients/grpc/_helpers.py @@ -12,22 +12,22 @@ See the License for the specific language governing permissions and limitations under the License. """ -from enum import Enum -from typing import Any, Dict, List, Optional, Union, Tuple +from enum import Enum +from typing import Any, Dict, List, Optional, Tuple, Union +from google.protobuf import json_format from google.protobuf.any_pb2 import Any as GrpcAny from google.protobuf.message import Message as GrpcMessage +from google.protobuf.struct_pb2 import Struct from google.protobuf.wrappers_pb2 import ( BoolValue, - StringValue, + BytesValue, + DoubleValue, Int32Value, Int64Value, - DoubleValue, - BytesValue, + StringValue, ) -from google.protobuf.struct_pb2 import Struct -from google.protobuf import json_format MetadataDict = Dict[str, List[Union[bytes, str]]] MetadataTuple = Tuple[Tuple[str, Union[bytes, str]], ...] diff --git a/dapr/clients/grpc/_jobs.py b/dapr/clients/grpc/_jobs.py index 896c8db3c..5df9975f0 100644 --- a/dapr/clients/grpc/_jobs.py +++ b/dapr/clients/grpc/_jobs.py @@ -117,9 +117,10 @@ def _get_proto(self): Returns: api_v1.Job: The proto representation of this job. """ - from dapr.proto.runtime.v1 import dapr_pb2 as api_v1 from google.protobuf.any_pb2 import Any as GrpcAny + from dapr.proto.runtime.v1 import dapr_pb2 as api_v1 + # Build the job proto job_proto = api_v1.Job(name=self.name) diff --git a/dapr/clients/grpc/_response.py b/dapr/clients/grpc/_response.py index fff511ff7..6898bc42d 100644 --- a/dapr/clients/grpc/_response.py +++ b/dapr/clients/grpc/_response.py @@ -21,19 +21,19 @@ from datetime import datetime from enum import Enum from typing import ( + TYPE_CHECKING, Callable, Dict, + Generator, + Generic, List, + Mapping, + NamedTuple, Optional, - Text, - Union, Sequence, - TYPE_CHECKING, - NamedTuple, - Generator, + Text, TypeVar, - Generic, - Mapping, + Union, ) from google.protobuf.any_pb2 import Any as GrpcAny @@ -43,11 +43,11 @@ from dapr.clients.grpc._helpers import ( MetadataDict, MetadataTuple, + WorkflowRuntimeStatus, to_bytes, to_str, tuple_to_dict, unpack, - WorkflowRuntimeStatus, ) from dapr.proto import api_service_v1, api_v1, appcallback_v1, common_v1 @@ -707,9 +707,9 @@ def _read_subscribe_config( handler: Callable[[Text, ConfigurationResponse], None], ): try: - responses: List[ - api_v1.SubscribeConfigurationResponse - ] = stub.SubscribeConfigurationAlpha1(req) + responses: List[api_v1.SubscribeConfigurationResponse] = ( + stub.SubscribeConfigurationAlpha1(req) + ) isFirst = True for response in responses: if isFirst: @@ -719,7 +719,7 @@ def _read_subscribe_config( if len(response.items) > 0: handler(response.id, ConfigurationResponse(response.items)) except Exception: - print(f'{self.store_name} configuration watcher for keys ' f'{self.keys} stopped.') + print(f'{self.store_name} configuration watcher for keys {self.keys} stopped.') pass @@ -1065,9 +1065,7 @@ def read(self, size: int = -1) -> bytes: return data[:size] -class EncryptResponse(CryptoResponse[TCryptoResponse]): - ... +class EncryptResponse(CryptoResponse[TCryptoResponse]): ... -class DecryptResponse(CryptoResponse[TCryptoResponse]): - ... +class DecryptResponse(CryptoResponse[TCryptoResponse]): ... diff --git a/dapr/clients/grpc/_state.py b/dapr/clients/grpc/_state.py index 3dc266b22..e20df4293 100644 --- a/dapr/clients/grpc/_state.py +++ b/dapr/clients/grpc/_state.py @@ -1,7 +1,8 @@ from enum import Enum -from dapr.proto import common_v1 from typing import Dict, Optional, Union +from dapr.proto import common_v1 + class Consistency(Enum): """Represents the consistency mode for a Dapr State Api Call""" diff --git a/dapr/clients/grpc/client.py b/dapr/clients/grpc/client.py index 6c276dd3f..a0a886d06 100644 --- a/dapr/clients/grpc/client.py +++ b/dapr/clients/grpc/client.py @@ -12,89 +12,85 @@ See the License for the specific language governing permissions and limitations under the License. """ + +import json +import socket import threading import time -import socket -import json import uuid - +from datetime import datetime +from typing import Any, Callable, Dict, List, Optional, Sequence, Text, Union from urllib.parse import urlencode - from warnings import warn -from typing import Callable, Dict, Optional, Text, Union, Sequence, List, Any - -from typing_extensions import Self -from datetime import datetime -from google.protobuf.message import Message as GrpcMessage -from google.protobuf.empty_pb2 import Empty as GrpcEmpty -from google.protobuf.any_pb2 import Any as GrpcAny - import grpc # type: ignore +from google.protobuf.any_pb2 import Any as GrpcAny +from google.protobuf.empty_pb2 import Empty as GrpcEmpty +from google.protobuf.message import Message as GrpcMessage from grpc import ( # type: ignore - UnaryUnaryClientInterceptor, - UnaryStreamClientInterceptor, - StreamUnaryClientInterceptor, - StreamStreamClientInterceptor, RpcError, + StreamStreamClientInterceptor, + StreamUnaryClientInterceptor, + UnaryStreamClientInterceptor, + UnaryUnaryClientInterceptor, ) +from typing_extensions import Self -from dapr.clients.exceptions import DaprInternalError, DaprGrpcError -from dapr.clients.grpc._state import StateOptions, StateItem -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions -from dapr.clients.grpc.subscription import Subscription, StreamInactiveError -from dapr.clients.grpc.interceptors import DaprClientInterceptor, DaprClientTimeoutInterceptor -from dapr.clients.health import DaprHealth -from dapr.clients.retry import RetryPolicy -from dapr.common.pubsub.subscription import StreamCancelledError -from dapr.conf import settings -from dapr.proto import api_v1, api_service_v1, common_v1 -from dapr.proto.runtime.v1.dapr_pb2 import UnsubscribeConfigurationResponse -from dapr.version import __version__ - +from dapr.clients.exceptions import DaprGrpcError, DaprInternalError +from dapr.clients.grpc import conversation +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions from dapr.clients.grpc._helpers import ( - getWorkflowRuntimeStatus, MetadataTuple, - to_bytes, - validateNotNone, - validateNotBlankString, convert_dict_to_grpc_dict_of_any, convert_value_to_struct, + getWorkflowRuntimeStatus, + to_bytes, + validateNotBlankString, + validateNotNone, ) -from dapr.conf.helpers import GrpcEndpoint +from dapr.clients.grpc._jobs import Job from dapr.clients.grpc._request import ( - InvokeMethodRequest, BindingRequest, - TransactionalStateOperation, - EncryptRequestIterator, DecryptRequestIterator, + EncryptRequestIterator, + InvokeMethodRequest, + TransactionalStateOperation, ) -from dapr.clients.grpc import conversation -from dapr.clients.grpc._jobs import Job from dapr.clients.grpc._response import ( BindingResponse, + BulkStateItem, + BulkStatesResponse, + ConfigurationResponse, + ConfigurationWatcher, DaprResponse, - GetSecretResponse, + DecryptResponse, + EncryptResponse, GetBulkSecretResponse, GetMetadataResponse, + GetSecretResponse, + GetWorkflowResponse, InvokeMethodResponse, - UnlockResponseStatus, - StateResponse, - BulkStatesResponse, - BulkStateItem, - ConfigurationResponse, QueryResponse, QueryResponseItem, RegisteredComponents, - ConfigurationWatcher, - TryLockResponse, - UnlockResponse, - GetWorkflowResponse, StartWorkflowResponse, - EncryptResponse, - DecryptResponse, + StateResponse, TopicEventResponse, + TryLockResponse, + UnlockResponse, + UnlockResponseStatus, ) +from dapr.clients.grpc._state import StateItem, StateOptions +from dapr.clients.grpc.interceptors import DaprClientInterceptor, DaprClientTimeoutInterceptor +from dapr.clients.grpc.subscription import StreamInactiveError, Subscription +from dapr.clients.health import DaprHealth +from dapr.clients.retry import RetryPolicy +from dapr.common.pubsub.subscription import StreamCancelledError +from dapr.conf import settings +from dapr.conf.helpers import GrpcEndpoint +from dapr.proto import api_service_v1, api_v1, common_v1 +from dapr.proto.runtime.v1.dapr_pb2 import UnsubscribeConfigurationResponse +from dapr.version import __version__ class DaprGrpcClient: @@ -162,7 +158,7 @@ def __init__( if not address: address = settings.DAPR_GRPC_ENDPOINT or ( - f'{settings.DAPR_RUNTIME_HOST}:' f'{settings.DAPR_GRPC_PORT}' + f'{settings.DAPR_RUNTIME_HOST}:{settings.DAPR_GRPC_PORT}' ) try: diff --git a/dapr/clients/grpc/conversation.py b/dapr/clients/grpc/conversation.py index 1da02dac2..d11c41979 100644 --- a/dapr/clients/grpc/conversation.py +++ b/dapr/clients/grpc/conversation.py @@ -12,6 +12,7 @@ See the License for the specific language governing permissions and limitations under the License. """ + from __future__ import annotations import asyncio @@ -400,13 +401,11 @@ def convert_llm_response_to_conversation_input( class ToolBackend(Protocol): """Interface for executors that knows how to execute a tool call.""" - def invoke(self, spec: ConversationToolsFunction, params: Params) -> Any: - ... + def invoke(self, spec: ConversationToolsFunction, params: Params) -> Any: ... async def ainvoke( self, spec: ConversationToolsFunction, params: Params, *, timeout: Union[float, None] = None - ) -> Any: - ... + ) -> Any: ... @dataclass diff --git a/dapr/clients/grpc/interceptors.py b/dapr/clients/grpc/interceptors.py index 15bde1857..a574fb8c6 100644 --- a/dapr/clients/grpc/interceptors.py +++ b/dapr/clients/grpc/interceptors.py @@ -1,7 +1,11 @@ from collections import namedtuple from typing import List, Tuple -from grpc import UnaryUnaryClientInterceptor, ClientCallDetails, StreamStreamClientInterceptor # type: ignore +from grpc import ( # type: ignore + ClientCallDetails, + StreamStreamClientInterceptor, + UnaryUnaryClientInterceptor, +) from dapr.conf import settings diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index 6dcfcb4dd..73cc047ac 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -1,16 +1,17 @@ -from grpc import RpcError, StatusCode, Call # type: ignore +import queue +import threading +from typing import Optional + +from grpc import Call, RpcError, StatusCode # type: ignore from dapr.clients.grpc._response import TopicEventResponse from dapr.clients.health import DaprHealth from dapr.common.pubsub.subscription import ( + StreamCancelledError, StreamInactiveError, SubscriptionMessage, - StreamCancelledError, ) from dapr.proto import api_v1, appcallback_v1 -import queue -import threading -from typing import Optional class Subscription: diff --git a/dapr/clients/health.py b/dapr/clients/health.py index 37c42a875..8e1002292 100644 --- a/dapr/clients/health.py +++ b/dapr/clients/health.py @@ -12,12 +12,13 @@ See the License for the specific language governing permissions and limitations under the License. """ -import urllib.request -import urllib.error + import time +import urllib.error +import urllib.request from warnings import warn -from dapr.clients.http.conf import DAPR_API_TOKEN_HEADER, USER_AGENT_HEADER, DAPR_USER_AGENT +from dapr.clients.http.conf import DAPR_API_TOKEN_HEADER, DAPR_USER_AGENT, USER_AGENT_HEADER from dapr.clients.http.helpers import get_api_url from dapr.conf import settings diff --git a/dapr/clients/http/client.py b/dapr/clients/http/client.py index f6f95aa74..387a0d939 100644 --- a/dapr/clients/http/client.py +++ b/dapr/clients/http/client.py @@ -13,25 +13,25 @@ limitations under the License. """ -import aiohttp +from typing import TYPE_CHECKING, Callable, Dict, Mapping, Optional, Tuple, Union -from typing import Callable, Mapping, Dict, Optional, Union, Tuple, TYPE_CHECKING +import aiohttp from dapr.clients.health import DaprHealth from dapr.clients.http.conf import ( + CONTENT_TYPE_HEADER, DAPR_API_TOKEN_HEADER, - USER_AGENT_HEADER, DAPR_USER_AGENT, - CONTENT_TYPE_HEADER, + USER_AGENT_HEADER, ) from dapr.clients.retry import RetryPolicy if TYPE_CHECKING: from dapr.serializers import Serializer -from dapr.conf import settings from dapr.clients._constants import DEFAULT_JSON_CONTENT_TYPE from dapr.clients.exceptions import DaprHttpError, DaprInternalError +from dapr.conf import settings class DaprHttpClient: diff --git a/dapr/clients/http/dapr_actor_http_client.py b/dapr/clients/http/dapr_actor_http_client.py index 186fdbc1c..711153659 100644 --- a/dapr/clients/http/dapr_actor_http_client.py +++ b/dapr/clients/http/dapr_actor_http_client.py @@ -13,15 +13,15 @@ limitations under the License. """ -from typing import Callable, Dict, Optional, Union, TYPE_CHECKING +from typing import TYPE_CHECKING, Callable, Dict, Optional, Union from dapr.clients.http.helpers import get_api_url if TYPE_CHECKING: from dapr.serializers import Serializer -from dapr.clients.http.client import DaprHttpClient from dapr.clients.base import DaprActorClientBase +from dapr.clients.http.client import DaprHttpClient from dapr.clients.retry import RetryPolicy DAPR_REENTRANCY_ID_HEADER = 'Dapr-Reentrancy-Id' diff --git a/dapr/clients/http/dapr_invocation_http_client.py b/dapr/clients/http/dapr_invocation_http_client.py index df4e6d222..604c483c0 100644 --- a/dapr/clients/http/dapr_invocation_http_client.py +++ b/dapr/clients/http/dapr_invocation_http_client.py @@ -14,13 +14,13 @@ """ import asyncio - from typing import Callable, Dict, Optional, Union + from multidict import MultiDict -from dapr.clients.http.client import DaprHttpClient -from dapr.clients.grpc._helpers import MetadataTuple, GrpcMessage +from dapr.clients.grpc._helpers import GrpcMessage, MetadataTuple from dapr.clients.grpc._response import InvokeMethodResponse +from dapr.clients.http.client import DaprHttpClient from dapr.clients.http.conf import CONTENT_TYPE_HEADER from dapr.clients.http.helpers import get_api_url from dapr.clients.retry import RetryPolicy diff --git a/dapr/clients/retry.py b/dapr/clients/retry.py index 171c96fbd..e895e46f3 100644 --- a/dapr/clients/retry.py +++ b/dapr/clients/retry.py @@ -12,11 +12,12 @@ See the License for the specific language governing permissions and limitations under the License. """ + import asyncio -from typing import Optional, List, Callable +import time +from typing import Callable, List, Optional from grpc import RpcError, StatusCode # type: ignore -import time from dapr.conf import settings diff --git a/dapr/common/pubsub/subscription.py b/dapr/common/pubsub/subscription.py index 6f68e180d..eb22a48da 100644 --- a/dapr/common/pubsub/subscription.py +++ b/dapr/common/pubsub/subscription.py @@ -1,7 +1,9 @@ import json +from typing import Optional, Union + from google.protobuf.json_format import MessageToDict + from dapr.proto.runtime.v1.appcallback_pb2 import TopicEventRequest -from typing import Optional, Union class SubscriptionMessage: diff --git a/dapr/conf/helpers.py b/dapr/conf/helpers.py index ab1e494b2..d2d187628 100644 --- a/dapr/conf/helpers.py +++ b/dapr/conf/helpers.py @@ -1,5 +1,5 @@ +from urllib.parse import ParseResult, parse_qs, urlparse from warnings import warn -from urllib.parse import urlparse, parse_qs, ParseResult class URIParseConfig: @@ -174,7 +174,7 @@ def tls(self) -> bool: def _validate_path_and_query(self) -> None: if self._parsed_url.path: raise ValueError( - f'paths are not supported for gRPC endpoints:' f" '{self._parsed_url.path}'" + f"paths are not supported for gRPC endpoints: '{self._parsed_url.path}'" ) if self._parsed_url.query: query_dict = parse_qs(self._parsed_url.query) diff --git a/dapr/serializers/base.py b/dapr/serializers/base.py index 5ff1d9e8b..1aa7dfc89 100644 --- a/dapr/serializers/base.py +++ b/dapr/serializers/base.py @@ -23,8 +23,7 @@ class Serializer(ABC): @abstractmethod def serialize( self, obj: object, custom_hook: Optional[Callable[[object], bytes]] = None - ) -> bytes: - ... + ) -> bytes: ... @abstractmethod def deserialize( @@ -32,5 +31,4 @@ def deserialize( data: bytes, data_type: Optional[Type] = object, custom_hook: Optional[Callable[[bytes], object]] = None, - ) -> Any: - ... + ) -> Any: ... diff --git a/dapr/serializers/json.py b/dapr/serializers/json.py index 4e9665187..59e1c194b 100644 --- a/dapr/serializers/json.py +++ b/dapr/serializers/json.py @@ -14,18 +14,18 @@ """ import base64 -import re import datetime import json - +import re from typing import Any, Callable, Optional, Type + from dateutil import parser from dapr.serializers.base import Serializer from dapr.serializers.util import ( + DAPR_DURATION_PARSER, convert_from_dapr_duration, convert_to_dapr_duration, - DAPR_DURATION_PARSER, ) diff --git a/dev-requirements.txt b/dev-requirements.txt index 461d92391..828ef8aa4 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,7 +1,6 @@ mypy>=1.2.0 mypy-extensions>=0.4.3 mypy-protobuf>=2.9 -flake8>=3.7.9 tox>=4.3.0 coverage>=5.3 wheel @@ -13,7 +12,7 @@ pyOpenSSL>=23.2.0 # needed for type checking Flask>=1.1 # needed for auto fix -ruff===0.2.2 +ruff===0.14.1 # needed for dapr-ext-workflow durabletask-dapr >= 0.2.0a9 # needed for .env file loading in examples diff --git a/examples/configuration/configuration.py b/examples/configuration/configuration.py index caf676e6b..d579df7fa 100644 --- a/examples/configuration/configuration.py +++ b/examples/configuration/configuration.py @@ -4,8 +4,9 @@ import asyncio from time import sleep + from dapr.clients import DaprClient -from dapr.clients.grpc._response import ConfigurationWatcher, ConfigurationResponse +from dapr.clients.grpc._response import ConfigurationResponse, ConfigurationWatcher configuration: ConfigurationWatcher = ConfigurationWatcher() diff --git a/examples/conversation/real_llm_providers_example.py b/examples/conversation/real_llm_providers_example.py index c103007e0..2347f4b50 100644 --- a/examples/conversation/real_llm_providers_example.py +++ b/examples/conversation/real_llm_providers_example.py @@ -48,7 +48,7 @@ import sys import tempfile from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import Any, Dict, List, Optional import yaml @@ -163,8 +163,8 @@ def create_tool_from_typed_function_example() -> conversation.ConversationTools: This shows the most advanced approach: define a typed function and automatically generate the complete tool schema from type hints and docstrings. """ - from typing import Optional, List from enum import Enum + from typing import List, Optional conversation.unregister_tool('find_restaurants') @@ -205,8 +205,8 @@ def create_tool_from_tool_decorator_example() -> conversation.ConversationTools: This shows the most advanced approach: define a typed function and automatically generate the complete tool schema from type hints and docstrings. """ - from typing import Optional, List from enum import Enum + from typing import List, Optional conversation.unregister_tool('find_restaurants') @@ -417,7 +417,7 @@ def create_component_configs(self, selected_providers: Optional[List[str]] = Non def test_basic_conversation_alpha2(self, provider_id: str) -> None: """Test basic Alpha2 conversation with a provider.""" print( - f"\n💬 Testing Alpha2 basic conversation with {self.available_providers[provider_id]['display_name']}" + f'\n💬 Testing Alpha2 basic conversation with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -453,7 +453,7 @@ def test_basic_conversation_alpha2(self, provider_id: str) -> None: def test_multi_turn_conversation_alpha2(self, provider_id: str) -> None: """Test multi-turn Alpha2 conversation with different message types.""" print( - f"\n🔄 Testing Alpha2 multi-turn conversation with {self.available_providers[provider_id]['display_name']}" + f'\n🔄 Testing Alpha2 multi-turn conversation with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -484,7 +484,7 @@ def test_multi_turn_conversation_alpha2(self, provider_id: str) -> None: f'✅ Multi-turn conversation processed {len(response.outputs[0].choices)} message(s)' ) for i, choice in enumerate(response.outputs[0].choices): - print(f' Response {i+1}: {choice.message.content[:100]}...') + print(f' Response {i + 1}: {choice.message.content[:100]}...') else: print('❌ No multi-turn response received') @@ -494,7 +494,7 @@ def test_multi_turn_conversation_alpha2(self, provider_id: str) -> None: def test_tool_calling_alpha2(self, provider_id: str) -> None: """Test Alpha2 tool calling with a provider.""" print( - f"\n🔧 Testing Alpha2 tool calling with {self.available_providers[provider_id]['display_name']}" + f'\n🔧 Testing Alpha2 tool calling with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -560,7 +560,7 @@ def test_tool_calling_alpha2(self, provider_id: str) -> None: def test_parameter_conversion(self, provider_id: str) -> None: """Test the new parameter conversion feature.""" print( - f"\n🔄 Testing parameter conversion with {self.available_providers[provider_id]['display_name']}" + f'\n🔄 Testing parameter conversion with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -600,8 +600,8 @@ def test_parameter_conversion(self, provider_id: str) -> None: if response.outputs and response.outputs[0].choices: choice = response.outputs[0].choices[0] - print(f'✅ Parameter conversion successful!') - print(f'✅ Tool creation helpers working perfectly!') + print('✅ Parameter conversion successful!') + print('✅ Tool creation helpers working perfectly!') print(f' Response: {choice.message.content[:100]}...') else: print('❌ Parameter conversion test failed') @@ -612,7 +612,7 @@ def test_parameter_conversion(self, provider_id: str) -> None: def test_multi_turn_tool_calling_alpha2(self, provider_id: str) -> None: """Test multi-turn Alpha2 tool calling with proper context accumulation.""" print( - f"\n🔄🔧 Testing multi-turn tool calling with {self.available_providers[provider_id]['display_name']}" + f'\n🔄🔧 Testing multi-turn tool calling with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -802,7 +802,7 @@ def test_multi_turn_tool_calling_alpha2(self, provider_id: str) -> None: def test_multi_turn_tool_calling_alpha2_tool_helpers(self, provider_id: str) -> None: """Test multi-turn Alpha2 tool calling with proper context accumulation using higher level abstractions.""" print( - f"\n🔄🔧 Testing multi-turn tool calling with {self.available_providers[provider_id]['display_name']}" + f'\n🔄🔧 Testing multi-turn tool calling with {self.available_providers[provider_id]["display_name"]}' ) # using decorator @@ -954,7 +954,7 @@ def append_response_to_history( def test_function_to_schema_approach(self, provider_id: str) -> None: """Test the best DevEx for most cases: function-to-JSON-schema automatic tool creation.""" print( - f"\n🎯 Testing function-to-schema approach with {self.available_providers[provider_id]['display_name']}" + f'\n🎯 Testing function-to-schema approach with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -1002,7 +1002,7 @@ def test_function_to_schema_approach(self, provider_id: str) -> None: def test_tool_decorated_function_to_schema_approach(self, provider_id: str) -> None: """Test the best DevEx for most cases: function-to-JSON-schema automatic tool creation.""" print( - f"\n🎯 Testing decorator tool function-to-schema approach with {self.available_providers[provider_id]['display_name']}" + f'\n🎯 Testing decorator tool function-to-schema approach with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -1052,7 +1052,7 @@ def test_tool_decorated_function_to_schema_approach(self, provider_id: str) -> N async def test_async_conversation_alpha2(self, provider_id: str) -> None: """Test async Alpha2 conversation with a provider.""" print( - f"\n⚡ Testing async Alpha2 conversation with {self.available_providers[provider_id]['display_name']}" + f'\n⚡ Testing async Alpha2 conversation with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -1083,7 +1083,7 @@ async def test_async_conversation_alpha2(self, provider_id: str) -> None: async def test_async_tool_calling_alpha2(self, provider_id: str) -> None: """Test async Alpha2 tool calling with a provider.""" print( - f"\n🔧⚡ Testing async Alpha2 tool calling with {self.available_providers[provider_id]['display_name']}" + f'\n🔧⚡ Testing async Alpha2 tool calling with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -1125,9 +1125,9 @@ async def test_async_tool_calling_alpha2(self, provider_id: str) -> None: def run_comprehensive_test(self, provider_id: str) -> None: """Run comprehensive Alpha2 tests for a provider.""" provider_name = self.available_providers[provider_id]['display_name'] - print(f"\n{'='*60}") + print(f'\n{"=" * 60}') print(f'🧪 Testing {provider_name} with Alpha2 API') - print(f"{'='*60}") + print(f'{"=" * 60}') # Alpha2 Sync tests self.test_basic_conversation_alpha2(provider_id) @@ -1149,7 +1149,7 @@ def run_comprehensive_test(self, provider_id: str) -> None: def test_basic_conversation_alpha1_legacy(self, provider_id: str) -> None: """Test legacy Alpha1 conversation for comparison.""" print( - f"\n📚 Testing legacy Alpha1 for comparison with {self.available_providers[provider_id]['display_name']}" + f'\n📚 Testing legacy Alpha1 for comparison with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -1235,7 +1235,7 @@ def main(): if provider_id in tester.available_providers: tester.run_comprehensive_test(provider_id) - print(f"\n{'='*60}") + print(f'\n{"=" * 60}') print('🎉 All Alpha2 tests completed!') print('✅ Real LLM provider integration with Alpha2 API is working correctly') print('🔧 Features demonstrated:') @@ -1248,7 +1248,7 @@ def main(): print(' • Function-to-schema using @tool decorator for automatic tool generation') print(' • Both sync and async implementations') print(' • Backward compatibility with Alpha1') - print(f"{'='*60}") + print(f'{"=" * 60}') except KeyboardInterrupt: print('\n\n⏹️ Tests interrupted by user') diff --git a/examples/crypto/crypto-async.py b/examples/crypto/crypto-async.py index 0946e9bbb..2e49a8282 100644 --- a/examples/crypto/crypto-async.py +++ b/examples/crypto/crypto-async.py @@ -14,7 +14,7 @@ import asyncio from dapr.aio.clients import DaprClient -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions # Name of the crypto component to use CRYPTO_COMPONENT_NAME = 'crypto-localstorage' diff --git a/examples/crypto/crypto.py b/examples/crypto/crypto.py index a282ba453..afe00f343 100644 --- a/examples/crypto/crypto.py +++ b/examples/crypto/crypto.py @@ -12,7 +12,7 @@ # ------------------------------------------------------------ from dapr.clients import DaprClient -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions # Name of the crypto component to use CRYPTO_COMPONENT_NAME = 'crypto-localstorage' diff --git a/examples/demo_actor/demo_actor/demo_actor.py b/examples/demo_actor/demo_actor/demo_actor.py index 0d65d57d2..f9306d47c 100644 --- a/examples/demo_actor/demo_actor/demo_actor.py +++ b/examples/demo_actor/demo_actor/demo_actor.py @@ -11,10 +11,11 @@ # limitations under the License. import datetime +from typing import Optional -from dapr.actor import Actor, Remindable from demo_actor_interface import DemoActorInterface -from typing import Optional + +from dapr.actor import Actor, Remindable class DemoActor(Actor, DemoActorInterface, Remindable): diff --git a/examples/demo_actor/demo_actor/demo_actor_client.py b/examples/demo_actor/demo_actor/demo_actor_client.py index df0e9f737..ad0dfccb6 100644 --- a/examples/demo_actor/demo_actor/demo_actor_client.py +++ b/examples/demo_actor/demo_actor/demo_actor_client.py @@ -12,10 +12,11 @@ import asyncio -from dapr.actor import ActorProxy, ActorId, ActorProxyFactory -from dapr.clients.retry import RetryPolicy from demo_actor_interface import DemoActorInterface +from dapr.actor import ActorId, ActorProxy, ActorProxyFactory +from dapr.clients.retry import RetryPolicy + async def main(): # Create proxy client diff --git a/examples/demo_actor/demo_actor/demo_actor_flask.py b/examples/demo_actor/demo_actor/demo_actor_flask.py index 5715d23d8..de1245ad0 100644 --- a/examples/demo_actor/demo_actor/demo_actor_flask.py +++ b/examples/demo_actor/demo_actor/demo_actor_flask.py @@ -10,13 +10,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +from demo_actor import DemoActor from flask import Flask, jsonify from flask_dapr.actor import DaprActor -from dapr.conf import settings -from dapr.actor.runtime.config import ActorRuntimeConfig, ActorTypeConfig, ActorReentrancyConfig +from dapr.actor.runtime.config import ActorReentrancyConfig, ActorRuntimeConfig, ActorTypeConfig from dapr.actor.runtime.runtime import ActorRuntime -from demo_actor import DemoActor +from dapr.conf import settings app = Flask(f'{DemoActor.__name__}Service') diff --git a/examples/demo_actor/demo_actor/demo_actor_interface.py b/examples/demo_actor/demo_actor/demo_actor_interface.py index be43c2ed6..51c3880c5 100644 --- a/examples/demo_actor/demo_actor/demo_actor_interface.py +++ b/examples/demo_actor/demo_actor/demo_actor_interface.py @@ -18,30 +18,24 @@ class DemoActorInterface(ActorInterface): @abstractmethod @actormethod(name='GetMyData') - async def get_my_data(self) -> object: - ... + async def get_my_data(self) -> object: ... @abstractmethod @actormethod(name='SetMyData') - async def set_my_data(self, data: object) -> None: - ... + async def set_my_data(self, data: object) -> None: ... @abstractmethod @actormethod(name='ClearMyData') - async def clear_my_data(self) -> None: - ... + async def clear_my_data(self) -> None: ... @abstractmethod @actormethod(name='SetReminder') - async def set_reminder(self, enabled: bool) -> None: - ... + async def set_reminder(self, enabled: bool) -> None: ... @abstractmethod @actormethod(name='SetTimer') - async def set_timer(self, enabled: bool) -> None: - ... + async def set_timer(self, enabled: bool) -> None: ... @abstractmethod @actormethod(name='GetReentrancyStatus') - async def get_reentrancy_status(self) -> bool: - ... + async def get_reentrancy_status(self) -> bool: ... diff --git a/examples/demo_actor/demo_actor/demo_actor_service.py b/examples/demo_actor/demo_actor/demo_actor_service.py index c53d06e25..c67b28c87 100644 --- a/examples/demo_actor/demo_actor/demo_actor_service.py +++ b/examples/demo_actor/demo_actor/demo_actor_service.py @@ -10,12 +10,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from fastapi import FastAPI # type: ignore -from dapr.actor.runtime.config import ActorRuntimeConfig, ActorTypeConfig, ActorReentrancyConfig -from dapr.actor.runtime.runtime import ActorRuntime from dapr.ext.fastapi import DaprActor # type: ignore from demo_actor import DemoActor +from fastapi import FastAPI # type: ignore +from dapr.actor.runtime.config import ActorReentrancyConfig, ActorRuntimeConfig, ActorTypeConfig +from dapr.actor.runtime.runtime import ActorRuntime app = FastAPI(title=f'{DemoActor.__name__}Service') diff --git a/examples/demo_workflow/app.py b/examples/demo_workflow/app.py index c89dcae6e..36ad5e843 100644 --- a/examples/demo_workflow/app.py +++ b/examples/demo_workflow/app.py @@ -12,15 +12,17 @@ from datetime import timedelta from time import sleep + from dapr.ext.workflow import ( - WorkflowRuntime, DaprWorkflowContext, - WorkflowActivityContext, RetryPolicy, + WorkflowActivityContext, + WorkflowRuntime, ) -from dapr.conf import Settings + from dapr.clients import DaprClient from dapr.clients.exceptions import DaprInternalError +from dapr.conf import Settings settings = Settings() @@ -192,8 +194,7 @@ def main(): instance_id=instance_id, workflow_component=workflow_component ) print( - f'Get response from {workflow_name} ' - f'after terminate call: {get_response.runtime_status}' + f'Get response from {workflow_name} after terminate call: {get_response.runtime_status}' ) child_get_response = d.get_workflow( instance_id=child_instance_id, workflow_component=workflow_component diff --git a/examples/distributed_lock/lock.py b/examples/distributed_lock/lock.py index d18d955f6..2f6364065 100644 --- a/examples/distributed_lock/lock.py +++ b/examples/distributed_lock/lock.py @@ -11,9 +11,10 @@ # limitations under the License. # ------------------------------------------------------------ -from dapr.clients import DaprClient import warnings +from dapr.clients import DaprClient + def main(): # Lock parameters diff --git a/examples/error_handling/error_handling.py b/examples/error_handling/error_handling.py index b75ebed97..ae42a88cd 100644 --- a/examples/error_handling/error_handling.py +++ b/examples/error_handling/error_handling.py @@ -1,7 +1,6 @@ from dapr.clients import DaprClient from dapr.clients.exceptions import DaprGrpcError - with DaprClient() as d: storeName = 'statestore' diff --git a/examples/grpc_proxying/helloworld_service_pb2.py b/examples/grpc_proxying/helloworld_service_pb2.py index e05049653..09e0a62ff 100644 --- a/examples/grpc_proxying/helloworld_service_pb2.py +++ b/examples/grpc_proxying/helloworld_service_pb2.py @@ -2,11 +2,13 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: helloworld_service.proto """Generated protocol buffer code.""" + from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() diff --git a/examples/grpc_proxying/helloworld_service_pb2_grpc.py b/examples/grpc_proxying/helloworld_service_pb2_grpc.py index b5403111b..14b2d40ee 100644 --- a/examples/grpc_proxying/helloworld_service_pb2_grpc.py +++ b/examples/grpc_proxying/helloworld_service_pb2_grpc.py @@ -1,7 +1,7 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" -import grpc +import grpc import helloworld_service_pb2 as helloworld__service__pb2 diff --git a/examples/grpc_proxying/invoke-caller.py b/examples/grpc_proxying/invoke-caller.py index a07298448..eead69da3 100644 --- a/examples/grpc_proxying/invoke-caller.py +++ b/examples/grpc_proxying/invoke-caller.py @@ -3,8 +3,7 @@ import grpc import helloworld_service_pb2_grpc -from helloworld_service_pb2 import HelloRequest, HelloReply -import json, time +from helloworld_service_pb2 import HelloRequest async def run() -> None: diff --git a/examples/grpc_proxying/invoke-receiver.py b/examples/grpc_proxying/invoke-receiver.py index ec36b036c..0a140ff79 100644 --- a/examples/grpc_proxying/invoke-receiver.py +++ b/examples/grpc_proxying/invoke-receiver.py @@ -2,9 +2,8 @@ import grpc import helloworld_service_pb2_grpc -from helloworld_service_pb2 import HelloRequest, HelloReply from dapr.ext.grpc import App -import json +from helloworld_service_pb2 import HelloReply, HelloRequest class HelloWorldService(helloworld_service_pb2_grpc.HelloWorldService): diff --git a/examples/invoke-custom-data/invoke-caller.py b/examples/invoke-custom-data/invoke-caller.py index 27dabd4de..caeb84313 100644 --- a/examples/invoke-custom-data/invoke-caller.py +++ b/examples/invoke-custom-data/invoke-caller.py @@ -1,7 +1,7 @@ -from dapr.clients import DaprClient - import proto.response_pb2 as response_messages +from dapr.clients import DaprClient + with DaprClient() as d: # Create a typed message with content type and body resp = d.invoke_method( diff --git a/examples/invoke-custom-data/invoke-receiver.py b/examples/invoke-custom-data/invoke-receiver.py index e2ad83ce5..543d4fceb 100644 --- a/examples/invoke-custom-data/invoke-receiver.py +++ b/examples/invoke-custom-data/invoke-receiver.py @@ -1,6 +1,5 @@ -from dapr.ext.grpc import App, InvokeMethodRequest - import proto.response_pb2 as response_messages +from dapr.ext.grpc import App, InvokeMethodRequest app = App() diff --git a/examples/invoke-custom-data/proto/response_pb2.py b/examples/invoke-custom-data/proto/response_pb2.py index 373ce113f..ae019cbcb 100644 --- a/examples/invoke-custom-data/proto/response_pb2.py +++ b/examples/invoke-custom-data/proto/response_pb2.py @@ -2,11 +2,13 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: response.proto """Generated protocol buffer code.""" + from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() diff --git a/examples/invoke-custom-data/proto/response_pb2_grpc.py b/examples/invoke-custom-data/proto/response_pb2_grpc.py index 8a9393943..2dd5fd3f1 100644 --- a/examples/invoke-custom-data/proto/response_pb2_grpc.py +++ b/examples/invoke-custom-data/proto/response_pb2_grpc.py @@ -1,3 +1,2 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/examples/invoke-http/invoke-caller.py b/examples/invoke-http/invoke-caller.py index 380001592..115f20ec0 100644 --- a/examples/invoke-http/invoke-caller.py +++ b/examples/invoke-http/invoke-caller.py @@ -1,5 +1,4 @@ import json -import time from dapr.clients import DaprClient from dapr.clients.exceptions import DaprHttpError diff --git a/examples/invoke-http/invoke-receiver.py b/examples/invoke-http/invoke-receiver.py index 8609464af..928f86987 100644 --- a/examples/invoke-http/invoke-receiver.py +++ b/examples/invoke-http/invoke-receiver.py @@ -1,7 +1,8 @@ # from dapr.ext.grpc import App, InvokeMethodRequest, InvokeMethodResponse -from flask import Flask, request import json +from flask import Flask, request + app = Flask(__name__) diff --git a/examples/jobs/job_management.py b/examples/jobs/job_management.py index fd8c7af88..fb088d3b1 100644 --- a/examples/jobs/job_management.py +++ b/examples/jobs/job_management.py @@ -1,9 +1,10 @@ import json from datetime import datetime, timedelta -from dapr.clients import DaprClient, Job, DropFailurePolicy, ConstantFailurePolicy from google.protobuf.any_pb2 import Any as GrpcAny +from dapr.clients import ConstantFailurePolicy, DaprClient, DropFailurePolicy, Job + def create_job_data(message: str): """Helper function to create job payload data.""" @@ -20,7 +21,7 @@ def main(): try: client.schedule_job_alpha1(job=simple_job, overwrite=True) - print(f'✓ Simple job scheduled successfully', flush=True) + print('✓ Simple job scheduled successfully', flush=True) except Exception as e: print(f'✗ Failed to schedule simple job: {e}', flush=True) return @@ -37,7 +38,7 @@ def main(): try: client.schedule_job_alpha1(job=recurring_job, overwrite=True) - print(f'✓ Recurring job scheduled successfully', flush=True) + print('✓ Recurring job scheduled successfully', flush=True) except Exception as e: print(f'✗ Failed to schedule recurring job: {e}', flush=True) return @@ -53,7 +54,7 @@ def main(): try: client.schedule_job_alpha1(one_time_job) - print(f'✓ One-time job scheduled successfully', flush=True) + print('✓ One-time job scheduled successfully', flush=True) except Exception as e: print(f'✗ Failed to schedule one-time job: {e}', flush=True) return @@ -71,7 +72,7 @@ def main(): try: client.schedule_job_alpha1(job=drop_policy_job, overwrite=True) - print(f'✓ Job with drop failure policy scheduled successfully', flush=True) + print('✓ Job with drop failure policy scheduled successfully', flush=True) except Exception as e: print(f'✗ Failed to schedule job with drop policy: {e}', flush=True) @@ -85,7 +86,7 @@ def main(): try: client.schedule_job_alpha1(job=constant_policy_job, overwrite=True) - print(f'✓ Job with constant retry policy scheduled successfully', flush=True) + print('✓ Job with constant retry policy scheduled successfully', flush=True) except Exception as e: print(f'✗ Failed to schedule job with retry policy: {e}', flush=True) @@ -93,7 +94,7 @@ def main(): print('\n4. Getting job details...', flush=True) try: job = client.get_job_alpha1('recurring-hello-job') - print(f'✓ Retrieved job details:', flush=True) + print('✓ Retrieved job details:', flush=True) print(f' - Name: {job.name}', flush=True) print(f' - Schedule: {job.schedule}', flush=True) print(f' - TTL: {job.ttl}', flush=True) @@ -104,7 +105,7 @@ def main(): except Exception: print(f' - Data: ', flush=True) else: - print(f' - Data: None', flush=True) + print(' - Data: None', flush=True) except Exception as e: print(f'✗ Failed to get job details: {e}', flush=True) diff --git a/examples/jobs/job_processing.py b/examples/jobs/job_processing.py index 9f5733b79..6d384cbb0 100644 --- a/examples/jobs/job_processing.py +++ b/examples/jobs/job_processing.py @@ -14,9 +14,10 @@ import json import threading import time -from datetime import datetime, timedelta + from dapr.ext.grpc import App, JobEvent -from dapr.clients import DaprClient, Job, ConstantFailurePolicy + +from dapr.clients import ConstantFailurePolicy, DaprClient, Job try: from google.protobuf.any_pb2 import Any as GrpcAny diff --git a/examples/pubsub-simple/subscriber.py b/examples/pubsub-simple/subscriber.py index daa11bc89..4d36f2807 100644 --- a/examples/pubsub-simple/subscriber.py +++ b/examples/pubsub-simple/subscriber.py @@ -11,14 +11,15 @@ # limitations under the License. # ------------------------------------------------------------ +import json from time import sleep + from cloudevents.sdk.event import v1 from dapr.ext.grpc import App + from dapr.clients.grpc._response import TopicEventResponse from dapr.proto import appcallback_v1 -import json - app = App() should_retry = True # To control whether dapr should retry sending a message diff --git a/examples/pubsub-streaming-async/subscriber-handler.py b/examples/pubsub-streaming-async/subscriber-handler.py index 06a492af5..c9c8203c2 100644 --- a/examples/pubsub-streaming-async/subscriber-handler.py +++ b/examples/pubsub-streaming-async/subscriber-handler.py @@ -1,5 +1,6 @@ import argparse import asyncio + from dapr.aio.clients import DaprClient from dapr.clients.grpc._response import TopicEventResponse diff --git a/examples/state_store/state_store.py b/examples/state_store/state_store.py index 301c675bc..b783fcdc9 100644 --- a/examples/state_store/state_store.py +++ b/examples/state_store/state_store.py @@ -5,11 +5,9 @@ import grpc from dapr.clients import DaprClient - from dapr.clients.grpc._request import TransactionalStateOperation, TransactionOperationType from dapr.clients.grpc._state import StateItem - with DaprClient() as d: storeName = 'statestore' diff --git a/examples/state_store_query/state_store_query.py b/examples/state_store_query/state_store_query.py index f532f0eb0..26c64da3e 100644 --- a/examples/state_store_query/state_store_query.py +++ b/examples/state_store_query/state_store_query.py @@ -2,10 +2,9 @@ dapr run python3 state_store_query.py """ -from dapr.clients import DaprClient - import json +from dapr.clients import DaprClient with DaprClient() as d: store_name = 'statestore' diff --git a/examples/w3c-tracing/invoke-receiver.py b/examples/w3c-tracing/invoke-receiver.py index 92300aebe..bb49236a8 100644 --- a/examples/w3c-tracing/invoke-receiver.py +++ b/examples/w3c-tracing/invoke-receiver.py @@ -3,16 +3,16 @@ import typing from concurrent import futures +from dapr.ext.grpc import App, InvokeMethodRequest, InvokeMethodResponse from opentelemetry import trace from opentelemetry.exporter.zipkin.json import ZipkinExporter -from opentelemetry.instrumentation.grpc import GrpcInstrumentorServer, filters +from opentelemetry.instrumentation.grpc import GrpcInstrumentorServer from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor from opentelemetry.sdk.trace.sampling import ALWAYS_ON from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator from dapr.clients import DaprClient -from dapr.ext.grpc import App, InvokeMethodRequest, InvokeMethodResponse # Create a tracer provider tracer_provider = TracerProvider(sampler=ALWAYS_ON) diff --git a/examples/workflow/child_workflow.py b/examples/workflow/child_workflow.py index dccaa631b..57ab2fc3e 100644 --- a/examples/workflow/child_workflow.py +++ b/examples/workflow/child_workflow.py @@ -10,9 +10,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import dapr.ext.workflow as wf import time +import dapr.ext.workflow as wf + wfr = wf.WorkflowRuntime() diff --git a/examples/workflow/cross-app1.py b/examples/workflow/cross-app1.py index f84de662a..1ef7b48da 100644 --- a/examples/workflow/cross-app1.py +++ b/examples/workflow/cross-app1.py @@ -10,19 +10,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +import time from datetime import timedelta -from durabletask.task import TaskFailedError import dapr.ext.workflow as wf -import time +from durabletask.task import TaskFailedError wfr = wf.WorkflowRuntime() @wfr.workflow def app1_workflow(ctx: wf.DaprWorkflowContext): - print(f'app1 - received workflow call', flush=True) - print(f'app1 - triggering app2 workflow', flush=True) + print('app1 - received workflow call', flush=True) + print('app1 - triggering app2 workflow', flush=True) try: retry_policy = wf.RetryPolicy( @@ -36,11 +36,11 @@ def app1_workflow(ctx: wf.DaprWorkflowContext): app_id='wfexample2', retry_policy=retry_policy, ) - print(f'app1 - received workflow result', flush=True) - except TaskFailedError as e: - print(f'app1 - received workflow error from app2', flush=True) + print('app1 - received workflow result', flush=True) + except TaskFailedError: + print('app1 - received workflow error from app2', flush=True) - print(f'app1 - returning workflow result', flush=True) + print('app1 - returning workflow result', flush=True) return 1 @@ -49,7 +49,7 @@ def app1_workflow(ctx: wf.DaprWorkflowContext): time.sleep(10) # wait for workflow runtime to start wf_client = wf.DaprWorkflowClient() - print(f'app1 - triggering app1 workflow', flush=True) + print('app1 - triggering app1 workflow', flush=True) instance_id = wf_client.schedule_new_workflow(workflow=app1_workflow) # Wait for the workflow to complete diff --git a/examples/workflow/cross-app2.py b/examples/workflow/cross-app2.py index 4cb30874c..2af65912c 100644 --- a/examples/workflow/cross-app2.py +++ b/examples/workflow/cross-app2.py @@ -10,37 +10,37 @@ # See the License for the specific language governing permissions and # limitations under the License. -from datetime import timedelta import os +import time +from datetime import timedelta -from durabletask.task import TaskFailedError import dapr.ext.workflow as wf -import time +from durabletask.task import TaskFailedError wfr = wf.WorkflowRuntime() @wfr.workflow def app2_workflow(ctx: wf.DaprWorkflowContext): - print(f'app2 - received workflow call', flush=True) + print('app2 - received workflow call', flush=True) if os.getenv('ERROR_WORKFLOW_MODE', 'false') == 'true': - print(f'app2 - raising error in workflow due to error mode being enabled', flush=True) + print('app2 - raising error in workflow due to error mode being enabled', flush=True) raise ValueError('Error in workflow due to error mode being enabled') - print(f'app2 - triggering app3 activity', flush=True) + print('app2 - triggering app3 activity', flush=True) try: retry_policy = wf.RetryPolicy( max_number_of_attempts=2, first_retry_interval=timedelta(milliseconds=100), max_retry_interval=timedelta(seconds=3), ) - result = yield ctx.call_activity( + yield ctx.call_activity( 'app3_activity', input=None, app_id='wfexample3', retry_policy=retry_policy ) - print(f'app2 - received activity result', flush=True) - except TaskFailedError as e: - print(f'app2 - received activity error from app3', flush=True) + print('app2 - received activity result', flush=True) + except TaskFailedError: + print('app2 - received activity error from app3', flush=True) - print(f'app2 - returning workflow result', flush=True) + print('app2 - returning workflow result', flush=True) return 2 diff --git a/examples/workflow/cross-app3.py b/examples/workflow/cross-app3.py index ecc945ca3..4bcc158a0 100644 --- a/examples/workflow/cross-app3.py +++ b/examples/workflow/cross-app3.py @@ -10,19 +10,20 @@ # See the License for the specific language governing permissions and # limitations under the License. import os -import dapr.ext.workflow as wf import time +import dapr.ext.workflow as wf + wfr = wf.WorkflowRuntime() @wfr.activity def app3_activity(ctx: wf.DaprWorkflowContext) -> int: - print(f'app3 - received activity call', flush=True) + print('app3 - received activity call', flush=True) if os.getenv('ERROR_ACTIVITY_MODE', 'false') == 'true': - print(f'app3 - raising error in activity due to error mode being enabled', flush=True) + print('app3 - raising error in activity due to error mode being enabled', flush=True) raise ValueError('Error in activity due to error mode being enabled') - print(f'app3 - returning activity result', flush=True) + print('app3 - returning activity result', flush=True) return 3 diff --git a/examples/workflow/fan_out_fan_in.py b/examples/workflow/fan_out_fan_in.py index e5799862f..f625ea287 100644 --- a/examples/workflow/fan_out_fan_in.py +++ b/examples/workflow/fan_out_fan_in.py @@ -12,6 +12,7 @@ import time from typing import List + import dapr.ext.workflow as wf wfr = wf.WorkflowRuntime() diff --git a/examples/workflow/human_approval.py b/examples/workflow/human_approval.py index 6a8a725d7..e12bf5b5e 100644 --- a/examples/workflow/human_approval.py +++ b/examples/workflow/human_approval.py @@ -11,13 +11,14 @@ # limitations under the License. import threading +import time from dataclasses import asdict, dataclass from datetime import timedelta -import time -from dapr.clients import DaprClient import dapr.ext.workflow as wf +from dapr.clients import DaprClient + wfr = wf.WorkflowRuntime() diff --git a/examples/workflow/monitor.py b/examples/workflow/monitor.py index 6cf575cfe..d4f534df5 100644 --- a/examples/workflow/monitor.py +++ b/examples/workflow/monitor.py @@ -10,10 +10,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +import random from dataclasses import dataclass from datetime import timedelta -import random from time import sleep + import dapr.ext.workflow as wf wfr = wf.WorkflowRuntime() diff --git a/examples/workflow/simple.py b/examples/workflow/simple.py index 76f21eba4..dc0ea0b6a 100644 --- a/examples/workflow/simple.py +++ b/examples/workflow/simple.py @@ -11,16 +11,18 @@ # limitations under the License. from datetime import timedelta from time import sleep + from dapr.ext.workflow import ( - WorkflowRuntime, + DaprWorkflowClient, DaprWorkflowContext, - WorkflowActivityContext, RetryPolicy, - DaprWorkflowClient, + WorkflowActivityContext, + WorkflowRuntime, when_any, ) -from dapr.conf import Settings + from dapr.clients.exceptions import DaprInternalError +from dapr.conf import Settings settings = Settings() diff --git a/examples/workflow/task_chaining.py b/examples/workflow/task_chaining.py index 074cadcd2..8a2058e1c 100644 --- a/examples/workflow/task_chaining.py +++ b/examples/workflow/task_chaining.py @@ -14,7 +14,6 @@ import dapr.ext.workflow as wf - wfr = wf.WorkflowRuntime() diff --git a/ext/dapr-ext-fastapi/dapr/ext/fastapi/__init__.py b/ext/dapr-ext-fastapi/dapr/ext/fastapi/__init__.py index 942603078..e43df65c9 100644 --- a/ext/dapr-ext-fastapi/dapr/ext/fastapi/__init__.py +++ b/ext/dapr-ext-fastapi/dapr/ext/fastapi/__init__.py @@ -16,5 +16,4 @@ from .actor import DaprActor from .app import DaprApp - __all__ = ['DaprActor', 'DaprApp'] diff --git a/ext/dapr-ext-fastapi/dapr/ext/fastapi/actor.py b/ext/dapr-ext-fastapi/dapr/ext/fastapi/actor.py index 93b7860e1..4b3990da4 100644 --- a/ext/dapr-ext-fastapi/dapr/ext/fastapi/actor.py +++ b/ext/dapr-ext-fastapi/dapr/ext/fastapi/actor.py @@ -13,12 +13,12 @@ limitations under the License. """ -from typing import Any, Optional, Type, List +from typing import Any, List, Optional, Type from dapr.actor import Actor, ActorRuntime from dapr.clients.exceptions import ERROR_CODE_UNKNOWN, DaprInternalError from dapr.serializers import DefaultJSONSerializer -from fastapi import FastAPI, APIRouter, Request, Response, status # type: ignore +from fastapi import APIRouter, FastAPI, Request, Response, status # type: ignore from fastapi.logger import logger from fastapi.responses import JSONResponse diff --git a/ext/dapr-ext-fastapi/dapr/ext/fastapi/app.py b/ext/dapr-ext-fastapi/dapr/ext/fastapi/app.py index d926fac5c..6bede5234 100644 --- a/ext/dapr-ext-fastapi/dapr/ext/fastapi/app.py +++ b/ext/dapr-ext-fastapi/dapr/ext/fastapi/app.py @@ -13,6 +13,7 @@ """ from typing import Dict, List, Optional + from fastapi import FastAPI # type: ignore diff --git a/ext/dapr-ext-fastapi/tests/test_app.py b/ext/dapr-ext-fastapi/tests/test_app.py index 831d55ebb..619697994 100644 --- a/ext/dapr-ext-fastapi/tests/test_app.py +++ b/ext/dapr-ext-fastapi/tests/test_app.py @@ -1,11 +1,10 @@ import unittest +from dapr.ext.fastapi import DaprApp from fastapi import FastAPI from fastapi.testclient import TestClient from pydantic import BaseModel -from dapr.ext.fastapi import DaprApp - class Message(BaseModel): body: str diff --git a/ext/dapr-ext-fastapi/tests/test_dapractor.py b/ext/dapr-ext-fastapi/tests/test_dapractor.py index ee863d726..71408c88c 100644 --- a/ext/dapr-ext-fastapi/tests/test_dapractor.py +++ b/ext/dapr-ext-fastapi/tests/test_dapractor.py @@ -16,9 +16,8 @@ import json import unittest -from fastapi import FastAPI - from dapr.ext.fastapi.actor import DaprActor, _wrap_response +from fastapi import FastAPI class DaprActorTest(unittest.TestCase): diff --git a/ext/dapr-ext-grpc/dapr/ext/grpc/__init__.py b/ext/dapr-ext-grpc/dapr/ext/grpc/__init__.py index 7d73b4a48..5324c6175 100644 --- a/ext/dapr-ext-grpc/dapr/ext/grpc/__init__.py +++ b/ext/dapr-ext-grpc/dapr/ext/grpc/__init__.py @@ -13,12 +13,11 @@ limitations under the License. """ -from dapr.clients.grpc._request import InvokeMethodRequest, BindingRequest, JobEvent -from dapr.clients.grpc._response import InvokeMethodResponse, TopicEventResponse -from dapr.clients.grpc._jobs import Job, FailurePolicy, DropFailurePolicy, ConstantFailurePolicy - from dapr.ext.grpc.app import App, Rule # type:ignore +from dapr.clients.grpc._jobs import ConstantFailurePolicy, DropFailurePolicy, FailurePolicy, Job +from dapr.clients.grpc._request import BindingRequest, InvokeMethodRequest, JobEvent +from dapr.clients.grpc._response import InvokeMethodResponse, TopicEventResponse __all__ = [ 'App', diff --git a/ext/dapr-ext-grpc/dapr/ext/grpc/_health_servicer.py b/ext/dapr-ext-grpc/dapr/ext/grpc/_health_servicer.py index 029dff745..f6d782da1 100644 --- a/ext/dapr-ext-grpc/dapr/ext/grpc/_health_servicer.py +++ b/ext/dapr-ext-grpc/dapr/ext/grpc/_health_servicer.py @@ -1,6 +1,6 @@ -import grpc from typing import Callable, Optional +import grpc from dapr.proto import appcallback_service_v1 from dapr.proto.runtime.v1.appcallback_pb2 import HealthCheckResponse diff --git a/ext/dapr-ext-grpc/dapr/ext/grpc/_servicer.py b/ext/dapr-ext-grpc/dapr/ext/grpc/_servicer.py index 996267fdd..8de632f97 100644 --- a/ext/dapr-ext-grpc/dapr/ext/grpc/_servicer.py +++ b/ext/dapr-ext-grpc/dapr/ext/grpc/_servicer.py @@ -12,25 +12,25 @@ See the License for the specific language governing permissions and limitations under the License. """ -import grpc -from cloudevents.sdk.event import v1 # type: ignore from typing import Callable, Dict, List, Optional, Tuple, Union +from cloudevents.sdk.event import v1 # type: ignore from google.protobuf import empty_pb2 from google.protobuf.message import Message as GrpcMessage from google.protobuf.struct_pb2 import Struct -from dapr.proto import appcallback_service_v1, common_v1, appcallback_v1 +import grpc +from dapr.clients._constants import DEFAULT_JSON_CONTENT_TYPE +from dapr.clients.grpc._request import BindingRequest, InvokeMethodRequest, JobEvent +from dapr.clients.grpc._response import InvokeMethodResponse, TopicEventResponse +from dapr.proto import appcallback_service_v1, appcallback_v1, common_v1 +from dapr.proto.common.v1.common_pb2 import InvokeRequest from dapr.proto.runtime.v1.appcallback_pb2 import ( - TopicEventRequest, BindingEventRequest, JobEventRequest, + TopicEventRequest, ) -from dapr.proto.common.v1.common_pb2 import InvokeRequest -from dapr.clients._constants import DEFAULT_JSON_CONTENT_TYPE -from dapr.clients.grpc._request import InvokeMethodRequest, BindingRequest, JobEvent -from dapr.clients.grpc._response import InvokeMethodResponse, TopicEventResponse InvokeMethodCallable = Callable[[InvokeMethodRequest], Union[str, bytes, InvokeMethodResponse]] TopicSubscribeCallable = Callable[[v1.Event], Optional[TopicEventResponse]] diff --git a/ext/dapr-ext-grpc/dapr/ext/grpc/app.py b/ext/dapr-ext-grpc/dapr/ext/grpc/app.py index 9f9ac8472..58e0cdf29 100644 --- a/ext/dapr-ext-grpc/dapr/ext/grpc/app.py +++ b/ext/dapr-ext-grpc/dapr/ext/grpc/app.py @@ -13,14 +13,14 @@ limitations under the License. """ -import grpc - from concurrent import futures from typing import Dict, Optional -from dapr.conf import settings -from dapr.ext.grpc._servicer import _CallbackServicer, Rule # type: ignore from dapr.ext.grpc._health_servicer import _HealthCheckServicer # type: ignore +from dapr.ext.grpc._servicer import Rule, _CallbackServicer # type: ignore + +import grpc +from dapr.conf import settings from dapr.proto import appcallback_service_v1 diff --git a/ext/dapr-ext-grpc/tests/test_app.py b/ext/dapr-ext-grpc/tests/test_app.py index 2a33dd668..315d9e18b 100644 --- a/ext/dapr-ext-grpc/tests/test_app.py +++ b/ext/dapr-ext-grpc/tests/test_app.py @@ -16,7 +16,7 @@ import unittest from cloudevents.sdk.event import v1 -from dapr.ext.grpc import App, Rule, InvokeMethodRequest, BindingRequest +from dapr.ext.grpc import App, BindingRequest, InvokeMethodRequest, Rule class AppTests(unittest.TestCase): diff --git a/ext/dapr-ext-grpc/tests/test_servicier.py b/ext/dapr-ext-grpc/tests/test_servicier.py index 2447eea3c..325d9b6d6 100644 --- a/ext/dapr-ext-grpc/tests/test_servicier.py +++ b/ext/dapr-ext-grpc/tests/test_servicier.py @@ -14,16 +14,15 @@ """ import unittest - from unittest.mock import MagicMock, Mock -from dapr.clients.grpc._request import InvokeMethodRequest -from dapr.clients.grpc._response import InvokeMethodResponse, TopicEventResponse from dapr.ext.grpc._servicer import _CallbackServicer -from dapr.proto import common_v1, appcallback_v1 - from google.protobuf.any_pb2 import Any as GrpcAny +from dapr.clients.grpc._request import InvokeMethodRequest +from dapr.clients.grpc._response import InvokeMethodResponse, TopicEventResponse +from dapr.proto import appcallback_v1, common_v1 + class OnInvokeTests(unittest.TestCase): def setUp(self): diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py b/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py index f78615112..dd2d45b75 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py @@ -14,12 +14,12 @@ """ # Import your main classes here -from dapr.ext.workflow.workflow_runtime import WorkflowRuntime, alternate_name from dapr.ext.workflow.dapr_workflow_client import DaprWorkflowClient from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext, when_all, when_any +from dapr.ext.workflow.retry_policy import RetryPolicy from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext +from dapr.ext.workflow.workflow_runtime import WorkflowRuntime, alternate_name from dapr.ext.workflow.workflow_state import WorkflowState, WorkflowStatus -from dapr.ext.workflow.retry_policy import RetryPolicy __all__ = [ 'WorkflowRuntime', diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_client.py b/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_client.py index cc384503a..461bfd43a 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_client.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_client.py @@ -14,23 +14,22 @@ """ from __future__ import annotations + from datetime import datetime from typing import Any, Optional, TypeVar - -from durabletask import client import durabletask.internal.orchestrator_service_pb2 as pb - -from dapr.ext.workflow.workflow_state import WorkflowState -from dapr.ext.workflow.workflow_context import Workflow +from dapr.ext.workflow.logger import Logger, LoggerOptions from dapr.ext.workflow.util import getAddress +from dapr.ext.workflow.workflow_context import Workflow +from dapr.ext.workflow.workflow_state import WorkflowState +from durabletask import client from grpc import RpcError from dapr.clients import DaprInternalError from dapr.clients.http.client import DAPR_API_TOKEN_HEADER from dapr.conf import settings from dapr.conf.helpers import GrpcEndpoint -from dapr.ext.workflow.logger import LoggerOptions, Logger T = TypeVar('T') TInput = TypeVar('TInput') diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py b/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py index 476ab765f..714def3f2 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py @@ -13,15 +13,14 @@ limitations under the License. """ -from typing import Any, Callable, List, Optional, TypeVar, Union from datetime import datetime, timedelta +from typing import Any, Callable, List, Optional, TypeVar, Union -from durabletask import task - -from dapr.ext.workflow.workflow_context import WorkflowContext, Workflow -from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext -from dapr.ext.workflow.logger import LoggerOptions, Logger +from dapr.ext.workflow.logger import Logger, LoggerOptions from dapr.ext.workflow.retry_policy import RetryPolicy +from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext +from dapr.ext.workflow.workflow_context import Workflow, WorkflowContext +from durabletask import task T = TypeVar('T') TInput = TypeVar('TInput') diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/logger/__init__.py b/ext/dapr-ext-workflow/dapr/ext/workflow/logger/__init__.py index 5583bde7e..b63a763bd 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/logger/__init__.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/logger/__init__.py @@ -1,4 +1,4 @@ -from dapr.ext.workflow.logger.options import LoggerOptions from dapr.ext.workflow.logger.logger import Logger +from dapr.ext.workflow.logger.options import LoggerOptions __all__ = ['LoggerOptions', 'Logger'] diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/logger/logger.py b/ext/dapr-ext-workflow/dapr/ext/workflow/logger/logger.py index 6b0f3fec4..b93e7074f 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/logger/logger.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/logger/logger.py @@ -1,5 +1,6 @@ import logging from typing import Union + from dapr.ext.workflow.logger.options import LoggerOptions diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/logger/options.py b/ext/dapr-ext-workflow/dapr/ext/workflow/logger/options.py index 0be44c52b..15cee8cc3 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/logger/options.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/logger/options.py @@ -13,8 +13,8 @@ limitations under the License. """ -from typing import Union import logging +from typing import Union class LoggerOptions: diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/retry_policy.py b/ext/dapr-ext-workflow/dapr/ext/workflow/retry_policy.py index af1f5ea9e..aa12f479d 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/retry_policy.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/retry_policy.py @@ -13,8 +13,8 @@ limitations under the License. """ -from typing import Optional, TypeVar from datetime import timedelta +from typing import Optional, TypeVar from durabletask import task diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/util.py b/ext/dapr-ext-workflow/dapr/ext/workflow/util.py index 648bc973d..3199e2558 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/util.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/util.py @@ -21,7 +21,7 @@ def getAddress(host: Optional[str] = None, port: Optional[str] = None) -> str: if not host and not port: address = settings.DAPR_GRPC_ENDPOINT or ( - f'{settings.DAPR_RUNTIME_HOST}:' f'{settings.DAPR_GRPC_PORT}' + f'{settings.DAPR_RUNTIME_HOST}:{settings.DAPR_GRPC_PORT}' ) else: host = host or settings.DAPR_RUNTIME_HOST diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_activity_context.py b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_activity_context.py index f460e8013..331ad6c2c 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_activity_context.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_activity_context.py @@ -14,6 +14,7 @@ """ from __future__ import annotations + from typing import Callable, TypeVar from durabletask import task diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py index d6e6ba072..8453e16ef 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py @@ -14,13 +14,13 @@ """ from __future__ import annotations + from abc import ABC, abstractmethod from datetime import datetime, timedelta from typing import Any, Callable, Generator, Optional, TypeVar, Union -from durabletask import task - from dapr.ext.workflow.workflow_activity_context import Activity +from durabletask import task T = TypeVar('T') TInput = TypeVar('TInput') diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_runtime.py b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_runtime.py index 9f4be6222..593e55c68 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_runtime.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_runtime.py @@ -15,21 +15,20 @@ import inspect from functools import wraps -from typing import Optional, TypeVar, Union, Sequence -import grpc - -from durabletask import worker, task +from typing import Optional, Sequence, TypeVar, Union -from dapr.ext.workflow.workflow_context import Workflow +import grpc from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext -from dapr.ext.workflow.workflow_activity_context import Activity, WorkflowActivityContext +from dapr.ext.workflow.logger import Logger, LoggerOptions from dapr.ext.workflow.util import getAddress +from dapr.ext.workflow.workflow_activity_context import Activity, WorkflowActivityContext +from dapr.ext.workflow.workflow_context import Workflow +from durabletask import task, worker from dapr.clients import DaprInternalError from dapr.clients.http.client import DAPR_API_TOKEN_HEADER from dapr.conf import settings from dapr.conf.helpers import GrpcEndpoint -from dapr.ext.workflow.logger import LoggerOptions, Logger T = TypeVar('T') TInput = TypeVar('TInput') diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_state.py b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_state.py index 10847fc54..af1d7e735 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_state.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_state.py @@ -13,8 +13,8 @@ limitations under the License. """ -from enum import Enum import json +from enum import Enum from durabletask import client diff --git a/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py b/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py index 3ae5fdaf5..32b148224 100644 --- a/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py +++ b/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py @@ -13,9 +13,10 @@ limitations under the License. """ +import unittest from datetime import datetime from unittest import mock -import unittest + from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext from durabletask import worker diff --git a/ext/dapr-ext-workflow/tests/test_workflow_activity_context.py b/ext/dapr-ext-workflow/tests/test_workflow_activity_context.py index a45b8b7cd..9a7d6fcc8 100644 --- a/ext/dapr-ext-workflow/tests/test_workflow_activity_context.py +++ b/ext/dapr-ext-workflow/tests/test_workflow_activity_context.py @@ -15,8 +15,9 @@ import unittest from unittest import mock -from durabletask import task + from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext +from durabletask import task mock_orchestration_id = 'orchestration001' mock_task = 10 diff --git a/ext/dapr-ext-workflow/tests/test_workflow_client.py b/ext/dapr-ext-workflow/tests/test_workflow_client.py index 540c0e801..a12a8844b 100644 --- a/ext/dapr-ext-workflow/tests/test_workflow_client.py +++ b/ext/dapr-ext-workflow/tests/test_workflow_client.py @@ -13,14 +13,15 @@ limitations under the License. """ +import unittest from datetime import datetime from typing import Any, Union -import unittest -from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext from unittest import mock + +import durabletask.internal.orchestrator_service_pb2 as pb from dapr.ext.workflow.dapr_workflow_client import DaprWorkflowClient +from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext from durabletask import client -import durabletask.internal.orchestrator_service_pb2 as pb from grpc import RpcError mock_schedule_result = 'workflow001' diff --git a/ext/dapr-ext-workflow/tests/test_workflow_runtime.py b/ext/dapr-ext-workflow/tests/test_workflow_runtime.py index 02d6c6f3b..bf18cd689 100644 --- a/ext/dapr-ext-workflow/tests/test_workflow_runtime.py +++ b/ext/dapr-ext-workflow/tests/test_workflow_runtime.py @@ -13,12 +13,13 @@ limitations under the License. """ -from typing import List import unittest -from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext +from typing import List from unittest import mock -from dapr.ext.workflow.workflow_runtime import WorkflowRuntime, alternate_name + +from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext +from dapr.ext.workflow.workflow_runtime import WorkflowRuntime, alternate_name listOrchestrators: List[str] = [] listActivities: List[str] = [] diff --git a/ext/dapr-ext-workflow/tests/test_workflow_util.py b/ext/dapr-ext-workflow/tests/test_workflow_util.py index 878ee7374..28e92e6c5 100644 --- a/ext/dapr-ext-workflow/tests/test_workflow_util.py +++ b/ext/dapr-ext-workflow/tests/test_workflow_util.py @@ -1,7 +1,8 @@ import unittest -from dapr.ext.workflow.util import getAddress from unittest.mock import patch +from dapr.ext.workflow.util import getAddress + from dapr.conf import settings diff --git a/ext/flask_dapr/flask_dapr/app.py b/ext/flask_dapr/flask_dapr/app.py index c8d5def92..80e42220f 100644 --- a/ext/flask_dapr/flask_dapr/app.py +++ b/ext/flask_dapr/flask_dapr/app.py @@ -14,6 +14,7 @@ """ from typing import Dict, List, Optional + from flask import Flask, jsonify diff --git a/pyproject.toml b/pyproject.toml index 2b8ddf72e..0378a8c8f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,21 +1,24 @@ [tool.ruff] -target-version = "py38" +target-version = "py310" line-length = 100 fix = true extend-exclude = [".github", "dapr/proto"] + [tool.ruff.lint] select = [ - "E", # pycodestyle errors + "I", # isort "W", # pycodestyle warnings "F", # pyflakes - "I", # isort - "C", # flake8-comprehensions - "B", # flake8-bugbear - "UP", # pyupgrade -] -ignore = [ - # Undefined name {name} - "F821", + "E", # pycodestyle errors + + # TODO: Add those back progressively as we fix the issues + # "C", # flake8-comprehensions + # "B", # flake8-bugbear + # "UP", # pyupgrade ] + +# TODO: Add those back progressively as we fix the issues +ignore = ["E501","E203", "E712", "E722", "E713"] + [tool.ruff.format] quote-style = 'single' diff --git a/setup.cfg b/setup.cfg index de5d53f4f..de9ecc33b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -52,17 +52,3 @@ dapr.proto = py.typed dapr.serializers = py.typed - -[flake8] -exclude = - .venv, - venv, - .env, - build, - dist, - .git, - .tox, - dapr/proto, - examples -ignore = F821, E501, W503, E203 -max-line-length = 100 diff --git a/tests/actor/fake_actor_classes.py b/tests/actor/fake_actor_classes.py index 50fe63fcf..2de821779 100644 --- a/tests/actor/fake_actor_classes.py +++ b/tests/actor/fake_actor_classes.py @@ -12,24 +12,22 @@ See the License for the specific language governing permissions and limitations under the License. """ -from dapr.serializers.json import DefaultJSONSerializer -import asyncio +import asyncio from datetime import timedelta from typing import Optional -from dapr.actor.runtime.actor import Actor -from dapr.actor.runtime.remindable import Remindable from dapr.actor.actor_interface import ActorInterface, actormethod - +from dapr.actor.runtime.actor import Actor from dapr.actor.runtime.reentrancy_context import reentrancy_ctx +from dapr.actor.runtime.remindable import Remindable +from dapr.serializers.json import DefaultJSONSerializer # Fake Simple Actor Class for testing class FakeSimpleActorInterface(ActorInterface): @actormethod(name='ActorMethod') - async def actor_method(self, arg: int) -> dict: - ... + async def actor_method(self, arg: int) -> dict: ... class FakeSimpleActor(Actor, FakeSimpleActorInterface): @@ -89,40 +87,32 @@ async def receive_reminder( class FakeActorCls1Interface(ActorInterface): # Fake Actor Class deriving multiple ActorInterfaces @actormethod(name='ActorCls1Method') - async def actor_cls1_method(self, arg): - ... + async def actor_cls1_method(self, arg): ... @actormethod(name='ActorCls1Method1') - async def actor_cls1_method1(self, arg): - ... + async def actor_cls1_method1(self, arg): ... @actormethod(name='ActorCls1Method2') - async def actor_cls1_method2(self, arg): - ... + async def actor_cls1_method2(self, arg): ... class FakeActorCls2Interface(ActorInterface): @actormethod(name='ActorCls2Method') - async def actor_cls2_method(self, arg): - ... + async def actor_cls2_method(self, arg): ... @actormethod(name='ActionMethod') - async def action(self, data: object) -> str: - ... + async def action(self, data: object) -> str: ... @actormethod(name='ActionMethodWithoutArg') - async def action_no_arg(self) -> str: - ... + async def action_no_arg(self) -> str: ... class ReentrantActorInterface(ActorInterface): @actormethod(name='ReentrantMethod') - async def reentrant_method(self, data: object) -> str: - ... + async def reentrant_method(self, data: object) -> str: ... @actormethod(name='ReentrantMethodWithPassthrough') - async def reentrant_pass_through_method(self, arg): - ... + async def reentrant_pass_through_method(self, arg): ... class FakeMultiInterfacesActor( diff --git a/tests/actor/fake_client.py b/tests/actor/fake_client.py index fa5fe1577..311a76e0e 100644 --- a/tests/actor/fake_client.py +++ b/tests/actor/fake_client.py @@ -13,36 +13,34 @@ limitations under the License. """ -from dapr.clients import DaprActorClientBase from typing import Optional +from dapr.clients import DaprActorClientBase + # Fake Dapr Actor Client Base Class for testing class FakeDaprActorClientBase(DaprActorClientBase): async def invoke_method( self, actor_type: str, actor_id: str, method: str, data: Optional[bytes] = None - ) -> bytes: - ... + ) -> bytes: ... - async def save_state_transactionally(self, actor_type: str, actor_id: str, data: bytes) -> None: - ... + async def save_state_transactionally( + self, actor_type: str, actor_id: str, data: bytes + ) -> None: ... - async def get_state(self, actor_type: str, actor_id: str, name: str) -> bytes: - ... + async def get_state(self, actor_type: str, actor_id: str, name: str) -> bytes: ... async def register_reminder( self, actor_type: str, actor_id: str, name: str, data: bytes - ) -> None: - ... + ) -> None: ... - async def unregister_reminder(self, actor_type: str, actor_id: str, name: str) -> None: - ... + async def unregister_reminder(self, actor_type: str, actor_id: str, name: str) -> None: ... - async def register_timer(self, actor_type: str, actor_id: str, name: str, data: bytes) -> None: - ... + async def register_timer( + self, actor_type: str, actor_id: str, name: str, data: bytes + ) -> None: ... - async def unregister_timer(self, actor_type: str, actor_id: str, name: str) -> None: - ... + async def unregister_timer(self, actor_type: str, actor_id: str, name: str) -> None: ... class FakeDaprActorClient(FakeDaprActorClientBase): diff --git a/tests/actor/test_actor.py b/tests/actor/test_actor.py index d9b602c9d..7a7bee2d2 100644 --- a/tests/actor/test_actor.py +++ b/tests/actor/test_actor.py @@ -14,25 +14,22 @@ """ import unittest - -from unittest import mock from datetime import timedelta +from unittest import mock from dapr.actor.id import ActorId +from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.actor.runtime.config import ActorRuntimeConfig from dapr.actor.runtime.context import ActorRuntimeContext from dapr.actor.runtime.runtime import ActorRuntime -from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.conf import settings from dapr.serializers import DefaultJSONSerializer - from tests.actor.fake_actor_classes import ( + FakeMultiInterfacesActor, FakeSimpleActor, FakeSimpleReminderActor, FakeSimpleTimerActor, - FakeMultiInterfacesActor, ) - from tests.actor.fake_client import FakeDaprActorClient from tests.actor.utils import _async_mock, _run from tests.clients.fake_http_server import FakeHttpServer diff --git a/tests/actor/test_actor_factory.py b/tests/actor/test_actor_factory.py index 0715c33f4..4f629bb25 100644 --- a/tests/actor/test_actor_factory.py +++ b/tests/actor/test_actor_factory.py @@ -18,16 +18,13 @@ from dapr.actor import Actor from dapr.actor.id import ActorId from dapr.actor.runtime._type_information import ActorTypeInformation -from dapr.actor.runtime.manager import ActorManager from dapr.actor.runtime.context import ActorRuntimeContext +from dapr.actor.runtime.manager import ActorManager from dapr.serializers import DefaultJSONSerializer - from tests.actor.fake_actor_classes import ( FakeSimpleActorInterface, ) - from tests.actor.fake_client import FakeDaprActorClient - from tests.actor.utils import _run diff --git a/tests/actor/test_actor_manager.py b/tests/actor/test_actor_manager.py index 6c21abfb7..af0e2e410 100644 --- a/tests/actor/test_actor_manager.py +++ b/tests/actor/test_actor_manager.py @@ -19,19 +19,16 @@ from dapr.actor.id import ActorId from dapr.actor.runtime._type_information import ActorTypeInformation -from dapr.actor.runtime.manager import ActorManager from dapr.actor.runtime.context import ActorRuntimeContext +from dapr.actor.runtime.manager import ActorManager from dapr.serializers import DefaultJSONSerializer - from tests.actor.fake_actor_classes import ( FakeMultiInterfacesActor, FakeSimpleActor, FakeSimpleReminderActor, FakeSimpleTimerActor, ) - from tests.actor.fake_client import FakeDaprActorClient - from tests.actor.utils import ( _async_mock, _run, diff --git a/tests/actor/test_actor_reentrancy.py b/tests/actor/test_actor_reentrancy.py index 834273f41..263070f65 100644 --- a/tests/actor/test_actor_reentrancy.py +++ b/tests/actor/test_actor_reentrancy.py @@ -13,22 +13,19 @@ limitations under the License. """ -import unittest import asyncio - +import unittest from unittest import mock +from dapr.actor.runtime.config import ActorReentrancyConfig, ActorRuntimeConfig from dapr.actor.runtime.runtime import ActorRuntime -from dapr.actor.runtime.config import ActorRuntimeConfig, ActorReentrancyConfig from dapr.conf import settings from dapr.serializers import DefaultJSONSerializer - from tests.actor.fake_actor_classes import ( - FakeReentrantActor, FakeMultiInterfacesActor, + FakeReentrantActor, FakeSlowReentrantActor, ) - from tests.actor.utils import _run from tests.clients.fake_http_server import FakeHttpServer @@ -212,9 +209,10 @@ async def expected_return_value(*args, **kwargs): _run(ActorRuntime.deactivate(FakeReentrantActor.__name__, 'test-id')) def test_parse_incoming_reentrancy_header_flask(self): - from ext.flask_dapr import flask_dapr from flask import Flask + from ext.flask_dapr import flask_dapr + app = Flask(f'{FakeReentrantActor.__name__}Service') flask_dapr.DaprActor(app) @@ -244,9 +242,9 @@ def test_parse_incoming_reentrancy_header_flask(self): ) def test_parse_incoming_reentrancy_header_fastapi(self): + from dapr.ext import fastapi from fastapi import FastAPI from fastapi.testclient import TestClient - from dapr.ext import fastapi app = FastAPI(title=f'{FakeReentrantActor.__name__}Service') fastapi.DaprActor(app) diff --git a/tests/actor/test_actor_runtime.py b/tests/actor/test_actor_runtime.py index f17f96cc8..7725c3728 100644 --- a/tests/actor/test_actor_runtime.py +++ b/tests/actor/test_actor_runtime.py @@ -14,20 +14,17 @@ """ import unittest - from datetime import timedelta -from dapr.actor.runtime.runtime import ActorRuntime from dapr.actor.runtime.config import ActorRuntimeConfig +from dapr.actor.runtime.runtime import ActorRuntime from dapr.conf import settings from dapr.serializers import DefaultJSONSerializer - from tests.actor.fake_actor_classes import ( - FakeSimpleActor, FakeMultiInterfacesActor, + FakeSimpleActor, FakeSimpleTimerActor, ) - from tests.actor.utils import _run from tests.clients.fake_http_server import FakeHttpServer diff --git a/tests/actor/test_actor_runtime_config.py b/tests/actor/test_actor_runtime_config.py index 7bbd8cefc..e39894c77 100644 --- a/tests/actor/test_actor_runtime_config.py +++ b/tests/actor/test_actor_runtime_config.py @@ -14,9 +14,9 @@ """ import unittest - from datetime import timedelta -from dapr.actor.runtime.config import ActorRuntimeConfig, ActorReentrancyConfig, ActorTypeConfig + +from dapr.actor.runtime.config import ActorReentrancyConfig, ActorRuntimeConfig, ActorTypeConfig class ActorTypeConfigTests(unittest.TestCase): diff --git a/tests/actor/test_client_proxy.py b/tests/actor/test_client_proxy.py index fe667d629..172e5d283 100644 --- a/tests/actor/test_client_proxy.py +++ b/tests/actor/test_client_proxy.py @@ -12,22 +12,18 @@ See the License for the specific language governing permissions and limitations under the License. """ -import unittest +import unittest from unittest import mock - -from dapr.actor.id import ActorId from dapr.actor.client.proxy import ActorProxy +from dapr.actor.id import ActorId from dapr.serializers import DefaultJSONSerializer from tests.actor.fake_actor_classes import ( - FakeMultiInterfacesActor, FakeActorCls2Interface, + FakeMultiInterfacesActor, ) - - from tests.actor.fake_client import FakeDaprActorClient - from tests.actor.utils import _async_mock, _run diff --git a/tests/actor/test_method_dispatcher.py b/tests/actor/test_method_dispatcher.py index 94f48a7b6..a32fba455 100644 --- a/tests/actor/test_method_dispatcher.py +++ b/tests/actor/test_method_dispatcher.py @@ -15,11 +15,10 @@ import unittest +from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.actor.runtime.context import ActorRuntimeContext from dapr.actor.runtime.method_dispatcher import ActorMethodDispatcher -from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.serializers import DefaultJSONSerializer - from tests.actor.fake_actor_classes import FakeSimpleActor from tests.actor.fake_client import FakeDaprActorClient from tests.actor.utils import _run diff --git a/tests/actor/test_mock_actor.py b/tests/actor/test_mock_actor.py index c37cdf4f8..8a958c425 100644 --- a/tests/actor/test_mock_actor.py +++ b/tests/actor/test_mock_actor.py @@ -9,48 +9,37 @@ class MockTestActorInterface(ActorInterface): @actormethod(name='GetData') - async def get_data(self) -> object: - ... + async def get_data(self) -> object: ... @actormethod(name='SetData') - async def set_data(self, data: object) -> None: - ... + async def set_data(self, data: object) -> None: ... @actormethod(name='ClearData') - async def clear_data(self) -> None: - ... + async def clear_data(self) -> None: ... @actormethod(name='TestData') - async def test_data(self) -> int: - ... + async def test_data(self) -> int: ... @actormethod(name='AddState') - async def add_state(self, name: str, data: object) -> None: - ... + async def add_state(self, name: str, data: object) -> None: ... @actormethod(name='UpdateState') - async def update_state(self, name: str, data: object) -> None: - ... + async def update_state(self, name: str, data: object) -> None: ... @actormethod(name='AddDataNoSave') - async def add_data_no_save(self, data: object) -> None: - ... + async def add_data_no_save(self, data: object) -> None: ... @actormethod(name='RemoveDataNoSave') - async def remove_data_no_save(self) -> None: - ... + async def remove_data_no_save(self) -> None: ... @actormethod(name='SaveState') - async def save_state(self) -> None: - ... + async def save_state(self) -> None: ... @actormethod(name='ToggleReminder') - async def toggle_reminder(self, name: str, enabled: bool) -> None: - ... + async def toggle_reminder(self, name: str, enabled: bool) -> None: ... @actormethod(name='ToggleTimer') - async def toggle_timer(self, name: str, enabled: bool) -> None: - ... + async def toggle_timer(self, name: str, enabled: bool) -> None: ... class MockTestActor(Actor, MockTestActorInterface, Remindable): diff --git a/tests/actor/test_state_manager.py b/tests/actor/test_state_manager.py index c9406dbd2..11a7c4f08 100644 --- a/tests/actor/test_state_manager.py +++ b/tests/actor/test_state_manager.py @@ -15,19 +15,16 @@ import base64 import unittest - from unittest import mock from dapr.actor.id import ActorId +from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.actor.runtime.context import ActorRuntimeContext from dapr.actor.runtime.state_change import StateChangeKind from dapr.actor.runtime.state_manager import ActorStateManager -from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.serializers import DefaultJSONSerializer - from tests.actor.fake_actor_classes import FakeSimpleActor from tests.actor.fake_client import FakeDaprActorClient - from tests.actor.utils import _async_mock, _run diff --git a/tests/actor/test_timer_data.py b/tests/actor/test_timer_data.py index ba410cecd..8a193f416 100644 --- a/tests/actor/test_timer_data.py +++ b/tests/actor/test_timer_data.py @@ -13,9 +13,9 @@ limitations under the License. """ -from typing import Any import unittest from datetime import timedelta +from typing import Any from dapr.actor.runtime._timer_data import ActorTimerData diff --git a/tests/actor/test_type_information.py b/tests/actor/test_type_information.py index 1532e3956..201eb87fb 100644 --- a/tests/actor/test_type_information.py +++ b/tests/actor/test_type_information.py @@ -17,10 +17,10 @@ from dapr.actor.runtime._type_information import ActorTypeInformation from tests.actor.fake_actor_classes import ( - FakeSimpleActor, - FakeMultiInterfacesActor, FakeActorCls1Interface, FakeActorCls2Interface, + FakeMultiInterfacesActor, + FakeSimpleActor, ReentrantActorInterface, ) diff --git a/tests/actor/test_type_utils.py b/tests/actor/test_type_utils.py index f8b2eee2a..6b2a9319b 100644 --- a/tests/actor/test_type_utils.py +++ b/tests/actor/test_type_utils.py @@ -17,19 +17,18 @@ from dapr.actor.actor_interface import ActorInterface from dapr.actor.runtime._type_utils import ( + get_actor_interfaces, get_class_method_args, + get_dispatchable_attrs, get_method_arg_types, get_method_return_types, is_dapr_actor, - get_actor_interfaces, - get_dispatchable_attrs, ) - from tests.actor.fake_actor_classes import ( - FakeSimpleActor, - FakeMultiInterfacesActor, FakeActorCls1Interface, FakeActorCls2Interface, + FakeMultiInterfacesActor, + FakeSimpleActor, ) diff --git a/tests/clients/certs.py b/tests/clients/certs.py index a30b25312..9d851ca46 100644 --- a/tests/clients/certs.py +++ b/tests/clients/certs.py @@ -1,7 +1,7 @@ import os import ssl -import grpc +import grpc from OpenSSL import crypto diff --git a/tests/clients/fake_dapr_server.py b/tests/clients/fake_dapr_server.py index a1cbeb4b7..a1ee695eb 100644 --- a/tests/clients/fake_dapr_server.py +++ b/tests/clients/fake_dapr_server.py @@ -1,48 +1,47 @@ -import grpc import json - from concurrent import futures -from google.protobuf.any_pb2 import Any as GrpcAny +from typing import Dict + +import grpc from google.protobuf import empty_pb2, struct_pb2 -from google.rpc import status_pb2, code_pb2 +from google.protobuf.any_pb2 import Any as GrpcAny +from google.rpc import code_pb2, status_pb2 from grpc_status import rpc_status from dapr.clients.grpc._helpers import to_bytes -from dapr.proto import api_service_v1, common_v1, api_v1, appcallback_v1 -from dapr.proto.common.v1.common_pb2 import ConfigurationItem from dapr.clients.grpc._response import WorkflowRuntimeStatus +from dapr.proto import api_service_v1, api_v1, appcallback_v1, common_v1 +from dapr.proto.common.v1.common_pb2 import ConfigurationItem from dapr.proto.runtime.v1.dapr_pb2 import ( ActiveActorsCount, + ConversationResponseAlpha2, + ConversationResultAlpha2, + ConversationResultChoices, + ConversationResultMessage, + ConversationToolCalls, + ConversationToolCallsOfFunction, + DecryptRequest, + DecryptResponse, + EncryptRequest, + EncryptResponse, GetMetadataResponse, + GetWorkflowRequest, + GetWorkflowResponse, + PauseWorkflowRequest, + PurgeWorkflowRequest, QueryStateItem, + RaiseEventWorkflowRequest, RegisteredComponents, + ResumeWorkflowRequest, SetMetadataRequest, + StartWorkflowRequest, + StartWorkflowResponse, + TerminateWorkflowRequest, TryLockRequest, TryLockResponse, UnlockRequest, UnlockResponse, - StartWorkflowRequest, - StartWorkflowResponse, - GetWorkflowRequest, - GetWorkflowResponse, - PauseWorkflowRequest, - ResumeWorkflowRequest, - TerminateWorkflowRequest, - PurgeWorkflowRequest, - RaiseEventWorkflowRequest, - EncryptRequest, - EncryptResponse, - DecryptRequest, - DecryptResponse, - ConversationResultAlpha2, - ConversationResultChoices, - ConversationResultMessage, - ConversationResponseAlpha2, - ConversationToolCalls, - ConversationToolCallsOfFunction, ) -from typing import Dict - from tests.clients.certs import GrpcCerts from tests.clients.fake_http_server import FakeHttpServer diff --git a/tests/clients/fake_http_server.py b/tests/clients/fake_http_server.py index e08e82d29..8476b18ba 100644 --- a/tests/clients/fake_http_server.py +++ b/tests/clients/fake_http_server.py @@ -1,8 +1,7 @@ import time +from http.server import BaseHTTPRequestHandler, HTTPServer from ssl import PROTOCOL_TLS_SERVER, SSLContext - from threading import Thread -from http.server import BaseHTTPRequestHandler, HTTPServer from tests.clients.certs import HttpCerts diff --git a/tests/clients/test_conversation.py b/tests/clients/test_conversation.py index 8a6cc697e..50daebc64 100644 --- a/tests/clients/test_conversation.py +++ b/tests/clients/test_conversation.py @@ -13,7 +13,6 @@ limitations under the License. """ - import asyncio import json import unittest @@ -33,7 +32,14 @@ from dapr.clients.grpc.conversation import ( ConversationInput, ConversationInputAlpha2, + ConversationMessage, + ConversationMessageOfAssistant, ConversationResponseAlpha2, + ConversationResultAlpha2, + ConversationResultAlpha2Choices, + ConversationResultAlpha2Message, + ConversationToolCalls, + ConversationToolCallsOfFunction, ConversationTools, ConversationToolsFunction, FunctionBackend, @@ -41,18 +47,11 @@ create_system_message, create_tool_message, create_user_message, + execute_registered_tool, execute_registered_tool_async, get_registered_tools, register_tool, unregister_tool, - ConversationResultAlpha2Message, - ConversationResultAlpha2Choices, - ConversationResultAlpha2, - ConversationMessage, - ConversationMessageOfAssistant, - ConversationToolCalls, - ConversationToolCallsOfFunction, - execute_registered_tool, ) from dapr.clients.grpc.conversation import ( tool as tool_decorator, @@ -1010,7 +1009,7 @@ def test_multiline_example(self): def test_zero_indent(self): result = conversation._indent_lines('Title', 'Line one\nLine two', 0) - expected = 'Title: Line one\n' ' Line two' + expected = 'Title: Line one\n Line two' self.assertEqual(result, expected) def test_empty_string(self): @@ -1026,7 +1025,7 @@ def test_title_length_affects_indent(self): # Title length is 1, indent_after_first_line should be indent + len(title) + 2 # indent=2, len(title)=1 => 2 + 1 + 2 = 5 spaces on continuation lines result = conversation._indent_lines('T', 'a\nb', 2) - expected = ' T: a\n' ' b' + expected = ' T: a\n b' self.assertEqual(result, expected) diff --git a/tests/clients/test_conversation_helpers.py b/tests/clients/test_conversation_helpers.py index 62f2f69ae..e7c69b30e 100644 --- a/tests/clients/test_conversation_helpers.py +++ b/tests/clients/test_conversation_helpers.py @@ -12,37 +12,39 @@ See the License for the specific language governing permissions and limitations under the License. """ + +import base64 import io import json -import base64 import unittest import warnings from contextlib import redirect_stdout from dataclasses import dataclass from enum import Enum -from typing import Any, Dict, List, Literal, Optional, Union, Set -from dapr.conf import settings +from typing import Any, Dict, List, Literal, Optional, Set, Union + from dapr.clients.grpc._conversation_helpers import ( - stringify_tool_output, - bind_params_to_func, - function_to_json_schema, + ToolArgumentError, _extract_docstring_args, _python_type_to_json_schema, + bind_params_to_func, extract_docstring_summary, - ToolArgumentError, + function_to_json_schema, + stringify_tool_output, ) from dapr.clients.grpc.conversation import ( - ConversationToolsFunction, - ConversationMessageOfUser, + ConversationMessage, ConversationMessageContent, - ConversationToolCalls, - ConversationToolCallsOfFunction, ConversationMessageOfAssistant, - ConversationMessageOfTool, - ConversationMessage, ConversationMessageOfDeveloper, ConversationMessageOfSystem, + ConversationMessageOfTool, + ConversationMessageOfUser, + ConversationToolCalls, + ConversationToolCallsOfFunction, + ConversationToolsFunction, ) +from dapr.conf import settings def test_string_passthrough(): @@ -2089,8 +2091,7 @@ def f(p: Plain) -> int: bind_params_to_func(f, {'p': {}}) def test_any_and_isinstance_fallback(self): - class C: - ... + class C: ... def f(a: Any, c: C) -> tuple: return a, c diff --git a/tests/clients/test_dapr_grpc_client.py b/tests/clients/test_dapr_grpc_client.py index e0713f703..a52bbeb0d 100644 --- a/tests/clients/test_dapr_grpc_client.py +++ b/tests/clients/test_dapr_grpc_client.py @@ -13,43 +13,43 @@ limitations under the License. """ +import asyncio import json import socket import tempfile import time import unittest import uuid -import asyncio - from unittest.mock import patch -from google.rpc import status_pb2, code_pb2 +from google.rpc import code_pb2, status_pb2 -from dapr.clients.exceptions import DaprGrpcError -from dapr.clients.grpc.client import DaprGrpcClient from dapr.clients import DaprClient -from dapr.clients.grpc.subscription import StreamInactiveError -from dapr.proto import common_v1 -from .fake_dapr_server import FakeDaprSidecar -from dapr.conf import settings +from dapr.clients.exceptions import DaprGrpcError +from dapr.clients.grpc import conversation +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions from dapr.clients.grpc._helpers import to_bytes +from dapr.clients.grpc._jobs import Job from dapr.clients.grpc._request import ( TransactionalStateOperation, TransactionOperationType, ) -from dapr.clients.grpc._jobs import Job -from dapr.clients.grpc._state import StateOptions, Consistency, Concurrency, StateItem -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions from dapr.clients.grpc._response import ( ConfigurationItem, ConfigurationResponse, ConfigurationWatcher, DaprResponse, + TopicEventResponse, UnlockResponseStatus, WorkflowRuntimeStatus, - TopicEventResponse, ) -from dapr.clients.grpc import conversation +from dapr.clients.grpc._state import Concurrency, Consistency, StateItem, StateOptions +from dapr.clients.grpc.client import DaprGrpcClient +from dapr.clients.grpc.subscription import StreamInactiveError +from dapr.conf import settings +from dapr.proto import common_v1 + +from .fake_dapr_server import FakeDaprSidecar class DaprGrpcClientTests(unittest.TestCase): @@ -1000,7 +1000,6 @@ def test_set_metadata(self): self.assertEqual(response.extended_metadata[metadata_key], metadata_value) def test_set_metadata_input_validation(self): - dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') valid_attr_name = 'attribute name' valid_attr_value = 'attribute value' # Invalid inputs for string arguments @@ -1694,7 +1693,7 @@ def test_delete_job_alpha1_validation_error(self): def test_jobs_error_handling(self): """Test error handling for Jobs API using fake server's exception mechanism.""" - from google.rpc import status_pb2, code_pb2 + from google.rpc import code_pb2, status_pb2 dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') diff --git a/tests/clients/test_dapr_grpc_client_async.py b/tests/clients/test_dapr_grpc_client_async.py index 50043912d..245c384dd 100644 --- a/tests/clients/test_dapr_grpc_client_async.py +++ b/tests/clients/test_dapr_grpc_client_async.py @@ -12,6 +12,7 @@ See the License for the specific language governing permissions and limitations under the License. """ + import json import socket import tempfile @@ -19,28 +20,29 @@ import uuid from unittest.mock import patch -from google.rpc import status_pb2, code_pb2 +from google.rpc import code_pb2, status_pb2 -from dapr.aio.clients.grpc.client import DaprGrpcClientAsync from dapr.aio.clients import DaprClient +from dapr.aio.clients.grpc.client import DaprGrpcClientAsync from dapr.clients.exceptions import DaprGrpcError -from dapr.common.pubsub.subscription import StreamInactiveError -from dapr.proto import common_v1 -from .fake_dapr_server import FakeDaprSidecar -from dapr.conf import settings -from dapr.clients.grpc._helpers import to_bytes -from dapr.clients.grpc._request import TransactionalStateOperation from dapr.clients.grpc import conversation +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions +from dapr.clients.grpc._helpers import to_bytes from dapr.clients.grpc._jobs import Job -from dapr.clients.grpc._state import StateOptions, Consistency, Concurrency, StateItem -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions +from dapr.clients.grpc._request import TransactionalStateOperation from dapr.clients.grpc._response import ( ConfigurationItem, - ConfigurationWatcher, ConfigurationResponse, + ConfigurationWatcher, DaprResponse, UnlockResponseStatus, ) +from dapr.clients.grpc._state import Concurrency, Consistency, StateItem, StateOptions +from dapr.common.pubsub.subscription import StreamInactiveError +from dapr.conf import settings +from dapr.proto import common_v1 + +from .fake_dapr_server import FakeDaprSidecar class DaprGrpcClientAsyncTests(unittest.IsolatedAsyncioTestCase): @@ -929,7 +931,6 @@ async def test_set_metadata(self): self.assertEqual(response.extended_metadata[metadata_key], metadata_value) async def test_set_metadata_input_validation(self): - dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}') valid_attr_name = 'attribute name' valid_attr_value = 'attribute value' # Invalid inputs for string arguments diff --git a/tests/clients/test_dapr_grpc_client_async_secure.py b/tests/clients/test_dapr_grpc_client_async_secure.py index 652feac20..a49fe5fc0 100644 --- a/tests/clients/test_dapr_grpc_client_async_secure.py +++ b/tests/clients/test_dapr_grpc_client_async_secure.py @@ -14,16 +14,15 @@ """ import unittest - from unittest.mock import patch from dapr.aio.clients.grpc.client import DaprGrpcClientAsync from dapr.clients.health import DaprHealth +from dapr.conf import settings from tests.clients.certs import replacement_get_credentials_func, replacement_get_health_context from tests.clients.test_dapr_grpc_client_async import DaprGrpcClientAsyncTests -from .fake_dapr_server import FakeDaprSidecar -from dapr.conf import settings +from .fake_dapr_server import FakeDaprSidecar DaprGrpcClientAsync.get_credentials = replacement_get_credentials_func DaprHealth.get_ssl_context = replacement_get_health_context diff --git a/tests/clients/test_dapr_grpc_client_secure.py b/tests/clients/test_dapr_grpc_client_secure.py index 41dedca1a..2a6710403 100644 --- a/tests/clients/test_dapr_grpc_client_secure.py +++ b/tests/clients/test_dapr_grpc_client_secure.py @@ -12,6 +12,7 @@ See the License for the specific language governing permissions and limitations under the License. """ + import unittest from unittest.mock import patch @@ -19,8 +20,8 @@ from dapr.clients.health import DaprHealth from dapr.conf import settings from tests.clients.certs import replacement_get_credentials_func, replacement_get_health_context - from tests.clients.test_dapr_grpc_client import DaprGrpcClientTests + from .fake_dapr_server import FakeDaprSidecar diff --git a/tests/clients/test_dapr_grpc_helpers.py b/tests/clients/test_dapr_grpc_helpers.py index 9e794aab7..6c7c27be9 100644 --- a/tests/clients/test_dapr_grpc_helpers.py +++ b/tests/clients/test_dapr_grpc_helpers.py @@ -1,22 +1,22 @@ import base64 import unittest -from google.protobuf.struct_pb2 import Struct from google.protobuf import json_format -from google.protobuf.json_format import ParseError from google.protobuf.any_pb2 import Any as GrpcAny +from google.protobuf.json_format import ParseError +from google.protobuf.struct_pb2 import Struct from google.protobuf.wrappers_pb2 import ( BoolValue, - StringValue, + BytesValue, + DoubleValue, Int32Value, Int64Value, - DoubleValue, - BytesValue, + StringValue, ) from dapr.clients.grpc._helpers import ( - convert_value_to_struct, convert_dict_to_grpc_dict_of_any, + convert_value_to_struct, ) diff --git a/tests/clients/test_dapr_grpc_request.py b/tests/clients/test_dapr_grpc_request.py index 98d8e2005..396a8ec95 100644 --- a/tests/clients/test_dapr_grpc_request.py +++ b/tests/clients/test_dapr_grpc_request.py @@ -16,13 +16,13 @@ import io import unittest +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions from dapr.clients.grpc._request import ( - InvokeMethodRequest, BindingRequest, - EncryptRequestIterator, DecryptRequestIterator, + EncryptRequestIterator, + InvokeMethodRequest, ) -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions from dapr.proto import api_v1, common_v1 diff --git a/tests/clients/test_dapr_grpc_request_async.py b/tests/clients/test_dapr_grpc_request_async.py index 75fe74fce..7782fecdf 100644 --- a/tests/clients/test_dapr_grpc_request_async.py +++ b/tests/clients/test_dapr_grpc_request_async.py @@ -16,8 +16,8 @@ import io import unittest -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions -from dapr.aio.clients.grpc._request import EncryptRequestIterator, DecryptRequestIterator +from dapr.aio.clients.grpc._request import DecryptRequestIterator, EncryptRequestIterator +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions from dapr.proto import api_v1 diff --git a/tests/clients/test_dapr_grpc_response.py b/tests/clients/test_dapr_grpc_response.py index 1c91805eb..c2fe237f9 100644 --- a/tests/clients/test_dapr_grpc_response.py +++ b/tests/clients/test_dapr_grpc_response.py @@ -18,15 +18,14 @@ from google.protobuf.any_pb2 import Any as GrpcAny from dapr.clients.grpc._response import ( - DaprResponse, - InvokeMethodResponse, BindingResponse, - StateResponse, BulkStateItem, - EncryptResponse, + DaprResponse, DecryptResponse, + EncryptResponse, + InvokeMethodResponse, + StateResponse, ) - from dapr.proto import api_v1, common_v1 diff --git a/tests/clients/test_dapr_grpc_response_async.py b/tests/clients/test_dapr_grpc_response_async.py index 2626cbf41..02b09716f 100644 --- a/tests/clients/test_dapr_grpc_response_async.py +++ b/tests/clients/test_dapr_grpc_response_async.py @@ -15,7 +15,7 @@ import unittest -from dapr.aio.clients.grpc._response import EncryptResponse, DecryptResponse +from dapr.aio.clients.grpc._response import DecryptResponse, EncryptResponse from dapr.proto import api_v1, common_v1 diff --git a/tests/clients/test_exceptions.py b/tests/clients/test_exceptions.py index 08eea4d53..e8b4c6d9f 100644 --- a/tests/clients/test_exceptions.py +++ b/tests/clients/test_exceptions.py @@ -3,9 +3,9 @@ import unittest import grpc -from google.rpc import error_details_pb2, status_pb2, code_pb2 from google.protobuf.any_pb2 import Any from google.protobuf.duration_pb2 import Duration +from google.rpc import code_pb2, error_details_pb2, status_pb2 from dapr.clients import DaprGrpcClient from dapr.clients.exceptions import DaprGrpcError, DaprInternalError diff --git a/tests/clients/test_heatlhcheck.py b/tests/clients/test_heatlhcheck.py index d447e072c..c5b49aee9 100644 --- a/tests/clients/test_heatlhcheck.py +++ b/tests/clients/test_heatlhcheck.py @@ -12,9 +12,10 @@ See the License for the specific language governing permissions and limitations under the License. """ + import time import unittest -from unittest.mock import patch, MagicMock +from unittest.mock import MagicMock, patch from dapr.clients.health import DaprHealth from dapr.conf import settings diff --git a/tests/clients/test_http_helpers.py b/tests/clients/test_http_helpers.py index ab173cd73..abf284dbe 100644 --- a/tests/clients/test_http_helpers.py +++ b/tests/clients/test_http_helpers.py @@ -1,8 +1,8 @@ import unittest from unittest.mock import patch -from dapr.conf import settings from dapr.clients.http.helpers import get_api_url +from dapr.conf import settings class DaprHttpClientHelpersTests(unittest.TestCase): diff --git a/tests/clients/test_http_service_invocation_client.py b/tests/clients/test_http_service_invocation_client.py index c0b43a863..a0a7aadd6 100644 --- a/tests/clients/test_http_service_invocation_client.py +++ b/tests/clients/test_http_service_invocation_client.py @@ -24,13 +24,12 @@ from opentelemetry.sdk.trace.sampling import ALWAYS_ON from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator - +from dapr.clients import DaprClient from dapr.clients.exceptions import DaprInternalError from dapr.conf import settings from dapr.proto import common_v1 from .fake_http_server import FakeHttpServer -from dapr.clients import DaprClient class DaprInvocationHttpClientTests(unittest.TestCase): diff --git a/tests/clients/test_jobs.py b/tests/clients/test_jobs.py index fe3d70b53..645d43256 100644 --- a/tests/clients/test_jobs.py +++ b/tests/clients/test_jobs.py @@ -5,9 +5,10 @@ """ import unittest + from google.protobuf.any_pb2 import Any as GrpcAny -from dapr.clients.grpc._jobs import Job, DropFailurePolicy, ConstantFailurePolicy +from dapr.clients.grpc._jobs import ConstantFailurePolicy, DropFailurePolicy, Job from dapr.proto.runtime.v1 import dapr_pb2 as api_v1 diff --git a/tests/clients/test_retries_policy.py b/tests/clients/test_retries_policy.py index b5137e643..d4a383fc1 100644 --- a/tests/clients/test_retries_policy.py +++ b/tests/clients/test_retries_policy.py @@ -12,11 +12,12 @@ See the License for the specific language governing permissions and limitations under the License. """ + import unittest from unittest import mock -from unittest.mock import Mock, MagicMock, patch, AsyncMock +from unittest.mock import AsyncMock, MagicMock, Mock, patch -from grpc import StatusCode, RpcError +from grpc import RpcError, StatusCode from dapr.clients.retry import RetryPolicy from dapr.serializers import DefaultJSONSerializer diff --git a/tests/clients/test_retries_policy_async.py b/tests/clients/test_retries_policy_async.py index ebe6865db..2b35c35c4 100644 --- a/tests/clients/test_retries_policy_async.py +++ b/tests/clients/test_retries_policy_async.py @@ -12,11 +12,12 @@ See the License for the specific language governing permissions and limitations under the License. """ + import unittest from unittest import mock -from unittest.mock import MagicMock, patch, AsyncMock +from unittest.mock import AsyncMock, MagicMock, patch -from grpc import StatusCode, RpcError +from grpc import RpcError, StatusCode from dapr.clients.retry import RetryPolicy diff --git a/tests/clients/test_secure_http_service_invocation_client.py b/tests/clients/test_secure_http_service_invocation_client.py index 4d1bdda1f..df13d8197 100644 --- a/tests/clients/test_secure_http_service_invocation_client.py +++ b/tests/clients/test_secure_http_service_invocation_client.py @@ -12,6 +12,7 @@ See the License for the specific language governing permissions and limitations under the License. """ + import ssl import typing from asyncio import TimeoutError @@ -29,8 +30,7 @@ from dapr.conf import settings from dapr.proto import common_v1 - -from .certs import replacement_get_health_context, replacement_get_credentials_func, GrpcCerts +from .certs import GrpcCerts, replacement_get_credentials_func, replacement_get_health_context from .fake_http_server import FakeHttpServer from .test_http_service_invocation_client import DaprInvocationHttpClientTests diff --git a/tests/clients/test_subscription.py b/tests/clients/test_subscription.py index ed2eae3fa..21018aaac 100644 --- a/tests/clients/test_subscription.py +++ b/tests/clients/test_subscription.py @@ -1,8 +1,9 @@ -from dapr.clients.grpc.subscription import SubscriptionMessage -from dapr.proto.runtime.v1.appcallback_pb2 import TopicEventRequest +import unittest + from google.protobuf.struct_pb2 import Struct -import unittest +from dapr.clients.grpc.subscription import SubscriptionMessage +from dapr.proto.runtime.v1.appcallback_pb2 import TopicEventRequest class SubscriptionMessageTests(unittest.TestCase): diff --git a/tests/clients/test_timeout_interceptor.py b/tests/clients/test_timeout_interceptor.py index 79859b2e5..c60331bed 100644 --- a/tests/clients/test_timeout_interceptor.py +++ b/tests/clients/test_timeout_interceptor.py @@ -15,6 +15,7 @@ import unittest from unittest.mock import Mock, patch + from dapr.clients.grpc.interceptors import DaprClientTimeoutInterceptor from dapr.conf import settings diff --git a/tests/clients/test_timeout_interceptor_async.py b/tests/clients/test_timeout_interceptor_async.py index d057df9fc..88b5831dc 100644 --- a/tests/clients/test_timeout_interceptor_async.py +++ b/tests/clients/test_timeout_interceptor_async.py @@ -15,6 +15,7 @@ import unittest from unittest.mock import Mock, patch + from dapr.aio.clients.grpc.interceptors import DaprClientTimeoutInterceptorAsync from dapr.conf import settings diff --git a/tests/serializers/test_default_json_serializer.py b/tests/serializers/test_default_json_serializer.py index 86e727ad0..8f65595c0 100644 --- a/tests/serializers/test_default_json_serializer.py +++ b/tests/serializers/test_default_json_serializer.py @@ -13,8 +13,8 @@ limitations under the License. """ -import unittest import datetime +import unittest from dapr.serializers.json import DefaultJSONSerializer diff --git a/tests/serializers/test_util.py b/tests/serializers/test_util.py index 9f3b9e026..25124fdf6 100644 --- a/tests/serializers/test_util.py +++ b/tests/serializers/test_util.py @@ -13,12 +13,12 @@ limitations under the License. """ -import unittest import json +import unittest from datetime import timedelta -from dapr.serializers.util import convert_from_dapr_duration, convert_to_dapr_duration from dapr.serializers.json import DaprJSONDecoder +from dapr.serializers.util import convert_from_dapr_duration, convert_to_dapr_duration class UtilTests(unittest.TestCase): diff --git a/tox.ini b/tox.ini index ebd403c3f..45c54da59 100644 --- a/tox.ini +++ b/tox.ini @@ -3,7 +3,6 @@ skipsdist = True minversion = 3.9.0 envlist = py{39,310,311,312,313} - flake8, ruff, mypy, @@ -25,18 +24,11 @@ commands_pre = pip3 install -e {toxinidir}/ext/dapr-ext-fastapi/ pip3 install -e {toxinidir}/ext/flask_dapr/ -[testenv:flake8] -basepython = python3 -usedevelop = False -deps = flake8 -commands = - flake8 . - [testenv:ruff] basepython = python3 usedevelop = False -deps = ruff==0.2.2 commands = + ruff check --fix ruff format [testenv:examples] From f42be572cf86f623d3a3b505a21f4f216dcebbb1 Mon Sep 17 00:00:00 2001 From: Albert Callarisa Date: Fri, 14 Nov 2025 15:29:11 +0100 Subject: [PATCH 03/16] fix: Fetch latest build instead of page1-limit1 to figure out cli version (#863) Signed-off-by: Albert Callarisa --- .github/workflows/validate_examples.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/validate_examples.yaml b/.github/workflows/validate_examples.yaml index e55a22913..f87cd246f 100644 --- a/.github/workflows/validate_examples.yaml +++ b/.github/workflows/validate_examples.yaml @@ -70,7 +70,7 @@ jobs: echo "Found $RUNTIME_VERSION" - name: Determine latest Dapr Cli version run: | - export CLI_VERSION=$(curl "https://api.github.com/repos/dapr/cli/releases?per_page=1&page=1" --header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' | jq '.[0].tag_name'| tr -d '",v') + export CLI_VERSION=$(curl "https://api.github.com/repos/dapr/cli/releases/latest" --header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' | jq '.tag_name'| tr -d '",v') echo "DAPR_CLI_VER=$CLI_VERSION" >> $GITHUB_ENV echo "Found $CLI_VERSION" - name: Set up Python ${{ matrix.python_ver }} From 896c76119df39a37975ba4f70035f93debb992c5 Mon Sep 17 00:00:00 2001 From: Albert Callarisa Date: Fri, 14 Nov 2025 16:48:15 +0100 Subject: [PATCH 04/16] fix: Put dev to 1.16.0.dev (#859) Signed-off-by: Albert Callarisa Co-authored-by: Yaron Schneider --- dapr/version/version.py | 2 +- examples/demo_actor/demo_actor/requirements.txt | 2 +- examples/demo_workflow/demo_workflow/requirements.txt | 2 +- examples/invoke-simple/requirements.txt | 4 ++-- examples/w3c-tracing/requirements.txt | 4 ++-- examples/workflow/requirements.txt | 4 ++-- ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py | 2 +- ext/dapr-ext-fastapi/setup.cfg | 2 +- ext/dapr-ext-grpc/dapr/ext/grpc/version.py | 2 +- ext/dapr-ext-grpc/setup.cfg | 2 +- ext/dapr-ext-workflow/dapr/ext/workflow/version.py | 2 +- ext/dapr-ext-workflow/setup.cfg | 2 +- ext/flask_dapr/flask_dapr/version.py | 2 +- ext/flask_dapr/setup.cfg | 2 +- 14 files changed, 17 insertions(+), 17 deletions(-) diff --git a/dapr/version/version.py b/dapr/version/version.py index 8c6c12960..c8ee50c56 100644 --- a/dapr/version/version.py +++ b/dapr/version/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.16.1rc1' +__version__ = '1.16.0.dev' diff --git a/examples/demo_actor/demo_actor/requirements.txt b/examples/demo_actor/demo_actor/requirements.txt index 9496602ed..c4ec19d73 100644 --- a/examples/demo_actor/demo_actor/requirements.txt +++ b/examples/demo_actor/demo_actor/requirements.txt @@ -1 +1 @@ -dapr-ext-fastapi>=1.16.1rc1 +dapr-ext-fastapi>=1.16.0.dev diff --git a/examples/demo_workflow/demo_workflow/requirements.txt b/examples/demo_workflow/demo_workflow/requirements.txt index a70b02692..1e5f89620 100644 --- a/examples/demo_workflow/demo_workflow/requirements.txt +++ b/examples/demo_workflow/demo_workflow/requirements.txt @@ -1 +1 @@ -dapr-ext-workflow>=1.16.1rc1 +dapr-ext-workflow>=1.16.0.dev diff --git a/examples/invoke-simple/requirements.txt b/examples/invoke-simple/requirements.txt index e77f5d6e2..056db33d1 100644 --- a/examples/invoke-simple/requirements.txt +++ b/examples/invoke-simple/requirements.txt @@ -1,2 +1,2 @@ -dapr-ext-grpc >= 1.16.1rc1 -dapr >= 1.16.1rc1 +dapr-ext-grpc >= 1.16.0.dev +dapr >= 1.16.0.dev diff --git a/examples/w3c-tracing/requirements.txt b/examples/w3c-tracing/requirements.txt index 514e2606a..8b1f04b01 100644 --- a/examples/w3c-tracing/requirements.txt +++ b/examples/w3c-tracing/requirements.txt @@ -1,5 +1,5 @@ -dapr-ext-grpc >= 1.16.1rc1 -dapr >= 1.16.1rc1 +dapr-ext-grpc >= 1.16.0.dev +dapr >= 1.16.0.dev opentelemetry-sdk opentelemetry-instrumentation-grpc opentelemetry-exporter-zipkin diff --git a/examples/workflow/requirements.txt b/examples/workflow/requirements.txt index fab86e728..c5af70b9d 100644 --- a/examples/workflow/requirements.txt +++ b/examples/workflow/requirements.txt @@ -1,2 +1,2 @@ -dapr-ext-workflow>=1.16.1rc1 -dapr>=1.16.1rc1 +dapr-ext-workflow>=1.16.0.dev +dapr>=1.16.0.dev diff --git a/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py b/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py index 8c6c12960..c8ee50c56 100644 --- a/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py +++ b/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.16.1rc1' +__version__ = '1.16.0.dev' diff --git a/ext/dapr-ext-fastapi/setup.cfg b/ext/dapr-ext-fastapi/setup.cfg index 8b6080ebf..f0de1d350 100644 --- a/ext/dapr-ext-fastapi/setup.cfg +++ b/ext/dapr-ext-fastapi/setup.cfg @@ -24,7 +24,7 @@ python_requires = >=3.9 packages = find_namespace: include_package_data = True install_requires = - dapr >= 1.16.1rc1 + dapr >= 1.16.0.dev uvicorn >= 0.11.6 fastapi >= 0.60.1 diff --git a/ext/dapr-ext-grpc/dapr/ext/grpc/version.py b/ext/dapr-ext-grpc/dapr/ext/grpc/version.py index 8c6c12960..c8ee50c56 100644 --- a/ext/dapr-ext-grpc/dapr/ext/grpc/version.py +++ b/ext/dapr-ext-grpc/dapr/ext/grpc/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.16.1rc1' +__version__ = '1.16.0.dev' diff --git a/ext/dapr-ext-grpc/setup.cfg b/ext/dapr-ext-grpc/setup.cfg index d08757c78..e2eaaf887 100644 --- a/ext/dapr-ext-grpc/setup.cfg +++ b/ext/dapr-ext-grpc/setup.cfg @@ -24,7 +24,7 @@ python_requires = >=3.9 packages = find_namespace: include_package_data = True install_requires = - dapr >= 1.16.1rc1 + dapr >= 1.16.0.dev cloudevents >= 1.0.0 [options.packages.find] diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/version.py b/ext/dapr-ext-workflow/dapr/ext/workflow/version.py index 8c6c12960..c8ee50c56 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/version.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.16.1rc1' +__version__ = '1.16.0.dev' diff --git a/ext/dapr-ext-workflow/setup.cfg b/ext/dapr-ext-workflow/setup.cfg index 83869566c..6efe6668c 100644 --- a/ext/dapr-ext-workflow/setup.cfg +++ b/ext/dapr-ext-workflow/setup.cfg @@ -24,7 +24,7 @@ python_requires = >=3.9 packages = find_namespace: include_package_data = True install_requires = - dapr >= 1.16.1rc1 + dapr >= 1.16.0.dev durabletask-dapr >= 0.2.0a9 [options.packages.find] diff --git a/ext/flask_dapr/flask_dapr/version.py b/ext/flask_dapr/flask_dapr/version.py index 8c6c12960..c8ee50c56 100644 --- a/ext/flask_dapr/flask_dapr/version.py +++ b/ext/flask_dapr/flask_dapr/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.16.1rc1' +__version__ = '1.16.0.dev' diff --git a/ext/flask_dapr/setup.cfg b/ext/flask_dapr/setup.cfg index 531a9aea5..4bc95bc6f 100644 --- a/ext/flask_dapr/setup.cfg +++ b/ext/flask_dapr/setup.cfg @@ -26,4 +26,4 @@ include_package_data = true zip_safe = false install_requires = Flask >= 1.1 - dapr >= 1.16.1rc1 + dapr >= 1.16.0.dev From e67276a093762b5b146debe1adfbf35164f88bcf Mon Sep 17 00:00:00 2001 From: Filinto Duran <1373693+filintod@users.noreply.github.com> Date: Mon, 17 Nov 2025 10:42:18 -0600 Subject: [PATCH 05/16] remove python 3.9 (#860) * remove py39 Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com> * Apply suggestions from code review Fix yaml strings Signed-off-by: Albert Callarisa --------- Signed-off-by: Filinto Duran <1373693+filintod@users.noreply.github.com> Signed-off-by: Albert Callarisa Co-authored-by: Albert Callarisa Co-authored-by: Albert Callarisa --- .github/workflows/build-push-to-main.yaml | 10 +++++----- .github/workflows/build-tag.yaml | 10 +++++----- .github/workflows/build.yaml | 6 +++--- .github/workflows/validate_examples.yaml | 2 +- tox.ini | 4 ++-- 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/.github/workflows/build-push-to-main.yaml b/.github/workflows/build-push-to-main.yaml index 47273aa0a..b3c791785 100644 --- a/.github/workflows/build-push-to-main.yaml +++ b/.github/workflows/build-push-to-main.yaml @@ -11,10 +11,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v6 with: - python-version: 3.9 + python-version: '3.10' - name: Install dependencies run: | python -m pip install --upgrade pip @@ -37,7 +37,7 @@ jobs: strategy: fail-fast: false matrix: - python_ver: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python_ver: ["3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v5 - name: Set up Python ${{ matrix.python_ver }} @@ -64,10 +64,10 @@ jobs: TWINE_USERNAME: "__token__" steps: - uses: actions/checkout@v5 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v6 with: - python-version: 3.9 + python-version: '3.10' - name: Install dependencies run: | python -m pip install --upgrade pip diff --git a/.github/workflows/build-tag.yaml b/.github/workflows/build-tag.yaml index 176fbaf04..4033f9012 100644 --- a/.github/workflows/build-tag.yaml +++ b/.github/workflows/build-tag.yaml @@ -15,10 +15,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v6 with: - python-version: 3.9 + python-version: '3.10' - name: Install dependencies run: | python -m pip install --upgrade pip @@ -41,7 +41,7 @@ jobs: strategy: fail-fast: false matrix: - python_ver: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python_ver: ["3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v5 - name: Set up Python ${{ matrix.python_ver }} @@ -68,10 +68,10 @@ jobs: TWINE_USERNAME: "__token__" steps: - uses: actions/checkout@v5 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v6 with: - python-version: 3.9 + python-version: '3.10' - name: Install dependencies run: | python -m pip install --upgrade pip diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index e2a254237..fa89c39e9 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -17,10 +17,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v6 with: - python-version: 3.9 + python-version: '3.10' - name: Install dependencies run: | python -m pip install --upgrade pip @@ -43,7 +43,7 @@ jobs: strategy: fail-fast: false matrix: - python_ver: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python_ver: ["3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v5 - name: Set up Python ${{ matrix.python_ver }} diff --git a/.github/workflows/validate_examples.yaml b/.github/workflows/validate_examples.yaml index f87cd246f..25b686284 100644 --- a/.github/workflows/validate_examples.yaml +++ b/.github/workflows/validate_examples.yaml @@ -46,7 +46,7 @@ jobs: strategy: fail-fast: false matrix: - python_ver: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python_ver: ["3.10", "3.11", "3.12", "3.13"] steps: - name: Parse repository_dispatch payload if: github.event_name == 'repository_dispatch' diff --git a/tox.ini b/tox.ini index 45c54da59..17041083b 100644 --- a/tox.ini +++ b/tox.ini @@ -1,8 +1,8 @@ [tox] skipsdist = True -minversion = 3.9.0 +minversion = 3.10.0 envlist = - py{39,310,311,312,313} + py{310,311,312,313} ruff, mypy, From cf1e5d807fb810a9c23cd964ccb521d5488fe6cb Mon Sep 17 00:00:00 2001 From: Yaron Schneider Date: Fri, 21 Nov 2025 00:52:22 -0800 Subject: [PATCH 06/16] Add dapr checkpointer for LangGraph (#862) * add dapr checkpointer for langgraph Signed-off-by: yaron2 * linter Signed-off-by: yaron2 * Update dapr-ext-langgraph/dapr/ext/langgraph/__init__.py Co-authored-by: Cassie Coyle Signed-off-by: Yaron Schneider * Update dapr-ext-langgraph/dapr/ext/langgraph/version.py Co-authored-by: Albert Callarisa Signed-off-by: Yaron Schneider * Update dapr-ext-langgraph/setup.cfg Co-authored-by: Albert Callarisa Signed-off-by: Yaron Schneider * Update dapr-ext-langgraph/setup.cfg Co-authored-by: Albert Callarisa Signed-off-by: Yaron Schneider * add CI steps Signed-off-by: yaron2 * use single client, minor improvements Signed-off-by: yaron2 * linter Signed-off-by: yaron2 * fix dir structure Signed-off-by: yaron2 * add tests Signed-off-by: yaron2 * linter Signed-off-by: yaron2 * Run langgraph unit tests with tox Signed-off-by: Albert Callarisa --------- Signed-off-by: yaron2 Signed-off-by: Yaron Schneider Signed-off-by: Albert Callarisa Co-authored-by: Cassie Coyle Co-authored-by: Albert Callarisa Co-authored-by: Albert Callarisa --- .github/workflows/build-push-to-main.yaml | 7 + .github/workflows/build-tag.yaml | 8 + README.md | 1 + ext/dapr-ext-langgraph/LICENSE | 203 ++++++++++++++++++ ext/dapr-ext-langgraph/README.rst | 22 ++ .../dapr/ext/langgraph/__init__.py | 21 ++ .../dapr/ext/langgraph/dapr_checkpointer.py | 164 ++++++++++++++ .../dapr/ext/langgraph/version.py | 16 ++ ext/dapr-ext-langgraph/setup.cfg | 36 ++++ ext/dapr-ext-langgraph/setup.py | 66 ++++++ ext/dapr-ext-langgraph/tests/__init__.py | 14 ++ .../tests/test_checkpointer.py | 150 +++++++++++++ tox.ini | 6 +- 13 files changed, 713 insertions(+), 1 deletion(-) create mode 100644 ext/dapr-ext-langgraph/LICENSE create mode 100644 ext/dapr-ext-langgraph/README.rst create mode 100644 ext/dapr-ext-langgraph/dapr/ext/langgraph/__init__.py create mode 100644 ext/dapr-ext-langgraph/dapr/ext/langgraph/dapr_checkpointer.py create mode 100644 ext/dapr-ext-langgraph/dapr/ext/langgraph/version.py create mode 100644 ext/dapr-ext-langgraph/setup.cfg create mode 100644 ext/dapr-ext-langgraph/setup.py create mode 100644 ext/dapr-ext-langgraph/tests/__init__.py create mode 100644 ext/dapr-ext-langgraph/tests/test_checkpointer.py diff --git a/.github/workflows/build-push-to-main.yaml b/.github/workflows/build-push-to-main.yaml index b3c791785..9d76d2694 100644 --- a/.github/workflows/build-push-to-main.yaml +++ b/.github/workflows/build-push-to-main.yaml @@ -106,3 +106,10 @@ jobs: cd ext/dapr-ext-fastapi python setup.py sdist bdist_wheel twine upload dist/* + - name: Build and publish dapr-ext-langgraph + env: + TWINE_PASSWORD: ${{ secrets.PYPI_UPLOAD_PASS }} + run: | + cd ext/dapr-ext-langgraph + python setup.py sdist bdist_wheel + twine upload dist/* diff --git a/.github/workflows/build-tag.yaml b/.github/workflows/build-tag.yaml index 4033f9012..322bb06d8 100644 --- a/.github/workflows/build-tag.yaml +++ b/.github/workflows/build-tag.yaml @@ -115,3 +115,11 @@ jobs: cd ext/dapr-ext-fastapi python setup.py sdist bdist_wheel twine upload dist/* + - name: Build and publish dapr-ext-langgraph + if: startsWith(github.ref_name, 'langgraph-v') + env: + TWINE_PASSWORD: ${{ secrets.PYPI_UPLOAD_PASS }} + run: | + cd ext/dapr-ext-langgraph + python setup.py sdist bdist_wheel + twine upload dist/* diff --git a/README.md b/README.md index a65b9ec51..30f65e216 100644 --- a/README.md +++ b/README.md @@ -86,6 +86,7 @@ pip3 install -e . pip3 install -e ./ext/dapr-ext-grpc/ pip3 install -e ./ext/dapr-ext-fastapi/ pip3 install -e ./ext/dapr-ext-workflow/ +pip3 install -e ./ext/dapr-ext-langgraph/ ``` 3. Install required packages diff --git a/ext/dapr-ext-langgraph/LICENSE b/ext/dapr-ext-langgraph/LICENSE new file mode 100644 index 000000000..be033a7fd --- /dev/null +++ b/ext/dapr-ext-langgraph/LICENSE @@ -0,0 +1,203 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 The Dapr Authors. + + and others that have contributed code to the public domain. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/ext/dapr-ext-langgraph/README.rst b/ext/dapr-ext-langgraph/README.rst new file mode 100644 index 000000000..85c101a65 --- /dev/null +++ b/ext/dapr-ext-langgraph/README.rst @@ -0,0 +1,22 @@ +dapr-ext-langgraph extension +======================= + +|pypi| + +.. |pypi| image:: https://badge.fury.io/py/dapr-ext-langgraph.svg + :target: https://pypi.org/project/dapr-ext-langgraph/ + +This is the Dapr Checkpointer extension for LangGraph + +Installation +------------ + +:: + + pip install dapr-ext-langgraph + +References +---------- + +* `Dapr `_ +* `Dapr Python-SDK `_ diff --git a/ext/dapr-ext-langgraph/dapr/ext/langgraph/__init__.py b/ext/dapr-ext-langgraph/dapr/ext/langgraph/__init__.py new file mode 100644 index 000000000..4f2d3cf8b --- /dev/null +++ b/ext/dapr-ext-langgraph/dapr/ext/langgraph/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +# Import your main classes here +from dapr.ext.langgraph.dapr_checkpointer import DaprCheckpointer + +__all__ = [ + 'DaprCheckpointer', +] diff --git a/ext/dapr-ext-langgraph/dapr/ext/langgraph/dapr_checkpointer.py b/ext/dapr-ext-langgraph/dapr/ext/langgraph/dapr_checkpointer.py new file mode 100644 index 000000000..123b313d1 --- /dev/null +++ b/ext/dapr-ext-langgraph/dapr/ext/langgraph/dapr_checkpointer.py @@ -0,0 +1,164 @@ +import json +from typing import Any, Sequence, Tuple + +from langchain_core.load import dumps +from langchain_core.runnables import RunnableConfig + +from dapr.clients import DaprClient +from langgraph.checkpoint.base import BaseCheckpointSaver, Checkpoint, CheckpointTuple + + +class DaprCheckpointer(BaseCheckpointSaver[Checkpoint]): + """ + Dapr-backed LangGraph Checkpointer that persists checkpoints to a Dapr state store. + Compatible with LangGraph >= 0.3.6 and LangChain Core >= 1.0.0. + """ + + REGISTRY_KEY = 'dapr_checkpoint_registry' + + def __init__(self, store_name: str, key_prefix: str): + self.store_name = store_name + self.key_prefix = key_prefix + self.client = DaprClient() + + # helper: construct Dapr key for a thread + def _get_key(self, config: RunnableConfig) -> str: + thread_id = None + + if isinstance(config, dict): + thread_id = config.get('configurable', {}).get('thread_id') + + if not thread_id: + thread_id = config.get('thread_id') + + if not thread_id: + thread_id = 'default' + + return f'{self.key_prefix}:{thread_id}' + + # restore a checkpoint + def get_tuple(self, config: RunnableConfig) -> CheckpointTuple | None: + key = self._get_key(config) + + resp = self.client.get_state(store_name=self.store_name, key=key) + if not resp.data: + return None + + wrapper = json.loads(resp.data) + cp_data = wrapper.get('checkpoint', wrapper) + metadata = wrapper.get('metadata', {'step': 0}) + if 'step' not in metadata: + metadata['step'] = 0 + + cp = Checkpoint(**cp_data) + return CheckpointTuple( + config=config, + checkpoint=cp, + parent_config=None, + metadata=metadata, + ) + + # save a full checkpoint snapshot + def put( + self, + config: RunnableConfig, + checkpoint: Checkpoint, + parent_config: RunnableConfig | None, + metadata: dict[str, Any], + ) -> None: + key = self._get_key(config) + + checkpoint_serializable = { + 'v': checkpoint['v'], + 'id': checkpoint['id'], + 'ts': checkpoint['ts'], + 'channel_values': checkpoint['channel_values'], + 'channel_versions': checkpoint['channel_versions'], + 'versions_seen': checkpoint['versions_seen'], + } + + wrapper = {'checkpoint': checkpoint_serializable, 'metadata': metadata} + + self.client.save_state(self.store_name, key, dumps(wrapper)) + + reg_resp = self.client.get_state(store_name=self.store_name, key=self.REGISTRY_KEY) + registry = json.loads(reg_resp.data) if reg_resp.data else [] + + if key not in registry: + registry.append(key) + self.client.save_state(self.store_name, self.REGISTRY_KEY, json.dumps(registry)) + + # incremental persistence (for streamed runs) + def put_writes( + self, + config: RunnableConfig, + writes: Sequence[Tuple[str, Any]], + task_id: str, + task_path: str = '', + ) -> None: + _ = task_id, task_path + + key = self._get_key(config) + + resp = self.client.get_state(store_name=self.store_name, key=key) + if not resp.data: + return + + wrapper = json.loads(resp.data) + cp = wrapper.get('checkpoint', {}) + + for field, value in writes: + cp['channel_values'][field] = value + + wrapper['checkpoint'] = cp + self.client.save_state(self.store_name, key, json.dumps(wrapper)) + + # enumerate all saved checkpoints + def list(self, config: RunnableConfig) -> list[CheckpointTuple]: + reg_resp = self.client.get_state(store_name=self.store_name, key=self.REGISTRY_KEY) + if not reg_resp.data: + return [] + + keys = json.loads(reg_resp.data) + checkpoints: list[CheckpointTuple] = [] + + for key in keys: + cp_resp = self.client.get_state(store_name=self.store_name, key=key) + if not cp_resp.data: + continue + + wrapper = json.loads(cp_resp.data) + cp_data = wrapper.get('checkpoint', {}) + metadata = wrapper.get('metadata', {}) + cp = Checkpoint(**cp_data) + + checkpoints.append( + CheckpointTuple( + config=config, + checkpoint=cp, + parent_config=None, + metadata=metadata, + ) + ) + + return checkpoints + + # remove a checkpoint and update the registry + def delete_thread(self, config: RunnableConfig) -> None: + key = self._get_key(config) + + self.client.delete_state(store_name=self.store_name, key=key) + + reg_resp = self.client.get_state(store_name=self.store_name, key=self.REGISTRY_KEY) + if not reg_resp.data: + return + + registry = json.loads(reg_resp.data) + + if key in registry: + registry.remove(key) + self.client.save_state( + store_name=self.store_name, + key=self.REGISTRY_KEY, + value=json.dumps(registry), + ) diff --git a/ext/dapr-ext-langgraph/dapr/ext/langgraph/version.py b/ext/dapr-ext-langgraph/dapr/ext/langgraph/version.py new file mode 100644 index 000000000..dae1485d2 --- /dev/null +++ b/ext/dapr-ext-langgraph/dapr/ext/langgraph/version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +__version__ = '1.16.0.dev' diff --git a/ext/dapr-ext-langgraph/setup.cfg b/ext/dapr-ext-langgraph/setup.cfg new file mode 100644 index 000000000..bb32e782c --- /dev/null +++ b/ext/dapr-ext-langgraph/setup.cfg @@ -0,0 +1,36 @@ +[metadata] +url = https://dapr.io/ +author = Dapr Authors +author_email = daprweb@microsoft.com +license = Apache +license_file = LICENSE +classifiers = + Development Status :: 5 - Production/Stable + Intended Audience :: Developers + License :: OSI Approved :: Apache Software License + Operating System :: OS Independent + Programming Language :: Python + Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 + Programming Language :: Python :: 3.13 + Programming Language :: Python :: 3.14 +project_urls = + Documentation = https://github.com/dapr/docs + Source = https://github.com/dapr/python-sdk + +[options] +python_requires = >=3.10 +packages = find_namespace: +include_package_data = True +install_requires = + dapr >= 1.16.1rc1 + langgraph >= 0.3.6 + langchain >= 0.1.17 + +[options.packages.find] +include = + dapr.* + +exclude = + tests diff --git a/ext/dapr-ext-langgraph/setup.py b/ext/dapr-ext-langgraph/setup.py new file mode 100644 index 000000000..78c0daace --- /dev/null +++ b/ext/dapr-ext-langgraph/setup.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import os + +from setuptools import setup + +# Load version in dapr package. +version_info = {} +with open('dapr/ext/langgraph/version.py') as fp: + exec(fp.read(), version_info) +__version__ = version_info['__version__'] + + +def is_release(): + return '.dev' not in __version__ + + +name = 'dapr-ext-langgraph' +version = __version__ +description = 'The official release of Dapr Python SDK LangGraph Extension.' +long_description = """ +This is the Dapr Checkpointer extension for LangGraph. + +Dapr is a portable, serverless, event-driven runtime that makes it easy for developers to +build resilient, stateless and stateful microservices that run on the cloud and edge and +embraces the diversity of languages and developer frameworks. + +Dapr codifies the best practices for building microservice applications into open, +independent, building blocks that enable you to build portable applications with the language +and framework of your choice. Each building block is independent and you can use one, some, +or all of them in your application. +""".lstrip() + +# Get build number from GITHUB_RUN_NUMBER environment variable +build_number = os.environ.get('GITHUB_RUN_NUMBER', '0') + +if not is_release(): + name += '-dev' + version = f'{__version__}{build_number}' + description = 'The developmental release for the Dapr Checkpointer extension for LangGraph' + long_description = ( + 'This is the developmental release for the Dapr Checkpointer extension for LangGraph' + ) + +print(f'package name: {name}, version: {version}', flush=True) + + +setup( + name=name, + version=version, + description=description, + long_description=long_description, +) diff --git a/ext/dapr-ext-langgraph/tests/__init__.py b/ext/dapr-ext-langgraph/tests/__init__.py new file mode 100644 index 000000000..ad87aedb7 --- /dev/null +++ b/ext/dapr-ext-langgraph/tests/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" diff --git a/ext/dapr-ext-langgraph/tests/test_checkpointer.py b/ext/dapr-ext-langgraph/tests/test_checkpointer.py new file mode 100644 index 000000000..05184f8aa --- /dev/null +++ b/ext/dapr-ext-langgraph/tests/test_checkpointer.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- + +import json +import unittest +from datetime import datetime +from unittest import mock + +from dapr.ext.langgraph.dapr_checkpointer import DaprCheckpointer +from langgraph.checkpoint.base import Checkpoint + + +@mock.patch('dapr.ext.langgraph.dapr_checkpointer.DaprClient') +class DaprCheckpointerTest(unittest.TestCase): + def setUp(self): + self.store = 'statestore' + self.prefix = 'lg' + self.config = {'configurable': {'thread_id': 't1'}} + + self.checkpoint = Checkpoint( + v=1, + id='cp1', + ts=datetime.now().timestamp(), + channel_values={'a': 1}, + channel_versions={}, + versions_seen={}, + ) + + def test_get_tuple_returns_checkpoint(self, mock_client_cls): + mock_client = mock_client_cls.return_value + + wrapper = { + 'checkpoint': { + 'v': self.checkpoint['v'], + 'id': self.checkpoint['id'], + 'ts': self.checkpoint['ts'], + 'channel_values': self.checkpoint['channel_values'], + 'channel_versions': self.checkpoint['channel_versions'], + 'versions_seen': self.checkpoint['versions_seen'], + }, + 'metadata': {'step': 3}, + } + mock_client.get_state.return_value.data = json.dumps(wrapper) + + cp = DaprCheckpointer(self.store, self.prefix) + tup = cp.get_tuple(self.config) + + assert tup is not None + assert tup.checkpoint['id'] == 'cp1' + assert tup.metadata['step'] == 3 + + def test_get_tuple_none_when_missing(self, mock_client_cls): + mock_client = mock_client_cls.return_value + mock_client.get_state.return_value.data = None + + cp = DaprCheckpointer(self.store, self.prefix) + assert cp.get_tuple(self.config) is None + + def test_put_saves_checkpoint_and_registry(self, mock_client_cls): + mock_client = mock_client_cls.return_value + + mock_client.get_state.return_value.data = json.dumps([]) + + cp = DaprCheckpointer(self.store, self.prefix) + cp.put(self.config, self.checkpoint, None, {'step': 10}) + + first_call = mock_client.save_state.call_args_list[0][0] + assert first_call[0] == 'statestore' + assert first_call[1] == 'lg:t1' + saved_payload = json.loads(first_call[2]) + assert saved_payload['metadata']['step'] == 10 + + second_call = mock_client.save_state.call_args_list[1][0] + assert second_call[0] == 'statestore' + assert second_call[1] == DaprCheckpointer.REGISTRY_KEY + + def test_put_writes_updates_channel_values(self, mock_client_cls): + mock_client = mock_client_cls.return_value + + wrapper = { + 'checkpoint': { + 'v': 1, + 'id': 'cp1', + 'ts': 1000, + 'channel_values': {'a': 10}, + 'channel_versions': {}, + 'versions_seen': {}, + }, + 'metadata': {}, + } + mock_client.get_state.return_value.data = json.dumps(wrapper) + + cp = DaprCheckpointer(self.store, self.prefix) + cp.put_writes(self.config, writes=[('a', 99)], task_id='task1') + + # save_state is called with updated checkpoint + call = mock_client.save_state.call_args[0] + saved = json.loads(call[2]) + assert saved['checkpoint']['channel_values']['a'] == 99 + + def test_list_returns_all_checkpoints(self, mock_client_cls): + mock_client = mock_client_cls.return_value + + registry = ['lg:t1'] + cp_wrapper = { + 'checkpoint': { + 'v': 1, + 'id': 'cp1', + 'ts': 1000, + 'channel_values': {'x': 1}, + 'channel_versions': {}, + 'versions_seen': {}, + }, + 'metadata': {'step': 5}, + } + + mock_client.get_state.side_effect = [ + mock.Mock(data=json.dumps(registry)), + mock.Mock(data=json.dumps(cp_wrapper)), + ] + + cp = DaprCheckpointer(self.store, self.prefix) + lst = cp.list(self.config) + + assert len(lst) == 1 + assert lst[0].checkpoint['id'] == 'cp1' + assert lst[0].metadata['step'] == 5 + + def test_delete_thread_removes_key_and_updates_registry(self, mock_client_cls): + mock_client = mock_client_cls.return_value + + registry = ['lg:t1'] + mock_client.get_state.return_value.data = json.dumps(registry) + + cp = DaprCheckpointer(self.store, self.prefix) + cp.delete_thread(self.config) + + mock_client.delete_state.assert_called_once_with( + store_name='statestore', + key='lg:t1', + ) + + mock_client.save_state.assert_called_with( + store_name='statestore', + key=DaprCheckpointer.REGISTRY_KEY, + value=json.dumps([]), + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/tox.ini b/tox.ini index 17041083b..7c31dd8a3 100644 --- a/tox.ini +++ b/tox.ini @@ -15,6 +15,7 @@ commands = coverage run -a -m unittest discover -v ./ext/dapr-ext-workflow/tests coverage run -a -m unittest discover -v ./ext/dapr-ext-grpc/tests coverage run -a -m unittest discover -v ./ext/dapr-ext-fastapi/tests + coverage run -a -m unittest discover -v ./ext/dapr-ext-langgraph/tests coverage run -a -m unittest discover -v ./ext/flask_dapr/tests coverage xml commands_pre = @@ -22,6 +23,7 @@ commands_pre = pip3 install -e {toxinidir}/ext/dapr-ext-workflow/ pip3 install -e {toxinidir}/ext/dapr-ext-grpc/ pip3 install -e {toxinidir}/ext/dapr-ext-fastapi/ + pip3 install -e {toxinidir}/ext/dapr-ext-langgraph/ pip3 install -e {toxinidir}/ext/flask_dapr/ [testenv:ruff] @@ -65,6 +67,7 @@ commands_pre = pip3 install -e {toxinidir}/ext/dapr-ext-workflow/ pip3 install -e {toxinidir}/ext/dapr-ext-grpc/ pip3 install -e {toxinidir}/ext/dapr-ext-fastapi/ + pip3 install -e {toxinidir}/ext/dapr-ext-langgraph/ allowlist_externals=* [testenv:example-component] @@ -84,6 +87,7 @@ commands_pre = pip3 install -e {toxinidir}/ext/dapr-ext-workflow/ pip3 install -e {toxinidir}/ext/dapr-ext-grpc/ pip3 install -e {toxinidir}/ext/dapr-ext-fastapi/ + pip3 install -e {toxinidir}/ext/dapr-ext-langgraph/ allowlist_externals=* [testenv:type] @@ -97,7 +101,7 @@ commands_pre = pip3 install -e {toxinidir}/ext/dapr-ext-workflow/ pip3 install -e {toxinidir}/ext/dapr-ext-grpc/ pip3 install -e {toxinidir}/ext/dapr-ext-fastapi/ - + pip3 install -e {toxinidir}/ext/dapr-ext-langgraph/ [testenv:doc] basepython = python3 usedevelop = False From e675288c7adf38afd2719088bd08462dc1c2c1d0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 21 Nov 2025 15:02:30 +0100 Subject: [PATCH 07/16] Bump actions/checkout from 5 to 6 (#867) Bumps [actions/checkout](https://github.com/actions/checkout) from 5 to 6. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v5...v6) --- updated-dependencies: - dependency-name: actions/checkout dependency-version: '6' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build-push-to-main.yaml | 6 +++--- .github/workflows/build-tag.yaml | 6 +++--- .github/workflows/build.yaml | 4 ++-- .github/workflows/dapr-bot-schedule.yml | 2 +- .github/workflows/fossa.yaml | 2 +- .github/workflows/validate_examples.yaml | 6 +++--- 6 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/workflows/build-push-to-main.yaml b/.github/workflows/build-push-to-main.yaml index 9d76d2694..93bfbff2a 100644 --- a/.github/workflows/build-push-to-main.yaml +++ b/.github/workflows/build-push-to-main.yaml @@ -10,7 +10,7 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Set up Python 3.10 uses: actions/setup-python@v6 with: @@ -39,7 +39,7 @@ jobs: matrix: python_ver: ["3.10", "3.11", "3.12", "3.13"] steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Set up Python ${{ matrix.python_ver }} uses: actions/setup-python@v6 with: @@ -63,7 +63,7 @@ jobs: env: TWINE_USERNAME: "__token__" steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Set up Python 3.10 uses: actions/setup-python@v6 with: diff --git a/.github/workflows/build-tag.yaml b/.github/workflows/build-tag.yaml index 322bb06d8..ebc4b129f 100644 --- a/.github/workflows/build-tag.yaml +++ b/.github/workflows/build-tag.yaml @@ -14,7 +14,7 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Set up Python 3.10 uses: actions/setup-python@v6 with: @@ -43,7 +43,7 @@ jobs: matrix: python_ver: ["3.10", "3.11", "3.12", "3.13"] steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Set up Python ${{ matrix.python_ver }} uses: actions/setup-python@v6 with: @@ -67,7 +67,7 @@ jobs: env: TWINE_USERNAME: "__token__" steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Set up Python 3.10 uses: actions/setup-python@v6 with: diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index fa89c39e9..7e03d1b80 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -16,7 +16,7 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Set up Python 3.10 uses: actions/setup-python@v6 with: @@ -45,7 +45,7 @@ jobs: matrix: python_ver: ["3.10", "3.11", "3.12", "3.13"] steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Set up Python ${{ matrix.python_ver }} uses: actions/setup-python@v6 with: diff --git a/.github/workflows/dapr-bot-schedule.yml b/.github/workflows/dapr-bot-schedule.yml index eb9579bdd..918c163ec 100644 --- a/.github/workflows/dapr-bot-schedule.yml +++ b/.github/workflows/dapr-bot-schedule.yml @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repo - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Install dependencies run: pip install PyGithub - name: Automerge and update diff --git a/.github/workflows/fossa.yaml b/.github/workflows/fossa.yaml index f9eb56f5a..18b45526c 100644 --- a/.github/workflows/fossa.yaml +++ b/.github/workflows/fossa.yaml @@ -40,7 +40,7 @@ jobs: FOSSA_API_KEY: b88e1f4287c3108c8751bf106fb46db6 # This is a push-only token that is safe to be exposed. steps: - name: "Checkout code" - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: "Run FOSSA Scan" uses: fossas/fossa-action@v1.7.0 # Use a specific version if locking is preferred diff --git a/.github/workflows/validate_examples.yaml b/.github/workflows/validate_examples.yaml index 25b686284..b4fcc7929 100644 --- a/.github/workflows/validate_examples.yaml +++ b/.github/workflows/validate_examples.yaml @@ -58,7 +58,7 @@ jobs: fi - name: Check out code onto GOPATH - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: repository: ${{ env.CHECKOUT_REPO }} ref: ${{ env.CHECKOUT_REF }} @@ -89,14 +89,14 @@ jobs: with: go-version: ${{ env.GOVER }} - name: Checkout Dapr CLI repo to override dapr command. - uses: actions/checkout@v5 + uses: actions/checkout@v6 if: env.DAPR_CLI_REF != '' with: repository: dapr/cli ref: ${{ env.DAPR_CLI_REF }} path: cli - name: Checkout Dapr repo to override daprd. - uses: actions/checkout@v5 + uses: actions/checkout@v6 if: env.DAPR_REF != '' with: repository: dapr/dapr From 19f935752cf89d7b2a9b89e74d6c13c6729bef55 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 21 Nov 2025 15:03:50 +0100 Subject: [PATCH 08/16] Bump mypy-protobuf from 3.6.0 to 3.7.0 (#864) Bumps [mypy-protobuf](https://github.com/nipunn1313/mypy-protobuf) from 3.6.0 to 3.7.0. - [Changelog](https://github.com/nipunn1313/mypy-protobuf/blob/main/CHANGELOG.md) - [Commits](https://github.com/nipunn1313/mypy-protobuf/compare/v3.6.0...v3.7.0) --- updated-dependencies: - dependency-name: mypy-protobuf dependency-version: 3.7.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Albert Callarisa --- tools/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/requirements.txt b/tools/requirements.txt index f005610f8..e92160336 100644 --- a/tools/requirements.txt +++ b/tools/requirements.txt @@ -1,2 +1,2 @@ grpcio-tools==1.62.3 -mypy-protobuf==3.6.0 +mypy-protobuf==3.7.0 From 9e73bf59dd50fb0fd1f7e55e2dd36c90bff84c05 Mon Sep 17 00:00:00 2001 From: Marc Duiker Date: Mon, 24 Nov 2025 13:58:31 +0100 Subject: [PATCH 09/16] Remove SDK docs since they're migrated to main docs (#866) Signed-off-by: Marc Duiker --- daprdocs/README.md | 25 - .../python-contributing.md | 27 - daprdocs/content/en/python-sdk-docs/_index.md | 157 ----- .../en/python-sdk-docs/conversation.md | 295 --------- .../en/python-sdk-docs/python-actor.md | 130 ---- .../en/python-sdk-docs/python-client.md | 601 ------------------ .../python-sdk-extensions/_index.md | 7 - .../python-sdk-extensions/python-fastapi.md | 115 ---- .../python-sdk-extensions/python-flask.md | 60 -- .../python-sdk-extensions/python-grpc.md | 118 ---- .../python-workflow-ext/_index.md | 105 --- .../python-workflow-ext/python-workflow.md | 166 ----- 12 files changed, 1806 deletions(-) delete mode 100644 daprdocs/README.md delete mode 100644 daprdocs/content/en/python-sdk-contributing/python-contributing.md delete mode 100644 daprdocs/content/en/python-sdk-docs/_index.md delete mode 100644 daprdocs/content/en/python-sdk-docs/conversation.md delete mode 100644 daprdocs/content/en/python-sdk-docs/python-actor.md delete mode 100644 daprdocs/content/en/python-sdk-docs/python-client.md delete mode 100644 daprdocs/content/en/python-sdk-docs/python-sdk-extensions/_index.md delete mode 100644 daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-fastapi.md delete mode 100644 daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-flask.md delete mode 100644 daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-grpc.md delete mode 100644 daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/_index.md delete mode 100644 daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/python-workflow.md diff --git a/daprdocs/README.md b/daprdocs/README.md deleted file mode 100644 index 5213ae214..000000000 --- a/daprdocs/README.md +++ /dev/null @@ -1,25 +0,0 @@ -# Dapr Python SDK documentation - -This page covers how the documentation is structured for the Dapr Python SDK. - -## Dapr Docs - -All Dapr documentation is hosted at [docs.dapr.io](https://docs.dapr.io), including the docs for the [Python SDK](https://docs.dapr.io/developing-applications/sdks/python/). Head over there if you want to read the docs. - -### Python SDK docs source - -Although the docs site code and content is in the [docs repo](https://github.com/dapr/docs), the Python SDK content and images are within the `content` and `static` directories, respectively. - -This allows separation of roles and expertise between maintainers, and makes it easy to find the docs files you are looking for. - -## Writing Python SDK docs - -To get up and running to write Python SDK docs, visit the [docs repo](https://github.com/dapr/docs) to initialize your environment. It will clone both the docs repo and this repo, so you can make changes and see it rendered within the site instantly, as well as commit and PR into this repo. - -Make sure to read the [docs contributing guide](https://docs.dapr.io/contributing/contributing-docs/) for information on style/semantics/etc. - -## Docs architecture - -The docs site is built on [Hugo](https://gohugo.io), which lives in the docs repo. This repo is setup as a git submodule so that when the repo is cloned and initialized, the python repo, along with the docs, are cloned as well. - -Then, in the Hugo configuration file, the `daprdocs/content` and `daprdocs/static` directories are redirected to the `daprdocs/developing-applications/sdks/python` and `static/python` directories, respectively. Thus, all the content within this repo is folded into the main docs site. \ No newline at end of file diff --git a/daprdocs/content/en/python-sdk-contributing/python-contributing.md b/daprdocs/content/en/python-sdk-contributing/python-contributing.md deleted file mode 100644 index fe22c10bd..000000000 --- a/daprdocs/content/en/python-sdk-contributing/python-contributing.md +++ /dev/null @@ -1,27 +0,0 @@ ---- -type: docs -title: "Contributing to the Python SDK" -linkTitle: "Python SDK" -weight: 3000 -description: Guidelines for contributing to the Dapr Python SDK ---- - -When contributing to the [Python SDK](https://github.com/dapr/python-sdk) the following rules and best-practices should be followed. - -## Examples - -The `examples` directory contains code samples for users to run to try out specific functionality of the various Python SDK packages and extensions. When writing new and updated samples keep in mind: - -- All examples should be runnable on Windows, Linux, and MacOS. While Python code is consistent among operating systems, any pre/post example commands should provide options through [tabpane]({{% ref "contributing-docs.md#tabbed-content" %}}) -- Contain steps to download/install any required pre-requisites. Someone coming in with a fresh OS install should be able to start on the example and complete it without an error. Links to external download pages are fine. - -## Docs - -The `daprdocs` directory contains the markdown files that are rendered into the [Dapr Docs](https://docs.dapr.io) website. When the documentation website is built this repo is cloned and configured so that its contents are rendered with the docs content. When writing docs keep in mind: - - - All rules in the [docs guide]({{% ref contributing-docs.md %}}) should be followed in addition to these. - - All files and directories should be prefixed with `python-` to ensure all file/directory names are globally unique across all Dapr documentation. - -## Github Dapr Bot Commands - -Checkout the [daprbot documentation](https://docs.dapr.io/contributing/daprbot/) for Github commands you can run in this repo for common tasks. For example, you can run the `/assign` (as a comment on an issue) to assign issues to a user or group of users. \ No newline at end of file diff --git a/daprdocs/content/en/python-sdk-docs/_index.md b/daprdocs/content/en/python-sdk-docs/_index.md deleted file mode 100644 index b8689eb99..000000000 --- a/daprdocs/content/en/python-sdk-docs/_index.md +++ /dev/null @@ -1,157 +0,0 @@ ---- -type: docs -title: "Dapr Python SDK" -linkTitle: "Python" -weight: 1000 -description: Python SDK packages for developing Dapr applications -no_list: true -cascade: - github_repo: https://github.com/dapr/python-sdk - github_subdir: daprdocs/content/en/python-sdk-docs - path_base_for_github_subdir: content/en/developing-applications/sdks/python/ - github_branch: master ---- - -Dapr offers a variety of subpackages to help with the development of Python applications. Using them you can create Python clients, servers, and virtual actors with Dapr. - -## Prerequisites - -- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed -- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) -- [Python 3.9+](https://www.python.org/downloads/) installed - -## Installation - -To get started with the Python SDK, install the main Dapr Python SDK package. - -{{< tabpane text=true >}} - -{{% tab header="Stable" %}} - -```bash -pip install dapr -``` -{{% /tab %}} - -{{% tab header="Development" %}} - -> **Note:** The development package will contain features and behavior that will be compatible with the pre-release version of the Dapr runtime. Make sure to uninstall any stable versions of the Python SDK before installing the dapr-dev package. - -```bash -pip install dapr-dev -``` - -{{% /tab %}} - -{{< /tabpane >}} - - -## Available subpackages - -### SDK imports - -Python SDK imports are subpackages included with the main SDK install, but need to be imported when used. The most common imports provided by the Dapr Python SDK are: - -
-
-
-
Client
-

Write Python applications to interact with a Dapr sidecar and other Dapr applications, including stateful virtual actors in Python

- -
-
-
-
-
Actors
-

Create and interact with Dapr's Actor framework.

- -
-
-
-
-
Conversation
-

Use the Dapr Conversation API (Alpha) for LLM interactions, tools, and multi-turn flows.

- -
-
-
- -Learn more about _all_ of the [available Dapr Python SDK imports](https://github.com/dapr/python-sdk/tree/master/dapr). - -### SDK extensions - -SDK extensions mainly work as utilities for receiving pub/sub events, programatically creating pub/sub subscriptions, and handling input binding events. While you can acheive all of these tasks without an extension, using a Python SDK extension proves convenient. - -
-
-
-
gRPC
-

Create Dapr services with the gRPC server extension.

- -
-
-
-
-
FastAPI
-

Integrate with Dapr Python virtual actors and pub/sub using the Dapr FastAPI extension.

- -
-
-
-
-
Flask
-

Integrate with Dapr Python virtual actors using the Dapr Flask extension.

- -
-
-
-
-
Workflow
-

Author workflows that work with other Dapr APIs in Python.

- -
-
-
- -Learn more about [the Dapr Python SDK extensions](https://github.com/dapr/python-sdk/tree/master/ext). - -## Try it out - -Clone the Python SDK repo. - -```bash -git clone https://github.com/dapr/python-sdk.git -``` - -Walk through the Python quickstarts, tutorials, and examples to see Dapr in action: - -| SDK samples | Description | -| ----------- | ----------- | -| [Quickstarts]({{% ref quickstarts %}}) | Experience Dapr's API building blocks in just a few minutes using the Python SDK. | -| [SDK samples](https://github.com/dapr/python-sdk/tree/master/examples) | Clone the SDK repo to try out some examples and get started. | -| [Bindings tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/bindings) | See how Dapr Python SDK works alongside other Dapr SDKs to enable bindings. | -| [Distributed Calculator tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/distributed-calculator/python) | Use the Dapr Python SDK to handle method invocation and state persistent capabilities. | -| [Hello World tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/hello-world) | Learn how to get Dapr up and running locally on your machine with the Python SDK. | -| [Hello Kubernetes tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/hello-kubernetes) | Get up and running with the Dapr Python SDK in a Kubernetes cluster. | -| [Observability tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/observability) | Explore Dapr's metric collection, tracing, logging and health check capabilities using the Python SDK. | -| [Pub/sub tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/pub-sub) | See how Dapr Python SDK works alongside other Dapr SDKs to enable pub/sub applications. | - - -## More information - -
-
-
-
Serialization
-

Learn more about serialization in Dapr SDKs.

- -
-
-
-
-
PyPI
-

Python Package Index

- -
-
-
diff --git a/daprdocs/content/en/python-sdk-docs/conversation.md b/daprdocs/content/en/python-sdk-docs/conversation.md deleted file mode 100644 index db67a6c49..000000000 --- a/daprdocs/content/en/python-sdk-docs/conversation.md +++ /dev/null @@ -1,295 +0,0 @@ -title: "Conversation API (Python) – Recommended Usage" -linkTitle: "Conversation" -weight: 11000 -type: docs -description: Recommended patterns for using Dapr Conversation API in Python with and without tools, including multi‑turn flows and safety guidance. ---- - -The Dapr Conversation API is currently in alpha. This page presents the recommended, minimal patterns to use it effectively with the Python SDK: -- Plain requests (no tools) -- Requests with tools (functions as tools) -- Multi‑turn flows with tool execution -- Async variants -- Important safety notes for executing tool calls - -## Prerequisites - -- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed -- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) -- [Python 3.9+](https://www.python.org/downloads/) installed -- [Dapr Python package]({{% ref "python#installation" %}}) installed -- A configured LLM component (for example, OpenAI or Azure OpenAI) in your Dapr environment - -For full, end‑to‑end flows and provider setup, see: -- The SDK examples under Conversation: - - [TOOL-CALL-QUICKSTART.md](https://github.com/dapr/python-sdk/blob/main/examples/conversation/TOOL-CALL-QUICKSTART.md) - - [real_llm_providers_example.py](https://github.com/dapr/python-sdk/blob/main/examples/conversation/real_llm_providers_example.py) - -## Plain conversation (no tools) - -```python -from dapr.clients import DaprClient -from dapr.clients.grpc import conversation - -# Build a single‑turn Alpha2 input -user_msg = conversation.create_user_message("What's Dapr?") -alpha2_input = conversation.ConversationInputAlpha2(messages=[user_msg]) - -with DaprClient() as client: - resp = client.converse_alpha2( - name="echo", # replace with your LLM component name - inputs=[alpha2_input], - temperature=1, - ) - - for msg in resp.to_assistant_messages(): - if msg.of_assistant.content: - print(msg.of_assistant.content[0].text) -``` - -Key points: -- Use `conversation.create_user_message` to build messages. -- Wrap into `ConversationInputAlpha2(messages=[...])` and pass to `converse_alpha2`. -- Use `response.to_assistant_messages()` to iterate assistant outputs. - -## Tools: decorator‑based (recommended) - -Decorator-based tools offer a clean, ergonomic approach. Define a function with clear type hints and detail docstring, this is important for the LLM to understand how or when to invoke the tool; -decorate it with `@conversation.tool`. Registered tools can be passed to the LLM and invoked via tool calls. - -```python -from dapr.clients import DaprClient -from dapr.clients.grpc import conversation - -@conversation.tool -def get_weather(location: str, unit: str = 'fahrenheit') -> str: - """Get current weather for a location.""" - # Replace with a real implementation - return f"Weather in {location} (unit={unit})" - -user_msg = conversation.create_user_message("What's the weather in Paris?") -alpha2_input = conversation.ConversationInputAlpha2(messages=[user_msg]) - -with DaprClient() as client: - response = client.converse_alpha2( - name="openai", # your LLM component - inputs=[alpha2_input], - tools=conversation.get_registered_tools(), # tools registered by @conversation.tool - tool_choice='auto', - temperature=1, - ) - - # Inspect assistant messages, including any tool calls - for msg in response.to_assistant_messages(): - if msg.of_assistant.tool_calls: - for tc in msg.of_assistant.tool_calls: - print(f"Tool call: {tc.function.name} args={tc.function.arguments}") - elif msg.of_assistant.content: - print(msg.of_assistant.content[0].text) -``` - -Notes: -- Use `conversation.get_registered_tools()` to collect all `@conversation.tool` decorated functions. -- The binder validates/coerces params using your function signature. Keep annotations accurate. - -## Minimal multi‑turn with tools - -This is the go‑to loop for tool‑using conversations: - -{{% alert title="Warning" color="warning" %}} -Do not blindly auto‑execute tool calls returned by the LLM unless you trust all tools registered. Treat tool names and arguments as untrusted input. -- Validate inputs and enforce guardrails (allow‑listed tools, argument schemas, side‑effect constraints). -- For async or I/O‑bound tools, prefer `conversation.execute_registered_tool_async(..., timeout=...)` and set conservative timeouts. -- Consider adding a policy layer or a user confirmation step before execution in sensitive contexts. -- Log and monitor tool usage; fail closed when validation fails. -{{% /alert %}} - -```python -from dapr.clients import DaprClient -from dapr.clients.grpc import conversation - -@conversation.tool -def get_weather(location: str, unit: str = 'fahrenheit') -> str: - return f"Weather in {location} (unit={unit})" - -history: list[conversation.ConversationMessage] = [ - conversation.create_user_message("What's the weather in San Francisco?")] - -with DaprClient() as client: - # Turn 1 - resp1 = client.converse_alpha2( - name="openai", - inputs=[conversation.ConversationInputAlpha2(messages=history)], - tools=conversation.get_registered_tools(), - tool_choice='auto', - temperature=1, - ) - - # Append assistant messages; execute tool calls; append tool results - for msg in resp1.to_assistant_messages(): - history.append(msg) - for tc in msg.of_assistant.tool_calls: - # IMPORTANT: validate inputs and enforce guardrails in production - tool_output = conversation.execute_registered_tool( - tc.function.name, tc.function.arguments - ) - history.append( - conversation.create_tool_message( - tool_id=tc.id, name=tc.function.name, content=str(tool_output) - ) - ) - - # Turn 2 (LLM sees tool result) - history.append(conversation.create_user_message("Should I bring an umbrella?")) - resp2 = client.converse_alpha2( - name="openai", - inputs=[conversation.ConversationInputAlpha2(messages=history)], - tools=conversation.get_registered_tools(), - temperature=1, - ) - - for msg in resp2.to_assistant_messages(): - history.append(msg) - if not msg.of_assistant.tool_calls and msg.of_assistant.content: - print(msg.of_assistant.content[0].text) -``` - -Tips: -- Always append assistant messages to history. -- Execute each tool call (with validation) and append a tool message with the tool output. -- The next turn includes these tool results so the LLM can reason with them. - -## Functions as tools: alternatives - -When decorators aren’t practical, two options exist. - -A) Automatic schema from a typed function: - -```python -from enum import Enum -from dapr.clients.grpc import conversation - -class Units(Enum): - CELSIUS = 'celsius' - FAHRENHEIT = 'fahrenheit' - -def get_weather(location: str, unit: Units = Units.FAHRENHEIT) -> str: - return f"Weather in {location}" - -fn = conversation.ConversationToolsFunction.from_function(get_weather) -weather_tool = conversation.ConversationTools(function=fn) -``` - -B) Manual JSON Schema (fallback): - -```python -from dapr.clients.grpc import conversation - -fn = conversation.ConversationToolsFunction( - name='get_weather', - description='Get current weather', - parameters={ - 'type': 'object', - 'properties': { - 'location': {'type': 'string'}, - 'unit': {'type': 'string', 'enum': ['celsius', 'fahrenheit']}, - }, - 'required': ['location'], - }, -) -weather_tool = conversation.ConversationTools(function=fn) -``` - -## Async variant - -Use the asynchronous client and async tool execution helpers as needed. - -```python -import asyncio -from dapr.aio.clients import DaprClient as AsyncDaprClient -from dapr.clients.grpc import conversation - -@conversation.tool -def get_time() -> str: - return '2025-01-01T12:00:00Z' - -async def main(): - async with AsyncDaprClient() as client: - msg = conversation.create_user_message('What time is it?') - inp = conversation.ConversationInputAlpha2(messages=[msg]) - resp = await client.converse_alpha2( - name='openai', inputs=[inp], tools=conversation.get_registered_tools() - ) - for m in resp.to_assistant_messages(): - if m.of_assistant.content: - print(m.of_assistant.content[0].text) - -asyncio.run(main()) -``` - -If you need to execute tools asynchronously (e.g., network I/O), implement async functions and use `conversation.execute_registered_tool_async` with timeouts. - -## Safety and validation (must‑read) - -An LLM may suggest tool calls. Treat all model‑provided parameters as untrusted input. - -Recommendations: -- Register only trusted functions as tools. Prefer the `@conversation.tool` decorator for clarity and automatic schema generation. -- Use precise type annotations and docstrings. The SDK converts function signatures to JSON schema and binds parameters with type coercion and rejection of unexpected/invalid fields. -- Add guardrails for tools that can cause side effects (filesystem, network, subprocess). Consider allow‑lists, sandboxing, and limits. -- Validate arguments before execution. For example, sanitize file paths or restrict URLs/domains. -- Consider timeouts and concurrency controls. For async tools, pass a timeout to `execute_registered_tool_async(..., timeout=...)`. -- Log and monitor tool usage. Fail closed: if validation fails, avoid executing the tool and inform the user safely. - -See also inline notes in `dapr/clients/grpc/conversation.py` (e.g., `tool()`, `ConversationTools`, `execute_registered_tool`) for parameter binding and error handling details. - - -## Key helper methods (quick reference) - -This section summarizes helper utilities available in dapr.clients.grpc.conversation used throughout the examples. - -- create_user_message(text: str) -> ConversationMessage - - Builds a user role message for Alpha2. Use in history lists. - - Example: `history.append(conversation.create_user_message("Hello"))` - -- create_system_message(text: str) -> ConversationMessage - - Builds a system message to steer the assistant’s behavior. - - Example: `history = [conversation.create_system_message("You are a concise assistant.")]` - -- create_assistant_message(text: str) -> ConversationMessage - - Useful for injecting assistant text in tests or controlled flows. - -- create_tool_message(tool_id: str, name: str, content: Any) -> ConversationMessage - - Converts a tool’s output into a tool message the LLM can read next turn. - - content can be any object; it is stringified safely by the SDK. - - Example: `history.append(conversation.create_tool_message(tool_id=tc.id, name=tc.function.name, content=conversation.execute_registered_tool(tc.function.name, tc.function.arguments)))` - -- get_registered_tools() -> list[ConversationTools] - - Returns all tools currently registered in the in-process registry. - - Includes tools created via: - - @conversation.tool decorator (auto-registered by default), and - - ConversationToolsFunction.from_function with register=True (default). - - Pass this list in converse_alpha2(..., tools=...). - -- register_tool(name: str, t: ConversationTools) / unregister_tool(name: str) - - Manually manage the tool registry (e.g., advanced scenarios, tests, cleanup). - - Names must be unique; unregister to avoid collisions in long-lived processes. - -- execute_registered_tool(name: str, params: Mapping|Sequence|str|None) -> Any - - Synchronously executes a registered tool by name. - - params accepts kwargs (mapping), args (sequence), JSON string, or None. If a JSON string is provided (as commonly returned by LLMs), it is parsed for you. - - Parameters are validated and coerced against the function signature/schema; unexpected or invalid fields raise errors. - - Security: treat params as untrusted; add guardrails for side effects. - -- execute_registered_tool_async(name: str, params: Mapping|Sequence|str|None, *, timeout: float|None=None) -> Any - - Async counterpart. Supports timeouts, which are recommended for I/O-bound tools. - - Prefer this for async tools or when using the aio client. - -- ConversationToolsFunction.from_function(func: Callable, register: bool = True) -> ConversationToolsFunction - - Derives a JSON schema from a typed Python function (annotations + optional docstring) and optionally registers a tool. - - Typical usage: `spec = conversation.ConversationToolsFunction.from_function(my_func)`; then either rely on auto-registration or wrap with `ConversationTools(function=spec)` and call `register_tool(spec.name, tool)` or pass `[tool]` directly to `tools=`. - -- ConversationResponseAlpha2.to_assistant_messages() -> list[ConversationMessage] - - Convenience to transform the response outputs into assistant ConversationMessage objects you can append to history directly (including tool_calls when present). - -Tip: The @conversation.tool decorator is the easiest way to create a tool. It auto-generates the schema from your function, allows an optional namespace/name override, and auto-registers the tool (you can set register=False to defer registration). diff --git a/daprdocs/content/en/python-sdk-docs/python-actor.md b/daprdocs/content/en/python-sdk-docs/python-actor.md deleted file mode 100644 index 22360afd0..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-actor.md +++ /dev/null @@ -1,130 +0,0 @@ ---- -type: docs -title: "Getting started with the Dapr actor Python SDK" -linkTitle: "Actor" -weight: 20000 -description: How to get up and running with the Dapr Python SDK ---- - -The Dapr actor package allows you to interact with Dapr virtual actors from a Python application. - -## Pre-requisites - -- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed -- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) -- [Python 3.9+](https://www.python.org/downloads/) installed -- [Dapr Python package]({{% ref "python#installation" %}}) installed - -## Actor interface - -The interface defines the actor contract that is shared between the actor implementation and the clients calling the actor. Because a client may depend on it, it typically makes sense to define it in an assembly that is separate from the actor implementation. - -```python -from dapr.actor import ActorInterface, actormethod - -class DemoActorInterface(ActorInterface): - @actormethod(name="GetMyData") - async def get_my_data(self) -> object: - ... -``` - -## Actor services - -An actor service hosts the virtual actor. It is implemented a class that derives from the base type `Actor` and implements the interfaces defined in the actor interface. - -Actors can be created using one of the Dapr actor extensions: - - [FastAPI actor extension]({{% ref python-fastapi.md %}}) - - [Flask actor extension]({{% ref python-flask.md %}}) - -## Actor client - -An actor client contains the implementation of the actor client which calls the actor methods defined in the actor interface. - -```python -import asyncio - -from dapr.actor import ActorProxy, ActorId -from demo_actor_interface import DemoActorInterface - -async def main(): - # Create proxy client - proxy = ActorProxy.create('DemoActor', ActorId('1'), DemoActorInterface) - - # Call method on client - resp = await proxy.GetMyData() -``` - -## Sample - -Visit [this page](https://github.com/dapr/python-sdk/tree/main/examples/demo_actor) for a runnable actor sample. - - -## Mock Actor Testing - -The Dapr Python SDK provides the ability to create mock actors to unit test your actor methods and see how they interact with the actor state. - -### Sample Usage - - -``` -from dapr.actor.runtime.mock_actor import create_mock_actor - -class MyActor(Actor, MyActorInterface): - async def save_state(self, data) -> None: - await self._state_manager.set_state('mystate', data) - await self._state_manager.save_state() - -mock_actor = create_mock_actor(MyActor, "id") - -await mock_actor.save_state(5) -assert mockactor._state_manager._mock_state['mystate'] == 5 #True -``` -Mock actors are created by passing your actor class and an actor ID (a string) to the create_mock_actor function. This function returns an instance of the actor with many internal methods overridden. Instead of interacting with Dapr for tasks like saving state or managing timers, the mock actor uses in-memory state to simulate these behaviors. - -This state can be accessed through the following variables: - -**IMPORTANT NOTE: Due to type hinting issues as discussed further down, these variables will not be visible to type hinters/linters/etc, who will think they are invalid variables. You will need to use them with #type: ignore in order to satisfy any such systems.** - -- **_state_manager._mock_state()** -A `[str, object]` dict where all the actor state is stored. Any variable saved via `_state_manager.save_state(key, value)`, or any other statemanager method is stored in the dict as that key, value pair. Any value loaded via `try_get_state` or any other statemanager method is taken from this dict. - -- **_state_manager._mock_timers()** -A `[str, ActorTimerData]` dict which holds the active actor timers. Any actor method which would add or remove a timer adds or pops the appropriate `ActorTimerData` object from this dict. - -- **_state_manager._mock_reminders()** -A [str, ActorReminderData] dict which holds the active actor reminders. Any actor method which would add or remove a timer adds or pops the appropriate ActorReminderData object from this dict. - -**Note: The timers and reminders will never actually trigger. The dictionaries exist only so methods that should add or remove timers/reminders can be tested. If you need to test the callbacks they should activate, you should call them directly with the appropriate values:** -``` -result = await mock_actor.recieve_reminder(name, state, due_time, period, _ttl) -# Test the result directly or test for side effects (like changing state) by querying `_state_manager._mock_state` -``` - -### Usage and Limitations - -**To allow for more fine-grained control, the `_on_activate` method will not be called automatically the way it is when Dapr initializes a new Actor instance. You should call it manually as needed as part of your tests.** - -**A current limitation of the mock actor system is that it does not call the `_on_pre_actor_method` and `_on_post_actor_method` methods. You can always call these methods manually as part of a test.** - -The `__init__`, `register_timer`, `unregister_timer`, `register_reminder`, `unregister_reminder` methods are all overwritten by the MockActor class that gets applied as a mixin via `create_mock_actor`. If your actor itself overwrites these methods, those modifications will themselves be overwritten and the actor will likely not behave as you expect. - -*note: `__init__` is a special case where you are expected to define it as* -``` - def __init__(self, ctx, actor_id): - super().__init__(ctx, actor_id) -``` -*Mock actors work fine with this, but if you have added any extra logic into `__init__`, it will be overwritten. It is worth noting that the correct way to apply logic on initialization is via `_on_activate` (which can also be safely used with mock actors) instead of `__init__`.* - -*If you have an actor which does override default Dapr actor methods, you can create a custom subclass of the MockActor class (from MockActor.py) which implements whatever custom logic you have along with interacting with `_mock_state`, `_mock_timers`, and `_mock_reminders` as normal, and then applying that custom class as a mixin via a `create_mock_actor` function you define yourself.* - -The actor `_runtime_ctx` variable is set to None. All the normal actor methods have been overwritten such as to not call it, but if your code itself interacts directly with `_runtime_ctx`, tests may fail. - -The actor _state_manager is overwritten with an instance of `MockStateManager`. This has all the same methods and functionality of the base `ActorStateManager`, except for using the various `_mock` variables for storing data instead of the `_runtime_ctx`. If your code implements its own custom state manager it will be overwritten and tests will likely fail. - -### Type Hinting - -Because of Python's lack of a unified method for type hinting type intersections (see: [python/typing #213](https://github.com/python/typing/issues/213)), type hinting unfortunately doesn't work with Mock Actors. The return type is type hinted as "instance of Actor subclass T" when it should really be type hinted as "instance of MockActor subclass T" or "instance of type intersection `[Actor subclass T, MockActor]`" (where, it is worth noting, `MockActor` is itself a subclass of `Actor`). - -This means that, for example, if you hover over `mockactor._state_manager` in a code editor, it will come up as an instance of ActorStateManager (instead of MockStateManager), and various IDE helper functions (like VSCode's `Go to Definition`, which will bring you to the definition of ActorStateManager instead of MockStateManager) won't work properly. - -For now, this issue is unfixable, so it's merely something to be noted because of the confusion it might cause. If in the future it becomes possible to accurately type hint cases like this feel free to open an issue about implementing it. \ No newline at end of file diff --git a/daprdocs/content/en/python-sdk-docs/python-client.md b/daprdocs/content/en/python-sdk-docs/python-client.md deleted file mode 100644 index f03a6a74c..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-client.md +++ /dev/null @@ -1,601 +0,0 @@ ---- -type: docs -title: "Getting started with the Dapr client Python SDK" -linkTitle: "Client" -weight: 10000 -description: How to get up and running with the Dapr Python SDK ---- - -The Dapr client package allows you to interact with other Dapr applications from a Python application. - -{{% alert title="Note" color="primary" %}} - If you haven't already, [try out one of the quickstarts]({{% ref quickstarts %}}) for a quick walk-through on how to use the Dapr Python SDK with an API building block. - -{{% /alert %}} - -## Prerequisites - -[Install the Dapr Python package]({{% ref "python#installation" %}}) before getting started. - -## Import the client package - -The `dapr` package contains the `DaprClient`, which is used to create and use a client. - -```python -from dapr.clients import DaprClient -``` - -## Initialising the client -You can initialise a Dapr client in multiple ways: - -#### Default values: -When you initialise the client without any parameters it will use the default values for a Dapr -sidecar instance (`127.0.0.1:50001`). -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - # use the client -``` - -#### Specifying an endpoint on initialisation: -When passed as an argument in the constructor, the gRPC endpoint takes precedence over any -configuration or environment variable. - -```python -from dapr.clients import DaprClient - -with DaprClient("mydomain:50051?tls=true") as d: - # use the client -``` - -#### Configuration options: - -##### Dapr Sidecar Endpoints -You can use the standardised `DAPR_GRPC_ENDPOINT` environment variable to -specify the gRPC endpoint. When this variable is set, the client can be initialised -without any arguments: - -```bash -export DAPR_GRPC_ENDPOINT="mydomain:50051?tls=true" -``` -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - # the client will use the endpoint specified in the environment variables -``` - -The legacy environment variables `DAPR_RUNTIME_HOST`, `DAPR_HTTP_PORT` and `DAPR_GRPC_PORT` are -also supported, but `DAPR_GRPC_ENDPOINT` takes precedence. - -##### Dapr API Token -If your Dapr instance is configured to require the `DAPR_API_TOKEN` environment variable, you can -set it in the environment and the client will use it automatically. -You can read more about Dapr API token authentication [here](https://docs.dapr.io/operations/security/api-token/). - -##### Health timeout -On client initialisation, a health check is performed against the Dapr sidecar (`/healthz/outbound`). -The client will wait for the sidecar to be up and running before proceeding. - -The default healthcheck timeout is 60 seconds, but it can be overridden by setting the `DAPR_HEALTH_TIMEOUT` -environment variable. - -##### Retries and timeout - -The Dapr client can retry a request if a specific error code is received from the sidecar. This is -configurable through the `DAPR_API_MAX_RETRIES` environment variable and is picked up automatically, -not requiring any code changes. -The default value for `DAPR_API_MAX_RETRIES` is `0`, which means no retries will be made. - -You can fine-tune more retry parameters by creating a `dapr.clients.retry.RetryPolicy` object and -passing it to the DaprClient constructor: - -```python -from dapr.clients.retry import RetryPolicy - -retry = RetryPolicy( - max_attempts=5, - initial_backoff=1, - max_backoff=20, - backoff_multiplier=1.5, - retryable_http_status_codes=[408, 429, 500, 502, 503, 504], - retryable_grpc_status_codes=[StatusCode.UNAVAILABLE, StatusCode.DEADLINE_EXCEEDED, ] -) - -with DaprClient(retry_policy=retry) as d: - ... -``` - -or for actors: -```python -factory = ActorProxyFactory(retry_policy=RetryPolicy(max_attempts=3)) -proxy = ActorProxy.create('DemoActor', ActorId('1'), DemoActorInterface, factory) -``` - -**Timeout** can be set for all calls through the environment variable `DAPR_API_TIMEOUT_SECONDS`. The default value is 60 seconds. - -> Note: You can control timeouts on service invocation separately, by passing a `timeout` parameter to the `invoke_method` method. - -## Error handling -Initially, errors in Dapr followed the [Standard gRPC error model](https://grpc.io/docs/guides/error/#standard-error-model). However, to provide more detailed and informative error messages, in version 1.13 an enhanced error model has been introduced which aligns with the gRPC [Richer error model](https://grpc.io/docs/guides/error/#richer-error-model). In response, the Python SDK implemented `DaprGrpcError`, a custom exception class designed to improve the developer experience. -It's important to note that the transition to using `DaprGrpcError` for all gRPC status exceptions is a work in progress. As of now, not every API call in the SDK has been updated to leverage this custom exception. We are actively working on this enhancement and welcome contributions from the community. - -Example of handling `DaprGrpcError` exceptions when using the Dapr python-SDK: - -```python -try: - d.save_state(store_name=storeName, key=key, value=value) -except DaprGrpcError as err: - print(f'Status code: {err.code()}') - print(f"Message: {err.message()}") - print(f"Error code: {err.error_code()}") - print(f"Error info(reason): {err.error_info.reason}") - print(f"Resource info (resource type): {err.resource_info.resource_type}") - print(f"Resource info (resource name): {err.resource_info.resource_name}") - print(f"Bad request (field): {err.bad_request.field_violations[0].field}") - print(f"Bad request (description): {err.bad_request.field_violations[0].description}") -``` - - -## Building blocks - -The Python SDK allows you to interface with all of the [Dapr building blocks]({{% ref building-blocks %}}). - -### Invoke a service - -The Dapr Python SDK provides a simple API for invoking services via either HTTP or gRPC (deprecated). The protocol can be selected by setting the `DAPR_API_METHOD_INVOCATION_PROTOCOL` environment variable, defaulting to HTTP when unset. GRPC service invocation in Dapr is deprecated and GRPC proxying is recommended as an alternative. - -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - # invoke a method (gRPC or HTTP GET) - resp = d.invoke_method('service-to-invoke', 'method-to-invoke', data='{"message":"Hello World"}') - - # for other HTTP verbs the verb must be specified - # invoke a 'POST' method (HTTP only) - resp = d.invoke_method('service-to-invoke', 'method-to-invoke', data='{"id":"100", "FirstName":"Value", "LastName":"Value"}', http_verb='post') -``` - -The base endpoint for HTTP api calls is specified in the `DAPR_HTTP_ENDPOINT` environment variable. -If this variable is not set, the endpoint value is derived from the `DAPR_RUNTIME_HOST` and `DAPR_HTTP_PORT` variables, whose default values are `127.0.0.1` and `3500` accordingly. - -The base endpoint for gRPC calls is the one used for the client initialisation ([explained above](#initialising-the-client)). - - -- For a full guide on service invocation visit [How-To: Invoke a service]({{% ref howto-invoke-discover-services.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/invoke-simple) for code samples and instructions to try out service invocation. - -### Save & get application state - -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - # Save state - d.save_state(store_name="statestore", key="key1", value="value1") - - # Get state - data = d.get_state(store_name="statestore", key="key1").data - - # Delete state - d.delete_state(store_name="statestore", key="key1") -``` - -- For a full list of state operations visit [How-To: Get & save state]({{% ref howto-get-save-state.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/state_store) for code samples and instructions to try out state management. - -### Query application state (Alpha) - -```python - from dapr import DaprClient - - query = ''' - { - "filter": { - "EQ": { "state": "CA" } - }, - "sort": [ - { - "key": "person.id", - "order": "DESC" - } - ] - } - ''' - - with DaprClient() as d: - resp = d.query_state( - store_name='state_store', - query=query, - states_metadata={"metakey": "metavalue"}, # optional - ) -``` - -- For a full list of state store query options visit [How-To: Query state]({{% ref howto-state-query-api.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/state_store_query) for code samples and instructions to try out state store querying. - -### Publish & subscribe - -#### Publish messages - -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - resp = d.publish_event(pubsub_name='pubsub', topic_name='TOPIC_A', data='{"message":"Hello World"}') -``` - - -Send [CloudEvents](https://cloudevents.io/) messages with a json payload: -```python -from dapr.clients import DaprClient -import json - -with DaprClient() as d: - cloud_event = { - 'specversion': '1.0', - 'type': 'com.example.event', - 'source': 'my-service', - 'id': 'myid', - 'data': {'id': 1, 'message': 'hello world'}, - 'datacontenttype': 'application/json', - } - - # Set the data content type to 'application/cloudevents+json' - resp = d.publish_event( - pubsub_name='pubsub', - topic_name='TOPIC_CE', - data=json.dumps(cloud_event), - data_content_type='application/cloudevents+json', - ) -``` - -Publish [CloudEvents](https://cloudevents.io/) messages with plain text payload: -```python -from dapr.clients import DaprClient -import json - -with DaprClient() as d: - cloud_event = { - 'specversion': '1.0', - 'type': 'com.example.event', - 'source': 'my-service', - 'id': "myid", - 'data': 'hello world', - 'datacontenttype': 'text/plain', - } - - # Set the data content type to 'application/cloudevents+json' - resp = d.publish_event( - pubsub_name='pubsub', - topic_name='TOPIC_CE', - data=json.dumps(cloud_event), - data_content_type='application/cloudevents+json', - ) -``` - - -#### Subscribe to messages - -```python -from cloudevents.sdk.event import v1 -from dapr.ext.grpc import App -import json - -app = App() - -# Default subscription for a topic -@app.subscribe(pubsub_name='pubsub', topic='TOPIC_A') -def mytopic(event: v1.Event) -> None: - data = json.loads(event.Data()) - print(f'Received: id={data["id"]}, message="{data ["message"]}"' - ' content_type="{event.content_type}"',flush=True) - -# Specific handler using Pub/Sub routing -@app.subscribe(pubsub_name='pubsub', topic='TOPIC_A', - rule=Rule("event.type == \"important\"", 1)) -def mytopic_important(event: v1.Event) -> None: - data = json.loads(event.Data()) - print(f'Received: id={data["id"]}, message="{data ["message"]}"' - ' content_type="{event.content_type}"',flush=True) -``` - -- For more information about pub/sub, visit [How-To: Publish & subscribe]({{% ref howto-publish-subscribe.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/pubsub-simple) for code samples and instructions to try out pub/sub. - -#### Streaming message subscription - -You can create a streaming subscription to a PubSub topic using either the `subscribe` -or `subscribe_handler` methods. - -The `subscribe` method returns an iterable `Subscription` object, which allows you to pull messages from the -stream by using a `for` loop (ex. `for message in subscription`) or by -calling the `next_message` method. This will block on the main thread while waiting for messages. -When done, you should call the close method to terminate the -subscription and stop receiving messages. - -The `subscribe_with_handler` method accepts a callback function that is executed for each message -received from the stream. -It runs in a separate thread, so it doesn't block the main thread. The callback should return a -`TopicEventResponse` (ex. `TopicEventResponse('success')`), indicating whether the message was -processed successfully, should be retried, or should be discarded. The method will automatically -manage message acknowledgements based on the returned status. The call to `subscribe_with_handler` -method returns a close function, which should be called to terminate the subscription when you're -done. - -Here's an example of using the `subscribe` method: - -```python -import time - -from dapr.clients import DaprClient -from dapr.clients.grpc.subscription import StreamInactiveError, StreamCancelledError - -counter = 0 - - -def process_message(message): - global counter - counter += 1 - # Process the message here - print(f'Processing message: {message.data()} from {message.topic()}...') - return 'success' - - -def main(): - with DaprClient() as client: - global counter - - subscription = client.subscribe( - pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' - ) - - try: - for message in subscription: - if message is None: - print('No message received. The stream might have been cancelled.') - continue - - try: - response_status = process_message(message) - - if response_status == 'success': - subscription.respond_success(message) - elif response_status == 'retry': - subscription.respond_retry(message) - elif response_status == 'drop': - subscription.respond_drop(message) - - if counter >= 5: - break - except StreamInactiveError: - print('Stream is inactive. Retrying...') - time.sleep(1) - continue - except StreamCancelledError: - print('Stream was cancelled') - break - except Exception as e: - print(f'Error occurred during message processing: {e}') - - finally: - print('Closing subscription...') - subscription.close() - - -if __name__ == '__main__': - main() -``` - -And here's an example of using the `subscribe_with_handler` method: - -```python -import time - -from dapr.clients import DaprClient -from dapr.clients.grpc._response import TopicEventResponse - -counter = 0 - - -def process_message(message): - # Process the message here - global counter - counter += 1 - print(f'Processing message: {message.data()} from {message.topic()}...') - return TopicEventResponse('success') - - -def main(): - with (DaprClient() as client): - # This will start a new thread that will listen for messages - # and process them in the `process_message` function - close_fn = client.subscribe_with_handler( - pubsub_name='pubsub', topic='TOPIC_A', handler_fn=process_message, - dead_letter_topic='TOPIC_A_DEAD' - ) - - while counter < 5: - time.sleep(1) - - print("Closing subscription...") - close_fn() - - -if __name__ == '__main__': - main() -``` - -- For more information about pub/sub, visit [How-To: Publish & subscribe]({{% ref howto-publish-subscribe.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/main/examples/pubsub-simple) for code samples and instructions to try out streaming pub/sub. - -### Conversation (Alpha) - -{{% alert title="Note" color="primary" %}} -The Dapr Conversation API is currently in alpha. -{{% /alert %}} - -Since version 1.15 Dapr offers developers the capability to securely and reliably interact with Large Language Models (LLM) through the [Conversation API]({{% ref conversation-overview.md %}}). - -```python -from dapr.clients import DaprClient -from dapr.clients.grpc.conversation import ConversationInput - -with DaprClient() as d: - inputs = [ - ConversationInput(content="What's Dapr?", role='user', scrub_pii=True), - ConversationInput(content='Give a brief overview.', role='user', scrub_pii=True), - ] - - metadata = { - 'model': 'foo', - 'key': 'authKey', - 'cacheTTL': '10m', - } - - response = d.converse_alpha1( - name='echo', inputs=inputs, temperature=0.7, context_id='chat-123', metadata=metadata - ) - - for output in response.outputs: - print(f'Result: {output.result}') -``` - -### Interact with output bindings - -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - resp = d.invoke_binding(binding_name='kafkaBinding', operation='create', data='{"message":"Hello World"}') -``` - -- For a full guide on output bindings visit [How-To: Use bindings]({{% ref howto-bindings.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/main/examples/invoke-binding) for code samples and instructions to try out output bindings. - -### Retrieve secrets - -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - resp = d.get_secret(store_name='localsecretstore', key='secretKey') -``` - -- For a full guide on secrets visit [How-To: Retrieve secrets]({{% ref howto-secrets.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/secret_store) for code samples and instructions to try out retrieving secrets - -### Configuration - -#### Get configuration - -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - # Get Configuration - configuration = d.get_configuration(store_name='configurationstore', keys=['orderId'], config_metadata={}) -``` - -#### Subscribe to configuration - -```python -import asyncio -from time import sleep -from dapr.clients import DaprClient - -async def executeConfiguration(): - with DaprClient() as d: - storeName = 'configurationstore' - - key = 'orderId' - - # Wait for sidecar to be up within 20 seconds. - d.wait(20) - - # Subscribe to configuration by key. - configuration = await d.subscribe_configuration(store_name=storeName, keys=[key], config_metadata={}) - while True: - if configuration != None: - items = configuration.get_items() - for key, item in items: - print(f"Subscribe key={key} value={item.value} version={item.version}", flush=True) - else: - print("Nothing yet") - sleep(5) - -asyncio.run(executeConfiguration()) -``` - -- Learn more about managing configurations via the [How-To: Manage configuration]({{% ref howto-manage-configuration.md %}}) guide. -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/configuration) for code samples and instructions to try out configuration. - -### Distributed Lock - -```python -from dapr.clients import DaprClient - -def main(): - # Lock parameters - store_name = 'lockstore' # as defined in components/lockstore.yaml - resource_id = 'example-lock-resource' - client_id = 'example-client-id' - expiry_in_seconds = 60 - - with DaprClient() as dapr: - print('Will try to acquire a lock from lock store named [%s]' % store_name) - print('The lock is for a resource named [%s]' % resource_id) - print('The client identifier is [%s]' % client_id) - print('The lock will will expire in %s seconds.' % expiry_in_seconds) - - with dapr.try_lock(store_name, resource_id, client_id, expiry_in_seconds) as lock_result: - assert lock_result.success, 'Failed to acquire the lock. Aborting.' - print('Lock acquired successfully!!!') - - # At this point the lock was released - by magic of the `with` clause ;) - unlock_result = dapr.unlock(store_name, resource_id, client_id) - print('We already released the lock so unlocking will not work.') - print('We tried to unlock it anyway and got back [%s]' % unlock_result.status) -``` - -- Learn more about using a distributed lock: [How-To: Use a lock]({{% ref howto-use-distributed-lock.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/blob/master/examples/distributed_lock) for code samples and instructions to try out distributed lock. - -### Cryptography - -```python -from dapr.clients import DaprClient - -message = 'The secret is "passw0rd"' - -def main(): - with DaprClient() as d: - resp = d.encrypt( - data=message.encode(), - options=EncryptOptions( - component_name='crypto-localstorage', - key_name='rsa-private-key.pem', - key_wrap_algorithm='RSA', - ), - ) - encrypt_bytes = resp.read() - - resp = d.decrypt( - data=encrypt_bytes, - options=DecryptOptions( - component_name='crypto-localstorage', - key_name='rsa-private-key.pem', - ), - ) - decrypt_bytes = resp.read() - - print(decrypt_bytes.decode()) # The secret is "passw0rd" -``` - -- For a full list of state operations visit [How-To: Use the cryptography APIs]({{% ref howto-cryptography.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/crypto) for code samples and instructions to try out cryptography - -## Related links -[Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples) diff --git a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/_index.md b/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/_index.md deleted file mode 100644 index 8b7bc9c50..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/_index.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -type: docs -title: "Dapr Python SDK extensions" -linkTitle: "Extensions" -weight: 30000 -description: Python SDK for developing Dapr applications ---- diff --git a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-fastapi.md b/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-fastapi.md deleted file mode 100644 index 13b6499b9..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-fastapi.md +++ /dev/null @@ -1,115 +0,0 @@ ---- -type: docs -title: "Dapr Python SDK integration with FastAPI" -linkTitle: "FastAPI" -weight: 200000 -description: How to create Dapr Python virtual actors and pubsub with the FastAPI extension ---- - -The Dapr Python SDK provides integration with FastAPI using the `dapr-ext-fastapi` extension. - -## Installation - -You can download and install the Dapr FastAPI extension with: - -{{< tabpane text=true >}} - -{{% tab header="Stable" %}} -```bash -pip install dapr-ext-fastapi -``` -{{% /tab %}} - -{{% tab header="Development" %}} -{{% alert title="Note" color="warning" %}} -The development package will contain features and behavior that will be compatible with the pre-release version of the Dapr runtime. Make sure to uninstall any stable versions of the Python SDK extension before installing the `dapr-dev` package. -{{% /alert %}} - -```bash -pip install dapr-ext-fastapi-dev -``` -{{% /tab %}} - -{{< /tabpane >}} - -## Example - -### Subscribing to events of different types - -```python -import uvicorn -from fastapi import Body, FastAPI -from dapr.ext.fastapi import DaprApp -from pydantic import BaseModel - -class RawEventModel(BaseModel): - body: str - -class User(BaseModel): - id: int - name: str - -class CloudEventModel(BaseModel): - data: User - datacontenttype: str - id: str - pubsubname: str - source: str - specversion: str - topic: str - traceid: str - traceparent: str - tracestate: str - type: str - - -app = FastAPI() -dapr_app = DaprApp(app) - -# Allow handling event with any structure (Easiest, but least robust) -# dapr publish --publish-app-id sample --topic any_topic --pubsub pubsub --data '{"id":"7", "desc": "good", "size":"small"}' -@dapr_app.subscribe(pubsub='pubsub', topic='any_topic') -def any_event_handler(event_data = Body()): - print(event_data) - -# For robustness choose one of the below based on if publisher is using CloudEvents - -# Handle events sent with CloudEvents -# dapr publish --publish-app-id sample --topic cloud_topic --pubsub pubsub --data '{"id":"7", "name":"Bob Jones"}' -@dapr_app.subscribe(pubsub='pubsub', topic='cloud_topic') -def cloud_event_handler(event_data: CloudEventModel): - print(event_data) - -# Handle raw events sent without CloudEvents -# curl -X "POST" http://localhost:3500/v1.0/publish/pubsub/raw_topic?metadata.rawPayload=true -H "Content-Type: application/json" -d '{"body": "345"}' -@dapr_app.subscribe(pubsub='pubsub', topic='raw_topic') -def raw_event_handler(event_data: RawEventModel): - print(event_data) - - - -if __name__ == "__main__": - uvicorn.run(app, host="0.0.0.0", port=30212) -``` - -### Creating an actor - -```python -from fastapi import FastAPI -from dapr.ext.fastapi import DaprActor -from demo_actor import DemoActor - -app = FastAPI(title=f'{DemoActor.__name__}Service') - -# Add Dapr Actor Extension -actor = DaprActor(app) - -@app.on_event("startup") -async def startup_event(): - # Register DemoActor - await actor.register_actor(DemoActor) - -@app.get("/GetMyData") -def get_my_data(): - return "{'message': 'myData'}" -``` diff --git a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-flask.md b/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-flask.md deleted file mode 100644 index b4ec58f9c..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-flask.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -type: docs -title: "Dapr Python SDK integration with Flask" -linkTitle: "Flask" -weight: 300000 -description: How to create Dapr Python virtual actors with the Flask extension ---- - -The Dapr Python SDK provides integration with Flask using the `flask-dapr` extension. - -## Installation - -You can download and install the Dapr Flask extension with: - -{{< tabpane text=true >}} - -{{% tab header="Stable" %}} -```bash -pip install flask-dapr -``` -{{% /tab %}} - -{{% tab header="Development" %}} -{{% alert title="Note" color="warning" %}} -The development package will contain features and behavior that will be compatible with the pre-release version of the Dapr runtime. Make sure to uninstall any stable versions of the Python SDK extension before installing the `dapr-dev` package. -{{% /alert %}} - -```bash -pip install flask-dapr-dev -``` -{{% /tab %}} - -{{< /tabpane >}} - -## Example - -```python -from flask import Flask -from flask_dapr.actor import DaprActor - -from dapr.conf import settings -from demo_actor import DemoActor - -app = Flask(f'{DemoActor.__name__}Service') - -# Enable DaprActor Flask extension -actor = DaprActor(app) - -# Register DemoActor -actor.register_actor(DemoActor) - -# Setup method route -@app.route('/GetMyData', methods=['GET']) -def get_my_data(): - return {'message': 'myData'}, 200 - -# Run application -if __name__ == '__main__': - app.run(port=settings.HTTP_APP_PORT) -``` diff --git a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-grpc.md b/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-grpc.md deleted file mode 100644 index e34c213b5..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-grpc.md +++ /dev/null @@ -1,118 +0,0 @@ ---- -type: docs -title: "Getting started with the Dapr Python gRPC service extension" -linkTitle: "gRPC" -weight: 100000 -description: How to get up and running with the Dapr Python gRPC extension ---- - -The Dapr Python SDK provides a built in gRPC server extension, `dapr.ext.grpc`, for creating Dapr services. - -## Installation - -You can download and install the Dapr gRPC server extension with: - -{{< tabpane text=true >}} - -{{% tab header="Stable" %}} -```bash -pip install dapr-ext-grpc -``` -{{% /tab %}} - -{{% tab header="Development" %}} -{{% alert title="Note" color="warning" %}} -The development package will contain features and behavior that will be compatible with the pre-release version of the Dapr runtime. Make sure to uninstall any stable versions of the Python SDK extension before installing the `dapr-dev` package. -{{% /alert %}} - -```bash -pip3 install dapr-ext-grpc-dev -``` -{{% /tab %}} - -{{< /tabpane >}} - -## Examples - -The `App` object can be used to create a server. - -### Listen for service invocation requests - -The `InvokeMethodReqest` and `InvokeMethodResponse` objects can be used to handle incoming requests. - -A simple service that will listen and respond to requests will look like: - -```python -from dapr.ext.grpc import App, InvokeMethodRequest, InvokeMethodResponse - -app = App() - -@app.method(name='my-method') -def mymethod(request: InvokeMethodRequest) -> InvokeMethodResponse: - print(request.metadata, flush=True) - print(request.text(), flush=True) - - return InvokeMethodResponse(b'INVOKE_RECEIVED', "text/plain; charset=UTF-8") - -app.run(50051) -``` - -A full sample can be found [here](https://github.com/dapr/python-sdk/tree/v1.0.0rc2/examples/invoke-simple). - -### Subscribe to a topic - -When subscribing to a topic, you can instruct dapr whether the event delivered has been accepted, or whether it should be dropped, or retried later. - -```python -from typing import Optional -from cloudevents.sdk.event import v1 -from dapr.ext.grpc import App -from dapr.clients.grpc._response import TopicEventResponse - -app = App() - -# Default subscription for a topic -@app.subscribe(pubsub_name='pubsub', topic='TOPIC_A') -def mytopic(event: v1.Event) -> Optional[TopicEventResponse]: - print(event.Data(),flush=True) - # Returning None (or not doing a return explicitly) is equivalent - # to returning a TopicEventResponse("success"). - # You can also return TopicEventResponse("retry") for dapr to log - # the message and retry delivery later, or TopicEventResponse("drop") - # for it to drop the message - return TopicEventResponse("success") - -# Specific handler using Pub/Sub routing -@app.subscribe(pubsub_name='pubsub', topic='TOPIC_A', - rule=Rule("event.type == \"important\"", 1)) -def mytopic_important(event: v1.Event) -> None: - print(event.Data(),flush=True) - -# Handler with disabled topic validation -@app.subscribe(pubsub_name='pubsub-mqtt', topic='topic/#', disable_topic_validation=True,) -def mytopic_wildcard(event: v1.Event) -> None: - print(event.Data(),flush=True) - -app.run(50051) -``` - -A full sample can be found [here](https://github.com/dapr/python-sdk/blob/v1.0.0rc2/examples/pubsub-simple/subscriber.py). - -### Setup input binding trigger - -```python -from dapr.ext.grpc import App, BindingRequest - -app = App() - -@app.binding('kafkaBinding') -def binding(request: BindingRequest): - print(request.text(), flush=True) - -app.run(50051) -``` - -A full sample can be found [here](https://github.com/dapr/python-sdk/tree/v1.0.0rc2/examples/invoke-binding). - -## Related links -- [PyPi](https://pypi.org/project/dapr-ext-grpc/) diff --git a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/_index.md b/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/_index.md deleted file mode 100644 index 12a63f8f6..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/_index.md +++ /dev/null @@ -1,105 +0,0 @@ ---- -type: docs -title: "Dapr Python SDK integration with Dapr Workflow extension" -linkTitle: "Dapr Workflow" -weight: 400000 -description: How to get up and running with the Dapr Workflow extension -no_list: true ---- - -The Dapr Python SDK provides a built-in Dapr Workflow extension, `dapr.ext.workflow`, for creating Dapr services. - -## Installation - -You can download and install the Dapr Workflow extension with: - -{{< tabpane text=true >}} - -{{% tab header="Stable" %}} -```bash -pip install dapr-ext-workflow -``` -{{% /tab %}} - -{{% tab header="Development" %}} -{{% alert title="Note" color="warning" %}} -The development package will contain features and behavior that will be compatible with the pre-release version of the Dapr runtime. Make sure to uninstall any stable versions of the Python SDK extension before installing the `dapr-dev` package. -{{% /alert %}} - -```bash -pip install dapr-ext-workflow-dev -``` -{{% /tab %}} - -{{< /tabpane >}} - -## Example - -```python -from time import sleep - -import dapr.ext.workflow as wf - - -wfr = wf.WorkflowRuntime() - - -@wfr.workflow(name='random_workflow') -def task_chain_workflow(ctx: wf.DaprWorkflowContext, wf_input: int): - try: - result1 = yield ctx.call_activity(step1, input=wf_input) - result2 = yield ctx.call_activity(step2, input=result1) - except Exception as e: - yield ctx.call_activity(error_handler, input=str(e)) - raise - return [result1, result2] - - -@wfr.activity(name='step1') -def step1(ctx, activity_input): - print(f'Step 1: Received input: {activity_input}.') - # Do some work - return activity_input + 1 - - -@wfr.activity -def step2(ctx, activity_input): - print(f'Step 2: Received input: {activity_input}.') - # Do some work - return activity_input * 2 - -@wfr.activity -def error_handler(ctx, error): - print(f'Executing error handler: {error}.') - # Do some compensating work - - -if __name__ == '__main__': - wfr.start() - sleep(10) # wait for workflow runtime to start - - wf_client = wf.DaprWorkflowClient() - instance_id = wf_client.schedule_new_workflow(workflow=task_chain_workflow, input=42) - print(f'Workflow started. Instance ID: {instance_id}') - state = wf_client.wait_for_workflow_completion(instance_id) - print(f'Workflow completed! Status: {state.runtime_status}') - - wfr.shutdown() -``` - -- Learn more about authoring and managing workflows: - - [How-To: Author a workflow]({{% ref howto-author-workflow.md %}}). - - [How-To: Manage a workflow]({{% ref howto-manage-workflow.md %}}). - - -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/main/examples/workflow) for code samples and instructions to try out Dapr Workflow: - - [Simple workflow example]({{% ref python-workflow.md %}}) - - [Task chaining example](https://github.com/dapr/python-sdk/blob/main/examples/workflow/task_chaining.py) - - [Fan-out/Fan-in example](https://github.com/dapr/python-sdk/blob/main/examples/workflow/fan_out_fan_in.py) - - [Child workflow example](https://github.com/dapr/python-sdk/blob/main/examples/workflow/child_workflow.py) - - [Human approval example](https://github.com/dapr/python-sdk/blob/main/examples/workflow/human_approval.py) - - [Monitor example](https://github.com/dapr/python-sdk/blob/main/examples/workflow/monitor.py) - - -## Next steps - -{{< button text="Getting started with the Dapr Workflow Python SDK" page="python-workflow.md" >}} diff --git a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/python-workflow.md b/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/python-workflow.md deleted file mode 100644 index 8c882ac72..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/python-workflow.md +++ /dev/null @@ -1,166 +0,0 @@ ---- -type: docs -title: "Getting started with the Dapr Workflow Python SDK" -linkTitle: "Workflow" -weight: 30000 -description: How to get up and running with workflows using the Dapr Python SDK ---- - -Let’s create a Dapr workflow and invoke it using the console. With the [provided workflow example](https://github.com/dapr/python-sdk/tree/main/examples/workflow/simple.py), you will: - -- Run a [Python console application](https://github.com/dapr/python-sdk/blob/main/examples/workflow/simple.py) that demonstrates workflow orchestration with activities, child workflows, and external events -- Learn how to handle retries, timeouts, and workflow state management -- Use the Python workflow SDK to start, pause, resume, and purge workflow instances - -This example uses the default configuration from `dapr init` in [self-hosted mode](https://github.com/dapr/cli#install-dapr-on-your-local-machine-self-hosted). - -In the Python example project, the `simple.py` file contains the setup of the app, including: -- The workflow definition -- The workflow activity definitions -- The registration of the workflow and workflow activities - -## Prerequisites -- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed -- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) -- [Python 3.9+](https://www.python.org/downloads/) installed -- [Dapr Python package]({{% ref "python#installation" %}}) and the [workflow extension]({{% ref "python-workflow/_index.md" %}}) installed -- Verify you're using the latest proto bindings - -## Set up the environment - -Start by cloning the [Python SDK repo]. - -```bash -git clone https://github.com/dapr/python-sdk.git -``` - -From the Python SDK root directory, navigate to the Dapr Workflow example. - -```bash -cd examples/workflow -``` - -Run the following command to install the requirements for running this workflow sample with the Dapr Python SDK. - -```bash -pip3 install -r workflow/requirements.txt -``` - -## Run the application locally - -To run the Dapr application, you need to start the Python program and a Dapr sidecar. In the terminal, run: - -```bash -dapr run --app-id wf-simple-example --dapr-grpc-port 50001 --resources-path components -- python3 simple.py -``` - -> **Note:** Since Python3.exe is not defined in Windows, you may need to use `python simple.py` instead of `python3 simple.py`. - - -**Expected output** - -``` -- "== APP == Hi Counter!" -- "== APP == New counter value is: 1!" -- "== APP == New counter value is: 11!" -- "== APP == Retry count value is: 0!" -- "== APP == Retry count value is: 1! This print statement verifies retry" -- "== APP == Appending 1 to child_orchestrator_string!" -- "== APP == Appending a to child_orchestrator_string!" -- "== APP == Appending a to child_orchestrator_string!" -- "== APP == Appending 2 to child_orchestrator_string!" -- "== APP == Appending b to child_orchestrator_string!" -- "== APP == Appending b to child_orchestrator_string!" -- "== APP == Appending 3 to child_orchestrator_string!" -- "== APP == Appending c to child_orchestrator_string!" -- "== APP == Appending c to child_orchestrator_string!" -- "== APP == Get response from hello_world_wf after pause call: Suspended" -- "== APP == Get response from hello_world_wf after resume call: Running" -- "== APP == New counter value is: 111!" -- "== APP == New counter value is: 1111!" -- "== APP == Workflow completed! Result: "Completed" -``` - -## What happened? - -When you run the application, several key workflow features are shown: - -1. **Workflow and Activity Registration**: The application uses Python decorators to automatically register workflows and activities with the runtime. This decorator-based approach provides a clean, declarative way to define your workflow components: - ```python - @wfr.workflow(name='hello_world_wf') - def hello_world_wf(ctx: DaprWorkflowContext, wf_input): - # Workflow definition... - - @wfr.activity(name='hello_act') - def hello_act(ctx: WorkflowActivityContext, wf_input): - # Activity definition... - ``` - -2. **Runtime Setup**: The application initializes the workflow runtime and client: - ```python - wfr = WorkflowRuntime() - wfr.start() - wf_client = DaprWorkflowClient() - ``` - -2. **Activity Execution**: The workflow executes a series of activities that increment a counter: - ```python - @wfr.workflow(name='hello_world_wf') - def hello_world_wf(ctx: DaprWorkflowContext, wf_input): - yield ctx.call_activity(hello_act, input=1) - yield ctx.call_activity(hello_act, input=10) - ``` - -3. **Retry Logic**: The workflow demonstrates error handling with a retry policy: - ```python - retry_policy = RetryPolicy( - first_retry_interval=timedelta(seconds=1), - max_number_of_attempts=3, - backoff_coefficient=2, - max_retry_interval=timedelta(seconds=10), - retry_timeout=timedelta(seconds=100), - ) - yield ctx.call_activity(hello_retryable_act, retry_policy=retry_policy) - ``` - -4. **Child Workflow**: A child workflow is executed with its own retry policy: - ```python - yield ctx.call_child_workflow(child_retryable_wf, retry_policy=retry_policy) - ``` - -5. **External Event Handling**: The workflow waits for an external event with a timeout: - ```python - event = ctx.wait_for_external_event(event_name) - timeout = ctx.create_timer(timedelta(seconds=30)) - winner = yield when_any([event, timeout]) - ``` - -6. **Workflow Lifecycle Management**: The example demonstrates how to pause and resume the workflow: - ```python - wf_client.pause_workflow(instance_id=instance_id) - metadata = wf_client.get_workflow_state(instance_id=instance_id) - # ... check status ... - wf_client.resume_workflow(instance_id=instance_id) - ``` - -7. **Event Raising**: After resuming, the workflow raises an event: - ```python - wf_client.raise_workflow_event( - instance_id=instance_id, - event_name=event_name, - data=event_data - ) - ``` - -8. **Completion and Cleanup**: Finally, the workflow waits for completion and cleans up: - ```python - state = wf_client.wait_for_workflow_completion( - instance_id, - timeout_in_seconds=30 - ) - wf_client.purge_workflow(instance_id=instance_id) - ``` -## Next steps -- [Learn more about Dapr workflow]({{% ref workflow-overview.md %}}) -- [Workflow API reference]({{% ref workflow_api.md %}}) -- [Try implementing more complex workflow patterns](https://github.com/dapr/python-sdk/tree/main/examples/workflow) From 853d60f500ba93485b7df0f605e02d1b7911a829 Mon Sep 17 00:00:00 2001 From: Patrick Assuied Date: Tue, 2 Dec 2025 06:01:13 -0800 Subject: [PATCH 10/16] Added async workflow client implementation, leveraging new durabletask.aio.client implementation (#861) * Added async workflow client implementation, leveraging new durabletask.aio.client implementation Signed-off-by: Patrick Assuied * lint Signed-off-by: Patrick Assuied * Refactor DaprWorkflowClientAsync to DaprWorkflowClient for consistency Signed-off-by: Patrick Assuied * DRY Signed-off-by: Patrick Assuied * Added example for using the async workflow client adapted from the simple example. Signed-off-by: Patrick Assuied * ruff'ed Signed-off-by: Patrick Assuied * PR feedback Signed-off-by: Patrick Assuied * added header Signed-off-by: Patrick Assuied * lint Signed-off-by: Patrick Assuied * Remove 'STEP' directives installing server version of package instead of local version (therefore overriding local changes) Signed-off-by: Patrick Assuied --------- Signed-off-by: Patrick Assuied --- examples/demo_actor/README.md | 6 - examples/demo_workflow/README.md | 6 - examples/workflow/README.md | 66 +++++ examples/workflow/simple_aio_client.py | 177 +++++++++++ .../dapr/ext/workflow/aio/__init__.py | 20 ++ .../ext/workflow/aio/dapr_workflow_client.py | 277 ++++++++++++++++++ ext/dapr-ext-workflow/setup.cfg | 2 +- .../tests/test_workflow_client_aio.py | 176 +++++++++++ 8 files changed, 717 insertions(+), 13 deletions(-) create mode 100644 examples/workflow/simple_aio_client.py create mode 100644 ext/dapr-ext-workflow/dapr/ext/workflow/aio/__init__.py create mode 100644 ext/dapr-ext-workflow/dapr/ext/workflow/aio/dapr_workflow_client.py create mode 100644 ext/dapr-ext-workflow/tests/test_workflow_client_aio.py diff --git a/examples/demo_actor/README.md b/examples/demo_actor/README.md index f1b1bbe2b..6353a6e01 100644 --- a/examples/demo_actor/README.md +++ b/examples/demo_actor/README.md @@ -17,16 +17,10 @@ This document describes how to create an Actor(DemoActor) and invoke its methods You can install dapr SDK package using pip command: - - ```sh pip3 install -r demo_actor/requirements.txt ``` - - ## Run in self-hosted mode - ```sh pip3 install -r demo_workflow/requirements.txt ``` - - + +```sh +dapr run --app-id wf-simple-aio-example -- python3 simple_aio_client.py +``` + + +The output of this example should look like this: + +``` + - "== APP == Hi Counter!" + - "== APP == New counter value is: 1!" + - "== APP == New counter value is: 11!" + - "== APP == Retry count value is: 0!" + - "== APP == Retry count value is: 1! This print statement verifies retry" + - "== APP == Appending 1 to child_orchestrator_string!" + - "== APP == Appending a to child_orchestrator_string!" + - "== APP == Appending a to child_orchestrator_string!" + - "== APP == Appending 2 to child_orchestrator_string!" + - "== APP == Appending b to child_orchestrator_string!" + - "== APP == Appending b to child_orchestrator_string!" + - "== APP == Appending 3 to child_orchestrator_string!" + - "== APP == Appending c to child_orchestrator_string!" + - "== APP == Appending c to child_orchestrator_string!" + - "== APP == Get response from hello_world_wf after pause call: SUSPENDED" + - "== APP == Get response from hello_world_wf after resume call: RUNNING" + - "== APP == New counter value is: 111!" + - "== APP == New counter value is: 1111!" + - "== APP == Workflow completed! Result: Completed" +``` + ### Task Chaining This example demonstrates how to chain "activity" tasks together in a workflow. You can run this sample using the following command: diff --git a/examples/workflow/simple_aio_client.py b/examples/workflow/simple_aio_client.py new file mode 100644 index 000000000..fd93a5016 --- /dev/null +++ b/examples/workflow/simple_aio_client.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import asyncio +from datetime import timedelta + +from dapr.ext.workflow import ( + DaprWorkflowContext, + RetryPolicy, + WorkflowActivityContext, + WorkflowRuntime, + when_any, +) +from dapr.ext.workflow.aio import DaprWorkflowClient + +from dapr.clients.exceptions import DaprInternalError +from dapr.conf import Settings + +settings = Settings() + +counter = 0 +retry_count = 0 +child_orchestrator_count = 0 +child_orchestrator_string = '' +child_act_retry_count = 0 +instance_id = 'exampleInstanceID' +child_instance_id = 'childInstanceID' +workflow_name = 'hello_world_wf' +child_workflow_name = 'child_wf' +input_data = 'Hi Counter!' +event_name = 'event1' +event_data = 'eventData' +non_existent_id_error = 'no such instance exists' + +retry_policy = RetryPolicy( + first_retry_interval=timedelta(seconds=1), + max_number_of_attempts=3, + backoff_coefficient=2, + max_retry_interval=timedelta(seconds=10), + retry_timeout=timedelta(seconds=100), +) + +wfr = WorkflowRuntime() + + +@wfr.workflow(name='hello_world_wf') +def hello_world_wf(ctx: DaprWorkflowContext, wf_input): + print(f'{wf_input}') + yield ctx.call_activity(hello_act, input=1) + yield ctx.call_activity(hello_act, input=10) + yield ctx.call_activity(hello_retryable_act, retry_policy=retry_policy) + yield ctx.call_child_workflow(child_retryable_wf, retry_policy=retry_policy) + + # Change in event handling: Use when_any to handle both event and timeout + event = ctx.wait_for_external_event(event_name) + timeout = ctx.create_timer(timedelta(seconds=30)) + winner = yield when_any([event, timeout]) + + if winner == timeout: + print('Workflow timed out waiting for event') + return 'Timeout' + + yield ctx.call_activity(hello_act, input=100) + yield ctx.call_activity(hello_act, input=1000) + return 'Completed' + + +@wfr.activity(name='hello_act') +def hello_act(ctx: WorkflowActivityContext, wf_input): + global counter + counter += wf_input + print(f'New counter value is: {counter}!', flush=True) + + +@wfr.activity(name='hello_retryable_act') +def hello_retryable_act(ctx: WorkflowActivityContext): + global retry_count + if (retry_count % 2) == 0: + print(f'Retry count value is: {retry_count}!', flush=True) + retry_count += 1 + raise ValueError('Retryable Error') + print(f'Retry count value is: {retry_count}! This print statement verifies retry', flush=True) + retry_count += 1 + + +@wfr.workflow(name='child_retryable_wf') +def child_retryable_wf(ctx: DaprWorkflowContext): + global child_orchestrator_string, child_orchestrator_count + if not ctx.is_replaying: + child_orchestrator_count += 1 + print(f'Appending {child_orchestrator_count} to child_orchestrator_string!', flush=True) + child_orchestrator_string += str(child_orchestrator_count) + yield ctx.call_activity( + act_for_child_wf, input=child_orchestrator_count, retry_policy=retry_policy + ) + if child_orchestrator_count < 3: + raise ValueError('Retryable Error') + + +@wfr.activity(name='act_for_child_wf') +def act_for_child_wf(ctx: WorkflowActivityContext, inp): + global child_orchestrator_string, child_act_retry_count + inp_char = chr(96 + inp) + print(f'Appending {inp_char} to child_orchestrator_string!', flush=True) + child_orchestrator_string += inp_char + if child_act_retry_count % 2 == 0: + child_act_retry_count += 1 + raise ValueError('Retryable Error') + child_act_retry_count += 1 + + +async def main(): + wfr.start() + wf_client = DaprWorkflowClient() + + try: + print('==========Start Counter Increase as per Input:==========') + await wf_client.schedule_new_workflow( + workflow=hello_world_wf, input=input_data, instance_id=instance_id + ) + + await wf_client.wait_for_workflow_start(instance_id) + + # Sleep to let the workflow run initial activities + await asyncio.sleep(12) + + assert counter == 11 + assert retry_count == 2 + assert child_orchestrator_string == '1aa2bb3cc' + + # Pause Test + await wf_client.pause_workflow(instance_id=instance_id) + metadata = await wf_client.get_workflow_state(instance_id=instance_id) + print(f'Get response from {workflow_name} after pause call: {metadata.runtime_status.name}') + + # Resume Test + await wf_client.resume_workflow(instance_id=instance_id) + metadata = await wf_client.get_workflow_state(instance_id=instance_id) + print( + f'Get response from {workflow_name} after resume call: {metadata.runtime_status.name}' + ) + + await asyncio.sleep(2) # Give the workflow time to reach the event wait state + await wf_client.raise_workflow_event( + instance_id=instance_id, event_name=event_name, data=event_data + ) + + print('========= Waiting for Workflow completion', flush=True) + try: + state = await wf_client.wait_for_workflow_completion(instance_id, timeout_in_seconds=30) + if state.runtime_status.name == 'COMPLETED': + print('Workflow completed! Result: {}'.format(state.serialized_output.strip('"'))) + else: + print(f'Workflow failed! Status: {state.runtime_status.name}') + except TimeoutError: + print('*** Workflow timed out!') + + await wf_client.purge_workflow(instance_id=instance_id) + try: + await wf_client.get_workflow_state(instance_id=instance_id) + except DaprInternalError as err: + if non_existent_id_error in err._message: + print('Instance Successfully Purged') + finally: + wfr.shutdown() + + +if __name__ == '__main__': + asyncio.run(main()) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/aio/__init__.py b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/__init__.py new file mode 100644 index 000000000..ceb8672be --- /dev/null +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +from .dapr_workflow_client import DaprWorkflowClient + +__all__ = [ + 'DaprWorkflowClient', +] diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/aio/dapr_workflow_client.py b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/dapr_workflow_client.py new file mode 100644 index 000000000..cd5e632f1 --- /dev/null +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/dapr_workflow_client.py @@ -0,0 +1,277 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +from __future__ import annotations + +from datetime import datetime +from typing import Any, Optional, TypeVar + +import durabletask.internal.orchestrator_service_pb2 as pb +from dapr.ext.workflow.logger import Logger, LoggerOptions +from dapr.ext.workflow.util import getAddress +from dapr.ext.workflow.workflow_context import Workflow +from dapr.ext.workflow.workflow_state import WorkflowState +from durabletask.aio import client as aioclient +from grpc.aio import AioRpcError + +from dapr.clients import DaprInternalError +from dapr.clients.http.client import DAPR_API_TOKEN_HEADER +from dapr.conf import settings +from dapr.conf.helpers import GrpcEndpoint + +T = TypeVar('T') +TInput = TypeVar('TInput') +TOutput = TypeVar('TOutput') + + +class DaprWorkflowClient: + """Async client for managing Dapr Workflow instances. + + This uses a gRPC async connection to send commands directly to the workflow engine, + bypassing the Dapr API layer. Intended to be used by workflow applications. + """ + + def __init__( + self, + host: Optional[str] = None, + port: Optional[str] = None, + logger_options: Optional[LoggerOptions] = None, + ): + address = getAddress(host, port) + + try: + uri = GrpcEndpoint(address) + except ValueError as error: + raise DaprInternalError(f'{error}') from error + + self._logger = Logger('DaprWorkflowClient', logger_options) + + metadata = tuple() + if settings.DAPR_API_TOKEN: + metadata = ((DAPR_API_TOKEN_HEADER, settings.DAPR_API_TOKEN),) + options = self._logger.get_options() + self.__obj = aioclient.AsyncTaskHubGrpcClient( + host_address=uri.endpoint, + metadata=metadata, + secure_channel=uri.tls, + log_handler=options.log_handler, + log_formatter=options.log_formatter, + ) + + async def schedule_new_workflow( + self, + workflow: Workflow, + *, + input: Optional[TInput] = None, + instance_id: Optional[str] = None, + start_at: Optional[datetime] = None, + reuse_id_policy: Optional[pb.OrchestrationIdReusePolicy] = None, + ) -> str: + """Schedules a new workflow instance for execution. + + Args: + workflow: The workflow to schedule. + input: The optional input to pass to the scheduled workflow instance. This must be a + serializable value. + instance_id: The unique ID of the workflow instance to schedule. If not specified, a + new GUID value is used. + start_at: The time when the workflow instance should start executing. + If not specified or if a date-time in the past is specified, the workflow instance will + be scheduled immediately. + reuse_id_policy: Optional policy to reuse the workflow id when there is a conflict with + an existing workflow instance. + + Returns: + The ID of the scheduled workflow instance. + """ + workflow_name = ( + workflow.__dict__['_dapr_alternate_name'] + if hasattr(workflow, '_dapr_alternate_name') + else workflow.__name__ + ) + return await self.__obj.schedule_new_orchestration( + workflow_name, + input=input, + instance_id=instance_id, + start_at=start_at, + reuse_id_policy=reuse_id_policy, + ) + + async def get_workflow_state( + self, instance_id: str, *, fetch_payloads: bool = True + ) -> Optional[WorkflowState]: + """Fetches runtime state for the specified workflow instance. + + Args: + instance_id: The unique ID of the workflow instance to fetch. + fetch_payloads: If true, fetches the input, output payloads and custom status + for the workflow instance. Defaults to true. + + Returns: + The current state of the workflow instance, or None if the workflow instance does not + exist. + + """ + try: + state = await self.__obj.get_orchestration_state( + instance_id, fetch_payloads=fetch_payloads + ) + return WorkflowState(state) if state else None + except AioRpcError as error: + if error.details() and 'no such instance exists' in error.details(): + self._logger.warning(f'Workflow instance not found: {instance_id}') + return None + self._logger.error( + f'Unhandled RPC error while fetching workflow state: {error.code()} - {error.details()}' + ) + raise + + async def wait_for_workflow_start( + self, instance_id: str, *, fetch_payloads: bool = False, timeout_in_seconds: int = 0 + ) -> Optional[WorkflowState]: + """Waits for a workflow to start running and returns a WorkflowState object that contains + metadata about the started workflow. + + A "started" workflow instance is any instance not in the WorkflowRuntimeStatus.Pending + state. This method will return a completed task if the workflow has already started + running or has already completed. + + Args: + instance_id: The unique ID of the workflow instance to wait for. + fetch_payloads: If true, fetches the input, output payloads and custom status for + the workflow instance. Defaults to false. + timeout_in_seconds: The maximum time to wait for the workflow instance to start running. + Defaults to meaning no timeout. + + Returns: + WorkflowState record that describes the workflow instance and its execution status. + If the specified workflow isn't found, the WorkflowState.Exists value will be false. + """ + state = await self.__obj.wait_for_orchestration_start( + instance_id, fetch_payloads=fetch_payloads, timeout=timeout_in_seconds + ) + return WorkflowState(state) if state else None + + async def wait_for_workflow_completion( + self, instance_id: str, *, fetch_payloads: bool = True, timeout_in_seconds: int = 0 + ) -> Optional[WorkflowState]: + """Waits for a workflow to complete and returns a WorkflowState object that contains + metadata about the started instance. + + A "completed" workflow instance is any instance in one of the terminal states. For + example, the WorkflowRuntimeStatus.Completed, WorkflowRuntimeStatus.Failed or + WorkflowRuntimeStatus.Terminated states. + + Workflows are long-running and could take hours, days, or months before completing. + Workflows can also be eternal, in which case they'll never complete unless terminated. + In such cases, this call may block indefinitely, so care must be taken to ensure + appropriate timeouts are enforced using timeout parameter. + + If a workflow instance is already complete when this method is called, the method + will return immediately. + + Args: + instance_id: The unique ID of the workflow instance to wait for. + fetch_payloads: If true, fetches the input, output payloads and custom status + for the workflow instance. Defaults to true. + timeout_in_seconds: The maximum time in seconds to wait for the workflow instance to + complete. Defaults to 0 seconds, meaning no timeout. + + Returns: + WorkflowState record that describes the workflow instance and its execution status. + """ + state = await self.__obj.wait_for_orchestration_completion( + instance_id, fetch_payloads=fetch_payloads, timeout=timeout_in_seconds + ) + return WorkflowState(state) if state else None + + async def raise_workflow_event( + self, instance_id: str, event_name: str, *, data: Optional[Any] = None + ) -> None: + """Sends an event notification message to a waiting workflow instance. + In order to handle the event, the target workflow instance must be waiting for an + event named value of "eventName" param using the wait_for_external_event API. + If the target workflow instance is not yet waiting for an event named param "eventName" + value, then the event will be saved in the workflow instance state and dispatched + immediately when the workflow calls wait_for_external_event. + This event saving occurs even if the workflow has canceled its wait operation before + the event was received. + + Workflows can wait for the same event name multiple times, so sending multiple events + with the same name is allowed. Each external event received by a workflow will complete + just one task returned by the wait_for_external_event method. + + Raised events for a completed or non-existent workflow instance will be silently + discarded. + + Args: + instance_id: The ID of the workflow instance that will handle the event. + event_name: The name of the event. Event names are case-insensitive. + data: The serializable data payload to include with the event. + """ + return await self.__obj.raise_orchestration_event(instance_id, event_name, data=data) + + async def terminate_workflow( + self, instance_id: str, *, output: Optional[Any] = None, recursive: bool = True + ) -> None: + """Terminates a running workflow instance and updates its runtime status to + WorkflowRuntimeStatus.Terminated This method internally enqueues a "terminate" message in + the task hub. When the task hub worker processes this message, it will update the runtime + status of the target instance to WorkflowRuntimeStatus.Terminated. You can use + wait_for_workflow_completion to wait for the instance to reach the terminated state. + + Terminating a workflow will terminate all child workflows that were started by + the workflow instance. + + However, terminating a workflow has no effect on any in-flight activity function + executions that were started by the terminated workflow instance. + + At the time of writing, there is no way to terminate an in-flight activity execution. + + Args: + instance_id: The ID of the workflow instance to terminate. + output: The optional output to set for the terminated workflow instance. + recursive: The optional flag to terminate all child workflows. + + """ + return await self.__obj.terminate_orchestration( + instance_id, output=output, recursive=recursive + ) + + async def pause_workflow(self, instance_id: str) -> None: + """Suspends a workflow instance, halting processing of it until resume_workflow is used to + resume the workflow. + + Args: + instance_id: The instance ID of the workflow to suspend. + """ + return await self.__obj.suspend_orchestration(instance_id) + + async def resume_workflow(self, instance_id: str) -> None: + """Resumes a workflow instance that was suspended via pause_workflow. + + Args: + instance_id: The instance ID of the workflow to resume. + """ + return await self.__obj.resume_orchestration(instance_id) + + async def purge_workflow(self, instance_id: str, recursive: bool = True) -> None: + """Purge data from a workflow instance. + + Args: + instance_id: The instance ID of the workflow to purge. + recursive: The optional flag to also purge data from all child workflows. + """ + return await self.__obj.purge_orchestration(instance_id, recursive) diff --git a/ext/dapr-ext-workflow/setup.cfg b/ext/dapr-ext-workflow/setup.cfg index 6efe6668c..fdf8bd4dc 100644 --- a/ext/dapr-ext-workflow/setup.cfg +++ b/ext/dapr-ext-workflow/setup.cfg @@ -25,7 +25,7 @@ packages = find_namespace: include_package_data = True install_requires = dapr >= 1.16.0.dev - durabletask-dapr >= 0.2.0a9 + durabletask-dapr >= 0.2.0a12 [options.packages.find] include = diff --git a/ext/dapr-ext-workflow/tests/test_workflow_client_aio.py b/ext/dapr-ext-workflow/tests/test_workflow_client_aio.py new file mode 100644 index 000000000..c84fcbfe6 --- /dev/null +++ b/ext/dapr-ext-workflow/tests/test_workflow_client_aio.py @@ -0,0 +1,176 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import unittest +from datetime import datetime +from typing import Any, Union +from unittest import mock + +import durabletask.internal.orchestrator_service_pb2 as pb +from dapr.ext.workflow.aio import DaprWorkflowClient +from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext +from durabletask import client +from grpc.aio import AioRpcError + +mock_schedule_result = 'workflow001' +mock_raise_event_result = 'event001' +mock_terminate_result = 'terminate001' +mock_suspend_result = 'suspend001' +mock_resume_result = 'resume001' +mock_purge_result = 'purge001' +mock_instance_id = 'instance001' +wf_status = 'not-found' + + +class SimulatedAioRpcError(AioRpcError): + def __init__(self, code, details): + self._code = code + self._details = details + + def code(self): + return self._code + + def details(self): + return self._details + + +class FakeAsyncTaskHubGrpcClient: + async def schedule_new_orchestration( + self, + workflow, + *, + input, + instance_id, + start_at, + reuse_id_policy: Union[pb.OrchestrationIdReusePolicy, None] = None, + ): + return mock_schedule_result + + async def get_orchestration_state(self, instance_id, *, fetch_payloads): + if wf_status == 'not-found': + raise SimulatedAioRpcError(code='UNKNOWN', details='no such instance exists') + elif wf_status == 'found': + return self._inner_get_orchestration_state( + instance_id, client.OrchestrationStatus.PENDING + ) + else: + raise SimulatedAioRpcError(code='UNKNOWN', details='unknown error') + + async def wait_for_orchestration_start(self, instance_id, *, fetch_payloads, timeout): + return self._inner_get_orchestration_state(instance_id, client.OrchestrationStatus.RUNNING) + + async def wait_for_orchestration_completion(self, instance_id, *, fetch_payloads, timeout): + return self._inner_get_orchestration_state( + instance_id, client.OrchestrationStatus.COMPLETED + ) + + async def raise_orchestration_event( + self, instance_id: str, event_name: str, *, data: Union[Any, None] = None + ): + return mock_raise_event_result + + async def terminate_orchestration( + self, instance_id: str, *, output: Union[Any, None] = None, recursive: bool = True + ): + return mock_terminate_result + + async def suspend_orchestration(self, instance_id: str): + return mock_suspend_result + + async def resume_orchestration(self, instance_id: str): + return mock_resume_result + + async def purge_orchestration(self, instance_id: str, recursive: bool = True): + return mock_purge_result + + def _inner_get_orchestration_state(self, instance_id, state: client.OrchestrationStatus): + return client.OrchestrationState( + instance_id=instance_id, + name='', + runtime_status=state, + created_at=datetime.now(), + last_updated_at=datetime.now(), + serialized_input=None, + serialized_output=None, + serialized_custom_status=None, + failure_details=None, + ) + + +class WorkflowClientAioTest(unittest.IsolatedAsyncioTestCase): + def mock_client_wf(ctx: DaprWorkflowContext, input): + print(f'{input}') + + async def test_client_functions(self): + with mock.patch( + 'durabletask.aio.client.AsyncTaskHubGrpcClient', + return_value=FakeAsyncTaskHubGrpcClient(), + ): + wfClient = DaprWorkflowClient() + actual_schedule_result = await wfClient.schedule_new_workflow( + workflow=self.mock_client_wf, input='Hi Chef!' + ) + assert actual_schedule_result == mock_schedule_result + + global wf_status + wf_status = 'not-found' + actual_get_result = await wfClient.get_workflow_state( + instance_id=mock_instance_id, fetch_payloads=True + ) + assert actual_get_result is None + + wf_status = 'error' + with self.assertRaises(AioRpcError): + await wfClient.get_workflow_state(instance_id=mock_instance_id, fetch_payloads=True) + + assert actual_get_result is None + + wf_status = 'found' + actual_get_result = await wfClient.get_workflow_state( + instance_id=mock_instance_id, fetch_payloads=True + ) + assert actual_get_result.runtime_status.name == 'PENDING' + assert actual_get_result.instance_id == mock_instance_id + + actual_wait_start_result = await wfClient.wait_for_workflow_start( + instance_id=mock_instance_id, timeout_in_seconds=30 + ) + assert actual_wait_start_result.runtime_status.name == 'RUNNING' + assert actual_wait_start_result.instance_id == mock_instance_id + + actual_wait_completion_result = await wfClient.wait_for_workflow_completion( + instance_id=mock_instance_id, timeout_in_seconds=30 + ) + assert actual_wait_completion_result.runtime_status.name == 'COMPLETED' + assert actual_wait_completion_result.instance_id == mock_instance_id + + actual_raise_event_result = await wfClient.raise_workflow_event( + instance_id=mock_instance_id, event_name='test_event', data='test_data' + ) + assert actual_raise_event_result == mock_raise_event_result + + actual_terminate_result = await wfClient.terminate_workflow( + instance_id=mock_instance_id, output='test_output' + ) + assert actual_terminate_result == mock_terminate_result + + actual_suspend_result = await wfClient.pause_workflow(instance_id=mock_instance_id) + assert actual_suspend_result == mock_suspend_result + + actual_resume_result = await wfClient.resume_workflow(instance_id=mock_instance_id) + assert actual_resume_result == mock_resume_result + + actual_purge_result = await wfClient.purge_workflow(instance_id=mock_instance_id) + assert actual_purge_result == mock_purge_result From 7657688cbdfdcdcbe005d79d560fd763cd8e74a2 Mon Sep 17 00:00:00 2001 From: Casper Nielsen Date: Tue, 9 Dec 2025 10:14:00 +0100 Subject: [PATCH 11/16] Fix/ext pkgs typed (#869) * chore: remove repo root py.typed. Only necessary in the pkg root Signed-off-by: Casper Nielsen * chore: add py.typed & ref in setup.cfg Signed-off-by: Casper Nielsen * fix: correct path for ext workflows py.typed file Signed-off-by: Casper Nielsen * fix: correct path for other ext packes py.typed file Signed-off-by: Casper Nielsen * fix: move back to correct location Signed-off-by: Casper Nielsen * feat: simplify main dapr pkg typed setup Signed-off-by: Casper Nielsen * feat: add type info for flask_dapr Signed-off-by: Casper Nielsen --------- Signed-off-by: Casper Nielsen --- dapr/{actor => }/py.typed | 0 .../dapr-ext-fastapi/dapr/ext/fastapi}/py.typed | 0 ext/dapr-ext-fastapi/setup.cfg | 4 ++++ .../dapr-ext-grpc/dapr/ext/grpc}/py.typed | 0 ext/dapr-ext-grpc/setup.cfg | 4 ++++ .../dapr-ext-langgraph/dapr/ext/langgraph}/py.typed | 0 ext/dapr-ext-langgraph/setup.cfg | 4 ++++ .../dapr-ext-workflow/dapr/ext/workflow}/py.typed | 0 ext/dapr-ext-workflow/setup.cfg | 4 ++++ .../serializers => ext/flask_dapr/flask_dapr}/py.typed | 0 ext/flask_dapr/setup.cfg | 4 ++++ py.typed | 0 setup.cfg | 10 +--------- 13 files changed, 21 insertions(+), 9 deletions(-) rename dapr/{actor => }/py.typed (100%) rename {dapr/aio/clients => ext/dapr-ext-fastapi/dapr/ext/fastapi}/py.typed (100%) rename {dapr/clients => ext/dapr-ext-grpc/dapr/ext/grpc}/py.typed (100%) rename {dapr/conf => ext/dapr-ext-langgraph/dapr/ext/langgraph}/py.typed (100%) rename {dapr/proto => ext/dapr-ext-workflow/dapr/ext/workflow}/py.typed (100%) rename {dapr/serializers => ext/flask_dapr/flask_dapr}/py.typed (100%) delete mode 100644 py.typed diff --git a/dapr/actor/py.typed b/dapr/py.typed similarity index 100% rename from dapr/actor/py.typed rename to dapr/py.typed diff --git a/dapr/aio/clients/py.typed b/ext/dapr-ext-fastapi/dapr/ext/fastapi/py.typed similarity index 100% rename from dapr/aio/clients/py.typed rename to ext/dapr-ext-fastapi/dapr/ext/fastapi/py.typed diff --git a/ext/dapr-ext-fastapi/setup.cfg b/ext/dapr-ext-fastapi/setup.cfg index f0de1d350..8b37613fb 100644 --- a/ext/dapr-ext-fastapi/setup.cfg +++ b/ext/dapr-ext-fastapi/setup.cfg @@ -34,3 +34,7 @@ include = exclude = tests + +[options.package_data] +dapr.ext.fastapi = + py.typed diff --git a/dapr/clients/py.typed b/ext/dapr-ext-grpc/dapr/ext/grpc/py.typed similarity index 100% rename from dapr/clients/py.typed rename to ext/dapr-ext-grpc/dapr/ext/grpc/py.typed diff --git a/ext/dapr-ext-grpc/setup.cfg b/ext/dapr-ext-grpc/setup.cfg index e2eaaf887..7d4d2e898 100644 --- a/ext/dapr-ext-grpc/setup.cfg +++ b/ext/dapr-ext-grpc/setup.cfg @@ -33,3 +33,7 @@ include = exclude = tests + +[options.package_data] +dapr.ext.grpc = + py.typed \ No newline at end of file diff --git a/dapr/conf/py.typed b/ext/dapr-ext-langgraph/dapr/ext/langgraph/py.typed similarity index 100% rename from dapr/conf/py.typed rename to ext/dapr-ext-langgraph/dapr/ext/langgraph/py.typed diff --git a/ext/dapr-ext-langgraph/setup.cfg b/ext/dapr-ext-langgraph/setup.cfg index bb32e782c..a2a851a6f 100644 --- a/ext/dapr-ext-langgraph/setup.cfg +++ b/ext/dapr-ext-langgraph/setup.cfg @@ -34,3 +34,7 @@ include = exclude = tests + +[options.package_data] +dapr.ext.langgraph = + py.typed \ No newline at end of file diff --git a/dapr/proto/py.typed b/ext/dapr-ext-workflow/dapr/ext/workflow/py.typed similarity index 100% rename from dapr/proto/py.typed rename to ext/dapr-ext-workflow/dapr/ext/workflow/py.typed diff --git a/ext/dapr-ext-workflow/setup.cfg b/ext/dapr-ext-workflow/setup.cfg index fdf8bd4dc..a621308fc 100644 --- a/ext/dapr-ext-workflow/setup.cfg +++ b/ext/dapr-ext-workflow/setup.cfg @@ -33,3 +33,7 @@ include = exclude = tests + +[options.package_data] +dapr.ext.workflow = + py.typed diff --git a/dapr/serializers/py.typed b/ext/flask_dapr/flask_dapr/py.typed similarity index 100% rename from dapr/serializers/py.typed rename to ext/flask_dapr/flask_dapr/py.typed diff --git a/ext/flask_dapr/setup.cfg b/ext/flask_dapr/setup.cfg index 4bc95bc6f..826136c54 100644 --- a/ext/flask_dapr/setup.cfg +++ b/ext/flask_dapr/setup.cfg @@ -27,3 +27,7 @@ zip_safe = false install_requires = Flask >= 1.1 dapr >= 1.16.0.dev + +[options.package_data] +flask_dapr = + py.typed diff --git a/py.typed b/py.typed deleted file mode 100644 index e69de29bb..000000000 diff --git a/setup.cfg b/setup.cfg index de9ecc33b..e68961557 100644 --- a/setup.cfg +++ b/setup.cfg @@ -42,13 +42,5 @@ exclude = tests [options.package_data] -dapr.actor = - py.typed -dapr.clients = - py.typed -dapr.conf = - py.typed -dapr.proto = - py.typed -dapr.serializers = +dapr = py.typed From 126825d70ff294342e62ef3e4ce5516de2d8c8ce Mon Sep 17 00:00:00 2001 From: Mingsing <37703938+StarJourneyMingsing@users.noreply.github.com> Date: Wed, 10 Dec 2025 22:41:12 +0800 Subject: [PATCH 12/16] Wait for the Dapr health check asynchronously in aio/clients/grpc/subscription.py to avoid blocking, ensuring the asyncio gRPC stream can close properly. (#839) * wait for Dapr health check asynchronously Switch Dapr health check from blocking call to async call to avoid blocking the event loop in async environments Signed-off-by: mingsing <107218803@qq.com> * add StatusCode.UNKNOWN branch Signed-off-by: mingsing <107218803@qq.com> * aio dapr health Signed-off-by: mingsing <107218803@qq.com> * add healthcheck test Signed-off-by: mingsing <107218803@qq.com> * ruff pass Signed-off-by: mingsing <107218803@qq.com> * fix async health check Signed-off-by: mingsing <107218803@qq.com> * use aiohttp Signed-off-by: mingsing <107218803@qq.com> * use aiohttp for asynchronous health check Signed-off-by: mingsing <107218803@qq.com> * remove deprecated wait_until_ready in async DaprHealth Signed-off-by: mingsing <107218803@qq.com> * rm DaprHealth.get_ssl_context in test_dapr_grpc_client_async_secure Signed-off-by: mingsing <107218803@qq.com> * format Signed-off-by: mingsing <107218803@qq.com> * Revert "rm DaprHealth.get_ssl_context in test_dapr_grpc_client_async_secure" Signed-off-by: mingsing <107218803@qq.com> * ruff check Signed-off-by: mingsing <107218803@qq.com> --------- Signed-off-by: mingsing <107218803@qq.com> Co-authored-by: mingsing <107218803@qq.com> Co-authored-by: Albert Callarisa --- dapr/aio/clients/grpc/client.py | 2 +- dapr/aio/clients/grpc/subscription.py | 6 +- dapr/aio/clients/health.py | 59 ++++++ .../test_dapr_grpc_client_async_secure.py | 2 + ...est_heatlhcheck.py => test_healthcheck.py} | 0 tests/clients/test_healthcheck_async.py | 197 ++++++++++++++++++ 6 files changed, 262 insertions(+), 4 deletions(-) create mode 100644 dapr/aio/clients/health.py rename tests/clients/{test_heatlhcheck.py => test_healthcheck.py} (100%) create mode 100644 tests/clients/test_healthcheck_async.py diff --git a/dapr/aio/clients/grpc/client.py b/dapr/aio/clients/grpc/client.py index 028eaef51..d363775fb 100644 --- a/dapr/aio/clients/grpc/client.py +++ b/dapr/aio/clients/grpc/client.py @@ -1906,7 +1906,7 @@ async def wait(self, timeout_s: float): remaining = (start + timeout_s) - time.time() if remaining < 0: raise e - asyncio.sleep(min(1, remaining)) + await asyncio.sleep(min(1, remaining)) async def get_metadata(self) -> GetMetadataResponse: """Returns information about the sidecar allowing for runtime diff --git a/dapr/aio/clients/grpc/subscription.py b/dapr/aio/clients/grpc/subscription.py index fff74f166..5ba1f6a33 100644 --- a/dapr/aio/clients/grpc/subscription.py +++ b/dapr/aio/clients/grpc/subscription.py @@ -3,8 +3,8 @@ from grpc import StatusCode from grpc.aio import AioRpcError +from dapr.aio.clients.health import DaprHealth from dapr.clients.grpc._response import TopicEventResponse -from dapr.clients.health import DaprHealth from dapr.common.pubsub.subscription import ( StreamCancelledError, StreamInactiveError, @@ -52,7 +52,7 @@ async def outgoing_request_iterator(): async def reconnect_stream(self): await self.close() - DaprHealth.wait_for_sidecar() + await DaprHealth.wait_for_sidecar() print('Attempting to reconnect...') await self.start() @@ -67,7 +67,7 @@ async def next_message(self): return None return SubscriptionMessage(message.event_message) except AioRpcError as e: - if e.code() == StatusCode.UNAVAILABLE: + if e.code() == StatusCode.UNAVAILABLE or e.code() == StatusCode.UNKNOWN: print( f'gRPC error while reading from stream: {e.details()}, ' f'Status Code: {e.code()}. ' diff --git a/dapr/aio/clients/health.py b/dapr/aio/clients/health.py new file mode 100644 index 000000000..9ab66ebba --- /dev/null +++ b/dapr/aio/clients/health.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2024 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import asyncio +import time + +import aiohttp + +from dapr.clients.http.conf import DAPR_API_TOKEN_HEADER, DAPR_USER_AGENT, USER_AGENT_HEADER +from dapr.clients.http.helpers import get_api_url +from dapr.conf import settings + + +class DaprHealth: + @staticmethod + async def wait_for_sidecar(): + health_url = f'{get_api_url()}/healthz/outbound' + headers = {USER_AGENT_HEADER: DAPR_USER_AGENT} + if settings.DAPR_API_TOKEN is not None: + headers[DAPR_API_TOKEN_HEADER] = settings.DAPR_API_TOKEN + timeout = float(settings.DAPR_HEALTH_TIMEOUT) + + start = time.time() + ssl_context = DaprHealth.get_ssl_context() + + connector = aiohttp.TCPConnector(ssl=ssl_context) + async with aiohttp.ClientSession(connector=connector) as session: + while True: + try: + async with session.get(health_url, headers=headers) as response: + if 200 <= response.status < 300: + break + except aiohttp.ClientError as e: + print(f'Health check on {health_url} failed: {e}') + except Exception as e: + print(f'Unexpected error during health check: {e}') + + remaining = (start + timeout) - time.time() + if remaining <= 0: + raise TimeoutError(f'Dapr health check timed out, after {timeout}.') + await asyncio.sleep(min(1, remaining)) + + @staticmethod + def get_ssl_context(): + # This method is used (overwritten) from tests + # to return context for self-signed certificates + return None diff --git a/tests/clients/test_dapr_grpc_client_async_secure.py b/tests/clients/test_dapr_grpc_client_async_secure.py index a49fe5fc0..a76a0449f 100644 --- a/tests/clients/test_dapr_grpc_client_async_secure.py +++ b/tests/clients/test_dapr_grpc_client_async_secure.py @@ -17,6 +17,7 @@ from unittest.mock import patch from dapr.aio.clients.grpc.client import DaprGrpcClientAsync +from dapr.aio.clients.health import DaprHealth as DaprHealthAsync from dapr.clients.health import DaprHealth from dapr.conf import settings from tests.clients.certs import replacement_get_credentials_func, replacement_get_health_context @@ -25,6 +26,7 @@ from .fake_dapr_server import FakeDaprSidecar DaprGrpcClientAsync.get_credentials = replacement_get_credentials_func +DaprHealthAsync.get_ssl_context = replacement_get_health_context DaprHealth.get_ssl_context = replacement_get_health_context diff --git a/tests/clients/test_heatlhcheck.py b/tests/clients/test_healthcheck.py similarity index 100% rename from tests/clients/test_heatlhcheck.py rename to tests/clients/test_healthcheck.py diff --git a/tests/clients/test_healthcheck_async.py b/tests/clients/test_healthcheck_async.py new file mode 100644 index 000000000..668768732 --- /dev/null +++ b/tests/clients/test_healthcheck_async.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import asyncio +import time +import unittest +from unittest.mock import AsyncMock, MagicMock, patch + +from dapr.aio.clients.health import DaprHealth +from dapr.conf import settings +from dapr.version import __version__ + + +class DaprHealthCheckAsyncTests(unittest.IsolatedAsyncioTestCase): + @patch.object(settings, 'DAPR_HTTP_ENDPOINT', 'http://domain.com:3500') + @patch('aiohttp.ClientSession.get') + async def test_wait_for_sidecar_success(self, mock_get): + # Create mock response + mock_response = MagicMock() + mock_response.status = 200 + mock_response.__aenter__ = AsyncMock(return_value=mock_response) + mock_response.__aexit__ = AsyncMock(return_value=None) + mock_get.return_value = mock_response + + try: + await DaprHealth.wait_for_sidecar() + except Exception as e: + self.fail(f'wait_for_sidecar() raised an exception unexpectedly: {e}') + + mock_get.assert_called_once() + + # Check URL + called_url = mock_get.call_args[0][0] + self.assertEqual(called_url, 'http://domain.com:3500/v1.0/healthz/outbound') + + # Check headers are properly set + headers = mock_get.call_args[1]['headers'] + self.assertIn('User-Agent', headers) + self.assertEqual(headers['User-Agent'], f'dapr-sdk-python/{__version__}') + + @patch.object(settings, 'DAPR_HTTP_ENDPOINT', 'http://domain.com:3500') + @patch.object(settings, 'DAPR_API_TOKEN', 'mytoken') + @patch('aiohttp.ClientSession.get') + async def test_wait_for_sidecar_success_with_api_token(self, mock_get): + # Create mock response + mock_response = MagicMock() + mock_response.status = 200 + mock_response.__aenter__ = AsyncMock(return_value=mock_response) + mock_response.__aexit__ = AsyncMock(return_value=None) + mock_get.return_value = mock_response + + try: + await DaprHealth.wait_for_sidecar() + except Exception as e: + self.fail(f'wait_for_sidecar() raised an exception unexpectedly: {e}') + + mock_get.assert_called_once() + + # Check headers are properly set + headers = mock_get.call_args[1]['headers'] + self.assertIn('User-Agent', headers) + self.assertEqual(headers['User-Agent'], f'dapr-sdk-python/{__version__}') + self.assertIn('dapr-api-token', headers) + self.assertEqual(headers['dapr-api-token'], 'mytoken') + + @patch.object(settings, 'DAPR_HEALTH_TIMEOUT', '2.5') + @patch('aiohttp.ClientSession.get') + async def test_wait_for_sidecar_timeout(self, mock_get): + # Create mock response that always returns 500 + mock_response = MagicMock() + mock_response.status = 500 + mock_response.__aenter__ = AsyncMock(return_value=mock_response) + mock_response.__aexit__ = AsyncMock(return_value=None) + mock_get.return_value = mock_response + + start = time.time() + + with self.assertRaises(TimeoutError): + await DaprHealth.wait_for_sidecar() + + self.assertGreaterEqual(time.time() - start, 2.5) + self.assertGreater(mock_get.call_count, 1) + + @patch.object(settings, 'DAPR_HTTP_ENDPOINT', 'http://domain.com:3500') + @patch.object(settings, 'DAPR_HEALTH_TIMEOUT', '5.0') + @patch('aiohttp.ClientSession.get') + async def test_health_check_does_not_block(self, mock_get): + """Test that health check doesn't block other async tasks from running""" + # Mock health check to retry several times before succeeding + call_count = [0] # Use list to allow modification in nested function + + def side_effect(*args, **kwargs): + call_count[0] += 1 + # First 2 calls fail with ClientError, then succeed + # This will cause ~2 seconds of retries (1 second sleep after each failure) + if call_count[0] <= 2: + import aiohttp + + raise aiohttp.ClientError('Connection refused') + else: + mock_response = MagicMock() + mock_response.status = 200 + mock_response.__aenter__ = AsyncMock(return_value=mock_response) + mock_response.__aexit__ = AsyncMock(return_value=None) + return mock_response + + mock_get.side_effect = side_effect + + # Counter that will be incremented by background task + counter = [0] # Use list to allow modification in nested function + is_running = [True] + + async def increment_counter(): + """Background task that increments counter every 0.5 seconds""" + while is_running[0]: + await asyncio.sleep(0.5) + counter[0] += 1 + + # Start the background task + counter_task = asyncio.create_task(increment_counter()) + + try: + # Run health check (will take ~2 seconds with retries) + await DaprHealth.wait_for_sidecar() + + # Stop the background task + is_running[0] = False + await asyncio.sleep(0.1) # Give it time to finish current iteration + + # Verify the counter was incremented during health check + # In 2 seconds with 0.5s intervals, we expect at least 3 increments + self.assertGreaterEqual( + counter[0], + 3, + f'Expected counter to increment at least 3 times during health check, ' + f'but got {counter[0]}. This indicates health check may be blocking.', + ) + + # Verify health check made multiple attempts + self.assertGreaterEqual(call_count[0], 2) + + finally: + # Clean up + is_running[0] = False + counter_task.cancel() + try: + await counter_task + except asyncio.CancelledError: + pass + + @patch.object(settings, 'DAPR_HTTP_ENDPOINT', 'http://domain.com:3500') + @patch('aiohttp.ClientSession.get') + async def test_multiple_health_checks_concurrent(self, mock_get): + """Test that multiple health check calls can run concurrently""" + # Create mock response + mock_response = MagicMock() + mock_response.status = 200 + mock_response.__aenter__ = AsyncMock(return_value=mock_response) + mock_response.__aexit__ = AsyncMock(return_value=None) + mock_get.return_value = mock_response + + # Run multiple health checks concurrently + start_time = time.time() + results = await asyncio.gather( + DaprHealth.wait_for_sidecar(), + DaprHealth.wait_for_sidecar(), + DaprHealth.wait_for_sidecar(), + ) + elapsed = time.time() - start_time + + # All should complete successfully + self.assertEqual(len(results), 3) + self.assertIsNone(results[0]) + self.assertIsNone(results[1]) + self.assertIsNone(results[2]) + + # Should complete quickly since they run concurrently + self.assertLess(elapsed, 1.0) + + # Verify multiple calls were made + self.assertGreaterEqual(mock_get.call_count, 3) + + +if __name__ == '__main__': + unittest.main() From 1645e1c412747d205e04eb0b09008630e5eb8539 Mon Sep 17 00:00:00 2001 From: Casper Nielsen Date: Tue, 16 Dec 2025 10:52:41 +0100 Subject: [PATCH 13/16] Feat/ext langgraph checkpoint (#868) * feat: align put, put_writes and get_tuple to redis checkpoint saver for consistincy Signed-off-by: Casper Nielsen * chore: include new import of python-ulid for langgraph checkpoint support Signed-off-by: Casper Nielsen * chore: ruff formatting Signed-off-by: Casper Nielsen * fix: include missing import of msgpack Signed-off-by: Casper Nielsen * fix: ensure handing of empty checkpoint_id passed in config to put_writes Signed-off-by: Casper Nielsen * chore: align extract of config between put and put_writes Signed-off-by: Casper Nielsen * fix: handle str retrieval for get_tuple with extra type checking Signed-off-by: Casper Nielsen * chore: add example for ext-langgraph Signed-off-by: Casper Nielsen * fix: rename to redis-memory Signed-off-by: Casper Nielsen * feat: add sqlite component Signed-off-by: Casper Nielsen * fix: remove refs to redis Signed-off-by: Casper Nielsen * chore: update readme with details on dual state component setup Signed-off-by: Casper Nielsen * fix: add guard for None metadata Signed-off-by: Casper Nielsen * test: update test case to handle new output format Signed-off-by: Casper Nielsen * chore: remove redundant graph .compile() call Signed-off-by: Casper Nielsen * fix: remove statestores and rely on existing metadata created Signed-off-by: Casper Nielsen * fix: delete files not needed to align with ci testing Signed-off-by: Casper Nielsen * fix: rename to agent.py & simplify Signed-off-by: Casper Nielsen * chore: align deps Signed-off-by: Casper Nielsen * fix: correct readme to align with other examples Signed-off-by: Casper Nielsen * chore: add extra validation lines Signed-off-by: Casper Nielsen * feat: add langgraph-checkpointer to test Signed-off-by: Casper Nielsen * chore: ruff formatting Signed-off-by: Casper Nielsen * fix: add instruction for the OPENAI_API_KEY export Signed-off-by: Casper Nielsen * feat: attempt using smaller llama model for examples testing Signed-off-by: Casper Nielsen * fix: correct deps for ci install Signed-off-by: Casper Nielsen * feat: ensure ci installs required dependencies Signed-off-by: Casper Nielsen * chore: formatting Signed-off-by: Casper Nielsen * fix: ensure ollama runs as a bg process Signed-off-by: Casper Nielsen * fix: be more concise in the multiply ask Signed-off-by: Casper Nielsen * fix: the small model isn't consistent enough for CI to depend on it. Add sleep to ensure ollama us serving before pulling model Signed-off-by: Casper Nielsen * fix: simplify verification strings to handle non determinism in response Signed-off-by: Casper Nielsen --------- Signed-off-by: Casper Nielsen --- .github/workflows/validate_examples.yaml | 6 + examples/langgraph-checkpointer/README.md | 66 ++++ examples/langgraph-checkpointer/agent.py | 66 ++++ .../components/statestore.yaml | 14 + .../langgraph-checkpointer/requirements.txt | 3 + .../dapr/ext/langgraph/dapr_checkpointer.py | 363 +++++++++++++++--- ext/dapr-ext-langgraph/setup.cfg | 2 + .../tests/test_checkpointer.py | 49 ++- tox.ini | 1 + 9 files changed, 499 insertions(+), 71 deletions(-) create mode 100644 examples/langgraph-checkpointer/README.md create mode 100644 examples/langgraph-checkpointer/agent.py create mode 100644 examples/langgraph-checkpointer/components/statestore.yaml create mode 100644 examples/langgraph-checkpointer/requirements.txt diff --git a/.github/workflows/validate_examples.yaml b/.github/workflows/validate_examples.yaml index b4fcc7929..c89147dc3 100644 --- a/.github/workflows/validate_examples.yaml +++ b/.github/workflows/validate_examples.yaml @@ -88,6 +88,12 @@ jobs: uses: actions/setup-go@v5 with: go-version: ${{ env.GOVER }} + - name: Set up Llama + run: | + curl -fsSL https://ollama.com/install.sh | sh + nohup ollama serve & + sleep 10 + ollama pull llama3.2:latest - name: Checkout Dapr CLI repo to override dapr command. uses: actions/checkout@v6 if: env.DAPR_CLI_REF != '' diff --git a/examples/langgraph-checkpointer/README.md b/examples/langgraph-checkpointer/README.md new file mode 100644 index 000000000..0fce355d7 --- /dev/null +++ b/examples/langgraph-checkpointer/README.md @@ -0,0 +1,66 @@ +# Dapr For Agents - LangGraph Checkpointer + +Supporting Dapr backed Checkpointer for LangGraph based Agents. + +## Pre-requisites + +- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started) +- [Install Python 3.10+](https://www.python.org/downloads/) + +## Install Dapr python-SDK + + + + + +```sh +pip3 install -r requirements.txt +``` + + + +## Run the example + +Export your `OPENAI_API_KEY`: + +```bash +export OPENAI_API_KEY="SK-..." +``` + +Run the following command in a terminal/command prompt: + + + +```bash +# 1. Run the LangGraph agent +dapr run --app-id langgraph-checkpointer --app-port 5001 --resources-path ./components -- python3 agent.py +``` + + + +## Cleanup + +Either press CTRL + C to quit the app or run the following command in a new terminal to stop the app: + +```bash +dapr stop --app-id langgraph-checkpointer +``` + diff --git a/examples/langgraph-checkpointer/agent.py b/examples/langgraph-checkpointer/agent.py new file mode 100644 index 000000000..8ea98ae82 --- /dev/null +++ b/examples/langgraph-checkpointer/agent.py @@ -0,0 +1,66 @@ +from dapr.ext.langgraph import DaprCheckpointer +from langchain_core.messages import HumanMessage, SystemMessage +from langchain_ollama import ChatOllama +from langgraph.graph import START, MessagesState, StateGraph +from langgraph.prebuilt import ToolNode, tools_condition + + +def add(a: int, b: int) -> int: + """Adds a and b. + + Args: + a: first int + b: second int + """ + return a + b + + +def multiply(a: int, b: int) -> int: + """Multiply a and b. + + Args: + a: first int + b: second int + """ + return a * b + + +tools = [add, multiply] +llm = ChatOllama(model='llama3.2:latest') +llm_with_tools = llm.bind_tools(tools) + +sys_msg = SystemMessage( + content='You are a helpful assistant tasked with performing arithmetic on a set of inputs.' +) + + +def assistant(state: MessagesState): + return {'messages': [llm_with_tools.invoke([sys_msg] + state['messages'])]} + + +builder = StateGraph(MessagesState) + +builder.add_node('assistant', assistant) +builder.add_node('tools', ToolNode(tools)) + +builder.add_edge(START, 'assistant') +builder.add_conditional_edges( + 'assistant', + tools_condition, +) +builder.add_edge('tools', 'assistant') + +memory = DaprCheckpointer(store_name='statestore', key_prefix='dapr') +react_graph_memory = builder.compile(checkpointer=memory) + +config = {'configurable': {'thread_id': '1'}} + +messages = [HumanMessage(content='Add 3 and 4.')] +messages = react_graph_memory.invoke({'messages': messages}, config) +for m in messages['messages']: + m.pretty_print() + +messages = [HumanMessage(content='Multiply the result by 2.')] +messages = react_graph_memory.invoke({'messages': messages}, config) +for m in messages['messages']: + m.pretty_print() diff --git a/examples/langgraph-checkpointer/components/statestore.yaml b/examples/langgraph-checkpointer/components/statestore.yaml new file mode 100644 index 000000000..2f676bff8 --- /dev/null +++ b/examples/langgraph-checkpointer/components/statestore.yaml @@ -0,0 +1,14 @@ +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: statestore +spec: + type: state.redis + version: v1 + metadata: + - name: redisHost + value: localhost:6379 + - name: redisPassword + value: "" + - name: actorStateStore + value: "true" diff --git a/examples/langgraph-checkpointer/requirements.txt b/examples/langgraph-checkpointer/requirements.txt new file mode 100644 index 000000000..788a4b213 --- /dev/null +++ b/examples/langgraph-checkpointer/requirements.txt @@ -0,0 +1,3 @@ +langchain-core>=1.0.7 +langgraph>=1.0.3 +langchain-ollama>=1.0.0 \ No newline at end of file diff --git a/ext/dapr-ext-langgraph/dapr/ext/langgraph/dapr_checkpointer.py b/ext/dapr-ext-langgraph/dapr/ext/langgraph/dapr_checkpointer.py index 123b313d1..a18de1c32 100644 --- a/ext/dapr-ext-langgraph/dapr/ext/langgraph/dapr_checkpointer.py +++ b/ext/dapr-ext-langgraph/dapr/ext/langgraph/dapr_checkpointer.py @@ -1,11 +1,23 @@ +import base64 import json -from typing import Any, Sequence, Tuple +import time +from typing import Any, Dict, List, Optional, Sequence, Tuple, cast -from langchain_core.load import dumps +import msgpack +from langchain_core.messages import AIMessage, HumanMessage, ToolMessage from langchain_core.runnables import RunnableConfig +from ulid import ULID from dapr.clients import DaprClient -from langgraph.checkpoint.base import BaseCheckpointSaver, Checkpoint, CheckpointTuple +from langgraph.checkpoint.base import ( + WRITES_IDX_MAP, + BaseCheckpointSaver, + ChannelVersions, + Checkpoint, + CheckpointMetadata, + CheckpointTuple, +) +from langgraph.checkpoint.serde.jsonplus import JsonPlusSerializer class DaprCheckpointer(BaseCheckpointSaver[Checkpoint]): @@ -19,7 +31,9 @@ class DaprCheckpointer(BaseCheckpointSaver[Checkpoint]): def __init__(self, store_name: str, key_prefix: str): self.store_name = store_name self.key_prefix = key_prefix + self.serde = JsonPlusSerializer() self.client = DaprClient() + self._key_cache: Dict[str, str] = {} # helper: construct Dapr key for a thread def _get_key(self, config: RunnableConfig) -> str: @@ -36,59 +50,89 @@ def _get_key(self, config: RunnableConfig) -> str: return f'{self.key_prefix}:{thread_id}' - # restore a checkpoint - def get_tuple(self, config: RunnableConfig) -> CheckpointTuple | None: - key = self._get_key(config) - - resp = self.client.get_state(store_name=self.store_name, key=key) - if not resp.data: - return None - - wrapper = json.loads(resp.data) - cp_data = wrapper.get('checkpoint', wrapper) - metadata = wrapper.get('metadata', {'step': 0}) - if 'step' not in metadata: - metadata['step'] = 0 - - cp = Checkpoint(**cp_data) - return CheckpointTuple( - config=config, - checkpoint=cp, - parent_config=None, - metadata=metadata, - ) - - # save a full checkpoint snapshot def put( self, config: RunnableConfig, checkpoint: Checkpoint, - parent_config: RunnableConfig | None, - metadata: dict[str, Any], - ) -> None: - key = self._get_key(config) + metadata: CheckpointMetadata, + new_versions: ChannelVersions, + ) -> RunnableConfig: + thread_id = config['configurable']['thread_id'] + checkpoint_ns = config['configurable'].get('checkpoint_ns', '') + config_checkpoint_id = config['configurable'].get('checkpoint_id', '') + thread_ts = config['configurable'].get('thread_ts', '') + + checkpoint_id = config_checkpoint_id or thread_ts or checkpoint.get('id', '') + + parent_checkpoint_id = None + if ( + checkpoint.get('id') + and config_checkpoint_id + and checkpoint.get('id') != config_checkpoint_id + ): + parent_checkpoint_id = config_checkpoint_id + checkpoint_id = checkpoint['id'] + + storage_safe_thread_id = self._safe_id(thread_id) + storage_safe_checkpoint_ns = self._safe_ns(checkpoint_ns) + storage_safe_checkpoint_id = self._safe_id(checkpoint_id) + + copy = checkpoint.copy() + next_config = { + 'configurable': { + 'thread_id': thread_id, + 'checkpoint_ns': checkpoint_ns, + 'checkpoint_id': checkpoint_id, + } + } - checkpoint_serializable = { - 'v': checkpoint['v'], - 'id': checkpoint['id'], - 'ts': checkpoint['ts'], - 'channel_values': checkpoint['channel_values'], - 'channel_versions': checkpoint['channel_versions'], - 'versions_seen': checkpoint['versions_seen'], + checkpoint_ts = None + if checkpoint_id: + try: + ulid_obj = ULID.from_str(checkpoint_id) + checkpoint_ts = ulid_obj.timestamp + except Exception: + checkpoint_ts = time.time() * 1000 + + checkpoint_data = { + 'thread_id': storage_safe_thread_id, + 'checkpoint_ns': storage_safe_checkpoint_ns, + 'checkpoint_id': storage_safe_checkpoint_id, + 'parent_checkpoint_id': ( + '00000000-0000-0000-0000-000000000000' + if (parent_checkpoint_id if parent_checkpoint_id else '') == '' + else parent_checkpoint_id + ), + 'checkpoint_ts': checkpoint_ts, + 'checkpoint': self._dump_checkpoint(copy), + 'metadata': self._dump_metadata(metadata), + 'has_writes': False, } - wrapper = {'checkpoint': checkpoint_serializable, 'metadata': metadata} + # Guard case where metadata is None + metadata = metadata or {} - self.client.save_state(self.store_name, key, dumps(wrapper)) + if all(key in metadata for key in ['source', 'step']): + checkpoint_data['source'] = metadata['source'] + checkpoint_data['step'] = metadata['step'] - reg_resp = self.client.get_state(store_name=self.store_name, key=self.REGISTRY_KEY) - registry = json.loads(reg_resp.data) if reg_resp.data else [] + checkpoint_key = self._make_safe_checkpoint_key( + thread_id=thread_id, checkpoint_ns=checkpoint_ns, checkpoint_id=checkpoint_id + ) + + _, data = self.serde.dumps_typed(checkpoint_data) + self.client.save_state(store_name=self.store_name, key=checkpoint_key, value=data) + + latest_pointer_key = ( + f'checkpoint_latest:{storage_safe_thread_id}:{storage_safe_checkpoint_ns}' + ) + + self.client.save_state( + store_name=self.store_name, key=latest_pointer_key, value=checkpoint_key + ) - if key not in registry: - registry.append(key) - self.client.save_state(self.store_name, self.REGISTRY_KEY, json.dumps(registry)) + return next_config - # incremental persistence (for streamed runs) def put_writes( self, config: RunnableConfig, @@ -96,24 +140,50 @@ def put_writes( task_id: str, task_path: str = '', ) -> None: - _ = task_id, task_path - - key = self._get_key(config) + """Store intermediate writes linked to a checkpoint with integrated key registry.""" + thread_id = config['configurable']['thread_id'] + checkpoint_ns = config['configurable'].get('checkpoint_ns', '') + checkpoint_id = config['configurable'].get('checkpoint_id', '') + storage_safe_thread_id = (self._safe_id(thread_id),) + storage_safe_checkpoint_ns = self._safe_ns(checkpoint_ns) + + writes_objects: List[Dict[str, Any]] = [] + for idx, (channel, value) in enumerate(writes): + type_, blob = self.serde.dumps_typed(value) + write_obj: Dict[str, Any] = { + 'thread_id': storage_safe_thread_id, + 'checkpoint_ns': storage_safe_checkpoint_ns, + 'checkpoint_id': self._safe_id(checkpoint_id), + 'task_id': task_id, + 'task_path': task_path, + 'idx': WRITES_IDX_MAP.get(channel, idx), + 'channel': channel, + 'type': type_, + 'blob': self._encode_blob(blob), + } + writes_objects.append(write_obj) + + for write_obj in writes_objects: + idx_value = write_obj['idx'] + assert isinstance(idx_value, int) + key = self._make_safe_checkpoint_key( + thread_id=thread_id, checkpoint_ns=checkpoint_ns, checkpoint_id=checkpoint_id + ) - resp = self.client.get_state(store_name=self.store_name, key=key) - if not resp.data: - return + self.client.save_state(store_name=self.store_name, key=key, value=json.dumps(write_obj)) - wrapper = json.loads(resp.data) - cp = wrapper.get('checkpoint', {}) + checkpoint_key = self._make_safe_checkpoint_key( + thread_id=thread_id, checkpoint_ns=checkpoint_ns, checkpoint_id=checkpoint_id + ) - for field, value in writes: - cp['channel_values'][field] = value + latest_pointer_key = ( + f'checkpoint_latest:{storage_safe_thread_id}:{storage_safe_checkpoint_ns}' + ) - wrapper['checkpoint'] = cp - self.client.save_state(self.store_name, key, json.dumps(wrapper)) + self.client.save_state( + store_name=self.store_name, key=latest_pointer_key, value=checkpoint_key + ) - # enumerate all saved checkpoints def list(self, config: RunnableConfig) -> list[CheckpointTuple]: reg_resp = self.client.get_state(store_name=self.store_name, key=self.REGISTRY_KEY) if not reg_resp.data: @@ -143,7 +213,6 @@ def list(self, config: RunnableConfig) -> list[CheckpointTuple]: return checkpoints - # remove a checkpoint and update the registry def delete_thread(self, config: RunnableConfig) -> None: key = self._get_key(config) @@ -162,3 +231,179 @@ def delete_thread(self, config: RunnableConfig) -> None: key=self.REGISTRY_KEY, value=json.dumps(registry), ) + + def get_tuple(self, config: RunnableConfig) -> Optional[CheckpointTuple]: + thread_id = config['configurable']['thread_id'] + checkpoint_ns = config['configurable'].get('checkpoint_ns', '') + + storage_safe_thread_id = self._safe_id(thread_id) + storage_safe_checkpoint_ns = self._safe_ns(checkpoint_ns) + + key = ':'.join( + [ + 'checkpoint_latest', + storage_safe_thread_id, + storage_safe_checkpoint_ns, + ] + ) + + # First we extract the latest checkpoint key + checkpoint_key = self.client.get_state(store_name=self.store_name, key=key) + if not checkpoint_key.data: + return None + + # To then derive the checkpoint data + checkpoint_data = self.client.get_state( + store_name=self.store_name, + # checkpoint_key.data can either be str or bytes + key=checkpoint_key.data.decode() + if isinstance(checkpoint_key.data, bytes) + else checkpoint_key.data, + ) + + if not checkpoint_data.data: + return None + + if isinstance(checkpoint_data.data, bytes): + unpacked = msgpack.unpackb(checkpoint_data.data) + + checkpoint_values = unpacked[b'checkpoint'] + channel_values = checkpoint_values[b'channel_values'] + + decoded_messages = [] + for item in channel_values[b'messages']: + if isinstance(item, msgpack.ExtType): + decoded_messages.append( + self._convert_checkpoint_message( + self._load_metadata(msgpack.unpackb(item.data)) + ) + ) + else: + decoded_messages.append(item) + + checkpoint_values[b'channel_values'][b'messages'] = decoded_messages + + mdata = unpacked.get(b'metadata') + if isinstance(mdata, bytes): + mdata = self._load_metadata(msgpack.unpackb(mdata)) + + metadata = { + k.decode() if isinstance(k, bytes) else k: v.decode() if isinstance(v, bytes) else v + for k, v in mdata.items() + } + + checkpoint_obj = Checkpoint( + **{ + key.decode() if isinstance(key, bytes) else key: value + for key, value in checkpoint_values.items() + } + ) + + checkpoint = self._decode_bytes(checkpoint_obj) + elif isinstance(checkpoint_data.data, str): + unpacked = json.loads(checkpoint_data.data) + checkpoint = unpacked.get('checkpoint', None) + metadata = unpacked.get('metadata', None) + + if not metadata or not checkpoint: + return None + else: + return None + + return CheckpointTuple( + config=config, + checkpoint=checkpoint, + metadata=metadata, + parent_config=None, + pending_writes=[], + ) + + def _safe_id(self, id) -> str: + return '00000000-0000-0000-0000-000000000000' if id == '' else id + + def _safe_ns(self, ns) -> str: + return '__empty__' if ns == '' else ns + + def _convert_checkpoint_message(self, msg_item): + _, _, data_dict, _ = msg_item + data_dict = self._decode_bytes(data_dict) + + msg_type = data_dict.get('type') + + if msg_type == 'human': + return HumanMessage(**data_dict) + elif msg_type == 'ai': + return AIMessage(**data_dict) + elif msg_type == 'tool': + return ToolMessage(**data_dict) + else: + raise ValueError(f'Unknown message type: {msg_type}') + + def _decode_bytes(self, obj): + if isinstance(obj, bytes): + try: + s = obj.decode() + # Convert to int if it's a number, the unpacked channel_version holds \xa1 which unpacks as strings + # LangGraph needs Ints for '>' comparison + if s.isdigit(): + return int(s) + return s + except Exception: + return obj + if isinstance(obj, dict): + return {self._decode_bytes(k): self._decode_bytes(v) for k, v in obj.items()} + if isinstance(obj, list): + return [self._decode_bytes(v) for v in obj] + if isinstance(obj, tuple): + return tuple(self._decode_bytes(v) for v in obj) + return obj + + def _encode_blob(self, blob: Any) -> str: + if isinstance(blob, bytes): + return base64.b64encode(blob).decode() + return blob + + def _dump_checkpoint(self, checkpoint: Checkpoint) -> dict[str, Any]: + type_, data = self.serde.dumps_typed(checkpoint) + + if type_ == 'json': + checkpoint_data = cast(dict, json.loads(data)) + else: + checkpoint_data = cast(dict, self.serde.loads_typed((type_, data))) + + if 'channel_values' in checkpoint_data: + for key, value in checkpoint_data['channel_values'].items(): + if isinstance(value, bytes): + checkpoint_data['channel_values'][key] = { + '__bytes__': self._encode_blob(value) + } + + if 'channel_versions' in checkpoint_data: + checkpoint_data['channel_versions'] = { + k: str(v) for k, v in checkpoint_data['channel_versions'].items() + } + + return {'type': type_, **checkpoint_data, 'pending_sends': []} + + def _load_metadata(self, metadata: dict[str, Any]) -> CheckpointMetadata: + type_str, data_bytes = self.serde.dumps_typed(metadata) + return self.serde.loads_typed((type_str, data_bytes)) + + def _dump_metadata(self, metadata: CheckpointMetadata) -> str: + _, serialized_bytes = self.serde.dumps_typed(metadata) + return serialized_bytes + + def _make_safe_checkpoint_key( + self, + thread_id: str, + checkpoint_ns: str, + checkpoint_id: str, + ) -> str: + return ':'.join( + [ + 'checkpoint', + thread_id, + checkpoint_ns, + checkpoint_id, + ] + ) diff --git a/ext/dapr-ext-langgraph/setup.cfg b/ext/dapr-ext-langgraph/setup.cfg index a2a851a6f..f08a79548 100644 --- a/ext/dapr-ext-langgraph/setup.cfg +++ b/ext/dapr-ext-langgraph/setup.cfg @@ -27,6 +27,8 @@ install_requires = dapr >= 1.16.1rc1 langgraph >= 0.3.6 langchain >= 0.1.17 + python-ulid >= 3.0.0 + msgpack-python >= 0.4.5 [options.packages.find] include = diff --git a/ext/dapr-ext-langgraph/tests/test_checkpointer.py b/ext/dapr-ext-langgraph/tests/test_checkpointer.py index 05184f8aa..fc51d9182 100644 --- a/ext/dapr-ext-langgraph/tests/test_checkpointer.py +++ b/ext/dapr-ext-langgraph/tests/test_checkpointer.py @@ -1,10 +1,12 @@ # -*- coding: utf-8 -*- +import base64 import json import unittest from datetime import datetime from unittest import mock +import msgpack from dapr.ext.langgraph.dapr_checkpointer import DaprCheckpointer from langgraph.checkpoint.base import Checkpoint @@ -61,17 +63,37 @@ def test_put_saves_checkpoint_and_registry(self, mock_client_cls): mock_client.get_state.return_value.data = json.dumps([]) cp = DaprCheckpointer(self.store, self.prefix) - cp.put(self.config, self.checkpoint, None, {'step': 10}) - - first_call = mock_client.save_state.call_args_list[0][0] - assert first_call[0] == 'statestore' - assert first_call[1] == 'lg:t1' - saved_payload = json.loads(first_call[2]) + cp.put(self.config, self.checkpoint, {'step': 10}, None) + + first_call = mock_client.save_state.call_args_list[0] + first_call_kwargs = first_call.kwargs + assert first_call_kwargs['store_name'] == 'statestore' + assert first_call_kwargs['key'] == 'checkpoint:t1::cp1' + unpacked = msgpack.unpackb(first_call_kwargs['value']) # We're packing bytes + saved_payload = {} + for k, v in unpacked.items(): + k = k.decode() if isinstance(k, bytes) else k + if ( + k == 'checkpoint' or k == 'metadata' + ): # Need to convert b'' on checkpoint/metadata dict key/values + if k == 'metadata': + v = msgpack.unpackb(v) # Metadata value is packed + val = {} + for sk, sv in v.items(): + sk = sk.decode() if isinstance(sk, bytes) else sk + sv = sv.decode() if isinstance(sv, bytes) else sv + val[sk] = sv + else: + val = v.decode() if isinstance(v, bytes) else v + saved_payload[k] = val assert saved_payload['metadata']['step'] == 10 - second_call = mock_client.save_state.call_args_list[1][0] - assert second_call[0] == 'statestore' - assert second_call[1] == DaprCheckpointer.REGISTRY_KEY + second_call = mock_client.save_state.call_args_list[1] + second_call_kwargs = second_call.kwargs + assert second_call_kwargs['store_name'] == 'statestore' + assert ( + second_call_kwargs['value'] == 'checkpoint:t1::cp1' + ) # Here we're testing if the last checkpoint is the first_call above def test_put_writes_updates_channel_values(self, mock_client_cls): mock_client = mock_client_cls.return_value @@ -93,9 +115,12 @@ def test_put_writes_updates_channel_values(self, mock_client_cls): cp.put_writes(self.config, writes=[('a', 99)], task_id='task1') # save_state is called with updated checkpoint - call = mock_client.save_state.call_args[0] - saved = json.loads(call[2]) - assert saved['checkpoint']['channel_values']['a'] == 99 + call = mock_client.save_state.call_args_list[0] + # As we're using named input params we've got to fetch through kwargs + kwargs = call.kwargs + saved = json.loads(kwargs['value']) + # As the value obj is base64 encoded in 'blob' we got to unpack it + assert msgpack.unpackb(base64.b64decode(saved['blob'])) == 99 def test_list_returns_all_checkpoints(self, mock_client_cls): mock_client = mock_client_cls.return_value diff --git a/tox.ini b/tox.ini index 7c31dd8a3..0697a4082 100644 --- a/tox.ini +++ b/tox.ini @@ -61,6 +61,7 @@ commands = ./validate.sh demo_workflow ./validate.sh workflow ./validate.sh jobs + ./validate.sh langgraph-checkpointer ./validate.sh ../ commands_pre = pip3 install -e {toxinidir}/ From 8bd46aa3b5969730eadef9b0ac68e1f9f9600ef2 Mon Sep 17 00:00:00 2001 From: Casper Nielsen Date: Mon, 12 Jan 2026 13:26:43 +0100 Subject: [PATCH 14/16] fix(test): correct metadata testcase for release 1.17 (#878) Signed-off-by: Casper Nielsen --- examples/metadata/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/metadata/README.md b/examples/metadata/README.md index eafe3723b..409df01b8 100644 --- a/examples/metadata/README.md +++ b/examples/metadata/README.md @@ -41,7 +41,7 @@ expected_stdout_lines: - "== APP == registered_components:" - "== APP == name=lockstore type=lock.redis version= capabilities=[]" - "== APP == name=pubsub type=pubsub.redis version=v1 capabilities=[]" - - "== APP == name=statestore type=state.redis version=v1 capabilities=['ACTOR', 'ETAG', 'TRANSACTIONAL'" + - "== APP == name=statestore type=state.redis version=v1 capabilities=['ACTOR', 'ETAG', 'KEYS_LIKE', 'TRANSACTIONAL', 'TTL'" - "== APP == We will update our custom label value and check it was persisted" - "== APP == We added a custom label named [is-this-our-metadata-example]" - "== APP == Its old value was [yes] but now it is [You bet it is!]" @@ -64,7 +64,7 @@ The output should be as follows: == APP == registered_components: == APP == name=lockstore type=lock.redis version= capabilities=[] == APP == name=pubsub type=pubsub.redis version=v1 capabilities=[] -== APP == name=statestore type=state.redis version=v1 capabilities=['ACTOR', 'ETAG', 'TRANSACTIONAL', 'TTL'] +== APP == name=statestore type=state.redis version=v1 capabilities=['ACTOR', 'ETAG', 'KEYS_LIKE', 'TRANSACTIONAL', 'TTL'] == APP == We will update our custom label value and check it was persisted == APP == We added a custom label named [is-this-our-metadata-example] == APP == Its old value was [yes] but now it is [You bet it is!] From 1cc5a31442d01af8d1b1991d255bcaa74e188a2c Mon Sep 17 00:00:00 2001 From: Yaron Schneider Date: Mon, 12 Jan 2026 05:03:06 -0800 Subject: [PATCH 15/16] Add Strands Agent Session Manager (#872) * Add Strands Agent Session Manager Signed-off-by: yaron2 * linter Signed-off-by: yaron2 * address feedback Signed-off-by: yaron2 * fix linter and add missing copyright header to langgraph Signed-off-by: yaron2 --------- Signed-off-by: yaron2 Co-authored-by: Albert Callarisa --- .github/workflows/build-push-to-main.yaml | 7 + .github/workflows/build-tag.yaml | 8 + README.md | 1 + .../dapr/ext/langgraph/dapr_checkpointer.py | 13 + ext/dapr-ext-strands/LICENSE | 203 +++++++ ext/dapr-ext-strands/README.rst | 22 + .../dapr/ext/strands/__init__.py | 21 + .../dapr/ext/strands/dapr_session_manager.py | 551 ++++++++++++++++++ .../dapr/ext/strands/version.py | 16 + ext/dapr-ext-strands/setup.cfg | 42 ++ ext/dapr-ext-strands/setup.py | 66 +++ ext/dapr-ext-strands/tests/__init__.py | 14 + .../tests/test_session_manager.py | 168 ++++++ tox.ini | 5 + 14 files changed, 1137 insertions(+) create mode 100644 ext/dapr-ext-strands/LICENSE create mode 100644 ext/dapr-ext-strands/README.rst create mode 100644 ext/dapr-ext-strands/dapr/ext/strands/__init__.py create mode 100644 ext/dapr-ext-strands/dapr/ext/strands/dapr_session_manager.py create mode 100644 ext/dapr-ext-strands/dapr/ext/strands/version.py create mode 100644 ext/dapr-ext-strands/setup.cfg create mode 100644 ext/dapr-ext-strands/setup.py create mode 100644 ext/dapr-ext-strands/tests/__init__.py create mode 100644 ext/dapr-ext-strands/tests/test_session_manager.py diff --git a/.github/workflows/build-push-to-main.yaml b/.github/workflows/build-push-to-main.yaml index 93bfbff2a..499daebde 100644 --- a/.github/workflows/build-push-to-main.yaml +++ b/.github/workflows/build-push-to-main.yaml @@ -113,3 +113,10 @@ jobs: cd ext/dapr-ext-langgraph python setup.py sdist bdist_wheel twine upload dist/* + - name: Build and publish dapr-ext-strands + env: + TWINE_PASSWORD: ${{ secrets.PYPI_UPLOAD_PASS }} + run: | + cd ext/dapr-ext-strands + python setup.py sdist bdist_wheel + twine upload dist/* diff --git a/.github/workflows/build-tag.yaml b/.github/workflows/build-tag.yaml index ebc4b129f..42e95268c 100644 --- a/.github/workflows/build-tag.yaml +++ b/.github/workflows/build-tag.yaml @@ -123,3 +123,11 @@ jobs: cd ext/dapr-ext-langgraph python setup.py sdist bdist_wheel twine upload dist/* + - name: Build and publish dapr-ext-strands + if: startsWith(github.ref_name, 'strands-v') + env: + TWINE_PASSWORD: ${{ secrets.PYPI_UPLOAD_PASS }} + run: | + cd ext/dapr-ext-strands + python setup.py sdist bdist_wheel + twine upload dist/* diff --git a/README.md b/README.md index 30f65e216..f205a1b6a 100644 --- a/README.md +++ b/README.md @@ -87,6 +87,7 @@ pip3 install -e ./ext/dapr-ext-grpc/ pip3 install -e ./ext/dapr-ext-fastapi/ pip3 install -e ./ext/dapr-ext-workflow/ pip3 install -e ./ext/dapr-ext-langgraph/ +pip3 install -e ./ext/dapr-ext-strands/ ``` 3. Install required packages diff --git a/ext/dapr-ext-langgraph/dapr/ext/langgraph/dapr_checkpointer.py b/ext/dapr-ext-langgraph/dapr/ext/langgraph/dapr_checkpointer.py index a18de1c32..6d2614d90 100644 --- a/ext/dapr-ext-langgraph/dapr/ext/langgraph/dapr_checkpointer.py +++ b/ext/dapr-ext-langgraph/dapr/ext/langgraph/dapr_checkpointer.py @@ -1,3 +1,16 @@ +""" +Copyright 2026 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + import base64 import json import time diff --git a/ext/dapr-ext-strands/LICENSE b/ext/dapr-ext-strands/LICENSE new file mode 100644 index 000000000..be033a7fd --- /dev/null +++ b/ext/dapr-ext-strands/LICENSE @@ -0,0 +1,203 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 The Dapr Authors. + + and others that have contributed code to the public domain. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/ext/dapr-ext-strands/README.rst b/ext/dapr-ext-strands/README.rst new file mode 100644 index 000000000..882ae13b6 --- /dev/null +++ b/ext/dapr-ext-strands/README.rst @@ -0,0 +1,22 @@ +dapr-ext-strands extension +======================= + +|pypi| + +.. |pypi| image:: https://badge.fury.io/py/dapr-ext-strands.svg + :target: https://pypi.org/project/dapr-ext-strands/ + +This is the Dapr Session Manager for Strands Agents + +Installation +------------ + +:: + + pip install dapr-ext-strands + +References +---------- + +* `Dapr `_ +* `Dapr Python-SDK `_ diff --git a/ext/dapr-ext-strands/dapr/ext/strands/__init__.py b/ext/dapr-ext-strands/dapr/ext/strands/__init__.py new file mode 100644 index 000000000..52ab2ee86 --- /dev/null +++ b/ext/dapr-ext-strands/dapr/ext/strands/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +# Import your main classes here +from dapr.ext.strands.dapr_session_manager import DaprSessionManager + +__all__ = [ + 'DaprSessionManager', +] diff --git a/ext/dapr-ext-strands/dapr/ext/strands/dapr_session_manager.py b/ext/dapr-ext-strands/dapr/ext/strands/dapr_session_manager.py new file mode 100644 index 000000000..c9a98ebdf --- /dev/null +++ b/ext/dapr-ext-strands/dapr/ext/strands/dapr_session_manager.py @@ -0,0 +1,551 @@ +""" +Copyright 2026 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import json +import logging +from typing import Any, Dict, List, Literal, Optional, cast + +from dapr.clients import DaprClient +from dapr.clients.grpc._state import Consistency, StateOptions +from strands import _identifier +from strands.session.repository_session_manager import RepositorySessionManager +from strands.session.session_repository import SessionRepository +from strands.types.exceptions import SessionException +from strands.types.session import Session, SessionAgent, SessionMessage + +logger = logging.getLogger(__name__) + +# Type-safe consistency constants +ConsistencyLevel = Literal['eventual', 'strong'] +DAPR_CONSISTENCY_EVENTUAL: ConsistencyLevel = 'eventual' +DAPR_CONSISTENCY_STRONG: ConsistencyLevel = 'strong' + + +class DaprSessionManager(RepositorySessionManager, SessionRepository): + """Dapr state store session manager for distributed storage. + + Stores session data in Dapr state stores (Redis, PostgreSQL, MongoDB, Cosmos DB, etc.) + with support for TTL and consistency levels. + + Key structure: + - `{session_id}:session` - Session metadata + - `{session_id}:agents:{agent_id}` - Agent metadata + - `{session_id}:messages:{agent_id}` - Message list (JSON array) + """ + + def __init__( + self, + session_id: str, + state_store_name: str, + dapr_client: DaprClient, + ttl: Optional[int] = None, + consistency: ConsistencyLevel = DAPR_CONSISTENCY_EVENTUAL, + ): + """Initialize DaprSessionManager. + + Args: + session_id: ID for the session. + ID is not allowed to contain path separators (e.g., a/b). + state_store_name: Name of the Dapr state store component. + dapr_client: DaprClient instance for state operations. + ttl: Optional time-to-live in seconds for state items. + consistency: Consistency level for state operations ("eventual" or "strong"). + """ + self._state_store_name = state_store_name + self._dapr_client = dapr_client + self._ttl = ttl + self._consistency = consistency + self._owns_client = False + + super().__init__(session_id=session_id, session_repository=self) + + @classmethod + def from_address( + cls, + session_id: str, + state_store_name: str, + dapr_address: str = 'localhost:50001', + ) -> 'DaprSessionManager': + """Create DaprSessionManager from Dapr address. + + Args: + session_id: ID for the session. + state_store_name: Name of the Dapr state store component. + dapr_address: Dapr gRPC endpoint (default: localhost:50001). + + Returns: + DaprSessionManager instance with owned client. + """ + dapr_client = DaprClient(address=dapr_address) + manager = cls(session_id, state_store_name=state_store_name, dapr_client=dapr_client) + manager._owns_client = True + return manager + + def _get_session_key(self, session_id: str) -> str: + """Get session state key. + + Args: + session_id: ID for the session. + + Returns: + State store key for the session. + + Raises: + ValueError: If session id contains a path separator. + """ + session_id = _identifier.validate(session_id, _identifier.Identifier.SESSION) + return f'{session_id}:session' + + def _get_agent_key(self, session_id: str, agent_id: str) -> str: + """Get agent state key. + + Args: + session_id: ID for the session. + agent_id: ID for the agent. + + Returns: + State store key for the agent. + + Raises: + ValueError: If session id or agent id contains a path separator. + """ + session_id = _identifier.validate(session_id, _identifier.Identifier.SESSION) + agent_id = _identifier.validate(agent_id, _identifier.Identifier.AGENT) + return f'{session_id}:agents:{agent_id}' + + def _get_messages_key(self, session_id: str, agent_id: str) -> str: + """Get messages list state key. + + Args: + session_id: ID for the session. + agent_id: ID for the agent. + + Returns: + State store key for the messages list. + + Raises: + ValueError: If session id or agent id contains a path separator. + """ + session_id = _identifier.validate(session_id, _identifier.Identifier.SESSION) + agent_id = _identifier.validate(agent_id, _identifier.Identifier.AGENT) + return f'{session_id}:messages:{agent_id}' + + def _get_manifest_key(self, session_id: str) -> str: + """Get session manifest key (tracks agent_ids for deletion).""" + session_id = _identifier.validate(session_id, _identifier.Identifier.SESSION) + return f'{session_id}:manifest' + + def _get_read_metadata(self) -> Dict[str, str]: + """Get metadata for read operations (consistency). + + Returns: + Metadata dictionary for state reads. + """ + metadata: Dict[str, str] = {} + if self._consistency: + metadata['consistency'] = self._consistency + return metadata + + def _get_write_metadata(self) -> Dict[str, str]: + """Get metadata for write operations (TTL). + + Returns: + Metadata dictionary for state writes. + """ + metadata: Dict[str, str] = {} + if self._ttl is not None: + metadata['ttlInSeconds'] = str(self._ttl) + return metadata + + def _get_state_options(self) -> Optional[StateOptions]: + """Get state options for write/delete operations (consistency). + + Returns: + StateOptions for consistency or None. + """ + if self._consistency == DAPR_CONSISTENCY_STRONG: + return StateOptions(consistency=Consistency.strong) + elif self._consistency == DAPR_CONSISTENCY_EVENTUAL: + return StateOptions(consistency=Consistency.eventual) + return None + + def _read_state(self, key: str) -> Optional[Dict[str, Any]]: + """Read and parse JSON state from Dapr. + + Args: + key: State store key. + + Returns: + Parsed JSON dictionary or None if not found. + + Raises: + SessionException: If state is corrupted or read fails. + """ + try: + response = self._dapr_client.get_state( + store_name=self._state_store_name, + key=key, + state_metadata=self._get_read_metadata(), + ) + + if not response.data: + return None + + content = response.data.decode('utf-8') + return cast(Dict[str, Any], json.loads(content)) + + except json.JSONDecodeError as e: + raise SessionException(f'Invalid JSON in state key {key}: {e}') from e + except Exception as e: + raise SessionException(f'Failed to read state key {key}: {e}') from e + + def _write_state(self, key: str, data: Dict[str, Any]) -> None: + """Write JSON state to Dapr. + + Args: + key: State store key. + data: Dictionary to serialize and store. + + Raises: + SessionException: If write fails. + """ + try: + content = json.dumps(data, ensure_ascii=False) + self._dapr_client.save_state( + store_name=self._state_store_name, + key=key, + value=content, + state_metadata=self._get_write_metadata(), + options=self._get_state_options(), + ) + except Exception as e: + raise SessionException(f'Failed to write state key {key}: {e}') from e + + def _delete_state(self, key: str) -> None: + """Delete state from Dapr. + + Args: + key: State store key. + + Raises: + SessionException: If delete fails. + """ + try: + self._dapr_client.delete_state( + store_name=self._state_store_name, + key=key, + options=self._get_state_options(), + ) + except Exception as e: + raise SessionException(f'Failed to delete state key {key}: {e}') from e + + def create_session(self, session: Session) -> Session: + """Create a new session. + + Args: + session: Session to create. + + Returns: + Created session. + + Raises: + SessionException: If session already exists or creation fails. + """ + session_key = self._get_session_key(session.session_id) + + # Check if session already exists + existing = self.read_session(session.session_id) + if existing is not None: + raise SessionException(f'Session {session.session_id} already exists') + + # Write session data + session_dict = session.to_dict() + self._write_state(session_key, session_dict) + return session + + def read_session(self, session_id: str) -> Optional[Session]: + """Read session data. + + Args: + session_id: ID of the session to read. + + Returns: + Session if found, None otherwise. + + Raises: + SessionException: If read fails. + """ + session_key = self._get_session_key(session_id) + + session_data = self._read_state(session_key) + if session_data is None: + return None + + return Session.from_dict(session_data) + + def delete_session(self, session_id: str) -> None: + """Delete session and all associated data. + + Uses a session manifest to discover agent IDs for cleanup. + """ + session_key = self._get_session_key(session_id) + manifest_key = self._get_manifest_key(session_id) + + # Read manifest (may be missing if no agents created) + manifest = self._read_state(manifest_key) + agent_ids: list[str] = manifest.get('agents', []) if manifest else [] + + # Delete agent and message keys + for agent_id in agent_ids: + agent_key = self._get_agent_key(session_id, agent_id) + messages_key = self._get_messages_key(session_id, agent_id) + self._delete_state(agent_key) + self._delete_state(messages_key) + + # Delete manifest and session + self._delete_state(manifest_key) + self._delete_state(session_key) + + def create_agent(self, session_id: str, session_agent: SessionAgent) -> None: + """Create a new agent in the session. + + Args: + session_id: ID of the session. + session_agent: Agent to create. + + Raises: + SessionException: If creation fails. + """ + agent_key = self._get_agent_key(session_id, session_agent.agent_id) + agent_dict = session_agent.to_dict() + + self._write_state(agent_key, agent_dict) + + # Initialize empty messages list + messages_key = self._get_messages_key(session_id, session_agent.agent_id) + self._write_state(messages_key, {'messages': []}) + + # Update manifest with this agent + manifest_key = self._get_manifest_key(session_id) + manifest = self._read_state(manifest_key) or {'agents': []} + if session_agent.agent_id not in manifest['agents']: + manifest['agents'].append(session_agent.agent_id) + self._write_state(manifest_key, manifest) + + def read_agent(self, session_id: str, agent_id: str) -> Optional[SessionAgent]: + """Read agent data. + + Args: + session_id: ID of the session. + agent_id: ID of the agent. + + Returns: + SessionAgent if found, None otherwise. + + Raises: + SessionException: If read fails. + """ + agent_key = self._get_agent_key(session_id, agent_id) + + agent_data = self._read_state(agent_key) + if agent_data is None: + return None + + return SessionAgent.from_dict(agent_data) + + def update_agent(self, session_id: str, session_agent: SessionAgent) -> None: + """Update agent data. + + Args: + session_id: ID of the session. + session_agent: Agent to update. + + Raises: + SessionException: If agent doesn't exist or update fails. + """ + previous_agent = self.read_agent(session_id=session_id, agent_id=session_agent.agent_id) + if previous_agent is None: + raise SessionException( + f'Agent {session_agent.agent_id} in session {session_id} does not exist' + ) + + # Preserve creation timestamp + session_agent.created_at = previous_agent.created_at + + agent_key = self._get_agent_key(session_id, session_agent.agent_id) + + self._write_state(agent_key, session_agent.to_dict()) + + def create_message( + self, + session_id: str, + agent_id: str, + session_message: SessionMessage, + ) -> None: + """Create a new message for the agent. + + Args: + session_id: ID of the session. + agent_id: ID of the agent. + session_message: Message to create. + + Raises: + SessionException: If creation fails. + """ + messages_key = self._get_messages_key(session_id, agent_id) + + # Read existing messages + messages_data = self._read_state(messages_key) + if messages_data is None: + messages_list = [] + else: + messages_list = messages_data.get('messages', []) + if not isinstance(messages_list, list): + messages_list = [] + + # Append new message + messages_list.append(session_message.to_dict()) + + # Write back + self._write_state(messages_key, {'messages': messages_list}) + + def read_message( + self, session_id: str, agent_id: str, message_id: int + ) -> Optional[SessionMessage]: + """Read message data. + + Args: + session_id: ID of the session. + agent_id: ID of the agent. + message_id: Index of the message. + + Returns: + SessionMessage if found, None otherwise. + + Raises: + ValueError: If message_id is not an integer. + SessionException: If read fails. + """ + if not isinstance(message_id, int): + raise ValueError(f'message_id=<{message_id}> | message id must be an integer') + + messages_key = self._get_messages_key(session_id, agent_id) + + messages_data = self._read_state(messages_key) + if messages_data is None: + return None + + messages_list = messages_data.get('messages', []) + if not isinstance(messages_list, list): + messages_list = [] + + # Find message by ID + for msg_dict in messages_list: + if msg_dict.get('message_id') == message_id: + return SessionMessage.from_dict(msg_dict) + + return None + + def update_message( + self, session_id: str, agent_id: str, session_message: SessionMessage + ) -> None: + """Update message data. + + Args: + session_id: ID of the session. + agent_id: ID of the agent. + session_message: Message to update. + + Raises: + SessionException: If message doesn't exist or update fails. + """ + previous_message = self.read_message( + session_id=session_id, agent_id=agent_id, message_id=session_message.message_id + ) + if previous_message is None: + raise SessionException(f'Message {session_message.message_id} does not exist') + + # Preserve creation timestamp + session_message.created_at = previous_message.created_at + + messages_key = self._get_messages_key(session_id, agent_id) + + # Read existing messages + messages_data = self._read_state(messages_key) + if messages_data is None: + raise SessionException( + f'Messages not found for agent {agent_id} in session {session_id}' + ) + + messages_list = messages_data.get('messages', []) + if not isinstance(messages_list, list): + messages_list = [] + + # Find and update message + updated = False + for i, msg_dict in enumerate(messages_list): + if msg_dict.get('message_id') == session_message.message_id: + messages_list[i] = session_message.to_dict() + updated = True + break + + if not updated: + raise SessionException(f'Message {session_message.message_id} not found in list') + + # Write back + self._write_state(messages_key, {'messages': messages_list}) + + def list_messages( + self, + session_id: str, + agent_id: str, + limit: Optional[int] = None, + offset: int = 0, + ) -> List[SessionMessage]: + """List messages for an agent with pagination. + + Args: + session_id: ID of the session. + agent_id: ID of the agent. + limit: Maximum number of messages to return. + offset: Number of messages to skip. + + Returns: + List of SessionMessage objects. + + Raises: + SessionException: If read fails. + """ + messages_key = self._get_messages_key(session_id, agent_id) + + messages_data = self._read_state(messages_key) + if messages_data is None: + return [] + + messages_list = messages_data.get('messages', []) + if not isinstance(messages_list, list): + messages_list = [] + + # Apply pagination + if limit is not None: + messages_list = messages_list[offset : offset + limit] + else: + messages_list = messages_list[offset:] + + # Convert to SessionMessage objects + return [SessionMessage.from_dict(msg_dict) for msg_dict in messages_list] + + def close(self) -> None: + """Close the Dapr client if owned by this manager.""" + if self._owns_client: + self._dapr_client.close() diff --git a/ext/dapr-ext-strands/dapr/ext/strands/version.py b/ext/dapr-ext-strands/dapr/ext/strands/version.py new file mode 100644 index 000000000..dae1485d2 --- /dev/null +++ b/ext/dapr-ext-strands/dapr/ext/strands/version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +__version__ = '1.16.0.dev' diff --git a/ext/dapr-ext-strands/setup.cfg b/ext/dapr-ext-strands/setup.cfg new file mode 100644 index 000000000..3149e8d2d --- /dev/null +++ b/ext/dapr-ext-strands/setup.cfg @@ -0,0 +1,42 @@ +[metadata] +url = https://dapr.io/ +author = Dapr Authors +author_email = daprweb@microsoft.com +license = Apache +license_file = LICENSE +classifiers = + Development Status :: 5 - Production/Stable + Intended Audience :: Developers + License :: OSI Approved :: Apache Software License + Operating System :: OS Independent + Programming Language :: Python + Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 + Programming Language :: Python :: 3.13 + Programming Language :: Python :: 3.14 +project_urls = + Documentation = https://github.com/dapr/docs + Source = https://github.com/dapr/python-sdk + +[options] +python_requires = >=3.10 +packages = find_namespace: +include_package_data = True +install_requires = + dapr >= 1.16.1rc1 + strands-agents + strands-agents-tools + python-ulid >= 3.0.0 + msgpack-python >= 0.4.5 + +[options.packages.find] +include = + dapr.* + +exclude = + tests + +[options.package_data] +dapr.ext.strands = + py.typed \ No newline at end of file diff --git a/ext/dapr-ext-strands/setup.py b/ext/dapr-ext-strands/setup.py new file mode 100644 index 000000000..1d8c6732d --- /dev/null +++ b/ext/dapr-ext-strands/setup.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import os + +from setuptools import setup + +# Load version in dapr package. +version_info = {} +with open('dapr/ext/strands/version.py') as fp: + exec(fp.read(), version_info) +__version__ = version_info['__version__'] + + +def is_release(): + return '.dev' not in __version__ + + +name = 'dapr-ext-strands' +version = __version__ +description = 'The official release of Dapr Python SDK Strands Agents Extension.' +long_description = """ +This is the Dapr Session Manager extension for Strands Agents. + +Dapr is a portable, serverless, event-driven runtime that makes it easy for developers to +build resilient, stateless and stateful microservices that run on the cloud and edge and +embraces the diversity of languages and developer frameworks. + +Dapr codifies the best practices for building microservice applications into open, +independent, building blocks that enable you to build portable applications with the language +and framework of your choice. Each building block is independent and you can use one, some, +or all of them in your application. +""".lstrip() + +# Get build number from GITHUB_RUN_NUMBER environment variable +build_number = os.environ.get('GITHUB_RUN_NUMBER', '0') + +if not is_release(): + name += '-dev' + version = f'{__version__}{build_number}' + description = ( + 'The developmental release for the Dapr Session Manager extension for Strands Agents' + ) + long_description = 'This is the developmental release for the Dapr Session Manager extension for Strands Agents' + +print(f'package name: {name}, version: {version}', flush=True) + + +setup( + name=name, + version=version, + description=description, + long_description=long_description, +) diff --git a/ext/dapr-ext-strands/tests/__init__.py b/ext/dapr-ext-strands/tests/__init__.py new file mode 100644 index 000000000..ad87aedb7 --- /dev/null +++ b/ext/dapr-ext-strands/tests/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" diff --git a/ext/dapr-ext-strands/tests/test_session_manager.py b/ext/dapr-ext-strands/tests/test_session_manager.py new file mode 100644 index 000000000..6f25edc55 --- /dev/null +++ b/ext/dapr-ext-strands/tests/test_session_manager.py @@ -0,0 +1,168 @@ +""" +Copyright 2026 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +# -*- coding: utf-8 -*- + +import json +import time +import unittest +from unittest import mock + +from dapr.ext.strands.dapr_session_manager import DaprSessionManager +from strands.types.exceptions import SessionException +from strands.types.session import Session, SessionAgent, SessionMessage + + +def dapr_state(data): + """Simulate a real Dapr get_state() response.""" + resp = mock.Mock() + resp.data = None if data is None else json.dumps(data).encode('utf-8') + return resp + + +def make_session(session_id='s1'): + return Session.from_dict( + { + 'session_id': session_id, + 'session_type': 'chat', + 'created_at': time.time(), + 'metadata': {}, + } + ) + + +def make_agent(agent_id='a1'): + return SessionAgent.from_dict( + { + 'agent_id': agent_id, + 'state': {}, + 'conversation_manager_state': {}, + 'created_at': time.time(), + } + ) + + +def make_message(message_id=1, text='hello'): + return SessionMessage.from_dict( + { + 'message_id': message_id, + 'role': 'user', + 'message': text, + 'created_at': time.time(), + } + ) + + +@mock.patch('dapr.ext.strands.dapr_session_manager.DaprClient') +class DaprSessionManagerTest(unittest.TestCase): + def setUp(self): + self.session_id = 's1' + self.store = 'statestore' + + self.mock_client = mock.Mock() + self.mock_client.get_state.return_value = dapr_state(None) + + self.manager = DaprSessionManager( + session_id=self.session_id, + state_store_name=self.store, + dapr_client=self.mock_client, + ) + + # + # session + # + def test_create_and_read_session(self, _): + session = make_session(self.session_id) + + self.manager.create_session(session) + + self.mock_client.get_state.return_value = dapr_state(session.to_dict()) + read = self.manager.read_session(self.session_id) + + assert read.session_id == self.session_id + + def test_create_session_raises_if_exists(self, _): + session = make_session(self.session_id) + + self.mock_client.get_state.return_value = dapr_state(session.to_dict()) + + with self.assertRaises(SessionException): + self.manager.create_session(session) + + # + # agent + # + def test_create_and_read_agent(self, _): + agent = make_agent('a1') + + self.manager.create_agent(self.session_id, agent) + + self.mock_client.get_state.return_value = dapr_state(agent.to_dict()) + read = self.manager.read_agent(self.session_id, 'a1') + + assert read.agent_id == 'a1' + + def test_update_agent_preserves_created_at(self, _): + agent = make_agent('a1') + original_ts = agent.created_at + + self.mock_client.get_state.return_value = dapr_state(agent.to_dict()) + + agent.state['x'] = 1 + self.manager.update_agent(self.session_id, agent) + + saved = json.loads(self.mock_client.save_state.call_args[1]['value']) + assert saved['created_at'] == original_ts + + def test_create_and_read_message(self, _): + msg = make_message(1, 'hello') + + self.manager.create_message(self.session_id, 'a1', msg) + + messages = {'messages': [msg.to_dict()]} + self.mock_client.get_state.return_value = dapr_state(messages) + + read = self.manager.read_message(self.session_id, 'a1', 1) + assert read.message == 'hello' + + def test_update_message_preserves_created_at(self, _): + msg = make_message(1, 'old') + original_ts = msg.created_at + + messages = {'messages': [msg.to_dict()]} + self.mock_client.get_state.return_value = dapr_state(messages) + + msg.message = 'new' + self.manager.update_message(self.session_id, 'a1', msg) + + saved = json.loads(self.mock_client.save_state.call_args[1]['value']) + updated = saved['messages'][0] + + assert updated['created_at'] == original_ts + assert updated['message'] == 'new' + + def test_delete_session_deletes_agents_and_messages(self, _): + manifest = {'agents': ['a1', 'a2']} + self.mock_client.get_state.return_value = dapr_state(manifest) + + self.manager.delete_session(self.session_id) + assert self.mock_client.delete_state.call_count == 6 + + def test_close_only_closes_owned_client(self, _): + self.manager._owns_client = True + self.manager.close() + self.mock_client.close.assert_called_once() + + +if __name__ == '__main__': + unittest.main() diff --git a/tox.ini b/tox.ini index 0697a4082..1bdb17921 100644 --- a/tox.ini +++ b/tox.ini @@ -16,6 +16,7 @@ commands = coverage run -a -m unittest discover -v ./ext/dapr-ext-grpc/tests coverage run -a -m unittest discover -v ./ext/dapr-ext-fastapi/tests coverage run -a -m unittest discover -v ./ext/dapr-ext-langgraph/tests + coverage run -a -m unittest discover -v ./ext/dapr-ext-strands/tests coverage run -a -m unittest discover -v ./ext/flask_dapr/tests coverage xml commands_pre = @@ -24,6 +25,7 @@ commands_pre = pip3 install -e {toxinidir}/ext/dapr-ext-grpc/ pip3 install -e {toxinidir}/ext/dapr-ext-fastapi/ pip3 install -e {toxinidir}/ext/dapr-ext-langgraph/ + pip3 install -e {toxinidir}/ext/dapr-ext-strands/ pip3 install -e {toxinidir}/ext/flask_dapr/ [testenv:ruff] @@ -69,6 +71,7 @@ commands_pre = pip3 install -e {toxinidir}/ext/dapr-ext-grpc/ pip3 install -e {toxinidir}/ext/dapr-ext-fastapi/ pip3 install -e {toxinidir}/ext/dapr-ext-langgraph/ + pip3 install -e {toxinidir}/ext/dapr-ext-strands/ allowlist_externals=* [testenv:example-component] @@ -89,6 +92,7 @@ commands_pre = pip3 install -e {toxinidir}/ext/dapr-ext-grpc/ pip3 install -e {toxinidir}/ext/dapr-ext-fastapi/ pip3 install -e {toxinidir}/ext/dapr-ext-langgraph/ + pip3 install -e {toxinidir}/ext/dapr-ext-strands/ allowlist_externals=* [testenv:type] @@ -103,6 +107,7 @@ commands_pre = pip3 install -e {toxinidir}/ext/dapr-ext-grpc/ pip3 install -e {toxinidir}/ext/dapr-ext-fastapi/ pip3 install -e {toxinidir}/ext/dapr-ext-langgraph/ + pip3 install -e {toxinidir}/ext/dapr-ext-strands/ [testenv:doc] basepython = python3 usedevelop = False From d72e14ff2b79695622cc2b09e0442f76cbfd63b8 Mon Sep 17 00:00:00 2001 From: Albert Callarisa Date: Mon, 12 Jan 2026 17:22:57 +0100 Subject: [PATCH 16/16] Release v1.16.1rc2 Signed-off-by: Albert Callarisa --- dapr/version/version.py | 2 +- examples/demo_actor/demo_actor/requirements.txt | 2 +- examples/demo_workflow/demo_workflow/requirements.txt | 2 +- examples/invoke-simple/requirements.txt | 4 ++-- examples/w3c-tracing/requirements.txt | 4 ++-- examples/workflow/requirements.txt | 4 ++-- ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py | 2 +- ext/dapr-ext-fastapi/setup.cfg | 2 +- ext/dapr-ext-grpc/dapr/ext/grpc/version.py | 2 +- ext/dapr-ext-grpc/setup.cfg | 2 +- ext/dapr-ext-langgraph/setup.cfg | 4 ++-- ext/dapr-ext-strands/setup.cfg | 4 ++-- ext/dapr-ext-workflow/dapr/ext/workflow/version.py | 2 +- ext/dapr-ext-workflow/setup.cfg | 2 +- ext/flask_dapr/flask_dapr/version.py | 2 +- 15 files changed, 20 insertions(+), 20 deletions(-) diff --git a/dapr/version/version.py b/dapr/version/version.py index 8c6c12960..5c39bd99d 100644 --- a/dapr/version/version.py +++ b/dapr/version/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.16.1rc1' +__version__ = '1.16.1rc2' diff --git a/examples/demo_actor/demo_actor/requirements.txt b/examples/demo_actor/demo_actor/requirements.txt index 9496602ed..75e46a7ff 100644 --- a/examples/demo_actor/demo_actor/requirements.txt +++ b/examples/demo_actor/demo_actor/requirements.txt @@ -1 +1 @@ -dapr-ext-fastapi>=1.16.1rc1 +dapr-ext-fastapi>=1.16.1rc2 diff --git a/examples/demo_workflow/demo_workflow/requirements.txt b/examples/demo_workflow/demo_workflow/requirements.txt index a70b02692..d7d8618e3 100644 --- a/examples/demo_workflow/demo_workflow/requirements.txt +++ b/examples/demo_workflow/demo_workflow/requirements.txt @@ -1 +1 @@ -dapr-ext-workflow>=1.16.1rc1 +dapr-ext-workflow>=1.16.1rc2 diff --git a/examples/invoke-simple/requirements.txt b/examples/invoke-simple/requirements.txt index e77f5d6e2..f83f9509e 100644 --- a/examples/invoke-simple/requirements.txt +++ b/examples/invoke-simple/requirements.txt @@ -1,2 +1,2 @@ -dapr-ext-grpc >= 1.16.1rc1 -dapr >= 1.16.1rc1 +dapr-ext-grpc >= 1.16.1rc2 +dapr >= 1.16.1rc2 diff --git a/examples/w3c-tracing/requirements.txt b/examples/w3c-tracing/requirements.txt index 514e2606a..36daee5c7 100644 --- a/examples/w3c-tracing/requirements.txt +++ b/examples/w3c-tracing/requirements.txt @@ -1,5 +1,5 @@ -dapr-ext-grpc >= 1.16.1rc1 -dapr >= 1.16.1rc1 +dapr-ext-grpc >= 1.16.1rc2 +dapr >= 1.16.1rc2 opentelemetry-sdk opentelemetry-instrumentation-grpc opentelemetry-exporter-zipkin diff --git a/examples/workflow/requirements.txt b/examples/workflow/requirements.txt index fab86e728..c7132e3a1 100644 --- a/examples/workflow/requirements.txt +++ b/examples/workflow/requirements.txt @@ -1,2 +1,2 @@ -dapr-ext-workflow>=1.16.1rc1 -dapr>=1.16.1rc1 +dapr-ext-workflow>=1.16.1rc2 +dapr>=1.16.1rc2 diff --git a/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py b/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py index 8c6c12960..5c39bd99d 100644 --- a/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py +++ b/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.16.1rc1' +__version__ = '1.16.1rc2' diff --git a/ext/dapr-ext-fastapi/setup.cfg b/ext/dapr-ext-fastapi/setup.cfg index 289b8fdc6..2bf42bbc1 100644 --- a/ext/dapr-ext-fastapi/setup.cfg +++ b/ext/dapr-ext-fastapi/setup.cfg @@ -24,7 +24,7 @@ python_requires = >=3.9 packages = find_namespace: include_package_data = True install_requires = - dapr >= 1.16.1rc1 + dapr >= 1.16.1rc2 uvicorn >= 0.11.6 fastapi >= 0.60.1 diff --git a/ext/dapr-ext-grpc/dapr/ext/grpc/version.py b/ext/dapr-ext-grpc/dapr/ext/grpc/version.py index 8c6c12960..5c39bd99d 100644 --- a/ext/dapr-ext-grpc/dapr/ext/grpc/version.py +++ b/ext/dapr-ext-grpc/dapr/ext/grpc/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.16.1rc1' +__version__ = '1.16.1rc2' diff --git a/ext/dapr-ext-grpc/setup.cfg b/ext/dapr-ext-grpc/setup.cfg index 5ba8f86f4..3256a06cf 100644 --- a/ext/dapr-ext-grpc/setup.cfg +++ b/ext/dapr-ext-grpc/setup.cfg @@ -24,7 +24,7 @@ python_requires = >=3.9 packages = find_namespace: include_package_data = True install_requires = - dapr >= 1.16.1rc1 + dapr >= 1.16.1rc2 cloudevents >= 1.0.0 [options.packages.find] diff --git a/ext/dapr-ext-langgraph/setup.cfg b/ext/dapr-ext-langgraph/setup.cfg index f08a79548..dc9286d7c 100644 --- a/ext/dapr-ext-langgraph/setup.cfg +++ b/ext/dapr-ext-langgraph/setup.cfg @@ -24,7 +24,7 @@ python_requires = >=3.10 packages = find_namespace: include_package_data = True install_requires = - dapr >= 1.16.1rc1 + dapr >= 1.16.1rc2 langgraph >= 0.3.6 langchain >= 0.1.17 python-ulid >= 3.0.0 @@ -39,4 +39,4 @@ exclude = [options.package_data] dapr.ext.langgraph = - py.typed \ No newline at end of file + py.typed diff --git a/ext/dapr-ext-strands/setup.cfg b/ext/dapr-ext-strands/setup.cfg index 3149e8d2d..fe619e9ba 100644 --- a/ext/dapr-ext-strands/setup.cfg +++ b/ext/dapr-ext-strands/setup.cfg @@ -24,7 +24,7 @@ python_requires = >=3.10 packages = find_namespace: include_package_data = True install_requires = - dapr >= 1.16.1rc1 + dapr >= 1.16.1rc2 strands-agents strands-agents-tools python-ulid >= 3.0.0 @@ -39,4 +39,4 @@ exclude = [options.package_data] dapr.ext.strands = - py.typed \ No newline at end of file + py.typed diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/version.py b/ext/dapr-ext-workflow/dapr/ext/workflow/version.py index 8c6c12960..5c39bd99d 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/version.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.16.1rc1' +__version__ = '1.16.1rc2' diff --git a/ext/dapr-ext-workflow/setup.cfg b/ext/dapr-ext-workflow/setup.cfg index 67b833393..8b37327ce 100644 --- a/ext/dapr-ext-workflow/setup.cfg +++ b/ext/dapr-ext-workflow/setup.cfg @@ -24,7 +24,7 @@ python_requires = >=3.9 packages = find_namespace: include_package_data = True install_requires = - dapr >= 1.16.1rc1 + dapr >= 1.16.1rc2 durabletask-dapr >= 0.2.0a9 [options.packages.find] diff --git a/ext/flask_dapr/flask_dapr/version.py b/ext/flask_dapr/flask_dapr/version.py index 8c6c12960..5c39bd99d 100644 --- a/ext/flask_dapr/flask_dapr/version.py +++ b/ext/flask_dapr/flask_dapr/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.16.1rc1' +__version__ = '1.16.1rc2'