diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index f03d84153..6042b7429 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -9,7 +9,6 @@ "terminal.integrated.shell.linux": "/bin/bash", "python.pythonPath": "/usr/local/bin/python", "python.linting.enabled": true, - "python.linting.flake8Enabled": true, "python.linting.mypyEnabled": true, }, diff --git a/.github/workflows/build-push-to-main.yaml b/.github/workflows/build-push-to-main.yaml index 4a3a63053..499daebde 100644 --- a/.github/workflows/build-push-to-main.yaml +++ b/.github/workflows/build-push-to-main.yaml @@ -10,11 +10,11 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 - - name: Set up Python 3.9 + - uses: actions/checkout@v6 + - name: Set up Python 3.10 uses: actions/setup-python@v6 with: - python-version: 3.9 + python-version: '3.10' - name: Install dependencies run: | python -m pip install --upgrade pip @@ -30,9 +30,6 @@ jobs: echo "Source files are not formatted correctly. Run 'tox -e ruff' to autoformat." exit 1 fi - - name: Run Linter - run: | - tox -e flake8 build: needs: lint @@ -40,9 +37,9 @@ jobs: strategy: fail-fast: false matrix: - python_ver: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python_ver: ["3.10", "3.11", "3.12", "3.13"] steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Set up Python ${{ matrix.python_ver }} uses: actions/setup-python@v6 with: @@ -66,11 +63,11 @@ jobs: env: TWINE_USERNAME: "__token__" steps: - - uses: actions/checkout@v5 - - name: Set up Python 3.9 + - uses: actions/checkout@v6 + - name: Set up Python 3.10 uses: actions/setup-python@v6 with: - python-version: 3.9 + python-version: '3.10' - name: Install dependencies run: | python -m pip install --upgrade pip @@ -109,3 +106,17 @@ jobs: cd ext/dapr-ext-fastapi python setup.py sdist bdist_wheel twine upload dist/* + - name: Build and publish dapr-ext-langgraph + env: + TWINE_PASSWORD: ${{ secrets.PYPI_UPLOAD_PASS }} + run: | + cd ext/dapr-ext-langgraph + python setup.py sdist bdist_wheel + twine upload dist/* + - name: Build and publish dapr-ext-strands + env: + TWINE_PASSWORD: ${{ secrets.PYPI_UPLOAD_PASS }} + run: | + cd ext/dapr-ext-strands + python setup.py sdist bdist_wheel + twine upload dist/* diff --git a/.github/workflows/build-tag.yaml b/.github/workflows/build-tag.yaml index 46593dd96..42e95268c 100644 --- a/.github/workflows/build-tag.yaml +++ b/.github/workflows/build-tag.yaml @@ -14,11 +14,11 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 - - name: Set up Python 3.9 + - uses: actions/checkout@v6 + - name: Set up Python 3.10 uses: actions/setup-python@v6 with: - python-version: 3.9 + python-version: '3.10' - name: Install dependencies run: | python -m pip install --upgrade pip @@ -34,9 +34,6 @@ jobs: echo "Source files are not formatted correctly. Run 'tox -e ruff' to autoformat." exit 1 fi - - name: Run Linter - run: | - tox -e flake8 build: needs: lint @@ -44,9 +41,9 @@ jobs: strategy: fail-fast: false matrix: - python_ver: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python_ver: ["3.10", "3.11", "3.12", "3.13"] steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Set up Python ${{ matrix.python_ver }} uses: actions/setup-python@v6 with: @@ -70,11 +67,11 @@ jobs: env: TWINE_USERNAME: "__token__" steps: - - uses: actions/checkout@v5 - - name: Set up Python 3.9 + - uses: actions/checkout@v6 + - name: Set up Python 3.10 uses: actions/setup-python@v6 with: - python-version: 3.9 + python-version: '3.10' - name: Install dependencies run: | python -m pip install --upgrade pip @@ -118,3 +115,19 @@ jobs: cd ext/dapr-ext-fastapi python setup.py sdist bdist_wheel twine upload dist/* + - name: Build and publish dapr-ext-langgraph + if: startsWith(github.ref_name, 'langgraph-v') + env: + TWINE_PASSWORD: ${{ secrets.PYPI_UPLOAD_PASS }} + run: | + cd ext/dapr-ext-langgraph + python setup.py sdist bdist_wheel + twine upload dist/* + - name: Build and publish dapr-ext-strands + if: startsWith(github.ref_name, 'strands-v') + env: + TWINE_PASSWORD: ${{ secrets.PYPI_UPLOAD_PASS }} + run: | + cd ext/dapr-ext-strands + python setup.py sdist bdist_wheel + twine upload dist/* diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 67f052fae..7e03d1b80 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -16,11 +16,11 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 - - name: Set up Python 3.9 + - uses: actions/checkout@v6 + - name: Set up Python 3.10 uses: actions/setup-python@v6 with: - python-version: 3.9 + python-version: '3.10' - name: Install dependencies run: | python -m pip install --upgrade pip @@ -36,9 +36,6 @@ jobs: echo "Source files are not formatted correctly. Run 'tox -e ruff' to autoformat." exit 1 fi - - name: Run Linter - run: | - tox -e flake8 build: needs: lint @@ -46,9 +43,9 @@ jobs: strategy: fail-fast: false matrix: - python_ver: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python_ver: ["3.10", "3.11", "3.12", "3.13"] steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Set up Python ${{ matrix.python_ver }} uses: actions/setup-python@v6 with: @@ -64,4 +61,4 @@ jobs: run: | tox -e py`echo "${{ matrix.python_ver }}" | sed 's/\.//g'` - name: Upload test coverage - uses: codecov/codecov-action@v5 \ No newline at end of file + uses: codecov/codecov-action@v5 diff --git a/.github/workflows/dapr-bot-schedule.yml b/.github/workflows/dapr-bot-schedule.yml index eb9579bdd..918c163ec 100644 --- a/.github/workflows/dapr-bot-schedule.yml +++ b/.github/workflows/dapr-bot-schedule.yml @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repo - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Install dependencies run: pip install PyGithub - name: Automerge and update diff --git a/.github/workflows/fossa.yaml b/.github/workflows/fossa.yaml index f9eb56f5a..18b45526c 100644 --- a/.github/workflows/fossa.yaml +++ b/.github/workflows/fossa.yaml @@ -40,7 +40,7 @@ jobs: FOSSA_API_KEY: b88e1f4287c3108c8751bf106fb46db6 # This is a push-only token that is safe to be exposed. steps: - name: "Checkout code" - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: "Run FOSSA Scan" uses: fossas/fossa-action@v1.7.0 # Use a specific version if locking is preferred diff --git a/.github/workflows/validate_examples.yaml b/.github/workflows/validate_examples.yaml index e55a22913..c89147dc3 100644 --- a/.github/workflows/validate_examples.yaml +++ b/.github/workflows/validate_examples.yaml @@ -46,7 +46,7 @@ jobs: strategy: fail-fast: false matrix: - python_ver: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python_ver: ["3.10", "3.11", "3.12", "3.13"] steps: - name: Parse repository_dispatch payload if: github.event_name == 'repository_dispatch' @@ -58,7 +58,7 @@ jobs: fi - name: Check out code onto GOPATH - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: repository: ${{ env.CHECKOUT_REPO }} ref: ${{ env.CHECKOUT_REF }} @@ -70,7 +70,7 @@ jobs: echo "Found $RUNTIME_VERSION" - name: Determine latest Dapr Cli version run: | - export CLI_VERSION=$(curl "https://api.github.com/repos/dapr/cli/releases?per_page=1&page=1" --header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' | jq '.[0].tag_name'| tr -d '",v') + export CLI_VERSION=$(curl "https://api.github.com/repos/dapr/cli/releases/latest" --header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' | jq '.tag_name'| tr -d '",v') echo "DAPR_CLI_VER=$CLI_VERSION" >> $GITHUB_ENV echo "Found $CLI_VERSION" - name: Set up Python ${{ matrix.python_ver }} @@ -88,15 +88,21 @@ jobs: uses: actions/setup-go@v5 with: go-version: ${{ env.GOVER }} + - name: Set up Llama + run: | + curl -fsSL https://ollama.com/install.sh | sh + nohup ollama serve & + sleep 10 + ollama pull llama3.2:latest - name: Checkout Dapr CLI repo to override dapr command. - uses: actions/checkout@v5 + uses: actions/checkout@v6 if: env.DAPR_CLI_REF != '' with: repository: dapr/cli ref: ${{ env.DAPR_CLI_REF }} path: cli - name: Checkout Dapr repo to override daprd. - uses: actions/checkout@v5 + uses: actions/checkout@v6 if: env.DAPR_REF != '' with: repository: dapr/dapr diff --git a/README.md b/README.md index 17e434808..f205a1b6a 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,13 @@ # Dapr SDK for Python -[![PyPI - Version](https://img.shields.io/pypi/v/dapr?style=flat&logo=pypi&logoColor=white&label=Latest%20version)](https://pypi.org/project/dapr/) -[![PyPI - Downloads](https://img.shields.io/pypi/dm/dapr?style=flat&logo=pypi&logoColor=white&label=Downloads)](https://pypi.org/project/dapr/) -[![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/dapr/python-sdk/.github%2Fworkflows%2Fbuild.yaml?branch=main&label=Build&logo=github)](https://github.com/dapr/python-sdk/actions/workflows/build.yaml) -[![codecov](https://codecov.io/gh/dapr/python-sdk/branch/main/graph/badge.svg)](https://codecov.io/gh/dapr/python-sdk) -[![GitHub License](https://img.shields.io/github/license/dapr/python-sdk?style=flat&label=License&logo=github)](https://github.com/dapr/python-sdk/blob/main/LICENSE) -[![GitHub issue custom search in repo](https://img.shields.io/github/issues-search/dapr/python-sdk?query=type%3Aissue%20is%3Aopen%20label%3A%22good%20first%20issue%22&label=Good%20first%20issues&style=flat&logo=github)](https://github.com/dapr/python-sdk/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) -[![Discord](https://img.shields.io/discord/778680217417809931?label=Discord&style=flat&logo=discord)](http://bit.ly/dapr-discord) -[![YouTube Channel Views](https://img.shields.io/youtube/channel/views/UCtpSQ9BLB_3EXdWAUQYwnRA?style=flat&label=YouTube%20views&logo=youtube)](https://youtube.com/@daprdev) +[![PyPI - Version](https://img.shields.io/pypi/v/dapr?style=flat&logo=pypi&logoColor=white&label=Latest%20version)](https://pypi.org/project/dapr/) +[![PyPI - Downloads](https://img.shields.io/pypi/dm/dapr?style=flat&logo=pypi&logoColor=white&label=Downloads)](https://pypi.org/project/dapr/) +[![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/dapr/python-sdk/.github%2Fworkflows%2Fbuild.yaml?branch=main&label=Build&logo=github)](https://github.com/dapr/python-sdk/actions/workflows/build.yaml) +[![codecov](https://codecov.io/gh/dapr/python-sdk/branch/main/graph/badge.svg)](https://codecov.io/gh/dapr/python-sdk) +[![GitHub License](https://img.shields.io/github/license/dapr/python-sdk?style=flat&label=License&logo=github)](https://github.com/dapr/python-sdk/blob/main/LICENSE) +[![GitHub issue custom search in repo](https://img.shields.io/github/issues-search/dapr/python-sdk?query=type%3Aissue%20is%3Aopen%20label%3A%22good%20first%20issue%22&label=Good%20first%20issues&style=flat&logo=github)](https://github.com/dapr/python-sdk/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) +[![Discord](https://img.shields.io/discord/778680217417809931?label=Discord&style=flat&logo=discord)](http://bit.ly/dapr-discord) +[![YouTube Channel Views](https://img.shields.io/youtube/channel/views/UCtpSQ9BLB_3EXdWAUQYwnRA?style=flat&label=YouTube%20views&logo=youtube)](https://youtube.com/@daprdev) [![X (formerly Twitter) Follow](https://img.shields.io/twitter/follow/daprdev?logo=x&style=flat)](https://twitter.com/daprdev) @@ -86,6 +86,8 @@ pip3 install -e . pip3 install -e ./ext/dapr-ext-grpc/ pip3 install -e ./ext/dapr-ext-fastapi/ pip3 install -e ./ext/dapr-ext-workflow/ +pip3 install -e ./ext/dapr-ext-langgraph/ +pip3 install -e ./ext/dapr-ext-strands/ ``` 3. Install required packages @@ -94,31 +96,25 @@ pip3 install -e ./ext/dapr-ext-workflow/ pip3 install -r dev-requirements.txt ``` -4. Run linter - -```bash -tox -e flake8 -``` - -5. Run autofix +4. Run linter and autofix ```bash tox -e ruff ``` -6. Run unit-test +5. Run unit-test ```bash tox -e py311 ``` -7. Run type check +6. Run type check ```bash tox -e type ``` -8. Run examples +7. Run examples ```bash tox -e examples @@ -153,7 +149,7 @@ export DAPR_BRANCH=release-1.16 # Optional, defaults to master ./tools/regen_grpcclient.sh ``` -> Note: The `grpcio-tools` version we're using doesn't support Python 3.13. +> Note: The `grpcio-tools` version we're using doesn't support Python 3.13. ## Help & Feedback diff --git a/dapr/actor/__init__.py b/dapr/actor/__init__.py index 4323caae2..bf21f488c 100644 --- a/dapr/actor/__init__.py +++ b/dapr/actor/__init__.py @@ -20,7 +20,6 @@ from dapr.actor.runtime.remindable import Remindable from dapr.actor.runtime.runtime import ActorRuntime - __all__ = [ 'ActorInterface', 'ActorProxy', diff --git a/dapr/actor/client/proxy.py b/dapr/actor/client/proxy.py index a7648bf97..dcf1ca436 100644 --- a/dapr/actor/client/proxy.py +++ b/dapr/actor/client/proxy.py @@ -21,8 +21,8 @@ from dapr.actor.runtime._type_utils import get_dispatchable_attrs_from_interface from dapr.clients import DaprActorClientBase, DaprActorHttpClient from dapr.clients.retry import RetryPolicy -from dapr.serializers import Serializer, DefaultJSONSerializer from dapr.conf import settings +from dapr.serializers import DefaultJSONSerializer, Serializer # Actor factory Callable type hint. ACTOR_FACTORY_CALLBACK = Callable[[ActorInterface, str, str], 'ActorProxy'] @@ -35,8 +35,7 @@ def create( actor_type: str, actor_id: ActorId, actor_interface: Optional[Type[ActorInterface]] = None, - ) -> 'ActorProxy': - ... + ) -> 'ActorProxy': ... class ActorProxyFactory(ActorFactoryBase): diff --git a/dapr/actor/runtime/_reminder_data.py b/dapr/actor/runtime/_reminder_data.py index 8821c94bc..5453b8162 100644 --- a/dapr/actor/runtime/_reminder_data.py +++ b/dapr/actor/runtime/_reminder_data.py @@ -14,7 +14,6 @@ """ import base64 - from datetime import timedelta from typing import Any, Dict, Optional diff --git a/dapr/actor/runtime/_state_provider.py b/dapr/actor/runtime/_state_provider.py index 54f6b5837..eeb1e4995 100644 --- a/dapr/actor/runtime/_state_provider.py +++ b/dapr/actor/runtime/_state_provider.py @@ -14,12 +14,11 @@ """ import io +from typing import Any, List, Tuple, Type -from typing import Any, List, Type, Tuple -from dapr.actor.runtime.state_change import StateChangeKind, ActorStateChange +from dapr.actor.runtime.state_change import ActorStateChange, StateChangeKind from dapr.clients.base import DaprActorClientBase -from dapr.serializers import Serializer, DefaultJSONSerializer - +from dapr.serializers import DefaultJSONSerializer, Serializer # Mapping StateChangeKind to Dapr State Operation _MAP_CHANGE_KIND_TO_OPERATION = { diff --git a/dapr/actor/runtime/_type_information.py b/dapr/actor/runtime/_type_information.py index 72566eb17..f9171aea8 100644 --- a/dapr/actor/runtime/_type_information.py +++ b/dapr/actor/runtime/_type_information.py @@ -13,10 +13,10 @@ limitations under the License. """ -from dapr.actor.runtime.remindable import Remindable -from dapr.actor.runtime._type_utils import is_dapr_actor, get_actor_interfaces +from typing import TYPE_CHECKING, List, Type -from typing import List, Type, TYPE_CHECKING +from dapr.actor.runtime._type_utils import get_actor_interfaces, is_dapr_actor +from dapr.actor.runtime.remindable import Remindable if TYPE_CHECKING: from dapr.actor.actor_interface import ActorInterface # noqa: F401 diff --git a/dapr/actor/runtime/actor.py b/dapr/actor/runtime/actor.py index 79b1e6ab1..fab02fc70 100644 --- a/dapr/actor/runtime/actor.py +++ b/dapr/actor/runtime/actor.py @@ -14,16 +14,15 @@ """ import uuid - from datetime import timedelta from typing import Any, Optional from dapr.actor.id import ActorId from dapr.actor.runtime._method_context import ActorMethodContext -from dapr.actor.runtime.context import ActorRuntimeContext -from dapr.actor.runtime.state_manager import ActorStateManager from dapr.actor.runtime._reminder_data import ActorReminderData from dapr.actor.runtime._timer_data import TIMER_CALLBACK, ActorTimerData +from dapr.actor.runtime.context import ActorRuntimeContext +from dapr.actor.runtime.state_manager import ActorStateManager class Actor: diff --git a/dapr/actor/runtime/context.py b/dapr/actor/runtime/context.py index ec66ba366..b2610bed4 100644 --- a/dapr/actor/runtime/context.py +++ b/dapr/actor/runtime/context.py @@ -13,16 +13,16 @@ limitations under the License. """ +from typing import TYPE_CHECKING, Callable, Optional + from dapr.actor.id import ActorId from dapr.actor.runtime._state_provider import StateProvider from dapr.clients.base import DaprActorClientBase from dapr.serializers import Serializer -from typing import Callable, Optional, TYPE_CHECKING - if TYPE_CHECKING: - from dapr.actor.runtime.actor import Actor from dapr.actor.runtime._type_information import ActorTypeInformation + from dapr.actor.runtime.actor import Actor class ActorRuntimeContext: diff --git a/dapr/actor/runtime/manager.py b/dapr/actor/runtime/manager.py index a6d1a792a..969e48e2a 100644 --- a/dapr/actor/runtime/manager.py +++ b/dapr/actor/runtime/manager.py @@ -15,17 +15,16 @@ import asyncio import uuid - from typing import Any, Callable, Coroutine, Dict, Optional from dapr.actor.id import ActorId -from dapr.clients.exceptions import DaprInternalError +from dapr.actor.runtime._method_context import ActorMethodContext +from dapr.actor.runtime._reminder_data import ActorReminderData from dapr.actor.runtime.actor import Actor from dapr.actor.runtime.context import ActorRuntimeContext -from dapr.actor.runtime._method_context import ActorMethodContext from dapr.actor.runtime.method_dispatcher import ActorMethodDispatcher -from dapr.actor.runtime._reminder_data import ActorReminderData from dapr.actor.runtime.reentrancy_context import reentrancy_ctx +from dapr.clients.exceptions import DaprInternalError TIMER_METHOD_NAME = 'fire_timer' REMINDER_METHOD_NAME = 'receive_reminder' diff --git a/dapr/actor/runtime/method_dispatcher.py b/dapr/actor/runtime/method_dispatcher.py index 8d9b65114..ffe66d991 100644 --- a/dapr/actor/runtime/method_dispatcher.py +++ b/dapr/actor/runtime/method_dispatcher.py @@ -14,9 +14,10 @@ """ from typing import Any, Dict, List -from dapr.actor.runtime.actor import Actor + from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.actor.runtime._type_utils import get_dispatchable_attrs +from dapr.actor.runtime.actor import Actor class ActorMethodDispatcher: diff --git a/dapr/actor/runtime/reentrancy_context.py b/dapr/actor/runtime/reentrancy_context.py index 0fc9927df..b295b57d7 100644 --- a/dapr/actor/runtime/reentrancy_context.py +++ b/dapr/actor/runtime/reentrancy_context.py @@ -13,7 +13,7 @@ limitations under the License. """ -from typing import Optional from contextvars import ContextVar +from typing import Optional reentrancy_ctx: ContextVar[Optional[str]] = ContextVar('reentrancy_ctx', default=None) diff --git a/dapr/actor/runtime/runtime.py b/dapr/actor/runtime/runtime.py index 3659f1479..b03f0bc75 100644 --- a/dapr/actor/runtime/runtime.py +++ b/dapr/actor/runtime/runtime.py @@ -14,20 +14,18 @@ """ import asyncio - -from typing import Dict, List, Optional, Type, Callable +from typing import Callable, Dict, List, Optional, Type from dapr.actor.id import ActorId +from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.actor.runtime.actor import Actor from dapr.actor.runtime.config import ActorRuntimeConfig from dapr.actor.runtime.context import ActorRuntimeContext -from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.actor.runtime.manager import ActorManager +from dapr.actor.runtime.reentrancy_context import reentrancy_ctx from dapr.clients.http.dapr_actor_http_client import DaprActorHttpClient -from dapr.serializers import Serializer, DefaultJSONSerializer from dapr.conf import settings - -from dapr.actor.runtime.reentrancy_context import reentrancy_ctx +from dapr.serializers import DefaultJSONSerializer, Serializer class ActorRuntime: diff --git a/dapr/actor/runtime/state_change.py b/dapr/actor/runtime/state_change.py index dba21e2c1..4937fcb53 100644 --- a/dapr/actor/runtime/state_change.py +++ b/dapr/actor/runtime/state_change.py @@ -14,7 +14,7 @@ """ from enum import Enum -from typing import TypeVar, Generic, Optional +from typing import Generic, Optional, TypeVar T = TypeVar('T') diff --git a/dapr/aio/clients/__init__.py b/dapr/aio/clients/__init__.py index e945b1307..3f7ce6363 100644 --- a/dapr/aio/clients/__init__.py +++ b/dapr/aio/clients/__init__.py @@ -15,14 +15,15 @@ from typing import Callable, Dict, List, Optional, Union +from google.protobuf.message import Message as GrpcMessage + +from dapr.aio.clients.grpc.client import DaprGrpcClientAsync, InvokeMethodResponse, MetadataTuple from dapr.clients.base import DaprActorClientBase -from dapr.clients.exceptions import DaprInternalError, ERROR_CODE_UNKNOWN -from dapr.aio.clients.grpc.client import DaprGrpcClientAsync, MetadataTuple, InvokeMethodResponse -from dapr.clients.grpc._jobs import Job, FailurePolicy, DropFailurePolicy, ConstantFailurePolicy +from dapr.clients.exceptions import ERROR_CODE_UNKNOWN, DaprInternalError +from dapr.clients.grpc._jobs import ConstantFailurePolicy, DropFailurePolicy, FailurePolicy, Job from dapr.clients.http.dapr_actor_http_client import DaprActorHttpClient from dapr.clients.http.dapr_invocation_http_client import DaprInvocationHttpClient from dapr.conf import settings -from google.protobuf.message import Message as GrpcMessage __all__ = [ 'DaprClient', @@ -37,10 +38,10 @@ ] from grpc.aio import ( # type: ignore - UnaryUnaryClientInterceptor, - UnaryStreamClientInterceptor, - StreamUnaryClientInterceptor, StreamStreamClientInterceptor, + StreamUnaryClientInterceptor, + UnaryStreamClientInterceptor, + UnaryUnaryClientInterceptor, ) diff --git a/dapr/aio/clients/grpc/_request.py b/dapr/aio/clients/grpc/_request.py index b3c3ce2d4..129c556f3 100644 --- a/dapr/aio/clients/grpc/_request.py +++ b/dapr/aio/clients/grpc/_request.py @@ -16,7 +16,7 @@ import io from typing import Union -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions from dapr.clients.grpc._helpers import to_bytes from dapr.clients.grpc._request import DaprRequest from dapr.proto import api_v1, common_v1 diff --git a/dapr/aio/clients/grpc/_response.py b/dapr/aio/clients/grpc/_response.py index 480eb7769..5380ede6d 100644 --- a/dapr/aio/clients/grpc/_response.py +++ b/dapr/aio/clients/grpc/_response.py @@ -15,8 +15,8 @@ from typing import AsyncGenerator, Generic -from dapr.proto import api_v1 from dapr.clients.grpc._response import DaprResponse, TCryptoResponse +from dapr.proto import api_v1 class CryptoResponse(DaprResponse, Generic[TCryptoResponse]): @@ -83,9 +83,7 @@ async def read(self, size: int = -1) -> bytes: return data[:size] -class EncryptResponse(CryptoResponse[api_v1.EncryptResponse]): - ... +class EncryptResponse(CryptoResponse[api_v1.EncryptResponse]): ... -class DecryptResponse(CryptoResponse[api_v1.DecryptResponse]): - ... +class DecryptResponse(CryptoResponse[api_v1.DecryptResponse]): ... diff --git a/dapr/aio/clients/grpc/client.py b/dapr/aio/clients/grpc/client.py index 1b76dcb0f..d363775fb 100644 --- a/dapr/aio/clients/grpc/client.py +++ b/dapr/aio/clients/grpc/client.py @@ -14,96 +14,90 @@ """ import asyncio -import time -import socket import json +import socket +import time import uuid - from datetime import datetime +from typing import Any, Awaitable, Callable, Dict, List, Optional, Sequence, Text, Union from urllib.parse import urlencode - from warnings import warn -from typing import Callable, Dict, Optional, Text, Union, Sequence, List, Any, Awaitable -from typing_extensions import Self - -from google.protobuf.message import Message as GrpcMessage -from google.protobuf.empty_pb2 import Empty as GrpcEmpty -from google.protobuf.any_pb2 import Any as GrpcAny - import grpc.aio # type: ignore +from google.protobuf.any_pb2 import Any as GrpcAny +from google.protobuf.empty_pb2 import Empty as GrpcEmpty +from google.protobuf.message import Message as GrpcMessage from grpc.aio import ( # type: ignore - UnaryUnaryClientInterceptor, - UnaryStreamClientInterceptor, - StreamUnaryClientInterceptor, - StreamStreamClientInterceptor, AioRpcError, + StreamStreamClientInterceptor, + StreamUnaryClientInterceptor, + UnaryStreamClientInterceptor, + UnaryUnaryClientInterceptor, ) +from typing_extensions import Self -from dapr.aio.clients.grpc.subscription import Subscription -from dapr.clients.exceptions import DaprInternalError, DaprGrpcError -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions -from dapr.clients.grpc._state import StateOptions, StateItem -from dapr.clients.grpc._helpers import getWorkflowRuntimeStatus -from dapr.clients.health import DaprHealth -from dapr.clients.retry import RetryPolicy -from dapr.common.pubsub.subscription import StreamInactiveError -from dapr.conf.helpers import GrpcEndpoint -from dapr.conf import settings -from dapr.proto import api_v1, api_service_v1, common_v1 -from dapr.proto.runtime.v1.dapr_pb2 import UnsubscribeConfigurationResponse -from dapr.version import __version__ - +from dapr.aio.clients.grpc._request import ( + DecryptRequestIterator, + EncryptRequestIterator, +) +from dapr.aio.clients.grpc._response import ( + DecryptResponse, + EncryptResponse, +) from dapr.aio.clients.grpc.interceptors import ( DaprClientInterceptorAsync, DaprClientTimeoutInterceptorAsync, ) +from dapr.aio.clients.grpc.subscription import Subscription +from dapr.clients.exceptions import DaprGrpcError, DaprInternalError +from dapr.clients.grpc import conversation +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions from dapr.clients.grpc._helpers import ( MetadataTuple, - to_bytes, - validateNotNone, - validateNotBlankString, convert_dict_to_grpc_dict_of_any, convert_value_to_struct, + getWorkflowRuntimeStatus, + to_bytes, + validateNotBlankString, + validateNotNone, ) -from dapr.aio.clients.grpc._request import ( - EncryptRequestIterator, - DecryptRequestIterator, -) -from dapr.aio.clients.grpc._response import ( - EncryptResponse, - DecryptResponse, -) +from dapr.clients.grpc._jobs import Job from dapr.clients.grpc._request import ( - InvokeMethodRequest, BindingRequest, + InvokeMethodRequest, TransactionalStateOperation, ) -from dapr.clients.grpc import conversation - -from dapr.clients.grpc._jobs import Job from dapr.clients.grpc._response import ( BindingResponse, + BulkStateItem, + BulkStatesResponse, + ConfigurationResponse, + ConfigurationWatcher, DaprResponse, - GetSecretResponse, GetBulkSecretResponse, GetMetadataResponse, + GetSecretResponse, + GetWorkflowResponse, InvokeMethodResponse, - UnlockResponseStatus, - StateResponse, - BulkStatesResponse, - BulkStateItem, - ConfigurationResponse, QueryResponse, QueryResponseItem, RegisteredComponents, - ConfigurationWatcher, - TryLockResponse, - UnlockResponse, - GetWorkflowResponse, StartWorkflowResponse, + StateResponse, TopicEventResponse, + TryLockResponse, + UnlockResponse, + UnlockResponseStatus, ) +from dapr.clients.grpc._state import StateItem, StateOptions +from dapr.clients.health import DaprHealth +from dapr.clients.retry import RetryPolicy +from dapr.common.pubsub.subscription import StreamInactiveError +from dapr.conf import settings +from dapr.conf.helpers import GrpcEndpoint +from dapr.proto import api_service_v1, api_v1, common_v1 +from dapr.proto.runtime.v1.dapr_pb2 import UnsubscribeConfigurationResponse +from dapr.version import __version__ class DaprGrpcClientAsync: @@ -170,7 +164,7 @@ def __init__( if not address: address = settings.DAPR_GRPC_ENDPOINT or ( - f'{settings.DAPR_RUNTIME_HOST}:' f'{settings.DAPR_GRPC_PORT}' + f'{settings.DAPR_RUNTIME_HOST}:{settings.DAPR_GRPC_PORT}' ) try: @@ -1912,7 +1906,7 @@ async def wait(self, timeout_s: float): remaining = (start + timeout_s) - time.time() if remaining < 0: raise e - asyncio.sleep(min(1, remaining)) + await asyncio.sleep(min(1, remaining)) async def get_metadata(self) -> GetMetadataResponse: """Returns information about the sidecar allowing for runtime diff --git a/dapr/aio/clients/grpc/interceptors.py b/dapr/aio/clients/grpc/interceptors.py index bf83cf56a..0444d5acb 100644 --- a/dapr/aio/clients/grpc/interceptors.py +++ b/dapr/aio/clients/grpc/interceptors.py @@ -16,7 +16,11 @@ from collections import namedtuple from typing import List, Tuple -from grpc.aio import UnaryUnaryClientInterceptor, StreamStreamClientInterceptor, ClientCallDetails # type: ignore +from grpc.aio import ( # type: ignore + ClientCallDetails, + StreamStreamClientInterceptor, + UnaryUnaryClientInterceptor, +) from dapr.conf import settings diff --git a/dapr/aio/clients/grpc/subscription.py b/dapr/aio/clients/grpc/subscription.py index e0e380ca6..5ba1f6a33 100644 --- a/dapr/aio/clients/grpc/subscription.py +++ b/dapr/aio/clients/grpc/subscription.py @@ -1,13 +1,14 @@ import asyncio + from grpc import StatusCode from grpc.aio import AioRpcError +from dapr.aio.clients.health import DaprHealth from dapr.clients.grpc._response import TopicEventResponse -from dapr.clients.health import DaprHealth from dapr.common.pubsub.subscription import ( + StreamCancelledError, StreamInactiveError, SubscriptionMessage, - StreamCancelledError, ) from dapr.proto import api_v1, appcallback_v1 @@ -51,7 +52,7 @@ async def outgoing_request_iterator(): async def reconnect_stream(self): await self.close() - DaprHealth.wait_for_sidecar() + await DaprHealth.wait_for_sidecar() print('Attempting to reconnect...') await self.start() @@ -66,7 +67,7 @@ async def next_message(self): return None return SubscriptionMessage(message.event_message) except AioRpcError as e: - if e.code() == StatusCode.UNAVAILABLE: + if e.code() == StatusCode.UNAVAILABLE or e.code() == StatusCode.UNKNOWN: print( f'gRPC error while reading from stream: {e.details()}, ' f'Status Code: {e.code()}. ' diff --git a/dapr/aio/clients/health.py b/dapr/aio/clients/health.py new file mode 100644 index 000000000..9ab66ebba --- /dev/null +++ b/dapr/aio/clients/health.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2024 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import asyncio +import time + +import aiohttp + +from dapr.clients.http.conf import DAPR_API_TOKEN_HEADER, DAPR_USER_AGENT, USER_AGENT_HEADER +from dapr.clients.http.helpers import get_api_url +from dapr.conf import settings + + +class DaprHealth: + @staticmethod + async def wait_for_sidecar(): + health_url = f'{get_api_url()}/healthz/outbound' + headers = {USER_AGENT_HEADER: DAPR_USER_AGENT} + if settings.DAPR_API_TOKEN is not None: + headers[DAPR_API_TOKEN_HEADER] = settings.DAPR_API_TOKEN + timeout = float(settings.DAPR_HEALTH_TIMEOUT) + + start = time.time() + ssl_context = DaprHealth.get_ssl_context() + + connector = aiohttp.TCPConnector(ssl=ssl_context) + async with aiohttp.ClientSession(connector=connector) as session: + while True: + try: + async with session.get(health_url, headers=headers) as response: + if 200 <= response.status < 300: + break + except aiohttp.ClientError as e: + print(f'Health check on {health_url} failed: {e}') + except Exception as e: + print(f'Unexpected error during health check: {e}') + + remaining = (start + timeout) - time.time() + if remaining <= 0: + raise TimeoutError(f'Dapr health check timed out, after {timeout}.') + await asyncio.sleep(min(1, remaining)) + + @staticmethod + def get_ssl_context(): + # This method is used (overwritten) from tests + # to return context for self-signed certificates + return None diff --git a/dapr/clients/__init__.py b/dapr/clients/__init__.py index 78ad99eb4..5d92b56c7 100644 --- a/dapr/clients/__init__.py +++ b/dapr/clients/__init__.py @@ -16,16 +16,16 @@ from typing import Callable, Dict, List, Optional, Union from warnings import warn +from google.protobuf.message import Message as GrpcMessage + from dapr.clients.base import DaprActorClientBase -from dapr.clients.exceptions import DaprInternalError, ERROR_CODE_UNKNOWN -from dapr.clients.grpc.client import DaprGrpcClient, MetadataTuple, InvokeMethodResponse -from dapr.clients.grpc._jobs import Job, FailurePolicy, DropFailurePolicy, ConstantFailurePolicy +from dapr.clients.exceptions import ERROR_CODE_UNKNOWN, DaprInternalError +from dapr.clients.grpc._jobs import ConstantFailurePolicy, DropFailurePolicy, FailurePolicy, Job +from dapr.clients.grpc.client import DaprGrpcClient, InvokeMethodResponse, MetadataTuple from dapr.clients.http.dapr_actor_http_client import DaprActorHttpClient from dapr.clients.http.dapr_invocation_http_client import DaprInvocationHttpClient from dapr.clients.retry import RetryPolicy from dapr.conf import settings -from google.protobuf.message import Message as GrpcMessage - __all__ = [ 'DaprClient', @@ -41,10 +41,10 @@ from grpc import ( # type: ignore - UnaryUnaryClientInterceptor, - UnaryStreamClientInterceptor, - StreamUnaryClientInterceptor, StreamStreamClientInterceptor, + StreamUnaryClientInterceptor, + UnaryStreamClientInterceptor, + UnaryUnaryClientInterceptor, ) diff --git a/dapr/clients/base.py b/dapr/clients/base.py index d2b972245..da2bb5257 100644 --- a/dapr/clients/base.py +++ b/dapr/clients/base.py @@ -23,31 +23,28 @@ class DaprActorClientBase(ABC): @abstractmethod async def invoke_method( self, actor_type: str, actor_id: str, method: str, data: Optional[bytes] = None - ) -> bytes: - ... + ) -> bytes: ... @abstractmethod - async def save_state_transactionally(self, actor_type: str, actor_id: str, data: bytes) -> None: - ... + async def save_state_transactionally( + self, actor_type: str, actor_id: str, data: bytes + ) -> None: ... @abstractmethod - async def get_state(self, actor_type: str, actor_id: str, name: str) -> bytes: - ... + async def get_state(self, actor_type: str, actor_id: str, name: str) -> bytes: ... @abstractmethod async def register_reminder( self, actor_type: str, actor_id: str, name: str, data: bytes - ) -> None: - ... + ) -> None: ... @abstractmethod - async def unregister_reminder(self, actor_type: str, actor_id: str, name: str) -> None: - ... + async def unregister_reminder(self, actor_type: str, actor_id: str, name: str) -> None: ... @abstractmethod - async def register_timer(self, actor_type: str, actor_id: str, name: str, data: bytes) -> None: - ... + async def register_timer( + self, actor_type: str, actor_id: str, name: str, data: bytes + ) -> None: ... @abstractmethod - async def unregister_timer(self, actor_type: str, actor_id: str, name: str) -> None: - ... + async def unregister_timer(self, actor_type: str, actor_id: str, name: str) -> None: ... diff --git a/dapr/clients/exceptions.py b/dapr/clients/exceptions.py index 61ae0d8b6..f6358cb85 100644 --- a/dapr/clients/exceptions.py +++ b/dapr/clients/exceptions.py @@ -12,6 +12,7 @@ See the License for the specific language governing permissions and limitations under the License. """ + import base64 import json from typing import TYPE_CHECKING, Optional @@ -20,9 +21,9 @@ from dapr.serializers import Serializer from google.protobuf.json_format import MessageToDict +from google.rpc import error_details_pb2 # type: ignore from grpc import RpcError # type: ignore from grpc_status import rpc_status # type: ignore -from google.rpc import error_details_pb2 # type: ignore ERROR_CODE_UNKNOWN = 'UNKNOWN' ERROR_CODE_DOES_NOT_EXIST = 'ERR_DOES_NOT_EXIST' diff --git a/dapr/clients/grpc/_conversation_helpers.py b/dapr/clients/grpc/_conversation_helpers.py index 37bb81c18..9f57c9cc7 100644 --- a/dapr/clients/grpc/_conversation_helpers.py +++ b/dapr/clients/grpc/_conversation_helpers.py @@ -16,6 +16,7 @@ import inspect import random import string +import types from dataclasses import fields, is_dataclass from enum import Enum from typing import ( @@ -23,21 +24,19 @@ Callable, Dict, List, + Literal, Mapping, Optional, Sequence, Union, - Literal, + cast, get_args, get_origin, get_type_hints, - cast, ) from dapr.conf import settings -import types - # Make mypy happy. Runtime handle: real class on 3.10+, else None. # TODO: Python 3.9 is about to be end-of-life, so we can drop this at some point next year (2026) UnionType: Any = getattr(types, 'UnionType', None) @@ -190,14 +189,14 @@ def _json_primitive_type(v: Any) -> str: if settings.DAPR_CONVERSATION_TOOLS_LARGE_ENUM_BEHAVIOR == 'error': raise ValueError( f"Enum '{getattr(python_type, '__name__', str(python_type))}' has {count} members, " - f"exceeding DAPR_CONVERSATION_MAX_ENUM_ITEMS={settings.DAPR_CONVERSATION_TOOLS_MAX_ENUM_ITEMS}. " - f"Either reduce the enum size or set DAPR_CONVERSATION_LARGE_ENUM_BEHAVIOR=string to allow compact schema." + f'exceeding DAPR_CONVERSATION_MAX_ENUM_ITEMS={settings.DAPR_CONVERSATION_TOOLS_MAX_ENUM_ITEMS}. ' + f'Either reduce the enum size or set DAPR_CONVERSATION_LARGE_ENUM_BEHAVIOR=string to allow compact schema.' ) # Default behavior: compact schema as a string with helpful context and a few examples example_values = [item.value for item in members[:5]] if members else [] desc = ( - f"{getattr(python_type, '__name__', 'Enum')} (enum with {count} values). " - f"Provide a valid value. Schema compacted to avoid oversized enum listing." + f'{getattr(python_type, "__name__", "Enum")} (enum with {count} values). ' + f'Provide a valid value. Schema compacted to avoid oversized enum listing.' ) schema = {'type': 'string', 'description': desc} if example_values: @@ -696,8 +695,8 @@ def stringify_tool_output(value: Any) -> str: * dataclass -> asdict If JSON serialization still fails, fallback to str(value). If that fails, return ''. """ - import json as _json import base64 as _b64 + import json as _json from dataclasses import asdict as _asdict if isinstance(value, str): @@ -760,20 +759,16 @@ def _default(o: Any): # --- Errors ---- -class ToolError(RuntimeError): - ... +class ToolError(RuntimeError): ... -class ToolNotFoundError(ToolError): - ... +class ToolNotFoundError(ToolError): ... -class ToolExecutionError(ToolError): - ... +class ToolExecutionError(ToolError): ... -class ToolArgumentError(ToolError): - ... +class ToolArgumentError(ToolError): ... def _coerce_bool(value: Any) -> bool: @@ -962,7 +957,7 @@ def _coerce_and_validate(value: Any, expected_type: Any) -> Any: missing.append(pname) if missing: raise ValueError( - f"Missing required constructor arg(s) for {expected_type.__name__}: {', '.join(missing)}" + f'Missing required constructor arg(s) for {expected_type.__name__}: {", ".join(missing)}' ) try: return expected_type(**kwargs) @@ -978,7 +973,7 @@ def _coerce_and_validate(value: Any, expected_type: Any) -> Any: if expected_type is Any or isinstance(value, expected_type): return value raise ValueError( - f"Expected {getattr(expected_type, '__name__', str(expected_type))}, got {type(value).__name__}" + f'Expected {getattr(expected_type, "__name__", str(expected_type))}, got {type(value).__name__}' ) @@ -1014,12 +1009,12 @@ def bind_params_to_func(fn: Callable[..., Any], params: Params): and p.name not in bound.arguments ] if missing: - raise ToolArgumentError(f"Missing required parameter(s): {', '.join(missing)}") + raise ToolArgumentError(f'Missing required parameter(s): {", ".join(missing)}') # unexpected kwargs unless **kwargs present if not any(p.kind is inspect.Parameter.VAR_KEYWORD for p in sig.parameters.values()): extra = set(params) - set(sig.parameters) if extra: - raise ToolArgumentError(f"Unexpected parameter(s): {', '.join(sorted(extra))}") + raise ToolArgumentError(f'Unexpected parameter(s): {", ".join(sorted(extra))}') elif isinstance(params, Sequence): bound = sig.bind(*params) else: diff --git a/dapr/clients/grpc/_helpers.py b/dapr/clients/grpc/_helpers.py index c68b0f56a..8eb9a1e97 100644 --- a/dapr/clients/grpc/_helpers.py +++ b/dapr/clients/grpc/_helpers.py @@ -12,22 +12,22 @@ See the License for the specific language governing permissions and limitations under the License. """ -from enum import Enum -from typing import Any, Dict, List, Optional, Union, Tuple +from enum import Enum +from typing import Any, Dict, List, Optional, Tuple, Union +from google.protobuf import json_format from google.protobuf.any_pb2 import Any as GrpcAny from google.protobuf.message import Message as GrpcMessage +from google.protobuf.struct_pb2 import Struct from google.protobuf.wrappers_pb2 import ( BoolValue, - StringValue, + BytesValue, + DoubleValue, Int32Value, Int64Value, - DoubleValue, - BytesValue, + StringValue, ) -from google.protobuf.struct_pb2 import Struct -from google.protobuf import json_format MetadataDict = Dict[str, List[Union[bytes, str]]] MetadataTuple = Tuple[Tuple[str, Union[bytes, str]], ...] diff --git a/dapr/clients/grpc/_jobs.py b/dapr/clients/grpc/_jobs.py index 896c8db3c..5df9975f0 100644 --- a/dapr/clients/grpc/_jobs.py +++ b/dapr/clients/grpc/_jobs.py @@ -117,9 +117,10 @@ def _get_proto(self): Returns: api_v1.Job: The proto representation of this job. """ - from dapr.proto.runtime.v1 import dapr_pb2 as api_v1 from google.protobuf.any_pb2 import Any as GrpcAny + from dapr.proto.runtime.v1 import dapr_pb2 as api_v1 + # Build the job proto job_proto = api_v1.Job(name=self.name) diff --git a/dapr/clients/grpc/_response.py b/dapr/clients/grpc/_response.py index fff511ff7..6898bc42d 100644 --- a/dapr/clients/grpc/_response.py +++ b/dapr/clients/grpc/_response.py @@ -21,19 +21,19 @@ from datetime import datetime from enum import Enum from typing import ( + TYPE_CHECKING, Callable, Dict, + Generator, + Generic, List, + Mapping, + NamedTuple, Optional, - Text, - Union, Sequence, - TYPE_CHECKING, - NamedTuple, - Generator, + Text, TypeVar, - Generic, - Mapping, + Union, ) from google.protobuf.any_pb2 import Any as GrpcAny @@ -43,11 +43,11 @@ from dapr.clients.grpc._helpers import ( MetadataDict, MetadataTuple, + WorkflowRuntimeStatus, to_bytes, to_str, tuple_to_dict, unpack, - WorkflowRuntimeStatus, ) from dapr.proto import api_service_v1, api_v1, appcallback_v1, common_v1 @@ -707,9 +707,9 @@ def _read_subscribe_config( handler: Callable[[Text, ConfigurationResponse], None], ): try: - responses: List[ - api_v1.SubscribeConfigurationResponse - ] = stub.SubscribeConfigurationAlpha1(req) + responses: List[api_v1.SubscribeConfigurationResponse] = ( + stub.SubscribeConfigurationAlpha1(req) + ) isFirst = True for response in responses: if isFirst: @@ -719,7 +719,7 @@ def _read_subscribe_config( if len(response.items) > 0: handler(response.id, ConfigurationResponse(response.items)) except Exception: - print(f'{self.store_name} configuration watcher for keys ' f'{self.keys} stopped.') + print(f'{self.store_name} configuration watcher for keys {self.keys} stopped.') pass @@ -1065,9 +1065,7 @@ def read(self, size: int = -1) -> bytes: return data[:size] -class EncryptResponse(CryptoResponse[TCryptoResponse]): - ... +class EncryptResponse(CryptoResponse[TCryptoResponse]): ... -class DecryptResponse(CryptoResponse[TCryptoResponse]): - ... +class DecryptResponse(CryptoResponse[TCryptoResponse]): ... diff --git a/dapr/clients/grpc/_state.py b/dapr/clients/grpc/_state.py index 3dc266b22..e20df4293 100644 --- a/dapr/clients/grpc/_state.py +++ b/dapr/clients/grpc/_state.py @@ -1,7 +1,8 @@ from enum import Enum -from dapr.proto import common_v1 from typing import Dict, Optional, Union +from dapr.proto import common_v1 + class Consistency(Enum): """Represents the consistency mode for a Dapr State Api Call""" diff --git a/dapr/clients/grpc/client.py b/dapr/clients/grpc/client.py index 6c276dd3f..a0a886d06 100644 --- a/dapr/clients/grpc/client.py +++ b/dapr/clients/grpc/client.py @@ -12,89 +12,85 @@ See the License for the specific language governing permissions and limitations under the License. """ + +import json +import socket import threading import time -import socket -import json import uuid - +from datetime import datetime +from typing import Any, Callable, Dict, List, Optional, Sequence, Text, Union from urllib.parse import urlencode - from warnings import warn -from typing import Callable, Dict, Optional, Text, Union, Sequence, List, Any - -from typing_extensions import Self -from datetime import datetime -from google.protobuf.message import Message as GrpcMessage -from google.protobuf.empty_pb2 import Empty as GrpcEmpty -from google.protobuf.any_pb2 import Any as GrpcAny - import grpc # type: ignore +from google.protobuf.any_pb2 import Any as GrpcAny +from google.protobuf.empty_pb2 import Empty as GrpcEmpty +from google.protobuf.message import Message as GrpcMessage from grpc import ( # type: ignore - UnaryUnaryClientInterceptor, - UnaryStreamClientInterceptor, - StreamUnaryClientInterceptor, - StreamStreamClientInterceptor, RpcError, + StreamStreamClientInterceptor, + StreamUnaryClientInterceptor, + UnaryStreamClientInterceptor, + UnaryUnaryClientInterceptor, ) +from typing_extensions import Self -from dapr.clients.exceptions import DaprInternalError, DaprGrpcError -from dapr.clients.grpc._state import StateOptions, StateItem -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions -from dapr.clients.grpc.subscription import Subscription, StreamInactiveError -from dapr.clients.grpc.interceptors import DaprClientInterceptor, DaprClientTimeoutInterceptor -from dapr.clients.health import DaprHealth -from dapr.clients.retry import RetryPolicy -from dapr.common.pubsub.subscription import StreamCancelledError -from dapr.conf import settings -from dapr.proto import api_v1, api_service_v1, common_v1 -from dapr.proto.runtime.v1.dapr_pb2 import UnsubscribeConfigurationResponse -from dapr.version import __version__ - +from dapr.clients.exceptions import DaprGrpcError, DaprInternalError +from dapr.clients.grpc import conversation +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions from dapr.clients.grpc._helpers import ( - getWorkflowRuntimeStatus, MetadataTuple, - to_bytes, - validateNotNone, - validateNotBlankString, convert_dict_to_grpc_dict_of_any, convert_value_to_struct, + getWorkflowRuntimeStatus, + to_bytes, + validateNotBlankString, + validateNotNone, ) -from dapr.conf.helpers import GrpcEndpoint +from dapr.clients.grpc._jobs import Job from dapr.clients.grpc._request import ( - InvokeMethodRequest, BindingRequest, - TransactionalStateOperation, - EncryptRequestIterator, DecryptRequestIterator, + EncryptRequestIterator, + InvokeMethodRequest, + TransactionalStateOperation, ) -from dapr.clients.grpc import conversation -from dapr.clients.grpc._jobs import Job from dapr.clients.grpc._response import ( BindingResponse, + BulkStateItem, + BulkStatesResponse, + ConfigurationResponse, + ConfigurationWatcher, DaprResponse, - GetSecretResponse, + DecryptResponse, + EncryptResponse, GetBulkSecretResponse, GetMetadataResponse, + GetSecretResponse, + GetWorkflowResponse, InvokeMethodResponse, - UnlockResponseStatus, - StateResponse, - BulkStatesResponse, - BulkStateItem, - ConfigurationResponse, QueryResponse, QueryResponseItem, RegisteredComponents, - ConfigurationWatcher, - TryLockResponse, - UnlockResponse, - GetWorkflowResponse, StartWorkflowResponse, - EncryptResponse, - DecryptResponse, + StateResponse, TopicEventResponse, + TryLockResponse, + UnlockResponse, + UnlockResponseStatus, ) +from dapr.clients.grpc._state import StateItem, StateOptions +from dapr.clients.grpc.interceptors import DaprClientInterceptor, DaprClientTimeoutInterceptor +from dapr.clients.grpc.subscription import StreamInactiveError, Subscription +from dapr.clients.health import DaprHealth +from dapr.clients.retry import RetryPolicy +from dapr.common.pubsub.subscription import StreamCancelledError +from dapr.conf import settings +from dapr.conf.helpers import GrpcEndpoint +from dapr.proto import api_service_v1, api_v1, common_v1 +from dapr.proto.runtime.v1.dapr_pb2 import UnsubscribeConfigurationResponse +from dapr.version import __version__ class DaprGrpcClient: @@ -162,7 +158,7 @@ def __init__( if not address: address = settings.DAPR_GRPC_ENDPOINT or ( - f'{settings.DAPR_RUNTIME_HOST}:' f'{settings.DAPR_GRPC_PORT}' + f'{settings.DAPR_RUNTIME_HOST}:{settings.DAPR_GRPC_PORT}' ) try: diff --git a/dapr/clients/grpc/conversation.py b/dapr/clients/grpc/conversation.py index 1da02dac2..d11c41979 100644 --- a/dapr/clients/grpc/conversation.py +++ b/dapr/clients/grpc/conversation.py @@ -12,6 +12,7 @@ See the License for the specific language governing permissions and limitations under the License. """ + from __future__ import annotations import asyncio @@ -400,13 +401,11 @@ def convert_llm_response_to_conversation_input( class ToolBackend(Protocol): """Interface for executors that knows how to execute a tool call.""" - def invoke(self, spec: ConversationToolsFunction, params: Params) -> Any: - ... + def invoke(self, spec: ConversationToolsFunction, params: Params) -> Any: ... async def ainvoke( self, spec: ConversationToolsFunction, params: Params, *, timeout: Union[float, None] = None - ) -> Any: - ... + ) -> Any: ... @dataclass diff --git a/dapr/clients/grpc/interceptors.py b/dapr/clients/grpc/interceptors.py index 15bde1857..a574fb8c6 100644 --- a/dapr/clients/grpc/interceptors.py +++ b/dapr/clients/grpc/interceptors.py @@ -1,7 +1,11 @@ from collections import namedtuple from typing import List, Tuple -from grpc import UnaryUnaryClientInterceptor, ClientCallDetails, StreamStreamClientInterceptor # type: ignore +from grpc import ( # type: ignore + ClientCallDetails, + StreamStreamClientInterceptor, + UnaryUnaryClientInterceptor, +) from dapr.conf import settings diff --git a/dapr/clients/grpc/subscription.py b/dapr/clients/grpc/subscription.py index 6dcfcb4dd..73cc047ac 100644 --- a/dapr/clients/grpc/subscription.py +++ b/dapr/clients/grpc/subscription.py @@ -1,16 +1,17 @@ -from grpc import RpcError, StatusCode, Call # type: ignore +import queue +import threading +from typing import Optional + +from grpc import Call, RpcError, StatusCode # type: ignore from dapr.clients.grpc._response import TopicEventResponse from dapr.clients.health import DaprHealth from dapr.common.pubsub.subscription import ( + StreamCancelledError, StreamInactiveError, SubscriptionMessage, - StreamCancelledError, ) from dapr.proto import api_v1, appcallback_v1 -import queue -import threading -from typing import Optional class Subscription: diff --git a/dapr/clients/health.py b/dapr/clients/health.py index 37c42a875..53d203b2f 100644 --- a/dapr/clients/health.py +++ b/dapr/clients/health.py @@ -17,7 +17,12 @@ import time from warnings import warn -from dapr.clients.http.conf import DAPR_API_TOKEN_HEADER, USER_AGENT_HEADER, DAPR_USER_AGENT +import time +import urllib.error +import urllib.request +from warnings import warn + +from dapr.clients.http.conf import DAPR_API_TOKEN_HEADER, DAPR_USER_AGENT, USER_AGENT_HEADER from dapr.clients.http.helpers import get_api_url from dapr.conf import settings diff --git a/dapr/clients/http/client.py b/dapr/clients/http/client.py index f6f95aa74..387a0d939 100644 --- a/dapr/clients/http/client.py +++ b/dapr/clients/http/client.py @@ -13,25 +13,25 @@ limitations under the License. """ -import aiohttp +from typing import TYPE_CHECKING, Callable, Dict, Mapping, Optional, Tuple, Union -from typing import Callable, Mapping, Dict, Optional, Union, Tuple, TYPE_CHECKING +import aiohttp from dapr.clients.health import DaprHealth from dapr.clients.http.conf import ( + CONTENT_TYPE_HEADER, DAPR_API_TOKEN_HEADER, - USER_AGENT_HEADER, DAPR_USER_AGENT, - CONTENT_TYPE_HEADER, + USER_AGENT_HEADER, ) from dapr.clients.retry import RetryPolicy if TYPE_CHECKING: from dapr.serializers import Serializer -from dapr.conf import settings from dapr.clients._constants import DEFAULT_JSON_CONTENT_TYPE from dapr.clients.exceptions import DaprHttpError, DaprInternalError +from dapr.conf import settings class DaprHttpClient: diff --git a/dapr/clients/http/dapr_actor_http_client.py b/dapr/clients/http/dapr_actor_http_client.py index 186fdbc1c..711153659 100644 --- a/dapr/clients/http/dapr_actor_http_client.py +++ b/dapr/clients/http/dapr_actor_http_client.py @@ -13,15 +13,15 @@ limitations under the License. """ -from typing import Callable, Dict, Optional, Union, TYPE_CHECKING +from typing import TYPE_CHECKING, Callable, Dict, Optional, Union from dapr.clients.http.helpers import get_api_url if TYPE_CHECKING: from dapr.serializers import Serializer -from dapr.clients.http.client import DaprHttpClient from dapr.clients.base import DaprActorClientBase +from dapr.clients.http.client import DaprHttpClient from dapr.clients.retry import RetryPolicy DAPR_REENTRANCY_ID_HEADER = 'Dapr-Reentrancy-Id' diff --git a/dapr/clients/http/dapr_invocation_http_client.py b/dapr/clients/http/dapr_invocation_http_client.py index df4e6d222..604c483c0 100644 --- a/dapr/clients/http/dapr_invocation_http_client.py +++ b/dapr/clients/http/dapr_invocation_http_client.py @@ -14,13 +14,13 @@ """ import asyncio - from typing import Callable, Dict, Optional, Union + from multidict import MultiDict -from dapr.clients.http.client import DaprHttpClient -from dapr.clients.grpc._helpers import MetadataTuple, GrpcMessage +from dapr.clients.grpc._helpers import GrpcMessage, MetadataTuple from dapr.clients.grpc._response import InvokeMethodResponse +from dapr.clients.http.client import DaprHttpClient from dapr.clients.http.conf import CONTENT_TYPE_HEADER from dapr.clients.http.helpers import get_api_url from dapr.clients.retry import RetryPolicy diff --git a/dapr/clients/retry.py b/dapr/clients/retry.py index 171c96fbd..e895e46f3 100644 --- a/dapr/clients/retry.py +++ b/dapr/clients/retry.py @@ -12,11 +12,12 @@ See the License for the specific language governing permissions and limitations under the License. """ + import asyncio -from typing import Optional, List, Callable +import time +from typing import Callable, List, Optional from grpc import RpcError, StatusCode # type: ignore -import time from dapr.conf import settings diff --git a/dapr/common/pubsub/subscription.py b/dapr/common/pubsub/subscription.py index 6f68e180d..eb22a48da 100644 --- a/dapr/common/pubsub/subscription.py +++ b/dapr/common/pubsub/subscription.py @@ -1,7 +1,9 @@ import json +from typing import Optional, Union + from google.protobuf.json_format import MessageToDict + from dapr.proto.runtime.v1.appcallback_pb2 import TopicEventRequest -from typing import Optional, Union class SubscriptionMessage: diff --git a/dapr/conf/helpers.py b/dapr/conf/helpers.py index ab1e494b2..d2d187628 100644 --- a/dapr/conf/helpers.py +++ b/dapr/conf/helpers.py @@ -1,5 +1,5 @@ +from urllib.parse import ParseResult, parse_qs, urlparse from warnings import warn -from urllib.parse import urlparse, parse_qs, ParseResult class URIParseConfig: @@ -174,7 +174,7 @@ def tls(self) -> bool: def _validate_path_and_query(self) -> None: if self._parsed_url.path: raise ValueError( - f'paths are not supported for gRPC endpoints:' f" '{self._parsed_url.path}'" + f"paths are not supported for gRPC endpoints: '{self._parsed_url.path}'" ) if self._parsed_url.query: query_dict = parse_qs(self._parsed_url.query) diff --git a/dapr/actor/py.typed b/dapr/py.typed similarity index 100% rename from dapr/actor/py.typed rename to dapr/py.typed diff --git a/dapr/serializers/base.py b/dapr/serializers/base.py index 5ff1d9e8b..1aa7dfc89 100644 --- a/dapr/serializers/base.py +++ b/dapr/serializers/base.py @@ -23,8 +23,7 @@ class Serializer(ABC): @abstractmethod def serialize( self, obj: object, custom_hook: Optional[Callable[[object], bytes]] = None - ) -> bytes: - ... + ) -> bytes: ... @abstractmethod def deserialize( @@ -32,5 +31,4 @@ def deserialize( data: bytes, data_type: Optional[Type] = object, custom_hook: Optional[Callable[[bytes], object]] = None, - ) -> Any: - ... + ) -> Any: ... diff --git a/dapr/serializers/json.py b/dapr/serializers/json.py index 4e9665187..59e1c194b 100644 --- a/dapr/serializers/json.py +++ b/dapr/serializers/json.py @@ -14,18 +14,18 @@ """ import base64 -import re import datetime import json - +import re from typing import Any, Callable, Optional, Type + from dateutil import parser from dapr.serializers.base import Serializer from dapr.serializers.util import ( + DAPR_DURATION_PARSER, convert_from_dapr_duration, convert_to_dapr_duration, - DAPR_DURATION_PARSER, ) diff --git a/dapr/version/version.py b/dapr/version/version.py index 8c6c12960..5c39bd99d 100644 --- a/dapr/version/version.py +++ b/dapr/version/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.16.1rc1' +__version__ = '1.16.1rc2' diff --git a/daprdocs/README.md b/daprdocs/README.md deleted file mode 100644 index 5213ae214..000000000 --- a/daprdocs/README.md +++ /dev/null @@ -1,25 +0,0 @@ -# Dapr Python SDK documentation - -This page covers how the documentation is structured for the Dapr Python SDK. - -## Dapr Docs - -All Dapr documentation is hosted at [docs.dapr.io](https://docs.dapr.io), including the docs for the [Python SDK](https://docs.dapr.io/developing-applications/sdks/python/). Head over there if you want to read the docs. - -### Python SDK docs source - -Although the docs site code and content is in the [docs repo](https://github.com/dapr/docs), the Python SDK content and images are within the `content` and `static` directories, respectively. - -This allows separation of roles and expertise between maintainers, and makes it easy to find the docs files you are looking for. - -## Writing Python SDK docs - -To get up and running to write Python SDK docs, visit the [docs repo](https://github.com/dapr/docs) to initialize your environment. It will clone both the docs repo and this repo, so you can make changes and see it rendered within the site instantly, as well as commit and PR into this repo. - -Make sure to read the [docs contributing guide](https://docs.dapr.io/contributing/contributing-docs/) for information on style/semantics/etc. - -## Docs architecture - -The docs site is built on [Hugo](https://gohugo.io), which lives in the docs repo. This repo is setup as a git submodule so that when the repo is cloned and initialized, the python repo, along with the docs, are cloned as well. - -Then, in the Hugo configuration file, the `daprdocs/content` and `daprdocs/static` directories are redirected to the `daprdocs/developing-applications/sdks/python` and `static/python` directories, respectively. Thus, all the content within this repo is folded into the main docs site. \ No newline at end of file diff --git a/daprdocs/content/en/python-sdk-contributing/python-contributing.md b/daprdocs/content/en/python-sdk-contributing/python-contributing.md deleted file mode 100644 index fe22c10bd..000000000 --- a/daprdocs/content/en/python-sdk-contributing/python-contributing.md +++ /dev/null @@ -1,27 +0,0 @@ ---- -type: docs -title: "Contributing to the Python SDK" -linkTitle: "Python SDK" -weight: 3000 -description: Guidelines for contributing to the Dapr Python SDK ---- - -When contributing to the [Python SDK](https://github.com/dapr/python-sdk) the following rules and best-practices should be followed. - -## Examples - -The `examples` directory contains code samples for users to run to try out specific functionality of the various Python SDK packages and extensions. When writing new and updated samples keep in mind: - -- All examples should be runnable on Windows, Linux, and MacOS. While Python code is consistent among operating systems, any pre/post example commands should provide options through [tabpane]({{% ref "contributing-docs.md#tabbed-content" %}}) -- Contain steps to download/install any required pre-requisites. Someone coming in with a fresh OS install should be able to start on the example and complete it without an error. Links to external download pages are fine. - -## Docs - -The `daprdocs` directory contains the markdown files that are rendered into the [Dapr Docs](https://docs.dapr.io) website. When the documentation website is built this repo is cloned and configured so that its contents are rendered with the docs content. When writing docs keep in mind: - - - All rules in the [docs guide]({{% ref contributing-docs.md %}}) should be followed in addition to these. - - All files and directories should be prefixed with `python-` to ensure all file/directory names are globally unique across all Dapr documentation. - -## Github Dapr Bot Commands - -Checkout the [daprbot documentation](https://docs.dapr.io/contributing/daprbot/) for Github commands you can run in this repo for common tasks. For example, you can run the `/assign` (as a comment on an issue) to assign issues to a user or group of users. \ No newline at end of file diff --git a/daprdocs/content/en/python-sdk-docs/_index.md b/daprdocs/content/en/python-sdk-docs/_index.md deleted file mode 100644 index b8689eb99..000000000 --- a/daprdocs/content/en/python-sdk-docs/_index.md +++ /dev/null @@ -1,157 +0,0 @@ ---- -type: docs -title: "Dapr Python SDK" -linkTitle: "Python" -weight: 1000 -description: Python SDK packages for developing Dapr applications -no_list: true -cascade: - github_repo: https://github.com/dapr/python-sdk - github_subdir: daprdocs/content/en/python-sdk-docs - path_base_for_github_subdir: content/en/developing-applications/sdks/python/ - github_branch: master ---- - -Dapr offers a variety of subpackages to help with the development of Python applications. Using them you can create Python clients, servers, and virtual actors with Dapr. - -## Prerequisites - -- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed -- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) -- [Python 3.9+](https://www.python.org/downloads/) installed - -## Installation - -To get started with the Python SDK, install the main Dapr Python SDK package. - -{{< tabpane text=true >}} - -{{% tab header="Stable" %}} - -```bash -pip install dapr -``` -{{% /tab %}} - -{{% tab header="Development" %}} - -> **Note:** The development package will contain features and behavior that will be compatible with the pre-release version of the Dapr runtime. Make sure to uninstall any stable versions of the Python SDK before installing the dapr-dev package. - -```bash -pip install dapr-dev -``` - -{{% /tab %}} - -{{< /tabpane >}} - - -## Available subpackages - -### SDK imports - -Python SDK imports are subpackages included with the main SDK install, but need to be imported when used. The most common imports provided by the Dapr Python SDK are: - -
-
-
-
Client
-

Write Python applications to interact with a Dapr sidecar and other Dapr applications, including stateful virtual actors in Python

- -
-
-
-
-
Actors
-

Create and interact with Dapr's Actor framework.

- -
-
-
-
-
Conversation
-

Use the Dapr Conversation API (Alpha) for LLM interactions, tools, and multi-turn flows.

- -
-
-
- -Learn more about _all_ of the [available Dapr Python SDK imports](https://github.com/dapr/python-sdk/tree/master/dapr). - -### SDK extensions - -SDK extensions mainly work as utilities for receiving pub/sub events, programatically creating pub/sub subscriptions, and handling input binding events. While you can acheive all of these tasks without an extension, using a Python SDK extension proves convenient. - -
-
-
-
gRPC
-

Create Dapr services with the gRPC server extension.

- -
-
-
-
-
FastAPI
-

Integrate with Dapr Python virtual actors and pub/sub using the Dapr FastAPI extension.

- -
-
-
-
-
Flask
-

Integrate with Dapr Python virtual actors using the Dapr Flask extension.

- -
-
-
-
-
Workflow
-

Author workflows that work with other Dapr APIs in Python.

- -
-
-
- -Learn more about [the Dapr Python SDK extensions](https://github.com/dapr/python-sdk/tree/master/ext). - -## Try it out - -Clone the Python SDK repo. - -```bash -git clone https://github.com/dapr/python-sdk.git -``` - -Walk through the Python quickstarts, tutorials, and examples to see Dapr in action: - -| SDK samples | Description | -| ----------- | ----------- | -| [Quickstarts]({{% ref quickstarts %}}) | Experience Dapr's API building blocks in just a few minutes using the Python SDK. | -| [SDK samples](https://github.com/dapr/python-sdk/tree/master/examples) | Clone the SDK repo to try out some examples and get started. | -| [Bindings tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/bindings) | See how Dapr Python SDK works alongside other Dapr SDKs to enable bindings. | -| [Distributed Calculator tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/distributed-calculator/python) | Use the Dapr Python SDK to handle method invocation and state persistent capabilities. | -| [Hello World tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/hello-world) | Learn how to get Dapr up and running locally on your machine with the Python SDK. | -| [Hello Kubernetes tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/hello-kubernetes) | Get up and running with the Dapr Python SDK in a Kubernetes cluster. | -| [Observability tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/observability) | Explore Dapr's metric collection, tracing, logging and health check capabilities using the Python SDK. | -| [Pub/sub tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/pub-sub) | See how Dapr Python SDK works alongside other Dapr SDKs to enable pub/sub applications. | - - -## More information - -
-
-
-
Serialization
-

Learn more about serialization in Dapr SDKs.

- -
-
-
-
-
PyPI
-

Python Package Index

- -
-
-
diff --git a/daprdocs/content/en/python-sdk-docs/conversation.md b/daprdocs/content/en/python-sdk-docs/conversation.md deleted file mode 100644 index db67a6c49..000000000 --- a/daprdocs/content/en/python-sdk-docs/conversation.md +++ /dev/null @@ -1,295 +0,0 @@ -title: "Conversation API (Python) – Recommended Usage" -linkTitle: "Conversation" -weight: 11000 -type: docs -description: Recommended patterns for using Dapr Conversation API in Python with and without tools, including multi‑turn flows and safety guidance. ---- - -The Dapr Conversation API is currently in alpha. This page presents the recommended, minimal patterns to use it effectively with the Python SDK: -- Plain requests (no tools) -- Requests with tools (functions as tools) -- Multi‑turn flows with tool execution -- Async variants -- Important safety notes for executing tool calls - -## Prerequisites - -- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed -- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) -- [Python 3.9+](https://www.python.org/downloads/) installed -- [Dapr Python package]({{% ref "python#installation" %}}) installed -- A configured LLM component (for example, OpenAI or Azure OpenAI) in your Dapr environment - -For full, end‑to‑end flows and provider setup, see: -- The SDK examples under Conversation: - - [TOOL-CALL-QUICKSTART.md](https://github.com/dapr/python-sdk/blob/main/examples/conversation/TOOL-CALL-QUICKSTART.md) - - [real_llm_providers_example.py](https://github.com/dapr/python-sdk/blob/main/examples/conversation/real_llm_providers_example.py) - -## Plain conversation (no tools) - -```python -from dapr.clients import DaprClient -from dapr.clients.grpc import conversation - -# Build a single‑turn Alpha2 input -user_msg = conversation.create_user_message("What's Dapr?") -alpha2_input = conversation.ConversationInputAlpha2(messages=[user_msg]) - -with DaprClient() as client: - resp = client.converse_alpha2( - name="echo", # replace with your LLM component name - inputs=[alpha2_input], - temperature=1, - ) - - for msg in resp.to_assistant_messages(): - if msg.of_assistant.content: - print(msg.of_assistant.content[0].text) -``` - -Key points: -- Use `conversation.create_user_message` to build messages. -- Wrap into `ConversationInputAlpha2(messages=[...])` and pass to `converse_alpha2`. -- Use `response.to_assistant_messages()` to iterate assistant outputs. - -## Tools: decorator‑based (recommended) - -Decorator-based tools offer a clean, ergonomic approach. Define a function with clear type hints and detail docstring, this is important for the LLM to understand how or when to invoke the tool; -decorate it with `@conversation.tool`. Registered tools can be passed to the LLM and invoked via tool calls. - -```python -from dapr.clients import DaprClient -from dapr.clients.grpc import conversation - -@conversation.tool -def get_weather(location: str, unit: str = 'fahrenheit') -> str: - """Get current weather for a location.""" - # Replace with a real implementation - return f"Weather in {location} (unit={unit})" - -user_msg = conversation.create_user_message("What's the weather in Paris?") -alpha2_input = conversation.ConversationInputAlpha2(messages=[user_msg]) - -with DaprClient() as client: - response = client.converse_alpha2( - name="openai", # your LLM component - inputs=[alpha2_input], - tools=conversation.get_registered_tools(), # tools registered by @conversation.tool - tool_choice='auto', - temperature=1, - ) - - # Inspect assistant messages, including any tool calls - for msg in response.to_assistant_messages(): - if msg.of_assistant.tool_calls: - for tc in msg.of_assistant.tool_calls: - print(f"Tool call: {tc.function.name} args={tc.function.arguments}") - elif msg.of_assistant.content: - print(msg.of_assistant.content[0].text) -``` - -Notes: -- Use `conversation.get_registered_tools()` to collect all `@conversation.tool` decorated functions. -- The binder validates/coerces params using your function signature. Keep annotations accurate. - -## Minimal multi‑turn with tools - -This is the go‑to loop for tool‑using conversations: - -{{% alert title="Warning" color="warning" %}} -Do not blindly auto‑execute tool calls returned by the LLM unless you trust all tools registered. Treat tool names and arguments as untrusted input. -- Validate inputs and enforce guardrails (allow‑listed tools, argument schemas, side‑effect constraints). -- For async or I/O‑bound tools, prefer `conversation.execute_registered_tool_async(..., timeout=...)` and set conservative timeouts. -- Consider adding a policy layer or a user confirmation step before execution in sensitive contexts. -- Log and monitor tool usage; fail closed when validation fails. -{{% /alert %}} - -```python -from dapr.clients import DaprClient -from dapr.clients.grpc import conversation - -@conversation.tool -def get_weather(location: str, unit: str = 'fahrenheit') -> str: - return f"Weather in {location} (unit={unit})" - -history: list[conversation.ConversationMessage] = [ - conversation.create_user_message("What's the weather in San Francisco?")] - -with DaprClient() as client: - # Turn 1 - resp1 = client.converse_alpha2( - name="openai", - inputs=[conversation.ConversationInputAlpha2(messages=history)], - tools=conversation.get_registered_tools(), - tool_choice='auto', - temperature=1, - ) - - # Append assistant messages; execute tool calls; append tool results - for msg in resp1.to_assistant_messages(): - history.append(msg) - for tc in msg.of_assistant.tool_calls: - # IMPORTANT: validate inputs and enforce guardrails in production - tool_output = conversation.execute_registered_tool( - tc.function.name, tc.function.arguments - ) - history.append( - conversation.create_tool_message( - tool_id=tc.id, name=tc.function.name, content=str(tool_output) - ) - ) - - # Turn 2 (LLM sees tool result) - history.append(conversation.create_user_message("Should I bring an umbrella?")) - resp2 = client.converse_alpha2( - name="openai", - inputs=[conversation.ConversationInputAlpha2(messages=history)], - tools=conversation.get_registered_tools(), - temperature=1, - ) - - for msg in resp2.to_assistant_messages(): - history.append(msg) - if not msg.of_assistant.tool_calls and msg.of_assistant.content: - print(msg.of_assistant.content[0].text) -``` - -Tips: -- Always append assistant messages to history. -- Execute each tool call (with validation) and append a tool message with the tool output. -- The next turn includes these tool results so the LLM can reason with them. - -## Functions as tools: alternatives - -When decorators aren’t practical, two options exist. - -A) Automatic schema from a typed function: - -```python -from enum import Enum -from dapr.clients.grpc import conversation - -class Units(Enum): - CELSIUS = 'celsius' - FAHRENHEIT = 'fahrenheit' - -def get_weather(location: str, unit: Units = Units.FAHRENHEIT) -> str: - return f"Weather in {location}" - -fn = conversation.ConversationToolsFunction.from_function(get_weather) -weather_tool = conversation.ConversationTools(function=fn) -``` - -B) Manual JSON Schema (fallback): - -```python -from dapr.clients.grpc import conversation - -fn = conversation.ConversationToolsFunction( - name='get_weather', - description='Get current weather', - parameters={ - 'type': 'object', - 'properties': { - 'location': {'type': 'string'}, - 'unit': {'type': 'string', 'enum': ['celsius', 'fahrenheit']}, - }, - 'required': ['location'], - }, -) -weather_tool = conversation.ConversationTools(function=fn) -``` - -## Async variant - -Use the asynchronous client and async tool execution helpers as needed. - -```python -import asyncio -from dapr.aio.clients import DaprClient as AsyncDaprClient -from dapr.clients.grpc import conversation - -@conversation.tool -def get_time() -> str: - return '2025-01-01T12:00:00Z' - -async def main(): - async with AsyncDaprClient() as client: - msg = conversation.create_user_message('What time is it?') - inp = conversation.ConversationInputAlpha2(messages=[msg]) - resp = await client.converse_alpha2( - name='openai', inputs=[inp], tools=conversation.get_registered_tools() - ) - for m in resp.to_assistant_messages(): - if m.of_assistant.content: - print(m.of_assistant.content[0].text) - -asyncio.run(main()) -``` - -If you need to execute tools asynchronously (e.g., network I/O), implement async functions and use `conversation.execute_registered_tool_async` with timeouts. - -## Safety and validation (must‑read) - -An LLM may suggest tool calls. Treat all model‑provided parameters as untrusted input. - -Recommendations: -- Register only trusted functions as tools. Prefer the `@conversation.tool` decorator for clarity and automatic schema generation. -- Use precise type annotations and docstrings. The SDK converts function signatures to JSON schema and binds parameters with type coercion and rejection of unexpected/invalid fields. -- Add guardrails for tools that can cause side effects (filesystem, network, subprocess). Consider allow‑lists, sandboxing, and limits. -- Validate arguments before execution. For example, sanitize file paths or restrict URLs/domains. -- Consider timeouts and concurrency controls. For async tools, pass a timeout to `execute_registered_tool_async(..., timeout=...)`. -- Log and monitor tool usage. Fail closed: if validation fails, avoid executing the tool and inform the user safely. - -See also inline notes in `dapr/clients/grpc/conversation.py` (e.g., `tool()`, `ConversationTools`, `execute_registered_tool`) for parameter binding and error handling details. - - -## Key helper methods (quick reference) - -This section summarizes helper utilities available in dapr.clients.grpc.conversation used throughout the examples. - -- create_user_message(text: str) -> ConversationMessage - - Builds a user role message for Alpha2. Use in history lists. - - Example: `history.append(conversation.create_user_message("Hello"))` - -- create_system_message(text: str) -> ConversationMessage - - Builds a system message to steer the assistant’s behavior. - - Example: `history = [conversation.create_system_message("You are a concise assistant.")]` - -- create_assistant_message(text: str) -> ConversationMessage - - Useful for injecting assistant text in tests or controlled flows. - -- create_tool_message(tool_id: str, name: str, content: Any) -> ConversationMessage - - Converts a tool’s output into a tool message the LLM can read next turn. - - content can be any object; it is stringified safely by the SDK. - - Example: `history.append(conversation.create_tool_message(tool_id=tc.id, name=tc.function.name, content=conversation.execute_registered_tool(tc.function.name, tc.function.arguments)))` - -- get_registered_tools() -> list[ConversationTools] - - Returns all tools currently registered in the in-process registry. - - Includes tools created via: - - @conversation.tool decorator (auto-registered by default), and - - ConversationToolsFunction.from_function with register=True (default). - - Pass this list in converse_alpha2(..., tools=...). - -- register_tool(name: str, t: ConversationTools) / unregister_tool(name: str) - - Manually manage the tool registry (e.g., advanced scenarios, tests, cleanup). - - Names must be unique; unregister to avoid collisions in long-lived processes. - -- execute_registered_tool(name: str, params: Mapping|Sequence|str|None) -> Any - - Synchronously executes a registered tool by name. - - params accepts kwargs (mapping), args (sequence), JSON string, or None. If a JSON string is provided (as commonly returned by LLMs), it is parsed for you. - - Parameters are validated and coerced against the function signature/schema; unexpected or invalid fields raise errors. - - Security: treat params as untrusted; add guardrails for side effects. - -- execute_registered_tool_async(name: str, params: Mapping|Sequence|str|None, *, timeout: float|None=None) -> Any - - Async counterpart. Supports timeouts, which are recommended for I/O-bound tools. - - Prefer this for async tools or when using the aio client. - -- ConversationToolsFunction.from_function(func: Callable, register: bool = True) -> ConversationToolsFunction - - Derives a JSON schema from a typed Python function (annotations + optional docstring) and optionally registers a tool. - - Typical usage: `spec = conversation.ConversationToolsFunction.from_function(my_func)`; then either rely on auto-registration or wrap with `ConversationTools(function=spec)` and call `register_tool(spec.name, tool)` or pass `[tool]` directly to `tools=`. - -- ConversationResponseAlpha2.to_assistant_messages() -> list[ConversationMessage] - - Convenience to transform the response outputs into assistant ConversationMessage objects you can append to history directly (including tool_calls when present). - -Tip: The @conversation.tool decorator is the easiest way to create a tool. It auto-generates the schema from your function, allows an optional namespace/name override, and auto-registers the tool (you can set register=False to defer registration). diff --git a/daprdocs/content/en/python-sdk-docs/python-actor.md b/daprdocs/content/en/python-sdk-docs/python-actor.md deleted file mode 100644 index 22360afd0..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-actor.md +++ /dev/null @@ -1,130 +0,0 @@ ---- -type: docs -title: "Getting started with the Dapr actor Python SDK" -linkTitle: "Actor" -weight: 20000 -description: How to get up and running with the Dapr Python SDK ---- - -The Dapr actor package allows you to interact with Dapr virtual actors from a Python application. - -## Pre-requisites - -- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed -- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) -- [Python 3.9+](https://www.python.org/downloads/) installed -- [Dapr Python package]({{% ref "python#installation" %}}) installed - -## Actor interface - -The interface defines the actor contract that is shared between the actor implementation and the clients calling the actor. Because a client may depend on it, it typically makes sense to define it in an assembly that is separate from the actor implementation. - -```python -from dapr.actor import ActorInterface, actormethod - -class DemoActorInterface(ActorInterface): - @actormethod(name="GetMyData") - async def get_my_data(self) -> object: - ... -``` - -## Actor services - -An actor service hosts the virtual actor. It is implemented a class that derives from the base type `Actor` and implements the interfaces defined in the actor interface. - -Actors can be created using one of the Dapr actor extensions: - - [FastAPI actor extension]({{% ref python-fastapi.md %}}) - - [Flask actor extension]({{% ref python-flask.md %}}) - -## Actor client - -An actor client contains the implementation of the actor client which calls the actor methods defined in the actor interface. - -```python -import asyncio - -from dapr.actor import ActorProxy, ActorId -from demo_actor_interface import DemoActorInterface - -async def main(): - # Create proxy client - proxy = ActorProxy.create('DemoActor', ActorId('1'), DemoActorInterface) - - # Call method on client - resp = await proxy.GetMyData() -``` - -## Sample - -Visit [this page](https://github.com/dapr/python-sdk/tree/main/examples/demo_actor) for a runnable actor sample. - - -## Mock Actor Testing - -The Dapr Python SDK provides the ability to create mock actors to unit test your actor methods and see how they interact with the actor state. - -### Sample Usage - - -``` -from dapr.actor.runtime.mock_actor import create_mock_actor - -class MyActor(Actor, MyActorInterface): - async def save_state(self, data) -> None: - await self._state_manager.set_state('mystate', data) - await self._state_manager.save_state() - -mock_actor = create_mock_actor(MyActor, "id") - -await mock_actor.save_state(5) -assert mockactor._state_manager._mock_state['mystate'] == 5 #True -``` -Mock actors are created by passing your actor class and an actor ID (a string) to the create_mock_actor function. This function returns an instance of the actor with many internal methods overridden. Instead of interacting with Dapr for tasks like saving state or managing timers, the mock actor uses in-memory state to simulate these behaviors. - -This state can be accessed through the following variables: - -**IMPORTANT NOTE: Due to type hinting issues as discussed further down, these variables will not be visible to type hinters/linters/etc, who will think they are invalid variables. You will need to use them with #type: ignore in order to satisfy any such systems.** - -- **_state_manager._mock_state()** -A `[str, object]` dict where all the actor state is stored. Any variable saved via `_state_manager.save_state(key, value)`, or any other statemanager method is stored in the dict as that key, value pair. Any value loaded via `try_get_state` or any other statemanager method is taken from this dict. - -- **_state_manager._mock_timers()** -A `[str, ActorTimerData]` dict which holds the active actor timers. Any actor method which would add or remove a timer adds or pops the appropriate `ActorTimerData` object from this dict. - -- **_state_manager._mock_reminders()** -A [str, ActorReminderData] dict which holds the active actor reminders. Any actor method which would add or remove a timer adds or pops the appropriate ActorReminderData object from this dict. - -**Note: The timers and reminders will never actually trigger. The dictionaries exist only so methods that should add or remove timers/reminders can be tested. If you need to test the callbacks they should activate, you should call them directly with the appropriate values:** -``` -result = await mock_actor.recieve_reminder(name, state, due_time, period, _ttl) -# Test the result directly or test for side effects (like changing state) by querying `_state_manager._mock_state` -``` - -### Usage and Limitations - -**To allow for more fine-grained control, the `_on_activate` method will not be called automatically the way it is when Dapr initializes a new Actor instance. You should call it manually as needed as part of your tests.** - -**A current limitation of the mock actor system is that it does not call the `_on_pre_actor_method` and `_on_post_actor_method` methods. You can always call these methods manually as part of a test.** - -The `__init__`, `register_timer`, `unregister_timer`, `register_reminder`, `unregister_reminder` methods are all overwritten by the MockActor class that gets applied as a mixin via `create_mock_actor`. If your actor itself overwrites these methods, those modifications will themselves be overwritten and the actor will likely not behave as you expect. - -*note: `__init__` is a special case where you are expected to define it as* -``` - def __init__(self, ctx, actor_id): - super().__init__(ctx, actor_id) -``` -*Mock actors work fine with this, but if you have added any extra logic into `__init__`, it will be overwritten. It is worth noting that the correct way to apply logic on initialization is via `_on_activate` (which can also be safely used with mock actors) instead of `__init__`.* - -*If you have an actor which does override default Dapr actor methods, you can create a custom subclass of the MockActor class (from MockActor.py) which implements whatever custom logic you have along with interacting with `_mock_state`, `_mock_timers`, and `_mock_reminders` as normal, and then applying that custom class as a mixin via a `create_mock_actor` function you define yourself.* - -The actor `_runtime_ctx` variable is set to None. All the normal actor methods have been overwritten such as to not call it, but if your code itself interacts directly with `_runtime_ctx`, tests may fail. - -The actor _state_manager is overwritten with an instance of `MockStateManager`. This has all the same methods and functionality of the base `ActorStateManager`, except for using the various `_mock` variables for storing data instead of the `_runtime_ctx`. If your code implements its own custom state manager it will be overwritten and tests will likely fail. - -### Type Hinting - -Because of Python's lack of a unified method for type hinting type intersections (see: [python/typing #213](https://github.com/python/typing/issues/213)), type hinting unfortunately doesn't work with Mock Actors. The return type is type hinted as "instance of Actor subclass T" when it should really be type hinted as "instance of MockActor subclass T" or "instance of type intersection `[Actor subclass T, MockActor]`" (where, it is worth noting, `MockActor` is itself a subclass of `Actor`). - -This means that, for example, if you hover over `mockactor._state_manager` in a code editor, it will come up as an instance of ActorStateManager (instead of MockStateManager), and various IDE helper functions (like VSCode's `Go to Definition`, which will bring you to the definition of ActorStateManager instead of MockStateManager) won't work properly. - -For now, this issue is unfixable, so it's merely something to be noted because of the confusion it might cause. If in the future it becomes possible to accurately type hint cases like this feel free to open an issue about implementing it. \ No newline at end of file diff --git a/daprdocs/content/en/python-sdk-docs/python-client.md b/daprdocs/content/en/python-sdk-docs/python-client.md deleted file mode 100644 index f03a6a74c..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-client.md +++ /dev/null @@ -1,601 +0,0 @@ ---- -type: docs -title: "Getting started with the Dapr client Python SDK" -linkTitle: "Client" -weight: 10000 -description: How to get up and running with the Dapr Python SDK ---- - -The Dapr client package allows you to interact with other Dapr applications from a Python application. - -{{% alert title="Note" color="primary" %}} - If you haven't already, [try out one of the quickstarts]({{% ref quickstarts %}}) for a quick walk-through on how to use the Dapr Python SDK with an API building block. - -{{% /alert %}} - -## Prerequisites - -[Install the Dapr Python package]({{% ref "python#installation" %}}) before getting started. - -## Import the client package - -The `dapr` package contains the `DaprClient`, which is used to create and use a client. - -```python -from dapr.clients import DaprClient -``` - -## Initialising the client -You can initialise a Dapr client in multiple ways: - -#### Default values: -When you initialise the client without any parameters it will use the default values for a Dapr -sidecar instance (`127.0.0.1:50001`). -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - # use the client -``` - -#### Specifying an endpoint on initialisation: -When passed as an argument in the constructor, the gRPC endpoint takes precedence over any -configuration or environment variable. - -```python -from dapr.clients import DaprClient - -with DaprClient("mydomain:50051?tls=true") as d: - # use the client -``` - -#### Configuration options: - -##### Dapr Sidecar Endpoints -You can use the standardised `DAPR_GRPC_ENDPOINT` environment variable to -specify the gRPC endpoint. When this variable is set, the client can be initialised -without any arguments: - -```bash -export DAPR_GRPC_ENDPOINT="mydomain:50051?tls=true" -``` -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - # the client will use the endpoint specified in the environment variables -``` - -The legacy environment variables `DAPR_RUNTIME_HOST`, `DAPR_HTTP_PORT` and `DAPR_GRPC_PORT` are -also supported, but `DAPR_GRPC_ENDPOINT` takes precedence. - -##### Dapr API Token -If your Dapr instance is configured to require the `DAPR_API_TOKEN` environment variable, you can -set it in the environment and the client will use it automatically. -You can read more about Dapr API token authentication [here](https://docs.dapr.io/operations/security/api-token/). - -##### Health timeout -On client initialisation, a health check is performed against the Dapr sidecar (`/healthz/outbound`). -The client will wait for the sidecar to be up and running before proceeding. - -The default healthcheck timeout is 60 seconds, but it can be overridden by setting the `DAPR_HEALTH_TIMEOUT` -environment variable. - -##### Retries and timeout - -The Dapr client can retry a request if a specific error code is received from the sidecar. This is -configurable through the `DAPR_API_MAX_RETRIES` environment variable and is picked up automatically, -not requiring any code changes. -The default value for `DAPR_API_MAX_RETRIES` is `0`, which means no retries will be made. - -You can fine-tune more retry parameters by creating a `dapr.clients.retry.RetryPolicy` object and -passing it to the DaprClient constructor: - -```python -from dapr.clients.retry import RetryPolicy - -retry = RetryPolicy( - max_attempts=5, - initial_backoff=1, - max_backoff=20, - backoff_multiplier=1.5, - retryable_http_status_codes=[408, 429, 500, 502, 503, 504], - retryable_grpc_status_codes=[StatusCode.UNAVAILABLE, StatusCode.DEADLINE_EXCEEDED, ] -) - -with DaprClient(retry_policy=retry) as d: - ... -``` - -or for actors: -```python -factory = ActorProxyFactory(retry_policy=RetryPolicy(max_attempts=3)) -proxy = ActorProxy.create('DemoActor', ActorId('1'), DemoActorInterface, factory) -``` - -**Timeout** can be set for all calls through the environment variable `DAPR_API_TIMEOUT_SECONDS`. The default value is 60 seconds. - -> Note: You can control timeouts on service invocation separately, by passing a `timeout` parameter to the `invoke_method` method. - -## Error handling -Initially, errors in Dapr followed the [Standard gRPC error model](https://grpc.io/docs/guides/error/#standard-error-model). However, to provide more detailed and informative error messages, in version 1.13 an enhanced error model has been introduced which aligns with the gRPC [Richer error model](https://grpc.io/docs/guides/error/#richer-error-model). In response, the Python SDK implemented `DaprGrpcError`, a custom exception class designed to improve the developer experience. -It's important to note that the transition to using `DaprGrpcError` for all gRPC status exceptions is a work in progress. As of now, not every API call in the SDK has been updated to leverage this custom exception. We are actively working on this enhancement and welcome contributions from the community. - -Example of handling `DaprGrpcError` exceptions when using the Dapr python-SDK: - -```python -try: - d.save_state(store_name=storeName, key=key, value=value) -except DaprGrpcError as err: - print(f'Status code: {err.code()}') - print(f"Message: {err.message()}") - print(f"Error code: {err.error_code()}") - print(f"Error info(reason): {err.error_info.reason}") - print(f"Resource info (resource type): {err.resource_info.resource_type}") - print(f"Resource info (resource name): {err.resource_info.resource_name}") - print(f"Bad request (field): {err.bad_request.field_violations[0].field}") - print(f"Bad request (description): {err.bad_request.field_violations[0].description}") -``` - - -## Building blocks - -The Python SDK allows you to interface with all of the [Dapr building blocks]({{% ref building-blocks %}}). - -### Invoke a service - -The Dapr Python SDK provides a simple API for invoking services via either HTTP or gRPC (deprecated). The protocol can be selected by setting the `DAPR_API_METHOD_INVOCATION_PROTOCOL` environment variable, defaulting to HTTP when unset. GRPC service invocation in Dapr is deprecated and GRPC proxying is recommended as an alternative. - -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - # invoke a method (gRPC or HTTP GET) - resp = d.invoke_method('service-to-invoke', 'method-to-invoke', data='{"message":"Hello World"}') - - # for other HTTP verbs the verb must be specified - # invoke a 'POST' method (HTTP only) - resp = d.invoke_method('service-to-invoke', 'method-to-invoke', data='{"id":"100", "FirstName":"Value", "LastName":"Value"}', http_verb='post') -``` - -The base endpoint for HTTP api calls is specified in the `DAPR_HTTP_ENDPOINT` environment variable. -If this variable is not set, the endpoint value is derived from the `DAPR_RUNTIME_HOST` and `DAPR_HTTP_PORT` variables, whose default values are `127.0.0.1` and `3500` accordingly. - -The base endpoint for gRPC calls is the one used for the client initialisation ([explained above](#initialising-the-client)). - - -- For a full guide on service invocation visit [How-To: Invoke a service]({{% ref howto-invoke-discover-services.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/invoke-simple) for code samples and instructions to try out service invocation. - -### Save & get application state - -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - # Save state - d.save_state(store_name="statestore", key="key1", value="value1") - - # Get state - data = d.get_state(store_name="statestore", key="key1").data - - # Delete state - d.delete_state(store_name="statestore", key="key1") -``` - -- For a full list of state operations visit [How-To: Get & save state]({{% ref howto-get-save-state.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/state_store) for code samples and instructions to try out state management. - -### Query application state (Alpha) - -```python - from dapr import DaprClient - - query = ''' - { - "filter": { - "EQ": { "state": "CA" } - }, - "sort": [ - { - "key": "person.id", - "order": "DESC" - } - ] - } - ''' - - with DaprClient() as d: - resp = d.query_state( - store_name='state_store', - query=query, - states_metadata={"metakey": "metavalue"}, # optional - ) -``` - -- For a full list of state store query options visit [How-To: Query state]({{% ref howto-state-query-api.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/state_store_query) for code samples and instructions to try out state store querying. - -### Publish & subscribe - -#### Publish messages - -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - resp = d.publish_event(pubsub_name='pubsub', topic_name='TOPIC_A', data='{"message":"Hello World"}') -``` - - -Send [CloudEvents](https://cloudevents.io/) messages with a json payload: -```python -from dapr.clients import DaprClient -import json - -with DaprClient() as d: - cloud_event = { - 'specversion': '1.0', - 'type': 'com.example.event', - 'source': 'my-service', - 'id': 'myid', - 'data': {'id': 1, 'message': 'hello world'}, - 'datacontenttype': 'application/json', - } - - # Set the data content type to 'application/cloudevents+json' - resp = d.publish_event( - pubsub_name='pubsub', - topic_name='TOPIC_CE', - data=json.dumps(cloud_event), - data_content_type='application/cloudevents+json', - ) -``` - -Publish [CloudEvents](https://cloudevents.io/) messages with plain text payload: -```python -from dapr.clients import DaprClient -import json - -with DaprClient() as d: - cloud_event = { - 'specversion': '1.0', - 'type': 'com.example.event', - 'source': 'my-service', - 'id': "myid", - 'data': 'hello world', - 'datacontenttype': 'text/plain', - } - - # Set the data content type to 'application/cloudevents+json' - resp = d.publish_event( - pubsub_name='pubsub', - topic_name='TOPIC_CE', - data=json.dumps(cloud_event), - data_content_type='application/cloudevents+json', - ) -``` - - -#### Subscribe to messages - -```python -from cloudevents.sdk.event import v1 -from dapr.ext.grpc import App -import json - -app = App() - -# Default subscription for a topic -@app.subscribe(pubsub_name='pubsub', topic='TOPIC_A') -def mytopic(event: v1.Event) -> None: - data = json.loads(event.Data()) - print(f'Received: id={data["id"]}, message="{data ["message"]}"' - ' content_type="{event.content_type}"',flush=True) - -# Specific handler using Pub/Sub routing -@app.subscribe(pubsub_name='pubsub', topic='TOPIC_A', - rule=Rule("event.type == \"important\"", 1)) -def mytopic_important(event: v1.Event) -> None: - data = json.loads(event.Data()) - print(f'Received: id={data["id"]}, message="{data ["message"]}"' - ' content_type="{event.content_type}"',flush=True) -``` - -- For more information about pub/sub, visit [How-To: Publish & subscribe]({{% ref howto-publish-subscribe.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/pubsub-simple) for code samples and instructions to try out pub/sub. - -#### Streaming message subscription - -You can create a streaming subscription to a PubSub topic using either the `subscribe` -or `subscribe_handler` methods. - -The `subscribe` method returns an iterable `Subscription` object, which allows you to pull messages from the -stream by using a `for` loop (ex. `for message in subscription`) or by -calling the `next_message` method. This will block on the main thread while waiting for messages. -When done, you should call the close method to terminate the -subscription and stop receiving messages. - -The `subscribe_with_handler` method accepts a callback function that is executed for each message -received from the stream. -It runs in a separate thread, so it doesn't block the main thread. The callback should return a -`TopicEventResponse` (ex. `TopicEventResponse('success')`), indicating whether the message was -processed successfully, should be retried, or should be discarded. The method will automatically -manage message acknowledgements based on the returned status. The call to `subscribe_with_handler` -method returns a close function, which should be called to terminate the subscription when you're -done. - -Here's an example of using the `subscribe` method: - -```python -import time - -from dapr.clients import DaprClient -from dapr.clients.grpc.subscription import StreamInactiveError, StreamCancelledError - -counter = 0 - - -def process_message(message): - global counter - counter += 1 - # Process the message here - print(f'Processing message: {message.data()} from {message.topic()}...') - return 'success' - - -def main(): - with DaprClient() as client: - global counter - - subscription = client.subscribe( - pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' - ) - - try: - for message in subscription: - if message is None: - print('No message received. The stream might have been cancelled.') - continue - - try: - response_status = process_message(message) - - if response_status == 'success': - subscription.respond_success(message) - elif response_status == 'retry': - subscription.respond_retry(message) - elif response_status == 'drop': - subscription.respond_drop(message) - - if counter >= 5: - break - except StreamInactiveError: - print('Stream is inactive. Retrying...') - time.sleep(1) - continue - except StreamCancelledError: - print('Stream was cancelled') - break - except Exception as e: - print(f'Error occurred during message processing: {e}') - - finally: - print('Closing subscription...') - subscription.close() - - -if __name__ == '__main__': - main() -``` - -And here's an example of using the `subscribe_with_handler` method: - -```python -import time - -from dapr.clients import DaprClient -from dapr.clients.grpc._response import TopicEventResponse - -counter = 0 - - -def process_message(message): - # Process the message here - global counter - counter += 1 - print(f'Processing message: {message.data()} from {message.topic()}...') - return TopicEventResponse('success') - - -def main(): - with (DaprClient() as client): - # This will start a new thread that will listen for messages - # and process them in the `process_message` function - close_fn = client.subscribe_with_handler( - pubsub_name='pubsub', topic='TOPIC_A', handler_fn=process_message, - dead_letter_topic='TOPIC_A_DEAD' - ) - - while counter < 5: - time.sleep(1) - - print("Closing subscription...") - close_fn() - - -if __name__ == '__main__': - main() -``` - -- For more information about pub/sub, visit [How-To: Publish & subscribe]({{% ref howto-publish-subscribe.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/main/examples/pubsub-simple) for code samples and instructions to try out streaming pub/sub. - -### Conversation (Alpha) - -{{% alert title="Note" color="primary" %}} -The Dapr Conversation API is currently in alpha. -{{% /alert %}} - -Since version 1.15 Dapr offers developers the capability to securely and reliably interact with Large Language Models (LLM) through the [Conversation API]({{% ref conversation-overview.md %}}). - -```python -from dapr.clients import DaprClient -from dapr.clients.grpc.conversation import ConversationInput - -with DaprClient() as d: - inputs = [ - ConversationInput(content="What's Dapr?", role='user', scrub_pii=True), - ConversationInput(content='Give a brief overview.', role='user', scrub_pii=True), - ] - - metadata = { - 'model': 'foo', - 'key': 'authKey', - 'cacheTTL': '10m', - } - - response = d.converse_alpha1( - name='echo', inputs=inputs, temperature=0.7, context_id='chat-123', metadata=metadata - ) - - for output in response.outputs: - print(f'Result: {output.result}') -``` - -### Interact with output bindings - -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - resp = d.invoke_binding(binding_name='kafkaBinding', operation='create', data='{"message":"Hello World"}') -``` - -- For a full guide on output bindings visit [How-To: Use bindings]({{% ref howto-bindings.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/main/examples/invoke-binding) for code samples and instructions to try out output bindings. - -### Retrieve secrets - -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - resp = d.get_secret(store_name='localsecretstore', key='secretKey') -``` - -- For a full guide on secrets visit [How-To: Retrieve secrets]({{% ref howto-secrets.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/secret_store) for code samples and instructions to try out retrieving secrets - -### Configuration - -#### Get configuration - -```python -from dapr.clients import DaprClient - -with DaprClient() as d: - # Get Configuration - configuration = d.get_configuration(store_name='configurationstore', keys=['orderId'], config_metadata={}) -``` - -#### Subscribe to configuration - -```python -import asyncio -from time import sleep -from dapr.clients import DaprClient - -async def executeConfiguration(): - with DaprClient() as d: - storeName = 'configurationstore' - - key = 'orderId' - - # Wait for sidecar to be up within 20 seconds. - d.wait(20) - - # Subscribe to configuration by key. - configuration = await d.subscribe_configuration(store_name=storeName, keys=[key], config_metadata={}) - while True: - if configuration != None: - items = configuration.get_items() - for key, item in items: - print(f"Subscribe key={key} value={item.value} version={item.version}", flush=True) - else: - print("Nothing yet") - sleep(5) - -asyncio.run(executeConfiguration()) -``` - -- Learn more about managing configurations via the [How-To: Manage configuration]({{% ref howto-manage-configuration.md %}}) guide. -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/configuration) for code samples and instructions to try out configuration. - -### Distributed Lock - -```python -from dapr.clients import DaprClient - -def main(): - # Lock parameters - store_name = 'lockstore' # as defined in components/lockstore.yaml - resource_id = 'example-lock-resource' - client_id = 'example-client-id' - expiry_in_seconds = 60 - - with DaprClient() as dapr: - print('Will try to acquire a lock from lock store named [%s]' % store_name) - print('The lock is for a resource named [%s]' % resource_id) - print('The client identifier is [%s]' % client_id) - print('The lock will will expire in %s seconds.' % expiry_in_seconds) - - with dapr.try_lock(store_name, resource_id, client_id, expiry_in_seconds) as lock_result: - assert lock_result.success, 'Failed to acquire the lock. Aborting.' - print('Lock acquired successfully!!!') - - # At this point the lock was released - by magic of the `with` clause ;) - unlock_result = dapr.unlock(store_name, resource_id, client_id) - print('We already released the lock so unlocking will not work.') - print('We tried to unlock it anyway and got back [%s]' % unlock_result.status) -``` - -- Learn more about using a distributed lock: [How-To: Use a lock]({{% ref howto-use-distributed-lock.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/blob/master/examples/distributed_lock) for code samples and instructions to try out distributed lock. - -### Cryptography - -```python -from dapr.clients import DaprClient - -message = 'The secret is "passw0rd"' - -def main(): - with DaprClient() as d: - resp = d.encrypt( - data=message.encode(), - options=EncryptOptions( - component_name='crypto-localstorage', - key_name='rsa-private-key.pem', - key_wrap_algorithm='RSA', - ), - ) - encrypt_bytes = resp.read() - - resp = d.decrypt( - data=encrypt_bytes, - options=DecryptOptions( - component_name='crypto-localstorage', - key_name='rsa-private-key.pem', - ), - ) - decrypt_bytes = resp.read() - - print(decrypt_bytes.decode()) # The secret is "passw0rd" -``` - -- For a full list of state operations visit [How-To: Use the cryptography APIs]({{% ref howto-cryptography.md %}}). -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/crypto) for code samples and instructions to try out cryptography - -## Related links -[Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples) diff --git a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/_index.md b/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/_index.md deleted file mode 100644 index 8b7bc9c50..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/_index.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -type: docs -title: "Dapr Python SDK extensions" -linkTitle: "Extensions" -weight: 30000 -description: Python SDK for developing Dapr applications ---- diff --git a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-fastapi.md b/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-fastapi.md deleted file mode 100644 index 13b6499b9..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-fastapi.md +++ /dev/null @@ -1,115 +0,0 @@ ---- -type: docs -title: "Dapr Python SDK integration with FastAPI" -linkTitle: "FastAPI" -weight: 200000 -description: How to create Dapr Python virtual actors and pubsub with the FastAPI extension ---- - -The Dapr Python SDK provides integration with FastAPI using the `dapr-ext-fastapi` extension. - -## Installation - -You can download and install the Dapr FastAPI extension with: - -{{< tabpane text=true >}} - -{{% tab header="Stable" %}} -```bash -pip install dapr-ext-fastapi -``` -{{% /tab %}} - -{{% tab header="Development" %}} -{{% alert title="Note" color="warning" %}} -The development package will contain features and behavior that will be compatible with the pre-release version of the Dapr runtime. Make sure to uninstall any stable versions of the Python SDK extension before installing the `dapr-dev` package. -{{% /alert %}} - -```bash -pip install dapr-ext-fastapi-dev -``` -{{% /tab %}} - -{{< /tabpane >}} - -## Example - -### Subscribing to events of different types - -```python -import uvicorn -from fastapi import Body, FastAPI -from dapr.ext.fastapi import DaprApp -from pydantic import BaseModel - -class RawEventModel(BaseModel): - body: str - -class User(BaseModel): - id: int - name: str - -class CloudEventModel(BaseModel): - data: User - datacontenttype: str - id: str - pubsubname: str - source: str - specversion: str - topic: str - traceid: str - traceparent: str - tracestate: str - type: str - - -app = FastAPI() -dapr_app = DaprApp(app) - -# Allow handling event with any structure (Easiest, but least robust) -# dapr publish --publish-app-id sample --topic any_topic --pubsub pubsub --data '{"id":"7", "desc": "good", "size":"small"}' -@dapr_app.subscribe(pubsub='pubsub', topic='any_topic') -def any_event_handler(event_data = Body()): - print(event_data) - -# For robustness choose one of the below based on if publisher is using CloudEvents - -# Handle events sent with CloudEvents -# dapr publish --publish-app-id sample --topic cloud_topic --pubsub pubsub --data '{"id":"7", "name":"Bob Jones"}' -@dapr_app.subscribe(pubsub='pubsub', topic='cloud_topic') -def cloud_event_handler(event_data: CloudEventModel): - print(event_data) - -# Handle raw events sent without CloudEvents -# curl -X "POST" http://localhost:3500/v1.0/publish/pubsub/raw_topic?metadata.rawPayload=true -H "Content-Type: application/json" -d '{"body": "345"}' -@dapr_app.subscribe(pubsub='pubsub', topic='raw_topic') -def raw_event_handler(event_data: RawEventModel): - print(event_data) - - - -if __name__ == "__main__": - uvicorn.run(app, host="0.0.0.0", port=30212) -``` - -### Creating an actor - -```python -from fastapi import FastAPI -from dapr.ext.fastapi import DaprActor -from demo_actor import DemoActor - -app = FastAPI(title=f'{DemoActor.__name__}Service') - -# Add Dapr Actor Extension -actor = DaprActor(app) - -@app.on_event("startup") -async def startup_event(): - # Register DemoActor - await actor.register_actor(DemoActor) - -@app.get("/GetMyData") -def get_my_data(): - return "{'message': 'myData'}" -``` diff --git a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-flask.md b/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-flask.md deleted file mode 100644 index b4ec58f9c..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-flask.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -type: docs -title: "Dapr Python SDK integration with Flask" -linkTitle: "Flask" -weight: 300000 -description: How to create Dapr Python virtual actors with the Flask extension ---- - -The Dapr Python SDK provides integration with Flask using the `flask-dapr` extension. - -## Installation - -You can download and install the Dapr Flask extension with: - -{{< tabpane text=true >}} - -{{% tab header="Stable" %}} -```bash -pip install flask-dapr -``` -{{% /tab %}} - -{{% tab header="Development" %}} -{{% alert title="Note" color="warning" %}} -The development package will contain features and behavior that will be compatible with the pre-release version of the Dapr runtime. Make sure to uninstall any stable versions of the Python SDK extension before installing the `dapr-dev` package. -{{% /alert %}} - -```bash -pip install flask-dapr-dev -``` -{{% /tab %}} - -{{< /tabpane >}} - -## Example - -```python -from flask import Flask -from flask_dapr.actor import DaprActor - -from dapr.conf import settings -from demo_actor import DemoActor - -app = Flask(f'{DemoActor.__name__}Service') - -# Enable DaprActor Flask extension -actor = DaprActor(app) - -# Register DemoActor -actor.register_actor(DemoActor) - -# Setup method route -@app.route('/GetMyData', methods=['GET']) -def get_my_data(): - return {'message': 'myData'}, 200 - -# Run application -if __name__ == '__main__': - app.run(port=settings.HTTP_APP_PORT) -``` diff --git a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-grpc.md b/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-grpc.md deleted file mode 100644 index e34c213b5..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-grpc.md +++ /dev/null @@ -1,118 +0,0 @@ ---- -type: docs -title: "Getting started with the Dapr Python gRPC service extension" -linkTitle: "gRPC" -weight: 100000 -description: How to get up and running with the Dapr Python gRPC extension ---- - -The Dapr Python SDK provides a built in gRPC server extension, `dapr.ext.grpc`, for creating Dapr services. - -## Installation - -You can download and install the Dapr gRPC server extension with: - -{{< tabpane text=true >}} - -{{% tab header="Stable" %}} -```bash -pip install dapr-ext-grpc -``` -{{% /tab %}} - -{{% tab header="Development" %}} -{{% alert title="Note" color="warning" %}} -The development package will contain features and behavior that will be compatible with the pre-release version of the Dapr runtime. Make sure to uninstall any stable versions of the Python SDK extension before installing the `dapr-dev` package. -{{% /alert %}} - -```bash -pip3 install dapr-ext-grpc-dev -``` -{{% /tab %}} - -{{< /tabpane >}} - -## Examples - -The `App` object can be used to create a server. - -### Listen for service invocation requests - -The `InvokeMethodReqest` and `InvokeMethodResponse` objects can be used to handle incoming requests. - -A simple service that will listen and respond to requests will look like: - -```python -from dapr.ext.grpc import App, InvokeMethodRequest, InvokeMethodResponse - -app = App() - -@app.method(name='my-method') -def mymethod(request: InvokeMethodRequest) -> InvokeMethodResponse: - print(request.metadata, flush=True) - print(request.text(), flush=True) - - return InvokeMethodResponse(b'INVOKE_RECEIVED', "text/plain; charset=UTF-8") - -app.run(50051) -``` - -A full sample can be found [here](https://github.com/dapr/python-sdk/tree/v1.0.0rc2/examples/invoke-simple). - -### Subscribe to a topic - -When subscribing to a topic, you can instruct dapr whether the event delivered has been accepted, or whether it should be dropped, or retried later. - -```python -from typing import Optional -from cloudevents.sdk.event import v1 -from dapr.ext.grpc import App -from dapr.clients.grpc._response import TopicEventResponse - -app = App() - -# Default subscription for a topic -@app.subscribe(pubsub_name='pubsub', topic='TOPIC_A') -def mytopic(event: v1.Event) -> Optional[TopicEventResponse]: - print(event.Data(),flush=True) - # Returning None (or not doing a return explicitly) is equivalent - # to returning a TopicEventResponse("success"). - # You can also return TopicEventResponse("retry") for dapr to log - # the message and retry delivery later, or TopicEventResponse("drop") - # for it to drop the message - return TopicEventResponse("success") - -# Specific handler using Pub/Sub routing -@app.subscribe(pubsub_name='pubsub', topic='TOPIC_A', - rule=Rule("event.type == \"important\"", 1)) -def mytopic_important(event: v1.Event) -> None: - print(event.Data(),flush=True) - -# Handler with disabled topic validation -@app.subscribe(pubsub_name='pubsub-mqtt', topic='topic/#', disable_topic_validation=True,) -def mytopic_wildcard(event: v1.Event) -> None: - print(event.Data(),flush=True) - -app.run(50051) -``` - -A full sample can be found [here](https://github.com/dapr/python-sdk/blob/v1.0.0rc2/examples/pubsub-simple/subscriber.py). - -### Setup input binding trigger - -```python -from dapr.ext.grpc import App, BindingRequest - -app = App() - -@app.binding('kafkaBinding') -def binding(request: BindingRequest): - print(request.text(), flush=True) - -app.run(50051) -``` - -A full sample can be found [here](https://github.com/dapr/python-sdk/tree/v1.0.0rc2/examples/invoke-binding). - -## Related links -- [PyPi](https://pypi.org/project/dapr-ext-grpc/) diff --git a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/_index.md b/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/_index.md deleted file mode 100644 index 12a63f8f6..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/_index.md +++ /dev/null @@ -1,105 +0,0 @@ ---- -type: docs -title: "Dapr Python SDK integration with Dapr Workflow extension" -linkTitle: "Dapr Workflow" -weight: 400000 -description: How to get up and running with the Dapr Workflow extension -no_list: true ---- - -The Dapr Python SDK provides a built-in Dapr Workflow extension, `dapr.ext.workflow`, for creating Dapr services. - -## Installation - -You can download and install the Dapr Workflow extension with: - -{{< tabpane text=true >}} - -{{% tab header="Stable" %}} -```bash -pip install dapr-ext-workflow -``` -{{% /tab %}} - -{{% tab header="Development" %}} -{{% alert title="Note" color="warning" %}} -The development package will contain features and behavior that will be compatible with the pre-release version of the Dapr runtime. Make sure to uninstall any stable versions of the Python SDK extension before installing the `dapr-dev` package. -{{% /alert %}} - -```bash -pip install dapr-ext-workflow-dev -``` -{{% /tab %}} - -{{< /tabpane >}} - -## Example - -```python -from time import sleep - -import dapr.ext.workflow as wf - - -wfr = wf.WorkflowRuntime() - - -@wfr.workflow(name='random_workflow') -def task_chain_workflow(ctx: wf.DaprWorkflowContext, wf_input: int): - try: - result1 = yield ctx.call_activity(step1, input=wf_input) - result2 = yield ctx.call_activity(step2, input=result1) - except Exception as e: - yield ctx.call_activity(error_handler, input=str(e)) - raise - return [result1, result2] - - -@wfr.activity(name='step1') -def step1(ctx, activity_input): - print(f'Step 1: Received input: {activity_input}.') - # Do some work - return activity_input + 1 - - -@wfr.activity -def step2(ctx, activity_input): - print(f'Step 2: Received input: {activity_input}.') - # Do some work - return activity_input * 2 - -@wfr.activity -def error_handler(ctx, error): - print(f'Executing error handler: {error}.') - # Do some compensating work - - -if __name__ == '__main__': - wfr.start() - sleep(10) # wait for workflow runtime to start - - wf_client = wf.DaprWorkflowClient() - instance_id = wf_client.schedule_new_workflow(workflow=task_chain_workflow, input=42) - print(f'Workflow started. Instance ID: {instance_id}') - state = wf_client.wait_for_workflow_completion(instance_id) - print(f'Workflow completed! Status: {state.runtime_status}') - - wfr.shutdown() -``` - -- Learn more about authoring and managing workflows: - - [How-To: Author a workflow]({{% ref howto-author-workflow.md %}}). - - [How-To: Manage a workflow]({{% ref howto-manage-workflow.md %}}). - - -- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/main/examples/workflow) for code samples and instructions to try out Dapr Workflow: - - [Simple workflow example]({{% ref python-workflow.md %}}) - - [Task chaining example](https://github.com/dapr/python-sdk/blob/main/examples/workflow/task_chaining.py) - - [Fan-out/Fan-in example](https://github.com/dapr/python-sdk/blob/main/examples/workflow/fan_out_fan_in.py) - - [Child workflow example](https://github.com/dapr/python-sdk/blob/main/examples/workflow/child_workflow.py) - - [Human approval example](https://github.com/dapr/python-sdk/blob/main/examples/workflow/human_approval.py) - - [Monitor example](https://github.com/dapr/python-sdk/blob/main/examples/workflow/monitor.py) - - -## Next steps - -{{< button text="Getting started with the Dapr Workflow Python SDK" page="python-workflow.md" >}} diff --git a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/python-workflow.md b/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/python-workflow.md deleted file mode 100644 index 8c882ac72..000000000 --- a/daprdocs/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/python-workflow.md +++ /dev/null @@ -1,166 +0,0 @@ ---- -type: docs -title: "Getting started with the Dapr Workflow Python SDK" -linkTitle: "Workflow" -weight: 30000 -description: How to get up and running with workflows using the Dapr Python SDK ---- - -Let’s create a Dapr workflow and invoke it using the console. With the [provided workflow example](https://github.com/dapr/python-sdk/tree/main/examples/workflow/simple.py), you will: - -- Run a [Python console application](https://github.com/dapr/python-sdk/blob/main/examples/workflow/simple.py) that demonstrates workflow orchestration with activities, child workflows, and external events -- Learn how to handle retries, timeouts, and workflow state management -- Use the Python workflow SDK to start, pause, resume, and purge workflow instances - -This example uses the default configuration from `dapr init` in [self-hosted mode](https://github.com/dapr/cli#install-dapr-on-your-local-machine-self-hosted). - -In the Python example project, the `simple.py` file contains the setup of the app, including: -- The workflow definition -- The workflow activity definitions -- The registration of the workflow and workflow activities - -## Prerequisites -- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed -- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) -- [Python 3.9+](https://www.python.org/downloads/) installed -- [Dapr Python package]({{% ref "python#installation" %}}) and the [workflow extension]({{% ref "python-workflow/_index.md" %}}) installed -- Verify you're using the latest proto bindings - -## Set up the environment - -Start by cloning the [Python SDK repo]. - -```bash -git clone https://github.com/dapr/python-sdk.git -``` - -From the Python SDK root directory, navigate to the Dapr Workflow example. - -```bash -cd examples/workflow -``` - -Run the following command to install the requirements for running this workflow sample with the Dapr Python SDK. - -```bash -pip3 install -r workflow/requirements.txt -``` - -## Run the application locally - -To run the Dapr application, you need to start the Python program and a Dapr sidecar. In the terminal, run: - -```bash -dapr run --app-id wf-simple-example --dapr-grpc-port 50001 --resources-path components -- python3 simple.py -``` - -> **Note:** Since Python3.exe is not defined in Windows, you may need to use `python simple.py` instead of `python3 simple.py`. - - -**Expected output** - -``` -- "== APP == Hi Counter!" -- "== APP == New counter value is: 1!" -- "== APP == New counter value is: 11!" -- "== APP == Retry count value is: 0!" -- "== APP == Retry count value is: 1! This print statement verifies retry" -- "== APP == Appending 1 to child_orchestrator_string!" -- "== APP == Appending a to child_orchestrator_string!" -- "== APP == Appending a to child_orchestrator_string!" -- "== APP == Appending 2 to child_orchestrator_string!" -- "== APP == Appending b to child_orchestrator_string!" -- "== APP == Appending b to child_orchestrator_string!" -- "== APP == Appending 3 to child_orchestrator_string!" -- "== APP == Appending c to child_orchestrator_string!" -- "== APP == Appending c to child_orchestrator_string!" -- "== APP == Get response from hello_world_wf after pause call: Suspended" -- "== APP == Get response from hello_world_wf after resume call: Running" -- "== APP == New counter value is: 111!" -- "== APP == New counter value is: 1111!" -- "== APP == Workflow completed! Result: "Completed" -``` - -## What happened? - -When you run the application, several key workflow features are shown: - -1. **Workflow and Activity Registration**: The application uses Python decorators to automatically register workflows and activities with the runtime. This decorator-based approach provides a clean, declarative way to define your workflow components: - ```python - @wfr.workflow(name='hello_world_wf') - def hello_world_wf(ctx: DaprWorkflowContext, wf_input): - # Workflow definition... - - @wfr.activity(name='hello_act') - def hello_act(ctx: WorkflowActivityContext, wf_input): - # Activity definition... - ``` - -2. **Runtime Setup**: The application initializes the workflow runtime and client: - ```python - wfr = WorkflowRuntime() - wfr.start() - wf_client = DaprWorkflowClient() - ``` - -2. **Activity Execution**: The workflow executes a series of activities that increment a counter: - ```python - @wfr.workflow(name='hello_world_wf') - def hello_world_wf(ctx: DaprWorkflowContext, wf_input): - yield ctx.call_activity(hello_act, input=1) - yield ctx.call_activity(hello_act, input=10) - ``` - -3. **Retry Logic**: The workflow demonstrates error handling with a retry policy: - ```python - retry_policy = RetryPolicy( - first_retry_interval=timedelta(seconds=1), - max_number_of_attempts=3, - backoff_coefficient=2, - max_retry_interval=timedelta(seconds=10), - retry_timeout=timedelta(seconds=100), - ) - yield ctx.call_activity(hello_retryable_act, retry_policy=retry_policy) - ``` - -4. **Child Workflow**: A child workflow is executed with its own retry policy: - ```python - yield ctx.call_child_workflow(child_retryable_wf, retry_policy=retry_policy) - ``` - -5. **External Event Handling**: The workflow waits for an external event with a timeout: - ```python - event = ctx.wait_for_external_event(event_name) - timeout = ctx.create_timer(timedelta(seconds=30)) - winner = yield when_any([event, timeout]) - ``` - -6. **Workflow Lifecycle Management**: The example demonstrates how to pause and resume the workflow: - ```python - wf_client.pause_workflow(instance_id=instance_id) - metadata = wf_client.get_workflow_state(instance_id=instance_id) - # ... check status ... - wf_client.resume_workflow(instance_id=instance_id) - ``` - -7. **Event Raising**: After resuming, the workflow raises an event: - ```python - wf_client.raise_workflow_event( - instance_id=instance_id, - event_name=event_name, - data=event_data - ) - ``` - -8. **Completion and Cleanup**: Finally, the workflow waits for completion and cleans up: - ```python - state = wf_client.wait_for_workflow_completion( - instance_id, - timeout_in_seconds=30 - ) - wf_client.purge_workflow(instance_id=instance_id) - ``` -## Next steps -- [Learn more about Dapr workflow]({{% ref workflow-overview.md %}}) -- [Workflow API reference]({{% ref workflow_api.md %}}) -- [Try implementing more complex workflow patterns](https://github.com/dapr/python-sdk/tree/main/examples/workflow) diff --git a/dev-requirements.txt b/dev-requirements.txt index 461d92391..828ef8aa4 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,7 +1,6 @@ mypy>=1.2.0 mypy-extensions>=0.4.3 mypy-protobuf>=2.9 -flake8>=3.7.9 tox>=4.3.0 coverage>=5.3 wheel @@ -13,7 +12,7 @@ pyOpenSSL>=23.2.0 # needed for type checking Flask>=1.1 # needed for auto fix -ruff===0.2.2 +ruff===0.14.1 # needed for dapr-ext-workflow durabletask-dapr >= 0.2.0a9 # needed for .env file loading in examples diff --git a/examples/configuration/configuration.py b/examples/configuration/configuration.py index caf676e6b..d579df7fa 100644 --- a/examples/configuration/configuration.py +++ b/examples/configuration/configuration.py @@ -4,8 +4,9 @@ import asyncio from time import sleep + from dapr.clients import DaprClient -from dapr.clients.grpc._response import ConfigurationWatcher, ConfigurationResponse +from dapr.clients.grpc._response import ConfigurationResponse, ConfigurationWatcher configuration: ConfigurationWatcher = ConfigurationWatcher() diff --git a/examples/conversation/real_llm_providers_example.py b/examples/conversation/real_llm_providers_example.py index c103007e0..2347f4b50 100644 --- a/examples/conversation/real_llm_providers_example.py +++ b/examples/conversation/real_llm_providers_example.py @@ -48,7 +48,7 @@ import sys import tempfile from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import Any, Dict, List, Optional import yaml @@ -163,8 +163,8 @@ def create_tool_from_typed_function_example() -> conversation.ConversationTools: This shows the most advanced approach: define a typed function and automatically generate the complete tool schema from type hints and docstrings. """ - from typing import Optional, List from enum import Enum + from typing import List, Optional conversation.unregister_tool('find_restaurants') @@ -205,8 +205,8 @@ def create_tool_from_tool_decorator_example() -> conversation.ConversationTools: This shows the most advanced approach: define a typed function and automatically generate the complete tool schema from type hints and docstrings. """ - from typing import Optional, List from enum import Enum + from typing import List, Optional conversation.unregister_tool('find_restaurants') @@ -417,7 +417,7 @@ def create_component_configs(self, selected_providers: Optional[List[str]] = Non def test_basic_conversation_alpha2(self, provider_id: str) -> None: """Test basic Alpha2 conversation with a provider.""" print( - f"\n💬 Testing Alpha2 basic conversation with {self.available_providers[provider_id]['display_name']}" + f'\n💬 Testing Alpha2 basic conversation with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -453,7 +453,7 @@ def test_basic_conversation_alpha2(self, provider_id: str) -> None: def test_multi_turn_conversation_alpha2(self, provider_id: str) -> None: """Test multi-turn Alpha2 conversation with different message types.""" print( - f"\n🔄 Testing Alpha2 multi-turn conversation with {self.available_providers[provider_id]['display_name']}" + f'\n🔄 Testing Alpha2 multi-turn conversation with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -484,7 +484,7 @@ def test_multi_turn_conversation_alpha2(self, provider_id: str) -> None: f'✅ Multi-turn conversation processed {len(response.outputs[0].choices)} message(s)' ) for i, choice in enumerate(response.outputs[0].choices): - print(f' Response {i+1}: {choice.message.content[:100]}...') + print(f' Response {i + 1}: {choice.message.content[:100]}...') else: print('❌ No multi-turn response received') @@ -494,7 +494,7 @@ def test_multi_turn_conversation_alpha2(self, provider_id: str) -> None: def test_tool_calling_alpha2(self, provider_id: str) -> None: """Test Alpha2 tool calling with a provider.""" print( - f"\n🔧 Testing Alpha2 tool calling with {self.available_providers[provider_id]['display_name']}" + f'\n🔧 Testing Alpha2 tool calling with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -560,7 +560,7 @@ def test_tool_calling_alpha2(self, provider_id: str) -> None: def test_parameter_conversion(self, provider_id: str) -> None: """Test the new parameter conversion feature.""" print( - f"\n🔄 Testing parameter conversion with {self.available_providers[provider_id]['display_name']}" + f'\n🔄 Testing parameter conversion with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -600,8 +600,8 @@ def test_parameter_conversion(self, provider_id: str) -> None: if response.outputs and response.outputs[0].choices: choice = response.outputs[0].choices[0] - print(f'✅ Parameter conversion successful!') - print(f'✅ Tool creation helpers working perfectly!') + print('✅ Parameter conversion successful!') + print('✅ Tool creation helpers working perfectly!') print(f' Response: {choice.message.content[:100]}...') else: print('❌ Parameter conversion test failed') @@ -612,7 +612,7 @@ def test_parameter_conversion(self, provider_id: str) -> None: def test_multi_turn_tool_calling_alpha2(self, provider_id: str) -> None: """Test multi-turn Alpha2 tool calling with proper context accumulation.""" print( - f"\n🔄🔧 Testing multi-turn tool calling with {self.available_providers[provider_id]['display_name']}" + f'\n🔄🔧 Testing multi-turn tool calling with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -802,7 +802,7 @@ def test_multi_turn_tool_calling_alpha2(self, provider_id: str) -> None: def test_multi_turn_tool_calling_alpha2_tool_helpers(self, provider_id: str) -> None: """Test multi-turn Alpha2 tool calling with proper context accumulation using higher level abstractions.""" print( - f"\n🔄🔧 Testing multi-turn tool calling with {self.available_providers[provider_id]['display_name']}" + f'\n🔄🔧 Testing multi-turn tool calling with {self.available_providers[provider_id]["display_name"]}' ) # using decorator @@ -954,7 +954,7 @@ def append_response_to_history( def test_function_to_schema_approach(self, provider_id: str) -> None: """Test the best DevEx for most cases: function-to-JSON-schema automatic tool creation.""" print( - f"\n🎯 Testing function-to-schema approach with {self.available_providers[provider_id]['display_name']}" + f'\n🎯 Testing function-to-schema approach with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -1002,7 +1002,7 @@ def test_function_to_schema_approach(self, provider_id: str) -> None: def test_tool_decorated_function_to_schema_approach(self, provider_id: str) -> None: """Test the best DevEx for most cases: function-to-JSON-schema automatic tool creation.""" print( - f"\n🎯 Testing decorator tool function-to-schema approach with {self.available_providers[provider_id]['display_name']}" + f'\n🎯 Testing decorator tool function-to-schema approach with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -1052,7 +1052,7 @@ def test_tool_decorated_function_to_schema_approach(self, provider_id: str) -> N async def test_async_conversation_alpha2(self, provider_id: str) -> None: """Test async Alpha2 conversation with a provider.""" print( - f"\n⚡ Testing async Alpha2 conversation with {self.available_providers[provider_id]['display_name']}" + f'\n⚡ Testing async Alpha2 conversation with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -1083,7 +1083,7 @@ async def test_async_conversation_alpha2(self, provider_id: str) -> None: async def test_async_tool_calling_alpha2(self, provider_id: str) -> None: """Test async Alpha2 tool calling with a provider.""" print( - f"\n🔧⚡ Testing async Alpha2 tool calling with {self.available_providers[provider_id]['display_name']}" + f'\n🔧⚡ Testing async Alpha2 tool calling with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -1125,9 +1125,9 @@ async def test_async_tool_calling_alpha2(self, provider_id: str) -> None: def run_comprehensive_test(self, provider_id: str) -> None: """Run comprehensive Alpha2 tests for a provider.""" provider_name = self.available_providers[provider_id]['display_name'] - print(f"\n{'='*60}") + print(f'\n{"=" * 60}') print(f'🧪 Testing {provider_name} with Alpha2 API') - print(f"{'='*60}") + print(f'{"=" * 60}') # Alpha2 Sync tests self.test_basic_conversation_alpha2(provider_id) @@ -1149,7 +1149,7 @@ def run_comprehensive_test(self, provider_id: str) -> None: def test_basic_conversation_alpha1_legacy(self, provider_id: str) -> None: """Test legacy Alpha1 conversation for comparison.""" print( - f"\n📚 Testing legacy Alpha1 for comparison with {self.available_providers[provider_id]['display_name']}" + f'\n📚 Testing legacy Alpha1 for comparison with {self.available_providers[provider_id]["display_name"]}' ) try: @@ -1235,7 +1235,7 @@ def main(): if provider_id in tester.available_providers: tester.run_comprehensive_test(provider_id) - print(f"\n{'='*60}") + print(f'\n{"=" * 60}') print('🎉 All Alpha2 tests completed!') print('✅ Real LLM provider integration with Alpha2 API is working correctly') print('🔧 Features demonstrated:') @@ -1248,7 +1248,7 @@ def main(): print(' • Function-to-schema using @tool decorator for automatic tool generation') print(' • Both sync and async implementations') print(' • Backward compatibility with Alpha1') - print(f"{'='*60}") + print(f'{"=" * 60}') except KeyboardInterrupt: print('\n\n⏹️ Tests interrupted by user') diff --git a/examples/crypto/crypto-async.py b/examples/crypto/crypto-async.py index 0946e9bbb..2e49a8282 100644 --- a/examples/crypto/crypto-async.py +++ b/examples/crypto/crypto-async.py @@ -14,7 +14,7 @@ import asyncio from dapr.aio.clients import DaprClient -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions # Name of the crypto component to use CRYPTO_COMPONENT_NAME = 'crypto-localstorage' diff --git a/examples/crypto/crypto.py b/examples/crypto/crypto.py index a282ba453..afe00f343 100644 --- a/examples/crypto/crypto.py +++ b/examples/crypto/crypto.py @@ -12,7 +12,7 @@ # ------------------------------------------------------------ from dapr.clients import DaprClient -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions # Name of the crypto component to use CRYPTO_COMPONENT_NAME = 'crypto-localstorage' diff --git a/examples/demo_actor/README.md b/examples/demo_actor/README.md index f1b1bbe2b..6353a6e01 100644 --- a/examples/demo_actor/README.md +++ b/examples/demo_actor/README.md @@ -17,16 +17,10 @@ This document describes how to create an Actor(DemoActor) and invoke its methods You can install dapr SDK package using pip command: - - ```sh pip3 install -r demo_actor/requirements.txt ``` - - ## Run in self-hosted mode - ```sh pip3 install -r demo_workflow/requirements.txt ``` - - + + + +```sh +pip3 install -r requirements.txt +``` + + + +## Run the example + +Export your `OPENAI_API_KEY`: + +```bash +export OPENAI_API_KEY="SK-..." +``` + +Run the following command in a terminal/command prompt: + + + +```bash +# 1. Run the LangGraph agent +dapr run --app-id langgraph-checkpointer --app-port 5001 --resources-path ./components -- python3 agent.py +``` + + + +## Cleanup + +Either press CTRL + C to quit the app or run the following command in a new terminal to stop the app: + +```bash +dapr stop --app-id langgraph-checkpointer +``` + diff --git a/examples/langgraph-checkpointer/agent.py b/examples/langgraph-checkpointer/agent.py new file mode 100644 index 000000000..8ea98ae82 --- /dev/null +++ b/examples/langgraph-checkpointer/agent.py @@ -0,0 +1,66 @@ +from dapr.ext.langgraph import DaprCheckpointer +from langchain_core.messages import HumanMessage, SystemMessage +from langchain_ollama import ChatOllama +from langgraph.graph import START, MessagesState, StateGraph +from langgraph.prebuilt import ToolNode, tools_condition + + +def add(a: int, b: int) -> int: + """Adds a and b. + + Args: + a: first int + b: second int + """ + return a + b + + +def multiply(a: int, b: int) -> int: + """Multiply a and b. + + Args: + a: first int + b: second int + """ + return a * b + + +tools = [add, multiply] +llm = ChatOllama(model='llama3.2:latest') +llm_with_tools = llm.bind_tools(tools) + +sys_msg = SystemMessage( + content='You are a helpful assistant tasked with performing arithmetic on a set of inputs.' +) + + +def assistant(state: MessagesState): + return {'messages': [llm_with_tools.invoke([sys_msg] + state['messages'])]} + + +builder = StateGraph(MessagesState) + +builder.add_node('assistant', assistant) +builder.add_node('tools', ToolNode(tools)) + +builder.add_edge(START, 'assistant') +builder.add_conditional_edges( + 'assistant', + tools_condition, +) +builder.add_edge('tools', 'assistant') + +memory = DaprCheckpointer(store_name='statestore', key_prefix='dapr') +react_graph_memory = builder.compile(checkpointer=memory) + +config = {'configurable': {'thread_id': '1'}} + +messages = [HumanMessage(content='Add 3 and 4.')] +messages = react_graph_memory.invoke({'messages': messages}, config) +for m in messages['messages']: + m.pretty_print() + +messages = [HumanMessage(content='Multiply the result by 2.')] +messages = react_graph_memory.invoke({'messages': messages}, config) +for m in messages['messages']: + m.pretty_print() diff --git a/examples/langgraph-checkpointer/components/statestore.yaml b/examples/langgraph-checkpointer/components/statestore.yaml new file mode 100644 index 000000000..2f676bff8 --- /dev/null +++ b/examples/langgraph-checkpointer/components/statestore.yaml @@ -0,0 +1,14 @@ +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: statestore +spec: + type: state.redis + version: v1 + metadata: + - name: redisHost + value: localhost:6379 + - name: redisPassword + value: "" + - name: actorStateStore + value: "true" diff --git a/examples/langgraph-checkpointer/requirements.txt b/examples/langgraph-checkpointer/requirements.txt new file mode 100644 index 000000000..788a4b213 --- /dev/null +++ b/examples/langgraph-checkpointer/requirements.txt @@ -0,0 +1,3 @@ +langchain-core>=1.0.7 +langgraph>=1.0.3 +langchain-ollama>=1.0.0 \ No newline at end of file diff --git a/examples/metadata/README.md b/examples/metadata/README.md index eafe3723b..409df01b8 100644 --- a/examples/metadata/README.md +++ b/examples/metadata/README.md @@ -41,7 +41,7 @@ expected_stdout_lines: - "== APP == registered_components:" - "== APP == name=lockstore type=lock.redis version= capabilities=[]" - "== APP == name=pubsub type=pubsub.redis version=v1 capabilities=[]" - - "== APP == name=statestore type=state.redis version=v1 capabilities=['ACTOR', 'ETAG', 'TRANSACTIONAL'" + - "== APP == name=statestore type=state.redis version=v1 capabilities=['ACTOR', 'ETAG', 'KEYS_LIKE', 'TRANSACTIONAL', 'TTL'" - "== APP == We will update our custom label value and check it was persisted" - "== APP == We added a custom label named [is-this-our-metadata-example]" - "== APP == Its old value was [yes] but now it is [You bet it is!]" @@ -64,7 +64,7 @@ The output should be as follows: == APP == registered_components: == APP == name=lockstore type=lock.redis version= capabilities=[] == APP == name=pubsub type=pubsub.redis version=v1 capabilities=[] -== APP == name=statestore type=state.redis version=v1 capabilities=['ACTOR', 'ETAG', 'TRANSACTIONAL', 'TTL'] +== APP == name=statestore type=state.redis version=v1 capabilities=['ACTOR', 'ETAG', 'KEYS_LIKE', 'TRANSACTIONAL', 'TTL'] == APP == We will update our custom label value and check it was persisted == APP == We added a custom label named [is-this-our-metadata-example] == APP == Its old value was [yes] but now it is [You bet it is!] diff --git a/examples/pubsub-simple/subscriber.py b/examples/pubsub-simple/subscriber.py index daa11bc89..4d36f2807 100644 --- a/examples/pubsub-simple/subscriber.py +++ b/examples/pubsub-simple/subscriber.py @@ -11,14 +11,15 @@ # limitations under the License. # ------------------------------------------------------------ +import json from time import sleep + from cloudevents.sdk.event import v1 from dapr.ext.grpc import App + from dapr.clients.grpc._response import TopicEventResponse from dapr.proto import appcallback_v1 -import json - app = App() should_retry = True # To control whether dapr should retry sending a message diff --git a/examples/pubsub-streaming-async/subscriber-handler.py b/examples/pubsub-streaming-async/subscriber-handler.py index 06a492af5..c9c8203c2 100644 --- a/examples/pubsub-streaming-async/subscriber-handler.py +++ b/examples/pubsub-streaming-async/subscriber-handler.py @@ -1,5 +1,6 @@ import argparse import asyncio + from dapr.aio.clients import DaprClient from dapr.clients.grpc._response import TopicEventResponse diff --git a/examples/state_store/state_store.py b/examples/state_store/state_store.py index 301c675bc..b783fcdc9 100644 --- a/examples/state_store/state_store.py +++ b/examples/state_store/state_store.py @@ -5,11 +5,9 @@ import grpc from dapr.clients import DaprClient - from dapr.clients.grpc._request import TransactionalStateOperation, TransactionOperationType from dapr.clients.grpc._state import StateItem - with DaprClient() as d: storeName = 'statestore' diff --git a/examples/state_store_query/state_store_query.py b/examples/state_store_query/state_store_query.py index f532f0eb0..26c64da3e 100644 --- a/examples/state_store_query/state_store_query.py +++ b/examples/state_store_query/state_store_query.py @@ -2,10 +2,9 @@ dapr run python3 state_store_query.py """ -from dapr.clients import DaprClient - import json +from dapr.clients import DaprClient with DaprClient() as d: store_name = 'statestore' diff --git a/examples/w3c-tracing/invoke-receiver.py b/examples/w3c-tracing/invoke-receiver.py index 92300aebe..bb49236a8 100644 --- a/examples/w3c-tracing/invoke-receiver.py +++ b/examples/w3c-tracing/invoke-receiver.py @@ -3,16 +3,16 @@ import typing from concurrent import futures +from dapr.ext.grpc import App, InvokeMethodRequest, InvokeMethodResponse from opentelemetry import trace from opentelemetry.exporter.zipkin.json import ZipkinExporter -from opentelemetry.instrumentation.grpc import GrpcInstrumentorServer, filters +from opentelemetry.instrumentation.grpc import GrpcInstrumentorServer from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor from opentelemetry.sdk.trace.sampling import ALWAYS_ON from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator from dapr.clients import DaprClient -from dapr.ext.grpc import App, InvokeMethodRequest, InvokeMethodResponse # Create a tracer provider tracer_provider = TracerProvider(sampler=ALWAYS_ON) diff --git a/examples/w3c-tracing/requirements.txt b/examples/w3c-tracing/requirements.txt index 514e2606a..36daee5c7 100644 --- a/examples/w3c-tracing/requirements.txt +++ b/examples/w3c-tracing/requirements.txt @@ -1,5 +1,5 @@ -dapr-ext-grpc >= 1.16.1rc1 -dapr >= 1.16.1rc1 +dapr-ext-grpc >= 1.16.1rc2 +dapr >= 1.16.1rc2 opentelemetry-sdk opentelemetry-instrumentation-grpc opentelemetry-exporter-zipkin diff --git a/examples/workflow/child_workflow.py b/examples/workflow/child_workflow.py index dccaa631b..57ab2fc3e 100644 --- a/examples/workflow/child_workflow.py +++ b/examples/workflow/child_workflow.py @@ -10,9 +10,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import dapr.ext.workflow as wf import time +import dapr.ext.workflow as wf + wfr = wf.WorkflowRuntime() diff --git a/examples/workflow/cross-app3.py b/examples/workflow/cross-app3.py index ecc945ca3..6bf99a66d 100644 --- a/examples/workflow/cross-app3.py +++ b/examples/workflow/cross-app3.py @@ -10,19 +10,21 @@ # See the License for the specific language governing permissions and # limitations under the License. import os -import dapr.ext.workflow as wf import time +import dapr.ext.workflow as wf + wfr = wf.WorkflowRuntime() @wfr.activity def app3_activity(ctx: wf.DaprWorkflowContext) -> int: - print(f'app3 - received activity call', flush=True) + print('app3 - received activity call', flush=True) if os.getenv('ERROR_ACTIVITY_MODE', 'false') == 'true': - print(f'app3 - raising error in activity due to error mode being enabled', flush=True) + print('app3 - raising error in activity due to error mode being enabled', flush=True) raise ValueError('Error in activity due to error mode being enabled') - print(f'app3 - returning activity result', flush=True) + print('app3 - returning activity result', flush=True) + return 3 diff --git a/examples/workflow/fan_out_fan_in.py b/examples/workflow/fan_out_fan_in.py index e5799862f..f625ea287 100644 --- a/examples/workflow/fan_out_fan_in.py +++ b/examples/workflow/fan_out_fan_in.py @@ -12,6 +12,7 @@ import time from typing import List + import dapr.ext.workflow as wf wfr = wf.WorkflowRuntime() diff --git a/examples/workflow/human_approval.py b/examples/workflow/human_approval.py index 6a8a725d7..e12bf5b5e 100644 --- a/examples/workflow/human_approval.py +++ b/examples/workflow/human_approval.py @@ -11,13 +11,14 @@ # limitations under the License. import threading +import time from dataclasses import asdict, dataclass from datetime import timedelta -import time -from dapr.clients import DaprClient import dapr.ext.workflow as wf +from dapr.clients import DaprClient + wfr = wf.WorkflowRuntime() diff --git a/examples/workflow/monitor.py b/examples/workflow/monitor.py index 6cf575cfe..d4f534df5 100644 --- a/examples/workflow/monitor.py +++ b/examples/workflow/monitor.py @@ -10,10 +10,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +import random from dataclasses import dataclass from datetime import timedelta -import random from time import sleep + import dapr.ext.workflow as wf wfr = wf.WorkflowRuntime() diff --git a/examples/workflow/requirements.txt b/examples/workflow/requirements.txt index fab86e728..c7132e3a1 100644 --- a/examples/workflow/requirements.txt +++ b/examples/workflow/requirements.txt @@ -1,2 +1,2 @@ -dapr-ext-workflow>=1.16.1rc1 -dapr>=1.16.1rc1 +dapr-ext-workflow>=1.16.1rc2 +dapr>=1.16.1rc2 diff --git a/examples/workflow/simple.py b/examples/workflow/simple.py index 76f21eba4..dc0ea0b6a 100644 --- a/examples/workflow/simple.py +++ b/examples/workflow/simple.py @@ -11,16 +11,18 @@ # limitations under the License. from datetime import timedelta from time import sleep + from dapr.ext.workflow import ( - WorkflowRuntime, + DaprWorkflowClient, DaprWorkflowContext, - WorkflowActivityContext, RetryPolicy, - DaprWorkflowClient, + WorkflowActivityContext, + WorkflowRuntime, when_any, ) -from dapr.conf import Settings + from dapr.clients.exceptions import DaprInternalError +from dapr.conf import Settings settings = Settings() diff --git a/examples/workflow/simple_aio_client.py b/examples/workflow/simple_aio_client.py new file mode 100644 index 000000000..fd93a5016 --- /dev/null +++ b/examples/workflow/simple_aio_client.py @@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import asyncio +from datetime import timedelta + +from dapr.ext.workflow import ( + DaprWorkflowContext, + RetryPolicy, + WorkflowActivityContext, + WorkflowRuntime, + when_any, +) +from dapr.ext.workflow.aio import DaprWorkflowClient + +from dapr.clients.exceptions import DaprInternalError +from dapr.conf import Settings + +settings = Settings() + +counter = 0 +retry_count = 0 +child_orchestrator_count = 0 +child_orchestrator_string = '' +child_act_retry_count = 0 +instance_id = 'exampleInstanceID' +child_instance_id = 'childInstanceID' +workflow_name = 'hello_world_wf' +child_workflow_name = 'child_wf' +input_data = 'Hi Counter!' +event_name = 'event1' +event_data = 'eventData' +non_existent_id_error = 'no such instance exists' + +retry_policy = RetryPolicy( + first_retry_interval=timedelta(seconds=1), + max_number_of_attempts=3, + backoff_coefficient=2, + max_retry_interval=timedelta(seconds=10), + retry_timeout=timedelta(seconds=100), +) + +wfr = WorkflowRuntime() + + +@wfr.workflow(name='hello_world_wf') +def hello_world_wf(ctx: DaprWorkflowContext, wf_input): + print(f'{wf_input}') + yield ctx.call_activity(hello_act, input=1) + yield ctx.call_activity(hello_act, input=10) + yield ctx.call_activity(hello_retryable_act, retry_policy=retry_policy) + yield ctx.call_child_workflow(child_retryable_wf, retry_policy=retry_policy) + + # Change in event handling: Use when_any to handle both event and timeout + event = ctx.wait_for_external_event(event_name) + timeout = ctx.create_timer(timedelta(seconds=30)) + winner = yield when_any([event, timeout]) + + if winner == timeout: + print('Workflow timed out waiting for event') + return 'Timeout' + + yield ctx.call_activity(hello_act, input=100) + yield ctx.call_activity(hello_act, input=1000) + return 'Completed' + + +@wfr.activity(name='hello_act') +def hello_act(ctx: WorkflowActivityContext, wf_input): + global counter + counter += wf_input + print(f'New counter value is: {counter}!', flush=True) + + +@wfr.activity(name='hello_retryable_act') +def hello_retryable_act(ctx: WorkflowActivityContext): + global retry_count + if (retry_count % 2) == 0: + print(f'Retry count value is: {retry_count}!', flush=True) + retry_count += 1 + raise ValueError('Retryable Error') + print(f'Retry count value is: {retry_count}! This print statement verifies retry', flush=True) + retry_count += 1 + + +@wfr.workflow(name='child_retryable_wf') +def child_retryable_wf(ctx: DaprWorkflowContext): + global child_orchestrator_string, child_orchestrator_count + if not ctx.is_replaying: + child_orchestrator_count += 1 + print(f'Appending {child_orchestrator_count} to child_orchestrator_string!', flush=True) + child_orchestrator_string += str(child_orchestrator_count) + yield ctx.call_activity( + act_for_child_wf, input=child_orchestrator_count, retry_policy=retry_policy + ) + if child_orchestrator_count < 3: + raise ValueError('Retryable Error') + + +@wfr.activity(name='act_for_child_wf') +def act_for_child_wf(ctx: WorkflowActivityContext, inp): + global child_orchestrator_string, child_act_retry_count + inp_char = chr(96 + inp) + print(f'Appending {inp_char} to child_orchestrator_string!', flush=True) + child_orchestrator_string += inp_char + if child_act_retry_count % 2 == 0: + child_act_retry_count += 1 + raise ValueError('Retryable Error') + child_act_retry_count += 1 + + +async def main(): + wfr.start() + wf_client = DaprWorkflowClient() + + try: + print('==========Start Counter Increase as per Input:==========') + await wf_client.schedule_new_workflow( + workflow=hello_world_wf, input=input_data, instance_id=instance_id + ) + + await wf_client.wait_for_workflow_start(instance_id) + + # Sleep to let the workflow run initial activities + await asyncio.sleep(12) + + assert counter == 11 + assert retry_count == 2 + assert child_orchestrator_string == '1aa2bb3cc' + + # Pause Test + await wf_client.pause_workflow(instance_id=instance_id) + metadata = await wf_client.get_workflow_state(instance_id=instance_id) + print(f'Get response from {workflow_name} after pause call: {metadata.runtime_status.name}') + + # Resume Test + await wf_client.resume_workflow(instance_id=instance_id) + metadata = await wf_client.get_workflow_state(instance_id=instance_id) + print( + f'Get response from {workflow_name} after resume call: {metadata.runtime_status.name}' + ) + + await asyncio.sleep(2) # Give the workflow time to reach the event wait state + await wf_client.raise_workflow_event( + instance_id=instance_id, event_name=event_name, data=event_data + ) + + print('========= Waiting for Workflow completion', flush=True) + try: + state = await wf_client.wait_for_workflow_completion(instance_id, timeout_in_seconds=30) + if state.runtime_status.name == 'COMPLETED': + print('Workflow completed! Result: {}'.format(state.serialized_output.strip('"'))) + else: + print(f'Workflow failed! Status: {state.runtime_status.name}') + except TimeoutError: + print('*** Workflow timed out!') + + await wf_client.purge_workflow(instance_id=instance_id) + try: + await wf_client.get_workflow_state(instance_id=instance_id) + except DaprInternalError as err: + if non_existent_id_error in err._message: + print('Instance Successfully Purged') + finally: + wfr.shutdown() + + +if __name__ == '__main__': + asyncio.run(main()) diff --git a/examples/workflow/task_chaining.py b/examples/workflow/task_chaining.py index 074cadcd2..8a2058e1c 100644 --- a/examples/workflow/task_chaining.py +++ b/examples/workflow/task_chaining.py @@ -14,7 +14,6 @@ import dapr.ext.workflow as wf - wfr = wf.WorkflowRuntime() diff --git a/ext/dapr-ext-fastapi/dapr/ext/fastapi/__init__.py b/ext/dapr-ext-fastapi/dapr/ext/fastapi/__init__.py index 942603078..e43df65c9 100644 --- a/ext/dapr-ext-fastapi/dapr/ext/fastapi/__init__.py +++ b/ext/dapr-ext-fastapi/dapr/ext/fastapi/__init__.py @@ -16,5 +16,4 @@ from .actor import DaprActor from .app import DaprApp - __all__ = ['DaprActor', 'DaprApp'] diff --git a/ext/dapr-ext-fastapi/dapr/ext/fastapi/actor.py b/ext/dapr-ext-fastapi/dapr/ext/fastapi/actor.py index 93b7860e1..4b3990da4 100644 --- a/ext/dapr-ext-fastapi/dapr/ext/fastapi/actor.py +++ b/ext/dapr-ext-fastapi/dapr/ext/fastapi/actor.py @@ -13,12 +13,12 @@ limitations under the License. """ -from typing import Any, Optional, Type, List +from typing import Any, List, Optional, Type from dapr.actor import Actor, ActorRuntime from dapr.clients.exceptions import ERROR_CODE_UNKNOWN, DaprInternalError from dapr.serializers import DefaultJSONSerializer -from fastapi import FastAPI, APIRouter, Request, Response, status # type: ignore +from fastapi import APIRouter, FastAPI, Request, Response, status # type: ignore from fastapi.logger import logger from fastapi.responses import JSONResponse diff --git a/ext/dapr-ext-fastapi/dapr/ext/fastapi/app.py b/ext/dapr-ext-fastapi/dapr/ext/fastapi/app.py index d926fac5c..6bede5234 100644 --- a/ext/dapr-ext-fastapi/dapr/ext/fastapi/app.py +++ b/ext/dapr-ext-fastapi/dapr/ext/fastapi/app.py @@ -13,6 +13,7 @@ """ from typing import Dict, List, Optional + from fastapi import FastAPI # type: ignore diff --git a/dapr/aio/clients/py.typed b/ext/dapr-ext-fastapi/dapr/ext/fastapi/py.typed similarity index 100% rename from dapr/aio/clients/py.typed rename to ext/dapr-ext-fastapi/dapr/ext/fastapi/py.typed diff --git a/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py b/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py index 8c6c12960..5c39bd99d 100644 --- a/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py +++ b/ext/dapr-ext-fastapi/dapr/ext/fastapi/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.16.1rc1' +__version__ = '1.16.1rc2' diff --git a/ext/dapr-ext-fastapi/setup.cfg b/ext/dapr-ext-fastapi/setup.cfg index 8b6080ebf..2bf42bbc1 100644 --- a/ext/dapr-ext-fastapi/setup.cfg +++ b/ext/dapr-ext-fastapi/setup.cfg @@ -24,7 +24,7 @@ python_requires = >=3.9 packages = find_namespace: include_package_data = True install_requires = - dapr >= 1.16.1rc1 + dapr >= 1.16.1rc2 uvicorn >= 0.11.6 fastapi >= 0.60.1 @@ -34,3 +34,7 @@ include = exclude = tests + +[options.package_data] +dapr.ext.fastapi = + py.typed diff --git a/ext/dapr-ext-fastapi/tests/test_app.py b/ext/dapr-ext-fastapi/tests/test_app.py index 831d55ebb..619697994 100644 --- a/ext/dapr-ext-fastapi/tests/test_app.py +++ b/ext/dapr-ext-fastapi/tests/test_app.py @@ -1,11 +1,10 @@ import unittest +from dapr.ext.fastapi import DaprApp from fastapi import FastAPI from fastapi.testclient import TestClient from pydantic import BaseModel -from dapr.ext.fastapi import DaprApp - class Message(BaseModel): body: str diff --git a/ext/dapr-ext-fastapi/tests/test_dapractor.py b/ext/dapr-ext-fastapi/tests/test_dapractor.py index ee863d726..71408c88c 100644 --- a/ext/dapr-ext-fastapi/tests/test_dapractor.py +++ b/ext/dapr-ext-fastapi/tests/test_dapractor.py @@ -16,9 +16,8 @@ import json import unittest -from fastapi import FastAPI - from dapr.ext.fastapi.actor import DaprActor, _wrap_response +from fastapi import FastAPI class DaprActorTest(unittest.TestCase): diff --git a/ext/dapr-ext-grpc/dapr/ext/grpc/__init__.py b/ext/dapr-ext-grpc/dapr/ext/grpc/__init__.py index 7d73b4a48..5324c6175 100644 --- a/ext/dapr-ext-grpc/dapr/ext/grpc/__init__.py +++ b/ext/dapr-ext-grpc/dapr/ext/grpc/__init__.py @@ -13,12 +13,11 @@ limitations under the License. """ -from dapr.clients.grpc._request import InvokeMethodRequest, BindingRequest, JobEvent -from dapr.clients.grpc._response import InvokeMethodResponse, TopicEventResponse -from dapr.clients.grpc._jobs import Job, FailurePolicy, DropFailurePolicy, ConstantFailurePolicy - from dapr.ext.grpc.app import App, Rule # type:ignore +from dapr.clients.grpc._jobs import ConstantFailurePolicy, DropFailurePolicy, FailurePolicy, Job +from dapr.clients.grpc._request import BindingRequest, InvokeMethodRequest, JobEvent +from dapr.clients.grpc._response import InvokeMethodResponse, TopicEventResponse __all__ = [ 'App', diff --git a/ext/dapr-ext-grpc/dapr/ext/grpc/_health_servicer.py b/ext/dapr-ext-grpc/dapr/ext/grpc/_health_servicer.py index 029dff745..f6d782da1 100644 --- a/ext/dapr-ext-grpc/dapr/ext/grpc/_health_servicer.py +++ b/ext/dapr-ext-grpc/dapr/ext/grpc/_health_servicer.py @@ -1,6 +1,6 @@ -import grpc from typing import Callable, Optional +import grpc from dapr.proto import appcallback_service_v1 from dapr.proto.runtime.v1.appcallback_pb2 import HealthCheckResponse diff --git a/ext/dapr-ext-grpc/dapr/ext/grpc/_servicer.py b/ext/dapr-ext-grpc/dapr/ext/grpc/_servicer.py index 996267fdd..8de632f97 100644 --- a/ext/dapr-ext-grpc/dapr/ext/grpc/_servicer.py +++ b/ext/dapr-ext-grpc/dapr/ext/grpc/_servicer.py @@ -12,25 +12,25 @@ See the License for the specific language governing permissions and limitations under the License. """ -import grpc -from cloudevents.sdk.event import v1 # type: ignore from typing import Callable, Dict, List, Optional, Tuple, Union +from cloudevents.sdk.event import v1 # type: ignore from google.protobuf import empty_pb2 from google.protobuf.message import Message as GrpcMessage from google.protobuf.struct_pb2 import Struct -from dapr.proto import appcallback_service_v1, common_v1, appcallback_v1 +import grpc +from dapr.clients._constants import DEFAULT_JSON_CONTENT_TYPE +from dapr.clients.grpc._request import BindingRequest, InvokeMethodRequest, JobEvent +from dapr.clients.grpc._response import InvokeMethodResponse, TopicEventResponse +from dapr.proto import appcallback_service_v1, appcallback_v1, common_v1 +from dapr.proto.common.v1.common_pb2 import InvokeRequest from dapr.proto.runtime.v1.appcallback_pb2 import ( - TopicEventRequest, BindingEventRequest, JobEventRequest, + TopicEventRequest, ) -from dapr.proto.common.v1.common_pb2 import InvokeRequest -from dapr.clients._constants import DEFAULT_JSON_CONTENT_TYPE -from dapr.clients.grpc._request import InvokeMethodRequest, BindingRequest, JobEvent -from dapr.clients.grpc._response import InvokeMethodResponse, TopicEventResponse InvokeMethodCallable = Callable[[InvokeMethodRequest], Union[str, bytes, InvokeMethodResponse]] TopicSubscribeCallable = Callable[[v1.Event], Optional[TopicEventResponse]] diff --git a/ext/dapr-ext-grpc/dapr/ext/grpc/app.py b/ext/dapr-ext-grpc/dapr/ext/grpc/app.py index 9f9ac8472..58e0cdf29 100644 --- a/ext/dapr-ext-grpc/dapr/ext/grpc/app.py +++ b/ext/dapr-ext-grpc/dapr/ext/grpc/app.py @@ -13,14 +13,14 @@ limitations under the License. """ -import grpc - from concurrent import futures from typing import Dict, Optional -from dapr.conf import settings -from dapr.ext.grpc._servicer import _CallbackServicer, Rule # type: ignore from dapr.ext.grpc._health_servicer import _HealthCheckServicer # type: ignore +from dapr.ext.grpc._servicer import Rule, _CallbackServicer # type: ignore + +import grpc +from dapr.conf import settings from dapr.proto import appcallback_service_v1 diff --git a/dapr/clients/py.typed b/ext/dapr-ext-grpc/dapr/ext/grpc/py.typed similarity index 100% rename from dapr/clients/py.typed rename to ext/dapr-ext-grpc/dapr/ext/grpc/py.typed diff --git a/ext/dapr-ext-grpc/dapr/ext/grpc/version.py b/ext/dapr-ext-grpc/dapr/ext/grpc/version.py index 8c6c12960..5c39bd99d 100644 --- a/ext/dapr-ext-grpc/dapr/ext/grpc/version.py +++ b/ext/dapr-ext-grpc/dapr/ext/grpc/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.16.1rc1' +__version__ = '1.16.1rc2' diff --git a/ext/dapr-ext-grpc/setup.cfg b/ext/dapr-ext-grpc/setup.cfg index d08757c78..3256a06cf 100644 --- a/ext/dapr-ext-grpc/setup.cfg +++ b/ext/dapr-ext-grpc/setup.cfg @@ -24,7 +24,7 @@ python_requires = >=3.9 packages = find_namespace: include_package_data = True install_requires = - dapr >= 1.16.1rc1 + dapr >= 1.16.1rc2 cloudevents >= 1.0.0 [options.packages.find] @@ -33,3 +33,7 @@ include = exclude = tests + +[options.package_data] +dapr.ext.grpc = + py.typed diff --git a/ext/dapr-ext-grpc/tests/test_app.py b/ext/dapr-ext-grpc/tests/test_app.py index 2a33dd668..315d9e18b 100644 --- a/ext/dapr-ext-grpc/tests/test_app.py +++ b/ext/dapr-ext-grpc/tests/test_app.py @@ -16,7 +16,7 @@ import unittest from cloudevents.sdk.event import v1 -from dapr.ext.grpc import App, Rule, InvokeMethodRequest, BindingRequest +from dapr.ext.grpc import App, BindingRequest, InvokeMethodRequest, Rule class AppTests(unittest.TestCase): diff --git a/ext/dapr-ext-grpc/tests/test_servicier.py b/ext/dapr-ext-grpc/tests/test_servicier.py index 2447eea3c..325d9b6d6 100644 --- a/ext/dapr-ext-grpc/tests/test_servicier.py +++ b/ext/dapr-ext-grpc/tests/test_servicier.py @@ -14,16 +14,15 @@ """ import unittest - from unittest.mock import MagicMock, Mock -from dapr.clients.grpc._request import InvokeMethodRequest -from dapr.clients.grpc._response import InvokeMethodResponse, TopicEventResponse from dapr.ext.grpc._servicer import _CallbackServicer -from dapr.proto import common_v1, appcallback_v1 - from google.protobuf.any_pb2 import Any as GrpcAny +from dapr.clients.grpc._request import InvokeMethodRequest +from dapr.clients.grpc._response import InvokeMethodResponse, TopicEventResponse +from dapr.proto import appcallback_v1, common_v1 + class OnInvokeTests(unittest.TestCase): def setUp(self): diff --git a/ext/dapr-ext-langgraph/LICENSE b/ext/dapr-ext-langgraph/LICENSE new file mode 100644 index 000000000..be033a7fd --- /dev/null +++ b/ext/dapr-ext-langgraph/LICENSE @@ -0,0 +1,203 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 The Dapr Authors. + + and others that have contributed code to the public domain. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/ext/dapr-ext-langgraph/README.rst b/ext/dapr-ext-langgraph/README.rst new file mode 100644 index 000000000..85c101a65 --- /dev/null +++ b/ext/dapr-ext-langgraph/README.rst @@ -0,0 +1,22 @@ +dapr-ext-langgraph extension +======================= + +|pypi| + +.. |pypi| image:: https://badge.fury.io/py/dapr-ext-langgraph.svg + :target: https://pypi.org/project/dapr-ext-langgraph/ + +This is the Dapr Checkpointer extension for LangGraph + +Installation +------------ + +:: + + pip install dapr-ext-langgraph + +References +---------- + +* `Dapr `_ +* `Dapr Python-SDK `_ diff --git a/ext/dapr-ext-langgraph/dapr/ext/langgraph/__init__.py b/ext/dapr-ext-langgraph/dapr/ext/langgraph/__init__.py new file mode 100644 index 000000000..4f2d3cf8b --- /dev/null +++ b/ext/dapr-ext-langgraph/dapr/ext/langgraph/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +# Import your main classes here +from dapr.ext.langgraph.dapr_checkpointer import DaprCheckpointer + +__all__ = [ + 'DaprCheckpointer', +] diff --git a/ext/dapr-ext-langgraph/dapr/ext/langgraph/dapr_checkpointer.py b/ext/dapr-ext-langgraph/dapr/ext/langgraph/dapr_checkpointer.py new file mode 100644 index 000000000..6d2614d90 --- /dev/null +++ b/ext/dapr-ext-langgraph/dapr/ext/langgraph/dapr_checkpointer.py @@ -0,0 +1,422 @@ +""" +Copyright 2026 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import base64 +import json +import time +from typing import Any, Dict, List, Optional, Sequence, Tuple, cast + +import msgpack +from langchain_core.messages import AIMessage, HumanMessage, ToolMessage +from langchain_core.runnables import RunnableConfig +from ulid import ULID + +from dapr.clients import DaprClient +from langgraph.checkpoint.base import ( + WRITES_IDX_MAP, + BaseCheckpointSaver, + ChannelVersions, + Checkpoint, + CheckpointMetadata, + CheckpointTuple, +) +from langgraph.checkpoint.serde.jsonplus import JsonPlusSerializer + + +class DaprCheckpointer(BaseCheckpointSaver[Checkpoint]): + """ + Dapr-backed LangGraph Checkpointer that persists checkpoints to a Dapr state store. + Compatible with LangGraph >= 0.3.6 and LangChain Core >= 1.0.0. + """ + + REGISTRY_KEY = 'dapr_checkpoint_registry' + + def __init__(self, store_name: str, key_prefix: str): + self.store_name = store_name + self.key_prefix = key_prefix + self.serde = JsonPlusSerializer() + self.client = DaprClient() + self._key_cache: Dict[str, str] = {} + + # helper: construct Dapr key for a thread + def _get_key(self, config: RunnableConfig) -> str: + thread_id = None + + if isinstance(config, dict): + thread_id = config.get('configurable', {}).get('thread_id') + + if not thread_id: + thread_id = config.get('thread_id') + + if not thread_id: + thread_id = 'default' + + return f'{self.key_prefix}:{thread_id}' + + def put( + self, + config: RunnableConfig, + checkpoint: Checkpoint, + metadata: CheckpointMetadata, + new_versions: ChannelVersions, + ) -> RunnableConfig: + thread_id = config['configurable']['thread_id'] + checkpoint_ns = config['configurable'].get('checkpoint_ns', '') + config_checkpoint_id = config['configurable'].get('checkpoint_id', '') + thread_ts = config['configurable'].get('thread_ts', '') + + checkpoint_id = config_checkpoint_id or thread_ts or checkpoint.get('id', '') + + parent_checkpoint_id = None + if ( + checkpoint.get('id') + and config_checkpoint_id + and checkpoint.get('id') != config_checkpoint_id + ): + parent_checkpoint_id = config_checkpoint_id + checkpoint_id = checkpoint['id'] + + storage_safe_thread_id = self._safe_id(thread_id) + storage_safe_checkpoint_ns = self._safe_ns(checkpoint_ns) + storage_safe_checkpoint_id = self._safe_id(checkpoint_id) + + copy = checkpoint.copy() + next_config = { + 'configurable': { + 'thread_id': thread_id, + 'checkpoint_ns': checkpoint_ns, + 'checkpoint_id': checkpoint_id, + } + } + + checkpoint_ts = None + if checkpoint_id: + try: + ulid_obj = ULID.from_str(checkpoint_id) + checkpoint_ts = ulid_obj.timestamp + except Exception: + checkpoint_ts = time.time() * 1000 + + checkpoint_data = { + 'thread_id': storage_safe_thread_id, + 'checkpoint_ns': storage_safe_checkpoint_ns, + 'checkpoint_id': storage_safe_checkpoint_id, + 'parent_checkpoint_id': ( + '00000000-0000-0000-0000-000000000000' + if (parent_checkpoint_id if parent_checkpoint_id else '') == '' + else parent_checkpoint_id + ), + 'checkpoint_ts': checkpoint_ts, + 'checkpoint': self._dump_checkpoint(copy), + 'metadata': self._dump_metadata(metadata), + 'has_writes': False, + } + + # Guard case where metadata is None + metadata = metadata or {} + + if all(key in metadata for key in ['source', 'step']): + checkpoint_data['source'] = metadata['source'] + checkpoint_data['step'] = metadata['step'] + + checkpoint_key = self._make_safe_checkpoint_key( + thread_id=thread_id, checkpoint_ns=checkpoint_ns, checkpoint_id=checkpoint_id + ) + + _, data = self.serde.dumps_typed(checkpoint_data) + self.client.save_state(store_name=self.store_name, key=checkpoint_key, value=data) + + latest_pointer_key = ( + f'checkpoint_latest:{storage_safe_thread_id}:{storage_safe_checkpoint_ns}' + ) + + self.client.save_state( + store_name=self.store_name, key=latest_pointer_key, value=checkpoint_key + ) + + return next_config + + def put_writes( + self, + config: RunnableConfig, + writes: Sequence[Tuple[str, Any]], + task_id: str, + task_path: str = '', + ) -> None: + """Store intermediate writes linked to a checkpoint with integrated key registry.""" + thread_id = config['configurable']['thread_id'] + checkpoint_ns = config['configurable'].get('checkpoint_ns', '') + checkpoint_id = config['configurable'].get('checkpoint_id', '') + storage_safe_thread_id = (self._safe_id(thread_id),) + storage_safe_checkpoint_ns = self._safe_ns(checkpoint_ns) + + writes_objects: List[Dict[str, Any]] = [] + for idx, (channel, value) in enumerate(writes): + type_, blob = self.serde.dumps_typed(value) + write_obj: Dict[str, Any] = { + 'thread_id': storage_safe_thread_id, + 'checkpoint_ns': storage_safe_checkpoint_ns, + 'checkpoint_id': self._safe_id(checkpoint_id), + 'task_id': task_id, + 'task_path': task_path, + 'idx': WRITES_IDX_MAP.get(channel, idx), + 'channel': channel, + 'type': type_, + 'blob': self._encode_blob(blob), + } + writes_objects.append(write_obj) + + for write_obj in writes_objects: + idx_value = write_obj['idx'] + assert isinstance(idx_value, int) + key = self._make_safe_checkpoint_key( + thread_id=thread_id, checkpoint_ns=checkpoint_ns, checkpoint_id=checkpoint_id + ) + + self.client.save_state(store_name=self.store_name, key=key, value=json.dumps(write_obj)) + + checkpoint_key = self._make_safe_checkpoint_key( + thread_id=thread_id, checkpoint_ns=checkpoint_ns, checkpoint_id=checkpoint_id + ) + + latest_pointer_key = ( + f'checkpoint_latest:{storage_safe_thread_id}:{storage_safe_checkpoint_ns}' + ) + + self.client.save_state( + store_name=self.store_name, key=latest_pointer_key, value=checkpoint_key + ) + + def list(self, config: RunnableConfig) -> list[CheckpointTuple]: + reg_resp = self.client.get_state(store_name=self.store_name, key=self.REGISTRY_KEY) + if not reg_resp.data: + return [] + + keys = json.loads(reg_resp.data) + checkpoints: list[CheckpointTuple] = [] + + for key in keys: + cp_resp = self.client.get_state(store_name=self.store_name, key=key) + if not cp_resp.data: + continue + + wrapper = json.loads(cp_resp.data) + cp_data = wrapper.get('checkpoint', {}) + metadata = wrapper.get('metadata', {}) + cp = Checkpoint(**cp_data) + + checkpoints.append( + CheckpointTuple( + config=config, + checkpoint=cp, + parent_config=None, + metadata=metadata, + ) + ) + + return checkpoints + + def delete_thread(self, config: RunnableConfig) -> None: + key = self._get_key(config) + + self.client.delete_state(store_name=self.store_name, key=key) + + reg_resp = self.client.get_state(store_name=self.store_name, key=self.REGISTRY_KEY) + if not reg_resp.data: + return + + registry = json.loads(reg_resp.data) + + if key in registry: + registry.remove(key) + self.client.save_state( + store_name=self.store_name, + key=self.REGISTRY_KEY, + value=json.dumps(registry), + ) + + def get_tuple(self, config: RunnableConfig) -> Optional[CheckpointTuple]: + thread_id = config['configurable']['thread_id'] + checkpoint_ns = config['configurable'].get('checkpoint_ns', '') + + storage_safe_thread_id = self._safe_id(thread_id) + storage_safe_checkpoint_ns = self._safe_ns(checkpoint_ns) + + key = ':'.join( + [ + 'checkpoint_latest', + storage_safe_thread_id, + storage_safe_checkpoint_ns, + ] + ) + + # First we extract the latest checkpoint key + checkpoint_key = self.client.get_state(store_name=self.store_name, key=key) + if not checkpoint_key.data: + return None + + # To then derive the checkpoint data + checkpoint_data = self.client.get_state( + store_name=self.store_name, + # checkpoint_key.data can either be str or bytes + key=checkpoint_key.data.decode() + if isinstance(checkpoint_key.data, bytes) + else checkpoint_key.data, + ) + + if not checkpoint_data.data: + return None + + if isinstance(checkpoint_data.data, bytes): + unpacked = msgpack.unpackb(checkpoint_data.data) + + checkpoint_values = unpacked[b'checkpoint'] + channel_values = checkpoint_values[b'channel_values'] + + decoded_messages = [] + for item in channel_values[b'messages']: + if isinstance(item, msgpack.ExtType): + decoded_messages.append( + self._convert_checkpoint_message( + self._load_metadata(msgpack.unpackb(item.data)) + ) + ) + else: + decoded_messages.append(item) + + checkpoint_values[b'channel_values'][b'messages'] = decoded_messages + + mdata = unpacked.get(b'metadata') + if isinstance(mdata, bytes): + mdata = self._load_metadata(msgpack.unpackb(mdata)) + + metadata = { + k.decode() if isinstance(k, bytes) else k: v.decode() if isinstance(v, bytes) else v + for k, v in mdata.items() + } + + checkpoint_obj = Checkpoint( + **{ + key.decode() if isinstance(key, bytes) else key: value + for key, value in checkpoint_values.items() + } + ) + + checkpoint = self._decode_bytes(checkpoint_obj) + elif isinstance(checkpoint_data.data, str): + unpacked = json.loads(checkpoint_data.data) + checkpoint = unpacked.get('checkpoint', None) + metadata = unpacked.get('metadata', None) + + if not metadata or not checkpoint: + return None + else: + return None + + return CheckpointTuple( + config=config, + checkpoint=checkpoint, + metadata=metadata, + parent_config=None, + pending_writes=[], + ) + + def _safe_id(self, id) -> str: + return '00000000-0000-0000-0000-000000000000' if id == '' else id + + def _safe_ns(self, ns) -> str: + return '__empty__' if ns == '' else ns + + def _convert_checkpoint_message(self, msg_item): + _, _, data_dict, _ = msg_item + data_dict = self._decode_bytes(data_dict) + + msg_type = data_dict.get('type') + + if msg_type == 'human': + return HumanMessage(**data_dict) + elif msg_type == 'ai': + return AIMessage(**data_dict) + elif msg_type == 'tool': + return ToolMessage(**data_dict) + else: + raise ValueError(f'Unknown message type: {msg_type}') + + def _decode_bytes(self, obj): + if isinstance(obj, bytes): + try: + s = obj.decode() + # Convert to int if it's a number, the unpacked channel_version holds \xa1 which unpacks as strings + # LangGraph needs Ints for '>' comparison + if s.isdigit(): + return int(s) + return s + except Exception: + return obj + if isinstance(obj, dict): + return {self._decode_bytes(k): self._decode_bytes(v) for k, v in obj.items()} + if isinstance(obj, list): + return [self._decode_bytes(v) for v in obj] + if isinstance(obj, tuple): + return tuple(self._decode_bytes(v) for v in obj) + return obj + + def _encode_blob(self, blob: Any) -> str: + if isinstance(blob, bytes): + return base64.b64encode(blob).decode() + return blob + + def _dump_checkpoint(self, checkpoint: Checkpoint) -> dict[str, Any]: + type_, data = self.serde.dumps_typed(checkpoint) + + if type_ == 'json': + checkpoint_data = cast(dict, json.loads(data)) + else: + checkpoint_data = cast(dict, self.serde.loads_typed((type_, data))) + + if 'channel_values' in checkpoint_data: + for key, value in checkpoint_data['channel_values'].items(): + if isinstance(value, bytes): + checkpoint_data['channel_values'][key] = { + '__bytes__': self._encode_blob(value) + } + + if 'channel_versions' in checkpoint_data: + checkpoint_data['channel_versions'] = { + k: str(v) for k, v in checkpoint_data['channel_versions'].items() + } + + return {'type': type_, **checkpoint_data, 'pending_sends': []} + + def _load_metadata(self, metadata: dict[str, Any]) -> CheckpointMetadata: + type_str, data_bytes = self.serde.dumps_typed(metadata) + return self.serde.loads_typed((type_str, data_bytes)) + + def _dump_metadata(self, metadata: CheckpointMetadata) -> str: + _, serialized_bytes = self.serde.dumps_typed(metadata) + return serialized_bytes + + def _make_safe_checkpoint_key( + self, + thread_id: str, + checkpoint_ns: str, + checkpoint_id: str, + ) -> str: + return ':'.join( + [ + 'checkpoint', + thread_id, + checkpoint_ns, + checkpoint_id, + ] + ) diff --git a/dapr/conf/py.typed b/ext/dapr-ext-langgraph/dapr/ext/langgraph/py.typed similarity index 100% rename from dapr/conf/py.typed rename to ext/dapr-ext-langgraph/dapr/ext/langgraph/py.typed diff --git a/ext/dapr-ext-langgraph/dapr/ext/langgraph/version.py b/ext/dapr-ext-langgraph/dapr/ext/langgraph/version.py new file mode 100644 index 000000000..dae1485d2 --- /dev/null +++ b/ext/dapr-ext-langgraph/dapr/ext/langgraph/version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +__version__ = '1.16.0.dev' diff --git a/ext/dapr-ext-langgraph/setup.cfg b/ext/dapr-ext-langgraph/setup.cfg new file mode 100644 index 000000000..dc9286d7c --- /dev/null +++ b/ext/dapr-ext-langgraph/setup.cfg @@ -0,0 +1,42 @@ +[metadata] +url = https://dapr.io/ +author = Dapr Authors +author_email = daprweb@microsoft.com +license = Apache +license_file = LICENSE +classifiers = + Development Status :: 5 - Production/Stable + Intended Audience :: Developers + License :: OSI Approved :: Apache Software License + Operating System :: OS Independent + Programming Language :: Python + Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 + Programming Language :: Python :: 3.13 + Programming Language :: Python :: 3.14 +project_urls = + Documentation = https://github.com/dapr/docs + Source = https://github.com/dapr/python-sdk + +[options] +python_requires = >=3.10 +packages = find_namespace: +include_package_data = True +install_requires = + dapr >= 1.16.1rc2 + langgraph >= 0.3.6 + langchain >= 0.1.17 + python-ulid >= 3.0.0 + msgpack-python >= 0.4.5 + +[options.packages.find] +include = + dapr.* + +exclude = + tests + +[options.package_data] +dapr.ext.langgraph = + py.typed diff --git a/ext/dapr-ext-langgraph/setup.py b/ext/dapr-ext-langgraph/setup.py new file mode 100644 index 000000000..78c0daace --- /dev/null +++ b/ext/dapr-ext-langgraph/setup.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import os + +from setuptools import setup + +# Load version in dapr package. +version_info = {} +with open('dapr/ext/langgraph/version.py') as fp: + exec(fp.read(), version_info) +__version__ = version_info['__version__'] + + +def is_release(): + return '.dev' not in __version__ + + +name = 'dapr-ext-langgraph' +version = __version__ +description = 'The official release of Dapr Python SDK LangGraph Extension.' +long_description = """ +This is the Dapr Checkpointer extension for LangGraph. + +Dapr is a portable, serverless, event-driven runtime that makes it easy for developers to +build resilient, stateless and stateful microservices that run on the cloud and edge and +embraces the diversity of languages and developer frameworks. + +Dapr codifies the best practices for building microservice applications into open, +independent, building blocks that enable you to build portable applications with the language +and framework of your choice. Each building block is independent and you can use one, some, +or all of them in your application. +""".lstrip() + +# Get build number from GITHUB_RUN_NUMBER environment variable +build_number = os.environ.get('GITHUB_RUN_NUMBER', '0') + +if not is_release(): + name += '-dev' + version = f'{__version__}{build_number}' + description = 'The developmental release for the Dapr Checkpointer extension for LangGraph' + long_description = ( + 'This is the developmental release for the Dapr Checkpointer extension for LangGraph' + ) + +print(f'package name: {name}, version: {version}', flush=True) + + +setup( + name=name, + version=version, + description=description, + long_description=long_description, +) diff --git a/ext/dapr-ext-langgraph/tests/__init__.py b/ext/dapr-ext-langgraph/tests/__init__.py new file mode 100644 index 000000000..ad87aedb7 --- /dev/null +++ b/ext/dapr-ext-langgraph/tests/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" diff --git a/ext/dapr-ext-langgraph/tests/test_checkpointer.py b/ext/dapr-ext-langgraph/tests/test_checkpointer.py new file mode 100644 index 000000000..fc51d9182 --- /dev/null +++ b/ext/dapr-ext-langgraph/tests/test_checkpointer.py @@ -0,0 +1,175 @@ +# -*- coding: utf-8 -*- + +import base64 +import json +import unittest +from datetime import datetime +from unittest import mock + +import msgpack +from dapr.ext.langgraph.dapr_checkpointer import DaprCheckpointer +from langgraph.checkpoint.base import Checkpoint + + +@mock.patch('dapr.ext.langgraph.dapr_checkpointer.DaprClient') +class DaprCheckpointerTest(unittest.TestCase): + def setUp(self): + self.store = 'statestore' + self.prefix = 'lg' + self.config = {'configurable': {'thread_id': 't1'}} + + self.checkpoint = Checkpoint( + v=1, + id='cp1', + ts=datetime.now().timestamp(), + channel_values={'a': 1}, + channel_versions={}, + versions_seen={}, + ) + + def test_get_tuple_returns_checkpoint(self, mock_client_cls): + mock_client = mock_client_cls.return_value + + wrapper = { + 'checkpoint': { + 'v': self.checkpoint['v'], + 'id': self.checkpoint['id'], + 'ts': self.checkpoint['ts'], + 'channel_values': self.checkpoint['channel_values'], + 'channel_versions': self.checkpoint['channel_versions'], + 'versions_seen': self.checkpoint['versions_seen'], + }, + 'metadata': {'step': 3}, + } + mock_client.get_state.return_value.data = json.dumps(wrapper) + + cp = DaprCheckpointer(self.store, self.prefix) + tup = cp.get_tuple(self.config) + + assert tup is not None + assert tup.checkpoint['id'] == 'cp1' + assert tup.metadata['step'] == 3 + + def test_get_tuple_none_when_missing(self, mock_client_cls): + mock_client = mock_client_cls.return_value + mock_client.get_state.return_value.data = None + + cp = DaprCheckpointer(self.store, self.prefix) + assert cp.get_tuple(self.config) is None + + def test_put_saves_checkpoint_and_registry(self, mock_client_cls): + mock_client = mock_client_cls.return_value + + mock_client.get_state.return_value.data = json.dumps([]) + + cp = DaprCheckpointer(self.store, self.prefix) + cp.put(self.config, self.checkpoint, {'step': 10}, None) + + first_call = mock_client.save_state.call_args_list[0] + first_call_kwargs = first_call.kwargs + assert first_call_kwargs['store_name'] == 'statestore' + assert first_call_kwargs['key'] == 'checkpoint:t1::cp1' + unpacked = msgpack.unpackb(first_call_kwargs['value']) # We're packing bytes + saved_payload = {} + for k, v in unpacked.items(): + k = k.decode() if isinstance(k, bytes) else k + if ( + k == 'checkpoint' or k == 'metadata' + ): # Need to convert b'' on checkpoint/metadata dict key/values + if k == 'metadata': + v = msgpack.unpackb(v) # Metadata value is packed + val = {} + for sk, sv in v.items(): + sk = sk.decode() if isinstance(sk, bytes) else sk + sv = sv.decode() if isinstance(sv, bytes) else sv + val[sk] = sv + else: + val = v.decode() if isinstance(v, bytes) else v + saved_payload[k] = val + assert saved_payload['metadata']['step'] == 10 + + second_call = mock_client.save_state.call_args_list[1] + second_call_kwargs = second_call.kwargs + assert second_call_kwargs['store_name'] == 'statestore' + assert ( + second_call_kwargs['value'] == 'checkpoint:t1::cp1' + ) # Here we're testing if the last checkpoint is the first_call above + + def test_put_writes_updates_channel_values(self, mock_client_cls): + mock_client = mock_client_cls.return_value + + wrapper = { + 'checkpoint': { + 'v': 1, + 'id': 'cp1', + 'ts': 1000, + 'channel_values': {'a': 10}, + 'channel_versions': {}, + 'versions_seen': {}, + }, + 'metadata': {}, + } + mock_client.get_state.return_value.data = json.dumps(wrapper) + + cp = DaprCheckpointer(self.store, self.prefix) + cp.put_writes(self.config, writes=[('a', 99)], task_id='task1') + + # save_state is called with updated checkpoint + call = mock_client.save_state.call_args_list[0] + # As we're using named input params we've got to fetch through kwargs + kwargs = call.kwargs + saved = json.loads(kwargs['value']) + # As the value obj is base64 encoded in 'blob' we got to unpack it + assert msgpack.unpackb(base64.b64decode(saved['blob'])) == 99 + + def test_list_returns_all_checkpoints(self, mock_client_cls): + mock_client = mock_client_cls.return_value + + registry = ['lg:t1'] + cp_wrapper = { + 'checkpoint': { + 'v': 1, + 'id': 'cp1', + 'ts': 1000, + 'channel_values': {'x': 1}, + 'channel_versions': {}, + 'versions_seen': {}, + }, + 'metadata': {'step': 5}, + } + + mock_client.get_state.side_effect = [ + mock.Mock(data=json.dumps(registry)), + mock.Mock(data=json.dumps(cp_wrapper)), + ] + + cp = DaprCheckpointer(self.store, self.prefix) + lst = cp.list(self.config) + + assert len(lst) == 1 + assert lst[0].checkpoint['id'] == 'cp1' + assert lst[0].metadata['step'] == 5 + + def test_delete_thread_removes_key_and_updates_registry(self, mock_client_cls): + mock_client = mock_client_cls.return_value + + registry = ['lg:t1'] + mock_client.get_state.return_value.data = json.dumps(registry) + + cp = DaprCheckpointer(self.store, self.prefix) + cp.delete_thread(self.config) + + mock_client.delete_state.assert_called_once_with( + store_name='statestore', + key='lg:t1', + ) + + mock_client.save_state.assert_called_with( + store_name='statestore', + key=DaprCheckpointer.REGISTRY_KEY, + value=json.dumps([]), + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/ext/dapr-ext-strands/LICENSE b/ext/dapr-ext-strands/LICENSE new file mode 100644 index 000000000..be033a7fd --- /dev/null +++ b/ext/dapr-ext-strands/LICENSE @@ -0,0 +1,203 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 The Dapr Authors. + + and others that have contributed code to the public domain. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/ext/dapr-ext-strands/README.rst b/ext/dapr-ext-strands/README.rst new file mode 100644 index 000000000..882ae13b6 --- /dev/null +++ b/ext/dapr-ext-strands/README.rst @@ -0,0 +1,22 @@ +dapr-ext-strands extension +======================= + +|pypi| + +.. |pypi| image:: https://badge.fury.io/py/dapr-ext-strands.svg + :target: https://pypi.org/project/dapr-ext-strands/ + +This is the Dapr Session Manager for Strands Agents + +Installation +------------ + +:: + + pip install dapr-ext-strands + +References +---------- + +* `Dapr `_ +* `Dapr Python-SDK `_ diff --git a/ext/dapr-ext-strands/dapr/ext/strands/__init__.py b/ext/dapr-ext-strands/dapr/ext/strands/__init__.py new file mode 100644 index 000000000..52ab2ee86 --- /dev/null +++ b/ext/dapr-ext-strands/dapr/ext/strands/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +# Import your main classes here +from dapr.ext.strands.dapr_session_manager import DaprSessionManager + +__all__ = [ + 'DaprSessionManager', +] diff --git a/ext/dapr-ext-strands/dapr/ext/strands/dapr_session_manager.py b/ext/dapr-ext-strands/dapr/ext/strands/dapr_session_manager.py new file mode 100644 index 000000000..c9a98ebdf --- /dev/null +++ b/ext/dapr-ext-strands/dapr/ext/strands/dapr_session_manager.py @@ -0,0 +1,551 @@ +""" +Copyright 2026 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import json +import logging +from typing import Any, Dict, List, Literal, Optional, cast + +from dapr.clients import DaprClient +from dapr.clients.grpc._state import Consistency, StateOptions +from strands import _identifier +from strands.session.repository_session_manager import RepositorySessionManager +from strands.session.session_repository import SessionRepository +from strands.types.exceptions import SessionException +from strands.types.session import Session, SessionAgent, SessionMessage + +logger = logging.getLogger(__name__) + +# Type-safe consistency constants +ConsistencyLevel = Literal['eventual', 'strong'] +DAPR_CONSISTENCY_EVENTUAL: ConsistencyLevel = 'eventual' +DAPR_CONSISTENCY_STRONG: ConsistencyLevel = 'strong' + + +class DaprSessionManager(RepositorySessionManager, SessionRepository): + """Dapr state store session manager for distributed storage. + + Stores session data in Dapr state stores (Redis, PostgreSQL, MongoDB, Cosmos DB, etc.) + with support for TTL and consistency levels. + + Key structure: + - `{session_id}:session` - Session metadata + - `{session_id}:agents:{agent_id}` - Agent metadata + - `{session_id}:messages:{agent_id}` - Message list (JSON array) + """ + + def __init__( + self, + session_id: str, + state_store_name: str, + dapr_client: DaprClient, + ttl: Optional[int] = None, + consistency: ConsistencyLevel = DAPR_CONSISTENCY_EVENTUAL, + ): + """Initialize DaprSessionManager. + + Args: + session_id: ID for the session. + ID is not allowed to contain path separators (e.g., a/b). + state_store_name: Name of the Dapr state store component. + dapr_client: DaprClient instance for state operations. + ttl: Optional time-to-live in seconds for state items. + consistency: Consistency level for state operations ("eventual" or "strong"). + """ + self._state_store_name = state_store_name + self._dapr_client = dapr_client + self._ttl = ttl + self._consistency = consistency + self._owns_client = False + + super().__init__(session_id=session_id, session_repository=self) + + @classmethod + def from_address( + cls, + session_id: str, + state_store_name: str, + dapr_address: str = 'localhost:50001', + ) -> 'DaprSessionManager': + """Create DaprSessionManager from Dapr address. + + Args: + session_id: ID for the session. + state_store_name: Name of the Dapr state store component. + dapr_address: Dapr gRPC endpoint (default: localhost:50001). + + Returns: + DaprSessionManager instance with owned client. + """ + dapr_client = DaprClient(address=dapr_address) + manager = cls(session_id, state_store_name=state_store_name, dapr_client=dapr_client) + manager._owns_client = True + return manager + + def _get_session_key(self, session_id: str) -> str: + """Get session state key. + + Args: + session_id: ID for the session. + + Returns: + State store key for the session. + + Raises: + ValueError: If session id contains a path separator. + """ + session_id = _identifier.validate(session_id, _identifier.Identifier.SESSION) + return f'{session_id}:session' + + def _get_agent_key(self, session_id: str, agent_id: str) -> str: + """Get agent state key. + + Args: + session_id: ID for the session. + agent_id: ID for the agent. + + Returns: + State store key for the agent. + + Raises: + ValueError: If session id or agent id contains a path separator. + """ + session_id = _identifier.validate(session_id, _identifier.Identifier.SESSION) + agent_id = _identifier.validate(agent_id, _identifier.Identifier.AGENT) + return f'{session_id}:agents:{agent_id}' + + def _get_messages_key(self, session_id: str, agent_id: str) -> str: + """Get messages list state key. + + Args: + session_id: ID for the session. + agent_id: ID for the agent. + + Returns: + State store key for the messages list. + + Raises: + ValueError: If session id or agent id contains a path separator. + """ + session_id = _identifier.validate(session_id, _identifier.Identifier.SESSION) + agent_id = _identifier.validate(agent_id, _identifier.Identifier.AGENT) + return f'{session_id}:messages:{agent_id}' + + def _get_manifest_key(self, session_id: str) -> str: + """Get session manifest key (tracks agent_ids for deletion).""" + session_id = _identifier.validate(session_id, _identifier.Identifier.SESSION) + return f'{session_id}:manifest' + + def _get_read_metadata(self) -> Dict[str, str]: + """Get metadata for read operations (consistency). + + Returns: + Metadata dictionary for state reads. + """ + metadata: Dict[str, str] = {} + if self._consistency: + metadata['consistency'] = self._consistency + return metadata + + def _get_write_metadata(self) -> Dict[str, str]: + """Get metadata for write operations (TTL). + + Returns: + Metadata dictionary for state writes. + """ + metadata: Dict[str, str] = {} + if self._ttl is not None: + metadata['ttlInSeconds'] = str(self._ttl) + return metadata + + def _get_state_options(self) -> Optional[StateOptions]: + """Get state options for write/delete operations (consistency). + + Returns: + StateOptions for consistency or None. + """ + if self._consistency == DAPR_CONSISTENCY_STRONG: + return StateOptions(consistency=Consistency.strong) + elif self._consistency == DAPR_CONSISTENCY_EVENTUAL: + return StateOptions(consistency=Consistency.eventual) + return None + + def _read_state(self, key: str) -> Optional[Dict[str, Any]]: + """Read and parse JSON state from Dapr. + + Args: + key: State store key. + + Returns: + Parsed JSON dictionary or None if not found. + + Raises: + SessionException: If state is corrupted or read fails. + """ + try: + response = self._dapr_client.get_state( + store_name=self._state_store_name, + key=key, + state_metadata=self._get_read_metadata(), + ) + + if not response.data: + return None + + content = response.data.decode('utf-8') + return cast(Dict[str, Any], json.loads(content)) + + except json.JSONDecodeError as e: + raise SessionException(f'Invalid JSON in state key {key}: {e}') from e + except Exception as e: + raise SessionException(f'Failed to read state key {key}: {e}') from e + + def _write_state(self, key: str, data: Dict[str, Any]) -> None: + """Write JSON state to Dapr. + + Args: + key: State store key. + data: Dictionary to serialize and store. + + Raises: + SessionException: If write fails. + """ + try: + content = json.dumps(data, ensure_ascii=False) + self._dapr_client.save_state( + store_name=self._state_store_name, + key=key, + value=content, + state_metadata=self._get_write_metadata(), + options=self._get_state_options(), + ) + except Exception as e: + raise SessionException(f'Failed to write state key {key}: {e}') from e + + def _delete_state(self, key: str) -> None: + """Delete state from Dapr. + + Args: + key: State store key. + + Raises: + SessionException: If delete fails. + """ + try: + self._dapr_client.delete_state( + store_name=self._state_store_name, + key=key, + options=self._get_state_options(), + ) + except Exception as e: + raise SessionException(f'Failed to delete state key {key}: {e}') from e + + def create_session(self, session: Session) -> Session: + """Create a new session. + + Args: + session: Session to create. + + Returns: + Created session. + + Raises: + SessionException: If session already exists or creation fails. + """ + session_key = self._get_session_key(session.session_id) + + # Check if session already exists + existing = self.read_session(session.session_id) + if existing is not None: + raise SessionException(f'Session {session.session_id} already exists') + + # Write session data + session_dict = session.to_dict() + self._write_state(session_key, session_dict) + return session + + def read_session(self, session_id: str) -> Optional[Session]: + """Read session data. + + Args: + session_id: ID of the session to read. + + Returns: + Session if found, None otherwise. + + Raises: + SessionException: If read fails. + """ + session_key = self._get_session_key(session_id) + + session_data = self._read_state(session_key) + if session_data is None: + return None + + return Session.from_dict(session_data) + + def delete_session(self, session_id: str) -> None: + """Delete session and all associated data. + + Uses a session manifest to discover agent IDs for cleanup. + """ + session_key = self._get_session_key(session_id) + manifest_key = self._get_manifest_key(session_id) + + # Read manifest (may be missing if no agents created) + manifest = self._read_state(manifest_key) + agent_ids: list[str] = manifest.get('agents', []) if manifest else [] + + # Delete agent and message keys + for agent_id in agent_ids: + agent_key = self._get_agent_key(session_id, agent_id) + messages_key = self._get_messages_key(session_id, agent_id) + self._delete_state(agent_key) + self._delete_state(messages_key) + + # Delete manifest and session + self._delete_state(manifest_key) + self._delete_state(session_key) + + def create_agent(self, session_id: str, session_agent: SessionAgent) -> None: + """Create a new agent in the session. + + Args: + session_id: ID of the session. + session_agent: Agent to create. + + Raises: + SessionException: If creation fails. + """ + agent_key = self._get_agent_key(session_id, session_agent.agent_id) + agent_dict = session_agent.to_dict() + + self._write_state(agent_key, agent_dict) + + # Initialize empty messages list + messages_key = self._get_messages_key(session_id, session_agent.agent_id) + self._write_state(messages_key, {'messages': []}) + + # Update manifest with this agent + manifest_key = self._get_manifest_key(session_id) + manifest = self._read_state(manifest_key) or {'agents': []} + if session_agent.agent_id not in manifest['agents']: + manifest['agents'].append(session_agent.agent_id) + self._write_state(manifest_key, manifest) + + def read_agent(self, session_id: str, agent_id: str) -> Optional[SessionAgent]: + """Read agent data. + + Args: + session_id: ID of the session. + agent_id: ID of the agent. + + Returns: + SessionAgent if found, None otherwise. + + Raises: + SessionException: If read fails. + """ + agent_key = self._get_agent_key(session_id, agent_id) + + agent_data = self._read_state(agent_key) + if agent_data is None: + return None + + return SessionAgent.from_dict(agent_data) + + def update_agent(self, session_id: str, session_agent: SessionAgent) -> None: + """Update agent data. + + Args: + session_id: ID of the session. + session_agent: Agent to update. + + Raises: + SessionException: If agent doesn't exist or update fails. + """ + previous_agent = self.read_agent(session_id=session_id, agent_id=session_agent.agent_id) + if previous_agent is None: + raise SessionException( + f'Agent {session_agent.agent_id} in session {session_id} does not exist' + ) + + # Preserve creation timestamp + session_agent.created_at = previous_agent.created_at + + agent_key = self._get_agent_key(session_id, session_agent.agent_id) + + self._write_state(agent_key, session_agent.to_dict()) + + def create_message( + self, + session_id: str, + agent_id: str, + session_message: SessionMessage, + ) -> None: + """Create a new message for the agent. + + Args: + session_id: ID of the session. + agent_id: ID of the agent. + session_message: Message to create. + + Raises: + SessionException: If creation fails. + """ + messages_key = self._get_messages_key(session_id, agent_id) + + # Read existing messages + messages_data = self._read_state(messages_key) + if messages_data is None: + messages_list = [] + else: + messages_list = messages_data.get('messages', []) + if not isinstance(messages_list, list): + messages_list = [] + + # Append new message + messages_list.append(session_message.to_dict()) + + # Write back + self._write_state(messages_key, {'messages': messages_list}) + + def read_message( + self, session_id: str, agent_id: str, message_id: int + ) -> Optional[SessionMessage]: + """Read message data. + + Args: + session_id: ID of the session. + agent_id: ID of the agent. + message_id: Index of the message. + + Returns: + SessionMessage if found, None otherwise. + + Raises: + ValueError: If message_id is not an integer. + SessionException: If read fails. + """ + if not isinstance(message_id, int): + raise ValueError(f'message_id=<{message_id}> | message id must be an integer') + + messages_key = self._get_messages_key(session_id, agent_id) + + messages_data = self._read_state(messages_key) + if messages_data is None: + return None + + messages_list = messages_data.get('messages', []) + if not isinstance(messages_list, list): + messages_list = [] + + # Find message by ID + for msg_dict in messages_list: + if msg_dict.get('message_id') == message_id: + return SessionMessage.from_dict(msg_dict) + + return None + + def update_message( + self, session_id: str, agent_id: str, session_message: SessionMessage + ) -> None: + """Update message data. + + Args: + session_id: ID of the session. + agent_id: ID of the agent. + session_message: Message to update. + + Raises: + SessionException: If message doesn't exist or update fails. + """ + previous_message = self.read_message( + session_id=session_id, agent_id=agent_id, message_id=session_message.message_id + ) + if previous_message is None: + raise SessionException(f'Message {session_message.message_id} does not exist') + + # Preserve creation timestamp + session_message.created_at = previous_message.created_at + + messages_key = self._get_messages_key(session_id, agent_id) + + # Read existing messages + messages_data = self._read_state(messages_key) + if messages_data is None: + raise SessionException( + f'Messages not found for agent {agent_id} in session {session_id}' + ) + + messages_list = messages_data.get('messages', []) + if not isinstance(messages_list, list): + messages_list = [] + + # Find and update message + updated = False + for i, msg_dict in enumerate(messages_list): + if msg_dict.get('message_id') == session_message.message_id: + messages_list[i] = session_message.to_dict() + updated = True + break + + if not updated: + raise SessionException(f'Message {session_message.message_id} not found in list') + + # Write back + self._write_state(messages_key, {'messages': messages_list}) + + def list_messages( + self, + session_id: str, + agent_id: str, + limit: Optional[int] = None, + offset: int = 0, + ) -> List[SessionMessage]: + """List messages for an agent with pagination. + + Args: + session_id: ID of the session. + agent_id: ID of the agent. + limit: Maximum number of messages to return. + offset: Number of messages to skip. + + Returns: + List of SessionMessage objects. + + Raises: + SessionException: If read fails. + """ + messages_key = self._get_messages_key(session_id, agent_id) + + messages_data = self._read_state(messages_key) + if messages_data is None: + return [] + + messages_list = messages_data.get('messages', []) + if not isinstance(messages_list, list): + messages_list = [] + + # Apply pagination + if limit is not None: + messages_list = messages_list[offset : offset + limit] + else: + messages_list = messages_list[offset:] + + # Convert to SessionMessage objects + return [SessionMessage.from_dict(msg_dict) for msg_dict in messages_list] + + def close(self) -> None: + """Close the Dapr client if owned by this manager.""" + if self._owns_client: + self._dapr_client.close() diff --git a/ext/dapr-ext-strands/dapr/ext/strands/version.py b/ext/dapr-ext-strands/dapr/ext/strands/version.py new file mode 100644 index 000000000..dae1485d2 --- /dev/null +++ b/ext/dapr-ext-strands/dapr/ext/strands/version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +__version__ = '1.16.0.dev' diff --git a/ext/dapr-ext-strands/setup.cfg b/ext/dapr-ext-strands/setup.cfg new file mode 100644 index 000000000..fe619e9ba --- /dev/null +++ b/ext/dapr-ext-strands/setup.cfg @@ -0,0 +1,42 @@ +[metadata] +url = https://dapr.io/ +author = Dapr Authors +author_email = daprweb@microsoft.com +license = Apache +license_file = LICENSE +classifiers = + Development Status :: 5 - Production/Stable + Intended Audience :: Developers + License :: OSI Approved :: Apache Software License + Operating System :: OS Independent + Programming Language :: Python + Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 + Programming Language :: Python :: 3.13 + Programming Language :: Python :: 3.14 +project_urls = + Documentation = https://github.com/dapr/docs + Source = https://github.com/dapr/python-sdk + +[options] +python_requires = >=3.10 +packages = find_namespace: +include_package_data = True +install_requires = + dapr >= 1.16.1rc2 + strands-agents + strands-agents-tools + python-ulid >= 3.0.0 + msgpack-python >= 0.4.5 + +[options.packages.find] +include = + dapr.* + +exclude = + tests + +[options.package_data] +dapr.ext.strands = + py.typed diff --git a/ext/dapr-ext-strands/setup.py b/ext/dapr-ext-strands/setup.py new file mode 100644 index 000000000..1d8c6732d --- /dev/null +++ b/ext/dapr-ext-strands/setup.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import os + +from setuptools import setup + +# Load version in dapr package. +version_info = {} +with open('dapr/ext/strands/version.py') as fp: + exec(fp.read(), version_info) +__version__ = version_info['__version__'] + + +def is_release(): + return '.dev' not in __version__ + + +name = 'dapr-ext-strands' +version = __version__ +description = 'The official release of Dapr Python SDK Strands Agents Extension.' +long_description = """ +This is the Dapr Session Manager extension for Strands Agents. + +Dapr is a portable, serverless, event-driven runtime that makes it easy for developers to +build resilient, stateless and stateful microservices that run on the cloud and edge and +embraces the diversity of languages and developer frameworks. + +Dapr codifies the best practices for building microservice applications into open, +independent, building blocks that enable you to build portable applications with the language +and framework of your choice. Each building block is independent and you can use one, some, +or all of them in your application. +""".lstrip() + +# Get build number from GITHUB_RUN_NUMBER environment variable +build_number = os.environ.get('GITHUB_RUN_NUMBER', '0') + +if not is_release(): + name += '-dev' + version = f'{__version__}{build_number}' + description = ( + 'The developmental release for the Dapr Session Manager extension for Strands Agents' + ) + long_description = 'This is the developmental release for the Dapr Session Manager extension for Strands Agents' + +print(f'package name: {name}, version: {version}', flush=True) + + +setup( + name=name, + version=version, + description=description, + long_description=long_description, +) diff --git a/ext/dapr-ext-strands/tests/__init__.py b/ext/dapr-ext-strands/tests/__init__.py new file mode 100644 index 000000000..ad87aedb7 --- /dev/null +++ b/ext/dapr-ext-strands/tests/__init__.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" diff --git a/ext/dapr-ext-strands/tests/test_session_manager.py b/ext/dapr-ext-strands/tests/test_session_manager.py new file mode 100644 index 000000000..6f25edc55 --- /dev/null +++ b/ext/dapr-ext-strands/tests/test_session_manager.py @@ -0,0 +1,168 @@ +""" +Copyright 2026 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +# -*- coding: utf-8 -*- + +import json +import time +import unittest +from unittest import mock + +from dapr.ext.strands.dapr_session_manager import DaprSessionManager +from strands.types.exceptions import SessionException +from strands.types.session import Session, SessionAgent, SessionMessage + + +def dapr_state(data): + """Simulate a real Dapr get_state() response.""" + resp = mock.Mock() + resp.data = None if data is None else json.dumps(data).encode('utf-8') + return resp + + +def make_session(session_id='s1'): + return Session.from_dict( + { + 'session_id': session_id, + 'session_type': 'chat', + 'created_at': time.time(), + 'metadata': {}, + } + ) + + +def make_agent(agent_id='a1'): + return SessionAgent.from_dict( + { + 'agent_id': agent_id, + 'state': {}, + 'conversation_manager_state': {}, + 'created_at': time.time(), + } + ) + + +def make_message(message_id=1, text='hello'): + return SessionMessage.from_dict( + { + 'message_id': message_id, + 'role': 'user', + 'message': text, + 'created_at': time.time(), + } + ) + + +@mock.patch('dapr.ext.strands.dapr_session_manager.DaprClient') +class DaprSessionManagerTest(unittest.TestCase): + def setUp(self): + self.session_id = 's1' + self.store = 'statestore' + + self.mock_client = mock.Mock() + self.mock_client.get_state.return_value = dapr_state(None) + + self.manager = DaprSessionManager( + session_id=self.session_id, + state_store_name=self.store, + dapr_client=self.mock_client, + ) + + # + # session + # + def test_create_and_read_session(self, _): + session = make_session(self.session_id) + + self.manager.create_session(session) + + self.mock_client.get_state.return_value = dapr_state(session.to_dict()) + read = self.manager.read_session(self.session_id) + + assert read.session_id == self.session_id + + def test_create_session_raises_if_exists(self, _): + session = make_session(self.session_id) + + self.mock_client.get_state.return_value = dapr_state(session.to_dict()) + + with self.assertRaises(SessionException): + self.manager.create_session(session) + + # + # agent + # + def test_create_and_read_agent(self, _): + agent = make_agent('a1') + + self.manager.create_agent(self.session_id, agent) + + self.mock_client.get_state.return_value = dapr_state(agent.to_dict()) + read = self.manager.read_agent(self.session_id, 'a1') + + assert read.agent_id == 'a1' + + def test_update_agent_preserves_created_at(self, _): + agent = make_agent('a1') + original_ts = agent.created_at + + self.mock_client.get_state.return_value = dapr_state(agent.to_dict()) + + agent.state['x'] = 1 + self.manager.update_agent(self.session_id, agent) + + saved = json.loads(self.mock_client.save_state.call_args[1]['value']) + assert saved['created_at'] == original_ts + + def test_create_and_read_message(self, _): + msg = make_message(1, 'hello') + + self.manager.create_message(self.session_id, 'a1', msg) + + messages = {'messages': [msg.to_dict()]} + self.mock_client.get_state.return_value = dapr_state(messages) + + read = self.manager.read_message(self.session_id, 'a1', 1) + assert read.message == 'hello' + + def test_update_message_preserves_created_at(self, _): + msg = make_message(1, 'old') + original_ts = msg.created_at + + messages = {'messages': [msg.to_dict()]} + self.mock_client.get_state.return_value = dapr_state(messages) + + msg.message = 'new' + self.manager.update_message(self.session_id, 'a1', msg) + + saved = json.loads(self.mock_client.save_state.call_args[1]['value']) + updated = saved['messages'][0] + + assert updated['created_at'] == original_ts + assert updated['message'] == 'new' + + def test_delete_session_deletes_agents_and_messages(self, _): + manifest = {'agents': ['a1', 'a2']} + self.mock_client.get_state.return_value = dapr_state(manifest) + + self.manager.delete_session(self.session_id) + assert self.mock_client.delete_state.call_count == 6 + + def test_close_only_closes_owned_client(self, _): + self.manager._owns_client = True + self.manager.close() + self.mock_client.close.assert_called_once() + + +if __name__ == '__main__': + unittest.main() diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py b/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py index f78615112..dd2d45b75 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py @@ -14,12 +14,12 @@ """ # Import your main classes here -from dapr.ext.workflow.workflow_runtime import WorkflowRuntime, alternate_name from dapr.ext.workflow.dapr_workflow_client import DaprWorkflowClient from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext, when_all, when_any +from dapr.ext.workflow.retry_policy import RetryPolicy from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext +from dapr.ext.workflow.workflow_runtime import WorkflowRuntime, alternate_name from dapr.ext.workflow.workflow_state import WorkflowState, WorkflowStatus -from dapr.ext.workflow.retry_policy import RetryPolicy __all__ = [ 'WorkflowRuntime', diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/aio/__init__.py b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/__init__.py new file mode 100644 index 000000000..ceb8672be --- /dev/null +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +from .dapr_workflow_client import DaprWorkflowClient + +__all__ = [ + 'DaprWorkflowClient', +] diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/aio/dapr_workflow_client.py b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/dapr_workflow_client.py new file mode 100644 index 000000000..cd5e632f1 --- /dev/null +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/dapr_workflow_client.py @@ -0,0 +1,277 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +from __future__ import annotations + +from datetime import datetime +from typing import Any, Optional, TypeVar + +import durabletask.internal.orchestrator_service_pb2 as pb +from dapr.ext.workflow.logger import Logger, LoggerOptions +from dapr.ext.workflow.util import getAddress +from dapr.ext.workflow.workflow_context import Workflow +from dapr.ext.workflow.workflow_state import WorkflowState +from durabletask.aio import client as aioclient +from grpc.aio import AioRpcError + +from dapr.clients import DaprInternalError +from dapr.clients.http.client import DAPR_API_TOKEN_HEADER +from dapr.conf import settings +from dapr.conf.helpers import GrpcEndpoint + +T = TypeVar('T') +TInput = TypeVar('TInput') +TOutput = TypeVar('TOutput') + + +class DaprWorkflowClient: + """Async client for managing Dapr Workflow instances. + + This uses a gRPC async connection to send commands directly to the workflow engine, + bypassing the Dapr API layer. Intended to be used by workflow applications. + """ + + def __init__( + self, + host: Optional[str] = None, + port: Optional[str] = None, + logger_options: Optional[LoggerOptions] = None, + ): + address = getAddress(host, port) + + try: + uri = GrpcEndpoint(address) + except ValueError as error: + raise DaprInternalError(f'{error}') from error + + self._logger = Logger('DaprWorkflowClient', logger_options) + + metadata = tuple() + if settings.DAPR_API_TOKEN: + metadata = ((DAPR_API_TOKEN_HEADER, settings.DAPR_API_TOKEN),) + options = self._logger.get_options() + self.__obj = aioclient.AsyncTaskHubGrpcClient( + host_address=uri.endpoint, + metadata=metadata, + secure_channel=uri.tls, + log_handler=options.log_handler, + log_formatter=options.log_formatter, + ) + + async def schedule_new_workflow( + self, + workflow: Workflow, + *, + input: Optional[TInput] = None, + instance_id: Optional[str] = None, + start_at: Optional[datetime] = None, + reuse_id_policy: Optional[pb.OrchestrationIdReusePolicy] = None, + ) -> str: + """Schedules a new workflow instance for execution. + + Args: + workflow: The workflow to schedule. + input: The optional input to pass to the scheduled workflow instance. This must be a + serializable value. + instance_id: The unique ID of the workflow instance to schedule. If not specified, a + new GUID value is used. + start_at: The time when the workflow instance should start executing. + If not specified or if a date-time in the past is specified, the workflow instance will + be scheduled immediately. + reuse_id_policy: Optional policy to reuse the workflow id when there is a conflict with + an existing workflow instance. + + Returns: + The ID of the scheduled workflow instance. + """ + workflow_name = ( + workflow.__dict__['_dapr_alternate_name'] + if hasattr(workflow, '_dapr_alternate_name') + else workflow.__name__ + ) + return await self.__obj.schedule_new_orchestration( + workflow_name, + input=input, + instance_id=instance_id, + start_at=start_at, + reuse_id_policy=reuse_id_policy, + ) + + async def get_workflow_state( + self, instance_id: str, *, fetch_payloads: bool = True + ) -> Optional[WorkflowState]: + """Fetches runtime state for the specified workflow instance. + + Args: + instance_id: The unique ID of the workflow instance to fetch. + fetch_payloads: If true, fetches the input, output payloads and custom status + for the workflow instance. Defaults to true. + + Returns: + The current state of the workflow instance, or None if the workflow instance does not + exist. + + """ + try: + state = await self.__obj.get_orchestration_state( + instance_id, fetch_payloads=fetch_payloads + ) + return WorkflowState(state) if state else None + except AioRpcError as error: + if error.details() and 'no such instance exists' in error.details(): + self._logger.warning(f'Workflow instance not found: {instance_id}') + return None + self._logger.error( + f'Unhandled RPC error while fetching workflow state: {error.code()} - {error.details()}' + ) + raise + + async def wait_for_workflow_start( + self, instance_id: str, *, fetch_payloads: bool = False, timeout_in_seconds: int = 0 + ) -> Optional[WorkflowState]: + """Waits for a workflow to start running and returns a WorkflowState object that contains + metadata about the started workflow. + + A "started" workflow instance is any instance not in the WorkflowRuntimeStatus.Pending + state. This method will return a completed task if the workflow has already started + running or has already completed. + + Args: + instance_id: The unique ID of the workflow instance to wait for. + fetch_payloads: If true, fetches the input, output payloads and custom status for + the workflow instance. Defaults to false. + timeout_in_seconds: The maximum time to wait for the workflow instance to start running. + Defaults to meaning no timeout. + + Returns: + WorkflowState record that describes the workflow instance and its execution status. + If the specified workflow isn't found, the WorkflowState.Exists value will be false. + """ + state = await self.__obj.wait_for_orchestration_start( + instance_id, fetch_payloads=fetch_payloads, timeout=timeout_in_seconds + ) + return WorkflowState(state) if state else None + + async def wait_for_workflow_completion( + self, instance_id: str, *, fetch_payloads: bool = True, timeout_in_seconds: int = 0 + ) -> Optional[WorkflowState]: + """Waits for a workflow to complete and returns a WorkflowState object that contains + metadata about the started instance. + + A "completed" workflow instance is any instance in one of the terminal states. For + example, the WorkflowRuntimeStatus.Completed, WorkflowRuntimeStatus.Failed or + WorkflowRuntimeStatus.Terminated states. + + Workflows are long-running and could take hours, days, or months before completing. + Workflows can also be eternal, in which case they'll never complete unless terminated. + In such cases, this call may block indefinitely, so care must be taken to ensure + appropriate timeouts are enforced using timeout parameter. + + If a workflow instance is already complete when this method is called, the method + will return immediately. + + Args: + instance_id: The unique ID of the workflow instance to wait for. + fetch_payloads: If true, fetches the input, output payloads and custom status + for the workflow instance. Defaults to true. + timeout_in_seconds: The maximum time in seconds to wait for the workflow instance to + complete. Defaults to 0 seconds, meaning no timeout. + + Returns: + WorkflowState record that describes the workflow instance and its execution status. + """ + state = await self.__obj.wait_for_orchestration_completion( + instance_id, fetch_payloads=fetch_payloads, timeout=timeout_in_seconds + ) + return WorkflowState(state) if state else None + + async def raise_workflow_event( + self, instance_id: str, event_name: str, *, data: Optional[Any] = None + ) -> None: + """Sends an event notification message to a waiting workflow instance. + In order to handle the event, the target workflow instance must be waiting for an + event named value of "eventName" param using the wait_for_external_event API. + If the target workflow instance is not yet waiting for an event named param "eventName" + value, then the event will be saved in the workflow instance state and dispatched + immediately when the workflow calls wait_for_external_event. + This event saving occurs even if the workflow has canceled its wait operation before + the event was received. + + Workflows can wait for the same event name multiple times, so sending multiple events + with the same name is allowed. Each external event received by a workflow will complete + just one task returned by the wait_for_external_event method. + + Raised events for a completed or non-existent workflow instance will be silently + discarded. + + Args: + instance_id: The ID of the workflow instance that will handle the event. + event_name: The name of the event. Event names are case-insensitive. + data: The serializable data payload to include with the event. + """ + return await self.__obj.raise_orchestration_event(instance_id, event_name, data=data) + + async def terminate_workflow( + self, instance_id: str, *, output: Optional[Any] = None, recursive: bool = True + ) -> None: + """Terminates a running workflow instance and updates its runtime status to + WorkflowRuntimeStatus.Terminated This method internally enqueues a "terminate" message in + the task hub. When the task hub worker processes this message, it will update the runtime + status of the target instance to WorkflowRuntimeStatus.Terminated. You can use + wait_for_workflow_completion to wait for the instance to reach the terminated state. + + Terminating a workflow will terminate all child workflows that were started by + the workflow instance. + + However, terminating a workflow has no effect on any in-flight activity function + executions that were started by the terminated workflow instance. + + At the time of writing, there is no way to terminate an in-flight activity execution. + + Args: + instance_id: The ID of the workflow instance to terminate. + output: The optional output to set for the terminated workflow instance. + recursive: The optional flag to terminate all child workflows. + + """ + return await self.__obj.terminate_orchestration( + instance_id, output=output, recursive=recursive + ) + + async def pause_workflow(self, instance_id: str) -> None: + """Suspends a workflow instance, halting processing of it until resume_workflow is used to + resume the workflow. + + Args: + instance_id: The instance ID of the workflow to suspend. + """ + return await self.__obj.suspend_orchestration(instance_id) + + async def resume_workflow(self, instance_id: str) -> None: + """Resumes a workflow instance that was suspended via pause_workflow. + + Args: + instance_id: The instance ID of the workflow to resume. + """ + return await self.__obj.resume_orchestration(instance_id) + + async def purge_workflow(self, instance_id: str, recursive: bool = True) -> None: + """Purge data from a workflow instance. + + Args: + instance_id: The instance ID of the workflow to purge. + recursive: The optional flag to also purge data from all child workflows. + """ + return await self.__obj.purge_orchestration(instance_id, recursive) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_client.py b/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_client.py index cc384503a..461bfd43a 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_client.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_client.py @@ -14,23 +14,22 @@ """ from __future__ import annotations + from datetime import datetime from typing import Any, Optional, TypeVar - -from durabletask import client import durabletask.internal.orchestrator_service_pb2 as pb - -from dapr.ext.workflow.workflow_state import WorkflowState -from dapr.ext.workflow.workflow_context import Workflow +from dapr.ext.workflow.logger import Logger, LoggerOptions from dapr.ext.workflow.util import getAddress +from dapr.ext.workflow.workflow_context import Workflow +from dapr.ext.workflow.workflow_state import WorkflowState +from durabletask import client from grpc import RpcError from dapr.clients import DaprInternalError from dapr.clients.http.client import DAPR_API_TOKEN_HEADER from dapr.conf import settings from dapr.conf.helpers import GrpcEndpoint -from dapr.ext.workflow.logger import LoggerOptions, Logger T = TypeVar('T') TInput = TypeVar('TInput') diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py b/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py index 476ab765f..714def3f2 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/dapr_workflow_context.py @@ -13,15 +13,14 @@ limitations under the License. """ -from typing import Any, Callable, List, Optional, TypeVar, Union from datetime import datetime, timedelta +from typing import Any, Callable, List, Optional, TypeVar, Union -from durabletask import task - -from dapr.ext.workflow.workflow_context import WorkflowContext, Workflow -from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext -from dapr.ext.workflow.logger import LoggerOptions, Logger +from dapr.ext.workflow.logger import Logger, LoggerOptions from dapr.ext.workflow.retry_policy import RetryPolicy +from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext +from dapr.ext.workflow.workflow_context import Workflow, WorkflowContext +from durabletask import task T = TypeVar('T') TInput = TypeVar('TInput') diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/logger/__init__.py b/ext/dapr-ext-workflow/dapr/ext/workflow/logger/__init__.py index 5583bde7e..b63a763bd 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/logger/__init__.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/logger/__init__.py @@ -1,4 +1,4 @@ -from dapr.ext.workflow.logger.options import LoggerOptions from dapr.ext.workflow.logger.logger import Logger +from dapr.ext.workflow.logger.options import LoggerOptions __all__ = ['LoggerOptions', 'Logger'] diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/logger/logger.py b/ext/dapr-ext-workflow/dapr/ext/workflow/logger/logger.py index 6b0f3fec4..b93e7074f 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/logger/logger.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/logger/logger.py @@ -1,5 +1,6 @@ import logging from typing import Union + from dapr.ext.workflow.logger.options import LoggerOptions diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/logger/options.py b/ext/dapr-ext-workflow/dapr/ext/workflow/logger/options.py index 0be44c52b..15cee8cc3 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/logger/options.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/logger/options.py @@ -13,8 +13,8 @@ limitations under the License. """ -from typing import Union import logging +from typing import Union class LoggerOptions: diff --git a/dapr/proto/py.typed b/ext/dapr-ext-workflow/dapr/ext/workflow/py.typed similarity index 100% rename from dapr/proto/py.typed rename to ext/dapr-ext-workflow/dapr/ext/workflow/py.typed diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/retry_policy.py b/ext/dapr-ext-workflow/dapr/ext/workflow/retry_policy.py index af1f5ea9e..aa12f479d 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/retry_policy.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/retry_policy.py @@ -13,8 +13,8 @@ limitations under the License. """ -from typing import Optional, TypeVar from datetime import timedelta +from typing import Optional, TypeVar from durabletask import task diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/util.py b/ext/dapr-ext-workflow/dapr/ext/workflow/util.py index 648bc973d..3199e2558 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/util.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/util.py @@ -21,7 +21,7 @@ def getAddress(host: Optional[str] = None, port: Optional[str] = None) -> str: if not host and not port: address = settings.DAPR_GRPC_ENDPOINT or ( - f'{settings.DAPR_RUNTIME_HOST}:' f'{settings.DAPR_GRPC_PORT}' + f'{settings.DAPR_RUNTIME_HOST}:{settings.DAPR_GRPC_PORT}' ) else: host = host or settings.DAPR_RUNTIME_HOST diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/version.py b/ext/dapr-ext-workflow/dapr/ext/workflow/version.py index 8c6c12960..5c39bd99d 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/version.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.16.1rc1' +__version__ = '1.16.1rc2' diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_activity_context.py b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_activity_context.py index f460e8013..331ad6c2c 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_activity_context.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_activity_context.py @@ -14,6 +14,7 @@ """ from __future__ import annotations + from typing import Callable, TypeVar from durabletask import task diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py index d6e6ba072..8453e16ef 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_context.py @@ -14,13 +14,13 @@ """ from __future__ import annotations + from abc import ABC, abstractmethod from datetime import datetime, timedelta from typing import Any, Callable, Generator, Optional, TypeVar, Union -from durabletask import task - from dapr.ext.workflow.workflow_activity_context import Activity +from durabletask import task T = TypeVar('T') TInput = TypeVar('TInput') diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_runtime.py b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_runtime.py index 9f4be6222..ee1e92bdf 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_runtime.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_runtime.py @@ -18,18 +18,18 @@ from typing import Optional, TypeVar, Union, Sequence import grpc -from durabletask import worker, task - -from dapr.ext.workflow.workflow_context import Workflow +import grpc from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext -from dapr.ext.workflow.workflow_activity_context import Activity, WorkflowActivityContext +from dapr.ext.workflow.logger import Logger, LoggerOptions from dapr.ext.workflow.util import getAddress +from dapr.ext.workflow.workflow_activity_context import Activity, WorkflowActivityContext +from dapr.ext.workflow.workflow_context import Workflow +from durabletask import task, worker from dapr.clients import DaprInternalError from dapr.clients.http.client import DAPR_API_TOKEN_HEADER from dapr.conf import settings from dapr.conf.helpers import GrpcEndpoint -from dapr.ext.workflow.logger import LoggerOptions, Logger T = TypeVar('T') TInput = TypeVar('TInput') diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_state.py b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_state.py index 10847fc54..af1d7e735 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_state.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/workflow_state.py @@ -13,8 +13,8 @@ limitations under the License. """ -from enum import Enum import json +from enum import Enum from durabletask import client diff --git a/ext/dapr-ext-workflow/setup.cfg b/ext/dapr-ext-workflow/setup.cfg index 83869566c..8b37327ce 100644 --- a/ext/dapr-ext-workflow/setup.cfg +++ b/ext/dapr-ext-workflow/setup.cfg @@ -24,7 +24,7 @@ python_requires = >=3.9 packages = find_namespace: include_package_data = True install_requires = - dapr >= 1.16.1rc1 + dapr >= 1.16.1rc2 durabletask-dapr >= 0.2.0a9 [options.packages.find] @@ -33,3 +33,7 @@ include = exclude = tests + +[options.package_data] +dapr.ext.workflow = + py.typed diff --git a/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py b/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py index 3ae5fdaf5..32b148224 100644 --- a/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py +++ b/ext/dapr-ext-workflow/tests/test_dapr_workflow_context.py @@ -13,9 +13,10 @@ limitations under the License. """ +import unittest from datetime import datetime from unittest import mock -import unittest + from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext from durabletask import worker diff --git a/ext/dapr-ext-workflow/tests/test_workflow_activity_context.py b/ext/dapr-ext-workflow/tests/test_workflow_activity_context.py index a45b8b7cd..9a7d6fcc8 100644 --- a/ext/dapr-ext-workflow/tests/test_workflow_activity_context.py +++ b/ext/dapr-ext-workflow/tests/test_workflow_activity_context.py @@ -15,8 +15,9 @@ import unittest from unittest import mock -from durabletask import task + from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext +from durabletask import task mock_orchestration_id = 'orchestration001' mock_task = 10 diff --git a/ext/dapr-ext-workflow/tests/test_workflow_client.py b/ext/dapr-ext-workflow/tests/test_workflow_client.py index 540c0e801..a12a8844b 100644 --- a/ext/dapr-ext-workflow/tests/test_workflow_client.py +++ b/ext/dapr-ext-workflow/tests/test_workflow_client.py @@ -13,14 +13,15 @@ limitations under the License. """ +import unittest from datetime import datetime from typing import Any, Union -import unittest -from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext from unittest import mock + +import durabletask.internal.orchestrator_service_pb2 as pb from dapr.ext.workflow.dapr_workflow_client import DaprWorkflowClient +from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext from durabletask import client -import durabletask.internal.orchestrator_service_pb2 as pb from grpc import RpcError mock_schedule_result = 'workflow001' diff --git a/ext/dapr-ext-workflow/tests/test_workflow_client_aio.py b/ext/dapr-ext-workflow/tests/test_workflow_client_aio.py new file mode 100644 index 000000000..c84fcbfe6 --- /dev/null +++ b/ext/dapr-ext-workflow/tests/test_workflow_client_aio.py @@ -0,0 +1,176 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import unittest +from datetime import datetime +from typing import Any, Union +from unittest import mock + +import durabletask.internal.orchestrator_service_pb2 as pb +from dapr.ext.workflow.aio import DaprWorkflowClient +from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext +from durabletask import client +from grpc.aio import AioRpcError + +mock_schedule_result = 'workflow001' +mock_raise_event_result = 'event001' +mock_terminate_result = 'terminate001' +mock_suspend_result = 'suspend001' +mock_resume_result = 'resume001' +mock_purge_result = 'purge001' +mock_instance_id = 'instance001' +wf_status = 'not-found' + + +class SimulatedAioRpcError(AioRpcError): + def __init__(self, code, details): + self._code = code + self._details = details + + def code(self): + return self._code + + def details(self): + return self._details + + +class FakeAsyncTaskHubGrpcClient: + async def schedule_new_orchestration( + self, + workflow, + *, + input, + instance_id, + start_at, + reuse_id_policy: Union[pb.OrchestrationIdReusePolicy, None] = None, + ): + return mock_schedule_result + + async def get_orchestration_state(self, instance_id, *, fetch_payloads): + if wf_status == 'not-found': + raise SimulatedAioRpcError(code='UNKNOWN', details='no such instance exists') + elif wf_status == 'found': + return self._inner_get_orchestration_state( + instance_id, client.OrchestrationStatus.PENDING + ) + else: + raise SimulatedAioRpcError(code='UNKNOWN', details='unknown error') + + async def wait_for_orchestration_start(self, instance_id, *, fetch_payloads, timeout): + return self._inner_get_orchestration_state(instance_id, client.OrchestrationStatus.RUNNING) + + async def wait_for_orchestration_completion(self, instance_id, *, fetch_payloads, timeout): + return self._inner_get_orchestration_state( + instance_id, client.OrchestrationStatus.COMPLETED + ) + + async def raise_orchestration_event( + self, instance_id: str, event_name: str, *, data: Union[Any, None] = None + ): + return mock_raise_event_result + + async def terminate_orchestration( + self, instance_id: str, *, output: Union[Any, None] = None, recursive: bool = True + ): + return mock_terminate_result + + async def suspend_orchestration(self, instance_id: str): + return mock_suspend_result + + async def resume_orchestration(self, instance_id: str): + return mock_resume_result + + async def purge_orchestration(self, instance_id: str, recursive: bool = True): + return mock_purge_result + + def _inner_get_orchestration_state(self, instance_id, state: client.OrchestrationStatus): + return client.OrchestrationState( + instance_id=instance_id, + name='', + runtime_status=state, + created_at=datetime.now(), + last_updated_at=datetime.now(), + serialized_input=None, + serialized_output=None, + serialized_custom_status=None, + failure_details=None, + ) + + +class WorkflowClientAioTest(unittest.IsolatedAsyncioTestCase): + def mock_client_wf(ctx: DaprWorkflowContext, input): + print(f'{input}') + + async def test_client_functions(self): + with mock.patch( + 'durabletask.aio.client.AsyncTaskHubGrpcClient', + return_value=FakeAsyncTaskHubGrpcClient(), + ): + wfClient = DaprWorkflowClient() + actual_schedule_result = await wfClient.schedule_new_workflow( + workflow=self.mock_client_wf, input='Hi Chef!' + ) + assert actual_schedule_result == mock_schedule_result + + global wf_status + wf_status = 'not-found' + actual_get_result = await wfClient.get_workflow_state( + instance_id=mock_instance_id, fetch_payloads=True + ) + assert actual_get_result is None + + wf_status = 'error' + with self.assertRaises(AioRpcError): + await wfClient.get_workflow_state(instance_id=mock_instance_id, fetch_payloads=True) + + assert actual_get_result is None + + wf_status = 'found' + actual_get_result = await wfClient.get_workflow_state( + instance_id=mock_instance_id, fetch_payloads=True + ) + assert actual_get_result.runtime_status.name == 'PENDING' + assert actual_get_result.instance_id == mock_instance_id + + actual_wait_start_result = await wfClient.wait_for_workflow_start( + instance_id=mock_instance_id, timeout_in_seconds=30 + ) + assert actual_wait_start_result.runtime_status.name == 'RUNNING' + assert actual_wait_start_result.instance_id == mock_instance_id + + actual_wait_completion_result = await wfClient.wait_for_workflow_completion( + instance_id=mock_instance_id, timeout_in_seconds=30 + ) + assert actual_wait_completion_result.runtime_status.name == 'COMPLETED' + assert actual_wait_completion_result.instance_id == mock_instance_id + + actual_raise_event_result = await wfClient.raise_workflow_event( + instance_id=mock_instance_id, event_name='test_event', data='test_data' + ) + assert actual_raise_event_result == mock_raise_event_result + + actual_terminate_result = await wfClient.terminate_workflow( + instance_id=mock_instance_id, output='test_output' + ) + assert actual_terminate_result == mock_terminate_result + + actual_suspend_result = await wfClient.pause_workflow(instance_id=mock_instance_id) + assert actual_suspend_result == mock_suspend_result + + actual_resume_result = await wfClient.resume_workflow(instance_id=mock_instance_id) + assert actual_resume_result == mock_resume_result + + actual_purge_result = await wfClient.purge_workflow(instance_id=mock_instance_id) + assert actual_purge_result == mock_purge_result diff --git a/ext/dapr-ext-workflow/tests/test_workflow_runtime.py b/ext/dapr-ext-workflow/tests/test_workflow_runtime.py index 02d6c6f3b..bf18cd689 100644 --- a/ext/dapr-ext-workflow/tests/test_workflow_runtime.py +++ b/ext/dapr-ext-workflow/tests/test_workflow_runtime.py @@ -13,12 +13,13 @@ limitations under the License. """ -from typing import List import unittest -from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext +from typing import List from unittest import mock -from dapr.ext.workflow.workflow_runtime import WorkflowRuntime, alternate_name + +from dapr.ext.workflow.dapr_workflow_context import DaprWorkflowContext from dapr.ext.workflow.workflow_activity_context import WorkflowActivityContext +from dapr.ext.workflow.workflow_runtime import WorkflowRuntime, alternate_name listOrchestrators: List[str] = [] listActivities: List[str] = [] diff --git a/ext/dapr-ext-workflow/tests/test_workflow_util.py b/ext/dapr-ext-workflow/tests/test_workflow_util.py index 878ee7374..28e92e6c5 100644 --- a/ext/dapr-ext-workflow/tests/test_workflow_util.py +++ b/ext/dapr-ext-workflow/tests/test_workflow_util.py @@ -1,7 +1,8 @@ import unittest -from dapr.ext.workflow.util import getAddress from unittest.mock import patch +from dapr.ext.workflow.util import getAddress + from dapr.conf import settings diff --git a/ext/flask_dapr/flask_dapr/app.py b/ext/flask_dapr/flask_dapr/app.py index c8d5def92..80e42220f 100644 --- a/ext/flask_dapr/flask_dapr/app.py +++ b/ext/flask_dapr/flask_dapr/app.py @@ -14,6 +14,7 @@ """ from typing import Dict, List, Optional + from flask import Flask, jsonify diff --git a/dapr/serializers/py.typed b/ext/flask_dapr/flask_dapr/py.typed similarity index 100% rename from dapr/serializers/py.typed rename to ext/flask_dapr/flask_dapr/py.typed diff --git a/ext/flask_dapr/flask_dapr/version.py b/ext/flask_dapr/flask_dapr/version.py index 8c6c12960..5c39bd99d 100644 --- a/ext/flask_dapr/flask_dapr/version.py +++ b/ext/flask_dapr/flask_dapr/version.py @@ -13,4 +13,4 @@ limitations under the License. """ -__version__ = '1.16.1rc1' +__version__ = '1.16.1rc2' diff --git a/ext/flask_dapr/setup.cfg b/ext/flask_dapr/setup.cfg index 531a9aea5..826136c54 100644 --- a/ext/flask_dapr/setup.cfg +++ b/ext/flask_dapr/setup.cfg @@ -26,4 +26,8 @@ include_package_data = true zip_safe = false install_requires = Flask >= 1.1 - dapr >= 1.16.1rc1 + dapr >= 1.16.0.dev + +[options.package_data] +flask_dapr = + py.typed diff --git a/py.typed b/py.typed deleted file mode 100644 index e69de29bb..000000000 diff --git a/pyproject.toml b/pyproject.toml index 2b8ddf72e..0378a8c8f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,21 +1,24 @@ [tool.ruff] -target-version = "py38" +target-version = "py310" line-length = 100 fix = true extend-exclude = [".github", "dapr/proto"] + [tool.ruff.lint] select = [ - "E", # pycodestyle errors + "I", # isort "W", # pycodestyle warnings "F", # pyflakes - "I", # isort - "C", # flake8-comprehensions - "B", # flake8-bugbear - "UP", # pyupgrade -] -ignore = [ - # Undefined name {name} - "F821", + "E", # pycodestyle errors + + # TODO: Add those back progressively as we fix the issues + # "C", # flake8-comprehensions + # "B", # flake8-bugbear + # "UP", # pyupgrade ] + +# TODO: Add those back progressively as we fix the issues +ignore = ["E501","E203", "E712", "E722", "E713"] + [tool.ruff.format] quote-style = 'single' diff --git a/setup.cfg b/setup.cfg index de5d53f4f..e68961557 100644 --- a/setup.cfg +++ b/setup.cfg @@ -42,27 +42,5 @@ exclude = tests [options.package_data] -dapr.actor = +dapr = py.typed -dapr.clients = - py.typed -dapr.conf = - py.typed -dapr.proto = - py.typed -dapr.serializers = - py.typed - -[flake8] -exclude = - .venv, - venv, - .env, - build, - dist, - .git, - .tox, - dapr/proto, - examples -ignore = F821, E501, W503, E203 -max-line-length = 100 diff --git a/tests/actor/fake_actor_classes.py b/tests/actor/fake_actor_classes.py index 50fe63fcf..2de821779 100644 --- a/tests/actor/fake_actor_classes.py +++ b/tests/actor/fake_actor_classes.py @@ -12,24 +12,22 @@ See the License for the specific language governing permissions and limitations under the License. """ -from dapr.serializers.json import DefaultJSONSerializer -import asyncio +import asyncio from datetime import timedelta from typing import Optional -from dapr.actor.runtime.actor import Actor -from dapr.actor.runtime.remindable import Remindable from dapr.actor.actor_interface import ActorInterface, actormethod - +from dapr.actor.runtime.actor import Actor from dapr.actor.runtime.reentrancy_context import reentrancy_ctx +from dapr.actor.runtime.remindable import Remindable +from dapr.serializers.json import DefaultJSONSerializer # Fake Simple Actor Class for testing class FakeSimpleActorInterface(ActorInterface): @actormethod(name='ActorMethod') - async def actor_method(self, arg: int) -> dict: - ... + async def actor_method(self, arg: int) -> dict: ... class FakeSimpleActor(Actor, FakeSimpleActorInterface): @@ -89,40 +87,32 @@ async def receive_reminder( class FakeActorCls1Interface(ActorInterface): # Fake Actor Class deriving multiple ActorInterfaces @actormethod(name='ActorCls1Method') - async def actor_cls1_method(self, arg): - ... + async def actor_cls1_method(self, arg): ... @actormethod(name='ActorCls1Method1') - async def actor_cls1_method1(self, arg): - ... + async def actor_cls1_method1(self, arg): ... @actormethod(name='ActorCls1Method2') - async def actor_cls1_method2(self, arg): - ... + async def actor_cls1_method2(self, arg): ... class FakeActorCls2Interface(ActorInterface): @actormethod(name='ActorCls2Method') - async def actor_cls2_method(self, arg): - ... + async def actor_cls2_method(self, arg): ... @actormethod(name='ActionMethod') - async def action(self, data: object) -> str: - ... + async def action(self, data: object) -> str: ... @actormethod(name='ActionMethodWithoutArg') - async def action_no_arg(self) -> str: - ... + async def action_no_arg(self) -> str: ... class ReentrantActorInterface(ActorInterface): @actormethod(name='ReentrantMethod') - async def reentrant_method(self, data: object) -> str: - ... + async def reentrant_method(self, data: object) -> str: ... @actormethod(name='ReentrantMethodWithPassthrough') - async def reentrant_pass_through_method(self, arg): - ... + async def reentrant_pass_through_method(self, arg): ... class FakeMultiInterfacesActor( diff --git a/tests/actor/fake_client.py b/tests/actor/fake_client.py index fa5fe1577..311a76e0e 100644 --- a/tests/actor/fake_client.py +++ b/tests/actor/fake_client.py @@ -13,36 +13,34 @@ limitations under the License. """ -from dapr.clients import DaprActorClientBase from typing import Optional +from dapr.clients import DaprActorClientBase + # Fake Dapr Actor Client Base Class for testing class FakeDaprActorClientBase(DaprActorClientBase): async def invoke_method( self, actor_type: str, actor_id: str, method: str, data: Optional[bytes] = None - ) -> bytes: - ... + ) -> bytes: ... - async def save_state_transactionally(self, actor_type: str, actor_id: str, data: bytes) -> None: - ... + async def save_state_transactionally( + self, actor_type: str, actor_id: str, data: bytes + ) -> None: ... - async def get_state(self, actor_type: str, actor_id: str, name: str) -> bytes: - ... + async def get_state(self, actor_type: str, actor_id: str, name: str) -> bytes: ... async def register_reminder( self, actor_type: str, actor_id: str, name: str, data: bytes - ) -> None: - ... + ) -> None: ... - async def unregister_reminder(self, actor_type: str, actor_id: str, name: str) -> None: - ... + async def unregister_reminder(self, actor_type: str, actor_id: str, name: str) -> None: ... - async def register_timer(self, actor_type: str, actor_id: str, name: str, data: bytes) -> None: - ... + async def register_timer( + self, actor_type: str, actor_id: str, name: str, data: bytes + ) -> None: ... - async def unregister_timer(self, actor_type: str, actor_id: str, name: str) -> None: - ... + async def unregister_timer(self, actor_type: str, actor_id: str, name: str) -> None: ... class FakeDaprActorClient(FakeDaprActorClientBase): diff --git a/tests/actor/test_actor.py b/tests/actor/test_actor.py index d9b602c9d..7a7bee2d2 100644 --- a/tests/actor/test_actor.py +++ b/tests/actor/test_actor.py @@ -14,25 +14,22 @@ """ import unittest - -from unittest import mock from datetime import timedelta +from unittest import mock from dapr.actor.id import ActorId +from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.actor.runtime.config import ActorRuntimeConfig from dapr.actor.runtime.context import ActorRuntimeContext from dapr.actor.runtime.runtime import ActorRuntime -from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.conf import settings from dapr.serializers import DefaultJSONSerializer - from tests.actor.fake_actor_classes import ( + FakeMultiInterfacesActor, FakeSimpleActor, FakeSimpleReminderActor, FakeSimpleTimerActor, - FakeMultiInterfacesActor, ) - from tests.actor.fake_client import FakeDaprActorClient from tests.actor.utils import _async_mock, _run from tests.clients.fake_http_server import FakeHttpServer diff --git a/tests/actor/test_actor_factory.py b/tests/actor/test_actor_factory.py index 0715c33f4..4f629bb25 100644 --- a/tests/actor/test_actor_factory.py +++ b/tests/actor/test_actor_factory.py @@ -18,16 +18,13 @@ from dapr.actor import Actor from dapr.actor.id import ActorId from dapr.actor.runtime._type_information import ActorTypeInformation -from dapr.actor.runtime.manager import ActorManager from dapr.actor.runtime.context import ActorRuntimeContext +from dapr.actor.runtime.manager import ActorManager from dapr.serializers import DefaultJSONSerializer - from tests.actor.fake_actor_classes import ( FakeSimpleActorInterface, ) - from tests.actor.fake_client import FakeDaprActorClient - from tests.actor.utils import _run diff --git a/tests/actor/test_actor_manager.py b/tests/actor/test_actor_manager.py index 6c21abfb7..af0e2e410 100644 --- a/tests/actor/test_actor_manager.py +++ b/tests/actor/test_actor_manager.py @@ -19,19 +19,16 @@ from dapr.actor.id import ActorId from dapr.actor.runtime._type_information import ActorTypeInformation -from dapr.actor.runtime.manager import ActorManager from dapr.actor.runtime.context import ActorRuntimeContext +from dapr.actor.runtime.manager import ActorManager from dapr.serializers import DefaultJSONSerializer - from tests.actor.fake_actor_classes import ( FakeMultiInterfacesActor, FakeSimpleActor, FakeSimpleReminderActor, FakeSimpleTimerActor, ) - from tests.actor.fake_client import FakeDaprActorClient - from tests.actor.utils import ( _async_mock, _run, diff --git a/tests/actor/test_actor_reentrancy.py b/tests/actor/test_actor_reentrancy.py index 834273f41..263070f65 100644 --- a/tests/actor/test_actor_reentrancy.py +++ b/tests/actor/test_actor_reentrancy.py @@ -13,22 +13,19 @@ limitations under the License. """ -import unittest import asyncio - +import unittest from unittest import mock +from dapr.actor.runtime.config import ActorReentrancyConfig, ActorRuntimeConfig from dapr.actor.runtime.runtime import ActorRuntime -from dapr.actor.runtime.config import ActorRuntimeConfig, ActorReentrancyConfig from dapr.conf import settings from dapr.serializers import DefaultJSONSerializer - from tests.actor.fake_actor_classes import ( - FakeReentrantActor, FakeMultiInterfacesActor, + FakeReentrantActor, FakeSlowReentrantActor, ) - from tests.actor.utils import _run from tests.clients.fake_http_server import FakeHttpServer @@ -212,9 +209,10 @@ async def expected_return_value(*args, **kwargs): _run(ActorRuntime.deactivate(FakeReentrantActor.__name__, 'test-id')) def test_parse_incoming_reentrancy_header_flask(self): - from ext.flask_dapr import flask_dapr from flask import Flask + from ext.flask_dapr import flask_dapr + app = Flask(f'{FakeReentrantActor.__name__}Service') flask_dapr.DaprActor(app) @@ -244,9 +242,9 @@ def test_parse_incoming_reentrancy_header_flask(self): ) def test_parse_incoming_reentrancy_header_fastapi(self): + from dapr.ext import fastapi from fastapi import FastAPI from fastapi.testclient import TestClient - from dapr.ext import fastapi app = FastAPI(title=f'{FakeReentrantActor.__name__}Service') fastapi.DaprActor(app) diff --git a/tests/actor/test_actor_runtime.py b/tests/actor/test_actor_runtime.py index f17f96cc8..7725c3728 100644 --- a/tests/actor/test_actor_runtime.py +++ b/tests/actor/test_actor_runtime.py @@ -14,20 +14,17 @@ """ import unittest - from datetime import timedelta -from dapr.actor.runtime.runtime import ActorRuntime from dapr.actor.runtime.config import ActorRuntimeConfig +from dapr.actor.runtime.runtime import ActorRuntime from dapr.conf import settings from dapr.serializers import DefaultJSONSerializer - from tests.actor.fake_actor_classes import ( - FakeSimpleActor, FakeMultiInterfacesActor, + FakeSimpleActor, FakeSimpleTimerActor, ) - from tests.actor.utils import _run from tests.clients.fake_http_server import FakeHttpServer diff --git a/tests/actor/test_actor_runtime_config.py b/tests/actor/test_actor_runtime_config.py index 7bbd8cefc..e39894c77 100644 --- a/tests/actor/test_actor_runtime_config.py +++ b/tests/actor/test_actor_runtime_config.py @@ -14,9 +14,9 @@ """ import unittest - from datetime import timedelta -from dapr.actor.runtime.config import ActorRuntimeConfig, ActorReentrancyConfig, ActorTypeConfig + +from dapr.actor.runtime.config import ActorReentrancyConfig, ActorRuntimeConfig, ActorTypeConfig class ActorTypeConfigTests(unittest.TestCase): diff --git a/tests/actor/test_client_proxy.py b/tests/actor/test_client_proxy.py index fe667d629..172e5d283 100644 --- a/tests/actor/test_client_proxy.py +++ b/tests/actor/test_client_proxy.py @@ -12,22 +12,18 @@ See the License for the specific language governing permissions and limitations under the License. """ -import unittest +import unittest from unittest import mock - -from dapr.actor.id import ActorId from dapr.actor.client.proxy import ActorProxy +from dapr.actor.id import ActorId from dapr.serializers import DefaultJSONSerializer from tests.actor.fake_actor_classes import ( - FakeMultiInterfacesActor, FakeActorCls2Interface, + FakeMultiInterfacesActor, ) - - from tests.actor.fake_client import FakeDaprActorClient - from tests.actor.utils import _async_mock, _run diff --git a/tests/actor/test_method_dispatcher.py b/tests/actor/test_method_dispatcher.py index 94f48a7b6..a32fba455 100644 --- a/tests/actor/test_method_dispatcher.py +++ b/tests/actor/test_method_dispatcher.py @@ -15,11 +15,10 @@ import unittest +from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.actor.runtime.context import ActorRuntimeContext from dapr.actor.runtime.method_dispatcher import ActorMethodDispatcher -from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.serializers import DefaultJSONSerializer - from tests.actor.fake_actor_classes import FakeSimpleActor from tests.actor.fake_client import FakeDaprActorClient from tests.actor.utils import _run diff --git a/tests/actor/test_mock_actor.py b/tests/actor/test_mock_actor.py index c37cdf4f8..8a958c425 100644 --- a/tests/actor/test_mock_actor.py +++ b/tests/actor/test_mock_actor.py @@ -9,48 +9,37 @@ class MockTestActorInterface(ActorInterface): @actormethod(name='GetData') - async def get_data(self) -> object: - ... + async def get_data(self) -> object: ... @actormethod(name='SetData') - async def set_data(self, data: object) -> None: - ... + async def set_data(self, data: object) -> None: ... @actormethod(name='ClearData') - async def clear_data(self) -> None: - ... + async def clear_data(self) -> None: ... @actormethod(name='TestData') - async def test_data(self) -> int: - ... + async def test_data(self) -> int: ... @actormethod(name='AddState') - async def add_state(self, name: str, data: object) -> None: - ... + async def add_state(self, name: str, data: object) -> None: ... @actormethod(name='UpdateState') - async def update_state(self, name: str, data: object) -> None: - ... + async def update_state(self, name: str, data: object) -> None: ... @actormethod(name='AddDataNoSave') - async def add_data_no_save(self, data: object) -> None: - ... + async def add_data_no_save(self, data: object) -> None: ... @actormethod(name='RemoveDataNoSave') - async def remove_data_no_save(self) -> None: - ... + async def remove_data_no_save(self) -> None: ... @actormethod(name='SaveState') - async def save_state(self) -> None: - ... + async def save_state(self) -> None: ... @actormethod(name='ToggleReminder') - async def toggle_reminder(self, name: str, enabled: bool) -> None: - ... + async def toggle_reminder(self, name: str, enabled: bool) -> None: ... @actormethod(name='ToggleTimer') - async def toggle_timer(self, name: str, enabled: bool) -> None: - ... + async def toggle_timer(self, name: str, enabled: bool) -> None: ... class MockTestActor(Actor, MockTestActorInterface, Remindable): diff --git a/tests/actor/test_state_manager.py b/tests/actor/test_state_manager.py index c9406dbd2..11a7c4f08 100644 --- a/tests/actor/test_state_manager.py +++ b/tests/actor/test_state_manager.py @@ -15,19 +15,16 @@ import base64 import unittest - from unittest import mock from dapr.actor.id import ActorId +from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.actor.runtime.context import ActorRuntimeContext from dapr.actor.runtime.state_change import StateChangeKind from dapr.actor.runtime.state_manager import ActorStateManager -from dapr.actor.runtime._type_information import ActorTypeInformation from dapr.serializers import DefaultJSONSerializer - from tests.actor.fake_actor_classes import FakeSimpleActor from tests.actor.fake_client import FakeDaprActorClient - from tests.actor.utils import _async_mock, _run diff --git a/tests/actor/test_timer_data.py b/tests/actor/test_timer_data.py index ba410cecd..8a193f416 100644 --- a/tests/actor/test_timer_data.py +++ b/tests/actor/test_timer_data.py @@ -13,9 +13,9 @@ limitations under the License. """ -from typing import Any import unittest from datetime import timedelta +from typing import Any from dapr.actor.runtime._timer_data import ActorTimerData diff --git a/tests/actor/test_type_information.py b/tests/actor/test_type_information.py index 1532e3956..201eb87fb 100644 --- a/tests/actor/test_type_information.py +++ b/tests/actor/test_type_information.py @@ -17,10 +17,10 @@ from dapr.actor.runtime._type_information import ActorTypeInformation from tests.actor.fake_actor_classes import ( - FakeSimpleActor, - FakeMultiInterfacesActor, FakeActorCls1Interface, FakeActorCls2Interface, + FakeMultiInterfacesActor, + FakeSimpleActor, ReentrantActorInterface, ) diff --git a/tests/actor/test_type_utils.py b/tests/actor/test_type_utils.py index f8b2eee2a..6b2a9319b 100644 --- a/tests/actor/test_type_utils.py +++ b/tests/actor/test_type_utils.py @@ -17,19 +17,18 @@ from dapr.actor.actor_interface import ActorInterface from dapr.actor.runtime._type_utils import ( + get_actor_interfaces, get_class_method_args, + get_dispatchable_attrs, get_method_arg_types, get_method_return_types, is_dapr_actor, - get_actor_interfaces, - get_dispatchable_attrs, ) - from tests.actor.fake_actor_classes import ( - FakeSimpleActor, - FakeMultiInterfacesActor, FakeActorCls1Interface, FakeActorCls2Interface, + FakeMultiInterfacesActor, + FakeSimpleActor, ) diff --git a/tests/clients/certs.py b/tests/clients/certs.py index a30b25312..9d851ca46 100644 --- a/tests/clients/certs.py +++ b/tests/clients/certs.py @@ -1,7 +1,7 @@ import os import ssl -import grpc +import grpc from OpenSSL import crypto diff --git a/tests/clients/fake_dapr_server.py b/tests/clients/fake_dapr_server.py index a1cbeb4b7..a1ee695eb 100644 --- a/tests/clients/fake_dapr_server.py +++ b/tests/clients/fake_dapr_server.py @@ -1,48 +1,47 @@ -import grpc import json - from concurrent import futures -from google.protobuf.any_pb2 import Any as GrpcAny +from typing import Dict + +import grpc from google.protobuf import empty_pb2, struct_pb2 -from google.rpc import status_pb2, code_pb2 +from google.protobuf.any_pb2 import Any as GrpcAny +from google.rpc import code_pb2, status_pb2 from grpc_status import rpc_status from dapr.clients.grpc._helpers import to_bytes -from dapr.proto import api_service_v1, common_v1, api_v1, appcallback_v1 -from dapr.proto.common.v1.common_pb2 import ConfigurationItem from dapr.clients.grpc._response import WorkflowRuntimeStatus +from dapr.proto import api_service_v1, api_v1, appcallback_v1, common_v1 +from dapr.proto.common.v1.common_pb2 import ConfigurationItem from dapr.proto.runtime.v1.dapr_pb2 import ( ActiveActorsCount, + ConversationResponseAlpha2, + ConversationResultAlpha2, + ConversationResultChoices, + ConversationResultMessage, + ConversationToolCalls, + ConversationToolCallsOfFunction, + DecryptRequest, + DecryptResponse, + EncryptRequest, + EncryptResponse, GetMetadataResponse, + GetWorkflowRequest, + GetWorkflowResponse, + PauseWorkflowRequest, + PurgeWorkflowRequest, QueryStateItem, + RaiseEventWorkflowRequest, RegisteredComponents, + ResumeWorkflowRequest, SetMetadataRequest, + StartWorkflowRequest, + StartWorkflowResponse, + TerminateWorkflowRequest, TryLockRequest, TryLockResponse, UnlockRequest, UnlockResponse, - StartWorkflowRequest, - StartWorkflowResponse, - GetWorkflowRequest, - GetWorkflowResponse, - PauseWorkflowRequest, - ResumeWorkflowRequest, - TerminateWorkflowRequest, - PurgeWorkflowRequest, - RaiseEventWorkflowRequest, - EncryptRequest, - EncryptResponse, - DecryptRequest, - DecryptResponse, - ConversationResultAlpha2, - ConversationResultChoices, - ConversationResultMessage, - ConversationResponseAlpha2, - ConversationToolCalls, - ConversationToolCallsOfFunction, ) -from typing import Dict - from tests.clients.certs import GrpcCerts from tests.clients.fake_http_server import FakeHttpServer diff --git a/tests/clients/fake_http_server.py b/tests/clients/fake_http_server.py index e08e82d29..8476b18ba 100644 --- a/tests/clients/fake_http_server.py +++ b/tests/clients/fake_http_server.py @@ -1,8 +1,7 @@ import time +from http.server import BaseHTTPRequestHandler, HTTPServer from ssl import PROTOCOL_TLS_SERVER, SSLContext - from threading import Thread -from http.server import BaseHTTPRequestHandler, HTTPServer from tests.clients.certs import HttpCerts diff --git a/tests/clients/test_conversation.py b/tests/clients/test_conversation.py index 8a6cc697e..50daebc64 100644 --- a/tests/clients/test_conversation.py +++ b/tests/clients/test_conversation.py @@ -13,7 +13,6 @@ limitations under the License. """ - import asyncio import json import unittest @@ -33,7 +32,14 @@ from dapr.clients.grpc.conversation import ( ConversationInput, ConversationInputAlpha2, + ConversationMessage, + ConversationMessageOfAssistant, ConversationResponseAlpha2, + ConversationResultAlpha2, + ConversationResultAlpha2Choices, + ConversationResultAlpha2Message, + ConversationToolCalls, + ConversationToolCallsOfFunction, ConversationTools, ConversationToolsFunction, FunctionBackend, @@ -41,18 +47,11 @@ create_system_message, create_tool_message, create_user_message, + execute_registered_tool, execute_registered_tool_async, get_registered_tools, register_tool, unregister_tool, - ConversationResultAlpha2Message, - ConversationResultAlpha2Choices, - ConversationResultAlpha2, - ConversationMessage, - ConversationMessageOfAssistant, - ConversationToolCalls, - ConversationToolCallsOfFunction, - execute_registered_tool, ) from dapr.clients.grpc.conversation import ( tool as tool_decorator, @@ -1010,7 +1009,7 @@ def test_multiline_example(self): def test_zero_indent(self): result = conversation._indent_lines('Title', 'Line one\nLine two', 0) - expected = 'Title: Line one\n' ' Line two' + expected = 'Title: Line one\n Line two' self.assertEqual(result, expected) def test_empty_string(self): @@ -1026,7 +1025,7 @@ def test_title_length_affects_indent(self): # Title length is 1, indent_after_first_line should be indent + len(title) + 2 # indent=2, len(title)=1 => 2 + 1 + 2 = 5 spaces on continuation lines result = conversation._indent_lines('T', 'a\nb', 2) - expected = ' T: a\n' ' b' + expected = ' T: a\n b' self.assertEqual(result, expected) diff --git a/tests/clients/test_conversation_helpers.py b/tests/clients/test_conversation_helpers.py index 62f2f69ae..e7c69b30e 100644 --- a/tests/clients/test_conversation_helpers.py +++ b/tests/clients/test_conversation_helpers.py @@ -12,37 +12,39 @@ See the License for the specific language governing permissions and limitations under the License. """ + +import base64 import io import json -import base64 import unittest import warnings from contextlib import redirect_stdout from dataclasses import dataclass from enum import Enum -from typing import Any, Dict, List, Literal, Optional, Union, Set -from dapr.conf import settings +from typing import Any, Dict, List, Literal, Optional, Set, Union + from dapr.clients.grpc._conversation_helpers import ( - stringify_tool_output, - bind_params_to_func, - function_to_json_schema, + ToolArgumentError, _extract_docstring_args, _python_type_to_json_schema, + bind_params_to_func, extract_docstring_summary, - ToolArgumentError, + function_to_json_schema, + stringify_tool_output, ) from dapr.clients.grpc.conversation import ( - ConversationToolsFunction, - ConversationMessageOfUser, + ConversationMessage, ConversationMessageContent, - ConversationToolCalls, - ConversationToolCallsOfFunction, ConversationMessageOfAssistant, - ConversationMessageOfTool, - ConversationMessage, ConversationMessageOfDeveloper, ConversationMessageOfSystem, + ConversationMessageOfTool, + ConversationMessageOfUser, + ConversationToolCalls, + ConversationToolCallsOfFunction, + ConversationToolsFunction, ) +from dapr.conf import settings def test_string_passthrough(): @@ -2089,8 +2091,7 @@ def f(p: Plain) -> int: bind_params_to_func(f, {'p': {}}) def test_any_and_isinstance_fallback(self): - class C: - ... + class C: ... def f(a: Any, c: C) -> tuple: return a, c diff --git a/tests/clients/test_dapr_grpc_client.py b/tests/clients/test_dapr_grpc_client.py index e0713f703..a52bbeb0d 100644 --- a/tests/clients/test_dapr_grpc_client.py +++ b/tests/clients/test_dapr_grpc_client.py @@ -13,43 +13,43 @@ limitations under the License. """ +import asyncio import json import socket import tempfile import time import unittest import uuid -import asyncio - from unittest.mock import patch -from google.rpc import status_pb2, code_pb2 +from google.rpc import code_pb2, status_pb2 -from dapr.clients.exceptions import DaprGrpcError -from dapr.clients.grpc.client import DaprGrpcClient from dapr.clients import DaprClient -from dapr.clients.grpc.subscription import StreamInactiveError -from dapr.proto import common_v1 -from .fake_dapr_server import FakeDaprSidecar -from dapr.conf import settings +from dapr.clients.exceptions import DaprGrpcError +from dapr.clients.grpc import conversation +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions from dapr.clients.grpc._helpers import to_bytes +from dapr.clients.grpc._jobs import Job from dapr.clients.grpc._request import ( TransactionalStateOperation, TransactionOperationType, ) -from dapr.clients.grpc._jobs import Job -from dapr.clients.grpc._state import StateOptions, Consistency, Concurrency, StateItem -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions from dapr.clients.grpc._response import ( ConfigurationItem, ConfigurationResponse, ConfigurationWatcher, DaprResponse, + TopicEventResponse, UnlockResponseStatus, WorkflowRuntimeStatus, - TopicEventResponse, ) -from dapr.clients.grpc import conversation +from dapr.clients.grpc._state import Concurrency, Consistency, StateItem, StateOptions +from dapr.clients.grpc.client import DaprGrpcClient +from dapr.clients.grpc.subscription import StreamInactiveError +from dapr.conf import settings +from dapr.proto import common_v1 + +from .fake_dapr_server import FakeDaprSidecar class DaprGrpcClientTests(unittest.TestCase): @@ -1000,7 +1000,6 @@ def test_set_metadata(self): self.assertEqual(response.extended_metadata[metadata_key], metadata_value) def test_set_metadata_input_validation(self): - dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') valid_attr_name = 'attribute name' valid_attr_value = 'attribute value' # Invalid inputs for string arguments @@ -1694,7 +1693,7 @@ def test_delete_job_alpha1_validation_error(self): def test_jobs_error_handling(self): """Test error handling for Jobs API using fake server's exception mechanism.""" - from google.rpc import status_pb2, code_pb2 + from google.rpc import code_pb2, status_pb2 dapr = DaprGrpcClient(f'{self.scheme}localhost:{self.grpc_port}') diff --git a/tests/clients/test_dapr_grpc_client_async.py b/tests/clients/test_dapr_grpc_client_async.py index 50043912d..245c384dd 100644 --- a/tests/clients/test_dapr_grpc_client_async.py +++ b/tests/clients/test_dapr_grpc_client_async.py @@ -12,6 +12,7 @@ See the License for the specific language governing permissions and limitations under the License. """ + import json import socket import tempfile @@ -19,28 +20,29 @@ import uuid from unittest.mock import patch -from google.rpc import status_pb2, code_pb2 +from google.rpc import code_pb2, status_pb2 -from dapr.aio.clients.grpc.client import DaprGrpcClientAsync from dapr.aio.clients import DaprClient +from dapr.aio.clients.grpc.client import DaprGrpcClientAsync from dapr.clients.exceptions import DaprGrpcError -from dapr.common.pubsub.subscription import StreamInactiveError -from dapr.proto import common_v1 -from .fake_dapr_server import FakeDaprSidecar -from dapr.conf import settings -from dapr.clients.grpc._helpers import to_bytes -from dapr.clients.grpc._request import TransactionalStateOperation from dapr.clients.grpc import conversation +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions +from dapr.clients.grpc._helpers import to_bytes from dapr.clients.grpc._jobs import Job -from dapr.clients.grpc._state import StateOptions, Consistency, Concurrency, StateItem -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions +from dapr.clients.grpc._request import TransactionalStateOperation from dapr.clients.grpc._response import ( ConfigurationItem, - ConfigurationWatcher, ConfigurationResponse, + ConfigurationWatcher, DaprResponse, UnlockResponseStatus, ) +from dapr.clients.grpc._state import Concurrency, Consistency, StateItem, StateOptions +from dapr.common.pubsub.subscription import StreamInactiveError +from dapr.conf import settings +from dapr.proto import common_v1 + +from .fake_dapr_server import FakeDaprSidecar class DaprGrpcClientAsyncTests(unittest.IsolatedAsyncioTestCase): @@ -929,7 +931,6 @@ async def test_set_metadata(self): self.assertEqual(response.extended_metadata[metadata_key], metadata_value) async def test_set_metadata_input_validation(self): - dapr = DaprGrpcClientAsync(f'{self.scheme}localhost:{self.grpc_port}') valid_attr_name = 'attribute name' valid_attr_value = 'attribute value' # Invalid inputs for string arguments diff --git a/tests/clients/test_dapr_grpc_client_async_secure.py b/tests/clients/test_dapr_grpc_client_async_secure.py index 652feac20..a76a0449f 100644 --- a/tests/clients/test_dapr_grpc_client_async_secure.py +++ b/tests/clients/test_dapr_grpc_client_async_secure.py @@ -14,18 +14,19 @@ """ import unittest - from unittest.mock import patch from dapr.aio.clients.grpc.client import DaprGrpcClientAsync +from dapr.aio.clients.health import DaprHealth as DaprHealthAsync from dapr.clients.health import DaprHealth +from dapr.conf import settings from tests.clients.certs import replacement_get_credentials_func, replacement_get_health_context from tests.clients.test_dapr_grpc_client_async import DaprGrpcClientAsyncTests -from .fake_dapr_server import FakeDaprSidecar -from dapr.conf import settings +from .fake_dapr_server import FakeDaprSidecar DaprGrpcClientAsync.get_credentials = replacement_get_credentials_func +DaprHealthAsync.get_ssl_context = replacement_get_health_context DaprHealth.get_ssl_context = replacement_get_health_context diff --git a/tests/clients/test_dapr_grpc_client_secure.py b/tests/clients/test_dapr_grpc_client_secure.py index 41dedca1a..2a6710403 100644 --- a/tests/clients/test_dapr_grpc_client_secure.py +++ b/tests/clients/test_dapr_grpc_client_secure.py @@ -12,6 +12,7 @@ See the License for the specific language governing permissions and limitations under the License. """ + import unittest from unittest.mock import patch @@ -19,8 +20,8 @@ from dapr.clients.health import DaprHealth from dapr.conf import settings from tests.clients.certs import replacement_get_credentials_func, replacement_get_health_context - from tests.clients.test_dapr_grpc_client import DaprGrpcClientTests + from .fake_dapr_server import FakeDaprSidecar diff --git a/tests/clients/test_dapr_grpc_helpers.py b/tests/clients/test_dapr_grpc_helpers.py index 9e794aab7..6c7c27be9 100644 --- a/tests/clients/test_dapr_grpc_helpers.py +++ b/tests/clients/test_dapr_grpc_helpers.py @@ -1,22 +1,22 @@ import base64 import unittest -from google.protobuf.struct_pb2 import Struct from google.protobuf import json_format -from google.protobuf.json_format import ParseError from google.protobuf.any_pb2 import Any as GrpcAny +from google.protobuf.json_format import ParseError +from google.protobuf.struct_pb2 import Struct from google.protobuf.wrappers_pb2 import ( BoolValue, - StringValue, + BytesValue, + DoubleValue, Int32Value, Int64Value, - DoubleValue, - BytesValue, + StringValue, ) from dapr.clients.grpc._helpers import ( - convert_value_to_struct, convert_dict_to_grpc_dict_of_any, + convert_value_to_struct, ) diff --git a/tests/clients/test_dapr_grpc_request.py b/tests/clients/test_dapr_grpc_request.py index 98d8e2005..396a8ec95 100644 --- a/tests/clients/test_dapr_grpc_request.py +++ b/tests/clients/test_dapr_grpc_request.py @@ -16,13 +16,13 @@ import io import unittest +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions from dapr.clients.grpc._request import ( - InvokeMethodRequest, BindingRequest, - EncryptRequestIterator, DecryptRequestIterator, + EncryptRequestIterator, + InvokeMethodRequest, ) -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions from dapr.proto import api_v1, common_v1 diff --git a/tests/clients/test_dapr_grpc_request_async.py b/tests/clients/test_dapr_grpc_request_async.py index 75fe74fce..7782fecdf 100644 --- a/tests/clients/test_dapr_grpc_request_async.py +++ b/tests/clients/test_dapr_grpc_request_async.py @@ -16,8 +16,8 @@ import io import unittest -from dapr.clients.grpc._crypto import EncryptOptions, DecryptOptions -from dapr.aio.clients.grpc._request import EncryptRequestIterator, DecryptRequestIterator +from dapr.aio.clients.grpc._request import DecryptRequestIterator, EncryptRequestIterator +from dapr.clients.grpc._crypto import DecryptOptions, EncryptOptions from dapr.proto import api_v1 diff --git a/tests/clients/test_dapr_grpc_response.py b/tests/clients/test_dapr_grpc_response.py index 1c91805eb..c2fe237f9 100644 --- a/tests/clients/test_dapr_grpc_response.py +++ b/tests/clients/test_dapr_grpc_response.py @@ -18,15 +18,14 @@ from google.protobuf.any_pb2 import Any as GrpcAny from dapr.clients.grpc._response import ( - DaprResponse, - InvokeMethodResponse, BindingResponse, - StateResponse, BulkStateItem, - EncryptResponse, + DaprResponse, DecryptResponse, + EncryptResponse, + InvokeMethodResponse, + StateResponse, ) - from dapr.proto import api_v1, common_v1 diff --git a/tests/clients/test_dapr_grpc_response_async.py b/tests/clients/test_dapr_grpc_response_async.py index 2626cbf41..02b09716f 100644 --- a/tests/clients/test_dapr_grpc_response_async.py +++ b/tests/clients/test_dapr_grpc_response_async.py @@ -15,7 +15,7 @@ import unittest -from dapr.aio.clients.grpc._response import EncryptResponse, DecryptResponse +from dapr.aio.clients.grpc._response import DecryptResponse, EncryptResponse from dapr.proto import api_v1, common_v1 diff --git a/tests/clients/test_exceptions.py b/tests/clients/test_exceptions.py index 08eea4d53..e8b4c6d9f 100644 --- a/tests/clients/test_exceptions.py +++ b/tests/clients/test_exceptions.py @@ -3,9 +3,9 @@ import unittest import grpc -from google.rpc import error_details_pb2, status_pb2, code_pb2 from google.protobuf.any_pb2 import Any from google.protobuf.duration_pb2 import Duration +from google.rpc import code_pb2, error_details_pb2, status_pb2 from dapr.clients import DaprGrpcClient from dapr.clients.exceptions import DaprGrpcError, DaprInternalError diff --git a/tests/clients/test_heatlhcheck.py b/tests/clients/test_healthcheck.py similarity index 98% rename from tests/clients/test_heatlhcheck.py rename to tests/clients/test_healthcheck.py index d447e072c..c5b49aee9 100644 --- a/tests/clients/test_heatlhcheck.py +++ b/tests/clients/test_healthcheck.py @@ -12,9 +12,10 @@ See the License for the specific language governing permissions and limitations under the License. """ + import time import unittest -from unittest.mock import patch, MagicMock +from unittest.mock import MagicMock, patch from dapr.clients.health import DaprHealth from dapr.conf import settings diff --git a/tests/clients/test_healthcheck_async.py b/tests/clients/test_healthcheck_async.py new file mode 100644 index 000000000..668768732 --- /dev/null +++ b/tests/clients/test_healthcheck_async.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2025 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import asyncio +import time +import unittest +from unittest.mock import AsyncMock, MagicMock, patch + +from dapr.aio.clients.health import DaprHealth +from dapr.conf import settings +from dapr.version import __version__ + + +class DaprHealthCheckAsyncTests(unittest.IsolatedAsyncioTestCase): + @patch.object(settings, 'DAPR_HTTP_ENDPOINT', 'http://domain.com:3500') + @patch('aiohttp.ClientSession.get') + async def test_wait_for_sidecar_success(self, mock_get): + # Create mock response + mock_response = MagicMock() + mock_response.status = 200 + mock_response.__aenter__ = AsyncMock(return_value=mock_response) + mock_response.__aexit__ = AsyncMock(return_value=None) + mock_get.return_value = mock_response + + try: + await DaprHealth.wait_for_sidecar() + except Exception as e: + self.fail(f'wait_for_sidecar() raised an exception unexpectedly: {e}') + + mock_get.assert_called_once() + + # Check URL + called_url = mock_get.call_args[0][0] + self.assertEqual(called_url, 'http://domain.com:3500/v1.0/healthz/outbound') + + # Check headers are properly set + headers = mock_get.call_args[1]['headers'] + self.assertIn('User-Agent', headers) + self.assertEqual(headers['User-Agent'], f'dapr-sdk-python/{__version__}') + + @patch.object(settings, 'DAPR_HTTP_ENDPOINT', 'http://domain.com:3500') + @patch.object(settings, 'DAPR_API_TOKEN', 'mytoken') + @patch('aiohttp.ClientSession.get') + async def test_wait_for_sidecar_success_with_api_token(self, mock_get): + # Create mock response + mock_response = MagicMock() + mock_response.status = 200 + mock_response.__aenter__ = AsyncMock(return_value=mock_response) + mock_response.__aexit__ = AsyncMock(return_value=None) + mock_get.return_value = mock_response + + try: + await DaprHealth.wait_for_sidecar() + except Exception as e: + self.fail(f'wait_for_sidecar() raised an exception unexpectedly: {e}') + + mock_get.assert_called_once() + + # Check headers are properly set + headers = mock_get.call_args[1]['headers'] + self.assertIn('User-Agent', headers) + self.assertEqual(headers['User-Agent'], f'dapr-sdk-python/{__version__}') + self.assertIn('dapr-api-token', headers) + self.assertEqual(headers['dapr-api-token'], 'mytoken') + + @patch.object(settings, 'DAPR_HEALTH_TIMEOUT', '2.5') + @patch('aiohttp.ClientSession.get') + async def test_wait_for_sidecar_timeout(self, mock_get): + # Create mock response that always returns 500 + mock_response = MagicMock() + mock_response.status = 500 + mock_response.__aenter__ = AsyncMock(return_value=mock_response) + mock_response.__aexit__ = AsyncMock(return_value=None) + mock_get.return_value = mock_response + + start = time.time() + + with self.assertRaises(TimeoutError): + await DaprHealth.wait_for_sidecar() + + self.assertGreaterEqual(time.time() - start, 2.5) + self.assertGreater(mock_get.call_count, 1) + + @patch.object(settings, 'DAPR_HTTP_ENDPOINT', 'http://domain.com:3500') + @patch.object(settings, 'DAPR_HEALTH_TIMEOUT', '5.0') + @patch('aiohttp.ClientSession.get') + async def test_health_check_does_not_block(self, mock_get): + """Test that health check doesn't block other async tasks from running""" + # Mock health check to retry several times before succeeding + call_count = [0] # Use list to allow modification in nested function + + def side_effect(*args, **kwargs): + call_count[0] += 1 + # First 2 calls fail with ClientError, then succeed + # This will cause ~2 seconds of retries (1 second sleep after each failure) + if call_count[0] <= 2: + import aiohttp + + raise aiohttp.ClientError('Connection refused') + else: + mock_response = MagicMock() + mock_response.status = 200 + mock_response.__aenter__ = AsyncMock(return_value=mock_response) + mock_response.__aexit__ = AsyncMock(return_value=None) + return mock_response + + mock_get.side_effect = side_effect + + # Counter that will be incremented by background task + counter = [0] # Use list to allow modification in nested function + is_running = [True] + + async def increment_counter(): + """Background task that increments counter every 0.5 seconds""" + while is_running[0]: + await asyncio.sleep(0.5) + counter[0] += 1 + + # Start the background task + counter_task = asyncio.create_task(increment_counter()) + + try: + # Run health check (will take ~2 seconds with retries) + await DaprHealth.wait_for_sidecar() + + # Stop the background task + is_running[0] = False + await asyncio.sleep(0.1) # Give it time to finish current iteration + + # Verify the counter was incremented during health check + # In 2 seconds with 0.5s intervals, we expect at least 3 increments + self.assertGreaterEqual( + counter[0], + 3, + f'Expected counter to increment at least 3 times during health check, ' + f'but got {counter[0]}. This indicates health check may be blocking.', + ) + + # Verify health check made multiple attempts + self.assertGreaterEqual(call_count[0], 2) + + finally: + # Clean up + is_running[0] = False + counter_task.cancel() + try: + await counter_task + except asyncio.CancelledError: + pass + + @patch.object(settings, 'DAPR_HTTP_ENDPOINT', 'http://domain.com:3500') + @patch('aiohttp.ClientSession.get') + async def test_multiple_health_checks_concurrent(self, mock_get): + """Test that multiple health check calls can run concurrently""" + # Create mock response + mock_response = MagicMock() + mock_response.status = 200 + mock_response.__aenter__ = AsyncMock(return_value=mock_response) + mock_response.__aexit__ = AsyncMock(return_value=None) + mock_get.return_value = mock_response + + # Run multiple health checks concurrently + start_time = time.time() + results = await asyncio.gather( + DaprHealth.wait_for_sidecar(), + DaprHealth.wait_for_sidecar(), + DaprHealth.wait_for_sidecar(), + ) + elapsed = time.time() - start_time + + # All should complete successfully + self.assertEqual(len(results), 3) + self.assertIsNone(results[0]) + self.assertIsNone(results[1]) + self.assertIsNone(results[2]) + + # Should complete quickly since they run concurrently + self.assertLess(elapsed, 1.0) + + # Verify multiple calls were made + self.assertGreaterEqual(mock_get.call_count, 3) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/clients/test_http_helpers.py b/tests/clients/test_http_helpers.py index ab173cd73..abf284dbe 100644 --- a/tests/clients/test_http_helpers.py +++ b/tests/clients/test_http_helpers.py @@ -1,8 +1,8 @@ import unittest from unittest.mock import patch -from dapr.conf import settings from dapr.clients.http.helpers import get_api_url +from dapr.conf import settings class DaprHttpClientHelpersTests(unittest.TestCase): diff --git a/tests/clients/test_http_service_invocation_client.py b/tests/clients/test_http_service_invocation_client.py index c0b43a863..a0a7aadd6 100644 --- a/tests/clients/test_http_service_invocation_client.py +++ b/tests/clients/test_http_service_invocation_client.py @@ -24,13 +24,12 @@ from opentelemetry.sdk.trace.sampling import ALWAYS_ON from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator - +from dapr.clients import DaprClient from dapr.clients.exceptions import DaprInternalError from dapr.conf import settings from dapr.proto import common_v1 from .fake_http_server import FakeHttpServer -from dapr.clients import DaprClient class DaprInvocationHttpClientTests(unittest.TestCase): diff --git a/tests/clients/test_jobs.py b/tests/clients/test_jobs.py index fe3d70b53..645d43256 100644 --- a/tests/clients/test_jobs.py +++ b/tests/clients/test_jobs.py @@ -5,9 +5,10 @@ """ import unittest + from google.protobuf.any_pb2 import Any as GrpcAny -from dapr.clients.grpc._jobs import Job, DropFailurePolicy, ConstantFailurePolicy +from dapr.clients.grpc._jobs import ConstantFailurePolicy, DropFailurePolicy, Job from dapr.proto.runtime.v1 import dapr_pb2 as api_v1 diff --git a/tests/clients/test_retries_policy.py b/tests/clients/test_retries_policy.py index b5137e643..d4a383fc1 100644 --- a/tests/clients/test_retries_policy.py +++ b/tests/clients/test_retries_policy.py @@ -12,11 +12,12 @@ See the License for the specific language governing permissions and limitations under the License. """ + import unittest from unittest import mock -from unittest.mock import Mock, MagicMock, patch, AsyncMock +from unittest.mock import AsyncMock, MagicMock, Mock, patch -from grpc import StatusCode, RpcError +from grpc import RpcError, StatusCode from dapr.clients.retry import RetryPolicy from dapr.serializers import DefaultJSONSerializer diff --git a/tests/clients/test_retries_policy_async.py b/tests/clients/test_retries_policy_async.py index ebe6865db..2b35c35c4 100644 --- a/tests/clients/test_retries_policy_async.py +++ b/tests/clients/test_retries_policy_async.py @@ -12,11 +12,12 @@ See the License for the specific language governing permissions and limitations under the License. """ + import unittest from unittest import mock -from unittest.mock import MagicMock, patch, AsyncMock +from unittest.mock import AsyncMock, MagicMock, patch -from grpc import StatusCode, RpcError +from grpc import RpcError, StatusCode from dapr.clients.retry import RetryPolicy diff --git a/tests/clients/test_secure_http_service_invocation_client.py b/tests/clients/test_secure_http_service_invocation_client.py index 4d1bdda1f..df13d8197 100644 --- a/tests/clients/test_secure_http_service_invocation_client.py +++ b/tests/clients/test_secure_http_service_invocation_client.py @@ -12,6 +12,7 @@ See the License for the specific language governing permissions and limitations under the License. """ + import ssl import typing from asyncio import TimeoutError @@ -29,8 +30,7 @@ from dapr.conf import settings from dapr.proto import common_v1 - -from .certs import replacement_get_health_context, replacement_get_credentials_func, GrpcCerts +from .certs import GrpcCerts, replacement_get_credentials_func, replacement_get_health_context from .fake_http_server import FakeHttpServer from .test_http_service_invocation_client import DaprInvocationHttpClientTests diff --git a/tests/clients/test_subscription.py b/tests/clients/test_subscription.py index ed2eae3fa..21018aaac 100644 --- a/tests/clients/test_subscription.py +++ b/tests/clients/test_subscription.py @@ -1,8 +1,9 @@ -from dapr.clients.grpc.subscription import SubscriptionMessage -from dapr.proto.runtime.v1.appcallback_pb2 import TopicEventRequest +import unittest + from google.protobuf.struct_pb2 import Struct -import unittest +from dapr.clients.grpc.subscription import SubscriptionMessage +from dapr.proto.runtime.v1.appcallback_pb2 import TopicEventRequest class SubscriptionMessageTests(unittest.TestCase): diff --git a/tests/clients/test_timeout_interceptor.py b/tests/clients/test_timeout_interceptor.py index 79859b2e5..c60331bed 100644 --- a/tests/clients/test_timeout_interceptor.py +++ b/tests/clients/test_timeout_interceptor.py @@ -15,6 +15,7 @@ import unittest from unittest.mock import Mock, patch + from dapr.clients.grpc.interceptors import DaprClientTimeoutInterceptor from dapr.conf import settings diff --git a/tests/clients/test_timeout_interceptor_async.py b/tests/clients/test_timeout_interceptor_async.py index d057df9fc..88b5831dc 100644 --- a/tests/clients/test_timeout_interceptor_async.py +++ b/tests/clients/test_timeout_interceptor_async.py @@ -15,6 +15,7 @@ import unittest from unittest.mock import Mock, patch + from dapr.aio.clients.grpc.interceptors import DaprClientTimeoutInterceptorAsync from dapr.conf import settings diff --git a/tests/serializers/test_default_json_serializer.py b/tests/serializers/test_default_json_serializer.py index 86e727ad0..8f65595c0 100644 --- a/tests/serializers/test_default_json_serializer.py +++ b/tests/serializers/test_default_json_serializer.py @@ -13,8 +13,8 @@ limitations under the License. """ -import unittest import datetime +import unittest from dapr.serializers.json import DefaultJSONSerializer diff --git a/tests/serializers/test_util.py b/tests/serializers/test_util.py index 9f3b9e026..25124fdf6 100644 --- a/tests/serializers/test_util.py +++ b/tests/serializers/test_util.py @@ -13,12 +13,12 @@ limitations under the License. """ -import unittest import json +import unittest from datetime import timedelta -from dapr.serializers.util import convert_from_dapr_duration, convert_to_dapr_duration from dapr.serializers.json import DaprJSONDecoder +from dapr.serializers.util import convert_from_dapr_duration, convert_to_dapr_duration class UtilTests(unittest.TestCase): diff --git a/tools/requirements.txt b/tools/requirements.txt index f005610f8..e92160336 100644 --- a/tools/requirements.txt +++ b/tools/requirements.txt @@ -1,2 +1,2 @@ grpcio-tools==1.62.3 -mypy-protobuf==3.6.0 +mypy-protobuf==3.7.0 diff --git a/tox.ini b/tox.ini index ebd403c3f..1bdb17921 100644 --- a/tox.ini +++ b/tox.ini @@ -1,9 +1,8 @@ [tox] skipsdist = True -minversion = 3.9.0 +minversion = 3.10.0 envlist = - py{39,310,311,312,313} - flake8, + py{310,311,312,313} ruff, mypy, @@ -16,6 +15,8 @@ commands = coverage run -a -m unittest discover -v ./ext/dapr-ext-workflow/tests coverage run -a -m unittest discover -v ./ext/dapr-ext-grpc/tests coverage run -a -m unittest discover -v ./ext/dapr-ext-fastapi/tests + coverage run -a -m unittest discover -v ./ext/dapr-ext-langgraph/tests + coverage run -a -m unittest discover -v ./ext/dapr-ext-strands/tests coverage run -a -m unittest discover -v ./ext/flask_dapr/tests coverage xml commands_pre = @@ -23,20 +24,15 @@ commands_pre = pip3 install -e {toxinidir}/ext/dapr-ext-workflow/ pip3 install -e {toxinidir}/ext/dapr-ext-grpc/ pip3 install -e {toxinidir}/ext/dapr-ext-fastapi/ + pip3 install -e {toxinidir}/ext/dapr-ext-langgraph/ + pip3 install -e {toxinidir}/ext/dapr-ext-strands/ pip3 install -e {toxinidir}/ext/flask_dapr/ -[testenv:flake8] -basepython = python3 -usedevelop = False -deps = flake8 -commands = - flake8 . - [testenv:ruff] basepython = python3 usedevelop = False -deps = ruff==0.2.2 commands = + ruff check --fix ruff format [testenv:examples] @@ -67,12 +63,15 @@ commands = ./validate.sh demo_workflow ./validate.sh workflow ./validate.sh jobs + ./validate.sh langgraph-checkpointer ./validate.sh ../ commands_pre = pip3 install -e {toxinidir}/ pip3 install -e {toxinidir}/ext/dapr-ext-workflow/ pip3 install -e {toxinidir}/ext/dapr-ext-grpc/ pip3 install -e {toxinidir}/ext/dapr-ext-fastapi/ + pip3 install -e {toxinidir}/ext/dapr-ext-langgraph/ + pip3 install -e {toxinidir}/ext/dapr-ext-strands/ allowlist_externals=* [testenv:example-component] @@ -92,6 +91,8 @@ commands_pre = pip3 install -e {toxinidir}/ext/dapr-ext-workflow/ pip3 install -e {toxinidir}/ext/dapr-ext-grpc/ pip3 install -e {toxinidir}/ext/dapr-ext-fastapi/ + pip3 install -e {toxinidir}/ext/dapr-ext-langgraph/ + pip3 install -e {toxinidir}/ext/dapr-ext-strands/ allowlist_externals=* [testenv:type] @@ -105,7 +106,8 @@ commands_pre = pip3 install -e {toxinidir}/ext/dapr-ext-workflow/ pip3 install -e {toxinidir}/ext/dapr-ext-grpc/ pip3 install -e {toxinidir}/ext/dapr-ext-fastapi/ - + pip3 install -e {toxinidir}/ext/dapr-ext-langgraph/ + pip3 install -e {toxinidir}/ext/dapr-ext-strands/ [testenv:doc] basepython = python3 usedevelop = False