From c44a8b53f4cf1f143436be15675906a3adfd3553 Mon Sep 17 00:00:00 2001 From: junanchen Date: Fri, 5 Dec 2025 22:05:34 -0800 Subject: [PATCH 01/12] [AgentServer] Fix error response in streaming & non-streaming --- ...ramework_output_non_streaming_converter.py | 5 +- ...nt_framework_output_streaming_converter.py | 11 +-- .../azure/ai/agentserver/core/server/base.py | 69 +++++++------------ 3 files changed, 27 insertions(+), 58 deletions(-) diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_non_streaming_converter.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_non_streaming_converter.py index 6e1fcdd4aba2..fbece993305a 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_non_streaming_converter.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_non_streaming_converter.py @@ -7,8 +7,7 @@ import json from typing import Any, List -from agent_framework import AgentRunResponse, FunctionResultContent -from agent_framework._types import FunctionCallContent, TextContent +from agent_framework import AgentRunResponse, FunctionCallContent, FunctionResultContent, ErrorContent, TextContent from azure.ai.agentserver.core import AgentRunContext from azure.ai.agentserver.core.logger import get_logger @@ -121,6 +120,8 @@ def _append_content_item(self, content: Any, sink: List[dict], author_name: str) self._append_function_call_content(content, sink, author_name) elif isinstance(content, FunctionResultContent): self._append_function_result_content(content, sink, author_name) + elif isinstance(content, ErrorContent): + raise ValueError(f"ErrorContent received: code={content.error_code}, message={content.message}") else: logger.debug("unsupported content type skipped: %s", type(content).__name__) diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_streaming_converter.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_streaming_converter.py index 5c2b0ae552cb..92f1cb983e08 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_streaming_converter.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/models/agent_framework_output_streaming_converter.py @@ -311,16 +311,7 @@ async def convert(self, updates: AsyncIterable[AgentRunResponseUpdate]) -> Async elif isinstance(first, FunctionResultContent): state = _FunctionCallOutputStreamingState(self) elif isinstance(first, ErrorContent): - code=getattr(first, "error_code", None) or "server_error" - message=getattr(first, "message", None) or "An error occurred" - raise ValueError(f"ErrorContent received: code={code}, message={message}") - # yield ResponseErrorEvent( - # sequence_number=self.next_sequence(), - # code=getattr(first, "error_code", None) or "server_error", - # message=getattr(first, "message", None) or "An error occurred", - # param="", - # ) - # continue + raise ValueError(f"ErrorContent received: code={first.error_code}, message={first.message}") if not state: continue diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py index eeb4b85cdc34..5d25dea61be6 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py @@ -7,7 +7,6 @@ import json import os import time -import traceback from abc import abstractmethod from typing import Any, AsyncGenerator, Generator, Optional, Union @@ -141,80 +140,58 @@ async def runs_endpoint(request): resp = await self.agent_run(context) if inspect.isgenerator(resp): - # Prefetch first event to allow 500 status if generation fails immediately - try: - first_event = next(resp) - except Exception as e: # noqa: BLE001 - err_msg = _format_error(e) - logger.error("Generator initialization failed: %s\n%s", e, traceback.format_exc()) - return JSONResponse({"error": err_msg}, status_code=500) - def gen(): ctx = TraceContextTextMapPropagator().extract(carrier=context_carrier) token = otel_context.attach(ctx) - error_sent = False + seq = 0 try: - # yield prefetched first event - yield _event_to_sse_chunk(first_event) for event in resp: + seq += 1 yield _event_to_sse_chunk(event) except Exception as e: # noqa: BLE001 - err_msg = _format_error(e) - logger.error("Error in non-async generator: %s\n%s", e, traceback.format_exc()) - payload = {"error": err_msg} - yield f"event: error\ndata: {json.dumps(payload)}\n\n" - error_sent = True + logger.error("Error in non-async generator: %s", e, exc_info=True) + err = project_models.ResponseErrorEvent( + sequence_number=seq + 1, + code=project_models.ResponseErrorCode.SERVER_ERROR, + message=_format_error(e), + param="") + yield _event_to_sse_chunk(err) finally: logger.info("End of processing CreateResponse request.") otel_context.detach(token) - if not error_sent: - yield "data: [DONE]\n\n" return StreamingResponse(gen(), media_type="text/event-stream") if inspect.isasyncgen(resp): - # Prefetch first async event to allow early 500 - try: - first_event = await resp.__anext__() - except StopAsyncIteration: - # No items produced; treat as empty successful stream - def empty_gen(): - yield "data: [DONE]\n\n" - - return StreamingResponse(empty_gen(), media_type="text/event-stream") - except Exception as e: # noqa: BLE001 - err_msg = _format_error(e) - logger.error("Async generator initialization failed: %s\n%s", e, traceback.format_exc()) - return JSONResponse({"error": err_msg}, status_code=500) - async def gen_async(): ctx = TraceContextTextMapPropagator().extract(carrier=context_carrier) token = otel_context.attach(ctx) - error_sent = False + seq = 0 try: - # yield prefetched first event - yield _event_to_sse_chunk(first_event) async for event in resp: + seq += 1 yield _event_to_sse_chunk(event) except Exception as e: # noqa: BLE001 - err_msg = _format_error(e) - logger.error("Error in async generator: %s\n%s", e, traceback.format_exc()) - payload = {"error": err_msg} - yield f"event: error\ndata: {json.dumps(payload)}\n\n" - yield "data: [DONE]\n\n" - error_sent = True + logger.error("Error in async generator: %s", e, exc_info=True) + err = project_models.ResponseErrorEvent( + sequence_number=seq + 1, + code=project_models.ResponseErrorCode.SERVER_ERROR, + message=_format_error(e), + param="") + yield _event_to_sse_chunk(err) finally: logger.info("End of processing CreateResponse request.") otel_context.detach(token) - if not error_sent: - yield "data: [DONE]\n\n" return StreamingResponse(gen_async(), media_type="text/event-stream") logger.info("End of processing CreateResponse request.") return JSONResponse(resp.as_dict()) except Exception as e: # TODO: extract status code from exception - logger.error(f"Error processing CreateResponse request: {traceback.format_exc()}") - return JSONResponse({"error": str(e)}, status_code=500) + logger.error(f"Error processing CreateResponse request: {e}", exc_info=True) + err = project_models.ResponseError( + code=project_models.ResponseErrorCode.SERVER_ERROR, + message=_format_error(e)) + return JSONResponse(err.as_dict()) async def liveness_endpoint(request): result = await self.agent_liveness(request) From aae05668f01661b6218993d1f7847d751de48269 Mon Sep 17 00:00:00 2001 From: junanchen Date: Sat, 6 Dec 2025 18:40:15 -0800 Subject: [PATCH 02/12] optimize error handling --- .../azure/ai/agentserver/core/server/base.py | 86 ++++++++----------- .../ai/agentserver/langgraph/langgraph.py | 4 +- 2 files changed, 38 insertions(+), 52 deletions(-) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py index 5d25dea61be6..e4842effff18 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py @@ -14,6 +14,7 @@ from opentelemetry import context as otel_context, trace from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator from starlette.applications import Starlette +from starlette.concurrency import iterate_in_threadpool from starlette.middleware.base import BaseHTTPMiddleware from starlette.middleware.cors import CORSMiddleware from starlette.requests import Request @@ -137,61 +138,46 @@ async def runs_endpoint(request): context_carrier = {} TraceContextTextMapPropagator().inject(context_carrier) + ex = None resp = await self.agent_run(context) - - if inspect.isgenerator(resp): - def gen(): - ctx = TraceContextTextMapPropagator().extract(carrier=context_carrier) - token = otel_context.attach(ctx) - seq = 0 - try: - for event in resp: - seq += 1 - yield _event_to_sse_chunk(event) - except Exception as e: # noqa: BLE001 - logger.error("Error in non-async generator: %s", e, exc_info=True) - err = project_models.ResponseErrorEvent( - sequence_number=seq + 1, - code=project_models.ResponseErrorCode.SERVER_ERROR, - message=_format_error(e), - param="") - yield _event_to_sse_chunk(err) - finally: - logger.info("End of processing CreateResponse request.") - otel_context.detach(token) - - return StreamingResponse(gen(), media_type="text/event-stream") - if inspect.isasyncgen(resp): - async def gen_async(): - ctx = TraceContextTextMapPropagator().extract(carrier=context_carrier) - token = otel_context.attach(ctx) - seq = 0 - try: - async for event in resp: - seq += 1 - yield _event_to_sse_chunk(event) - except Exception as e: # noqa: BLE001 - logger.error("Error in async generator: %s", e, exc_info=True) - err = project_models.ResponseErrorEvent( - sequence_number=seq + 1, - code=project_models.ResponseErrorCode.SERVER_ERROR, - message=_format_error(e), - param="") - yield _event_to_sse_chunk(err) - finally: - logger.info("End of processing CreateResponse request.") - otel_context.detach(token) - - return StreamingResponse(gen_async(), media_type="text/event-stream") - logger.info("End of processing CreateResponse request.") - return JSONResponse(resp.as_dict()) except Exception as e: # TODO: extract status code from exception logger.error(f"Error processing CreateResponse request: {e}", exc_info=True) - err = project_models.ResponseError( + ex = e + + if not request.stream: + logger.info("End of processing CreateResponse request.") + result = resp if not ex else project_models.ResponseError( code=project_models.ResponseErrorCode.SERVER_ERROR, - message=_format_error(e)) - return JSONResponse(err.as_dict()) + message=_format_error(ex)) + return JSONResponse(result.as_dict()) + + async def gen_async(ex): + ctx = TraceContextTextMapPropagator().extract(carrier=context_carrier) + token = otel_context.attach(ctx) + seq = 0 + try: + if ex: + return + it = iterate_in_threadpool(resp) if inspect.isgenerator(resp) else resp + async for event in resp: + seq += 1 + yield _event_to_sse_chunk(event) + logger.info("End of processing CreateResponse request.") + except Exception as e: # noqa: BLE001 + logger.error("Error in async generator: %s", e, exc_info=True) + ex = e + finally: + if ex: + err = project_models.ResponseErrorEvent( + sequence_number=seq + 1, + code=project_models.ResponseErrorCode.SERVER_ERROR, + message=_format_error(ex), + param="") + yield _event_to_sse_chunk(err) + otel_context.detach(token) + + return StreamingResponse(gen_async(ex), media_type="text/event-stream") async def liveness_endpoint(request): result = await self.agent_liveness(request) diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py index e6bf10d0b5c2..51937fe31986 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py @@ -268,7 +268,7 @@ async def agent_run_non_stream(self, input_data: dict, context: AgentRunContext, output = self.state_converter.state_to_response(result, context) return output except Exception as e: - logger.error(f"Error during agent run: {e}") + logger.error(f"Error during agent run: {e}", exc_info=True) raise e async def agent_run_astream( @@ -301,7 +301,7 @@ async def agent_run_astream( async for result in self.state_converter.state_to_response_stream(stream, context): yield result except Exception as e: - logger.error(f"Error during streaming agent run: {e}") + logger.error(f"Error during streaming agent run: {e}", exc_info=True) raise e finally: # Close tool_client if provided From 1f14c28e85cf05849e702e3961ccbdbd1328bb11 Mon Sep 17 00:00:00 2001 From: junanchen Date: Sat, 6 Dec 2025 22:55:06 -0800 Subject: [PATCH 03/12] cache error in tracing init --- .../agentframework/agent_framework.py | 31 ++++++++++--------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py index 4a0a074bd635..233436ac84ea 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py @@ -167,20 +167,23 @@ async def _resolve_agent_for_request(self, context: AgentRunContext): return agent, tool_client_wrapper def init_tracing(self): - exporter = os.environ.get(AdapterConstants.OTEL_EXPORTER_ENDPOINT) - app_insights_conn_str = os.environ.get(APPINSIGHT_CONNSTR_ENV_NAME) - project_endpoint = os.environ.get(AdapterConstants.AZURE_AI_PROJECT_ENDPOINT) - - if exporter or app_insights_conn_str: - from agent_framework.observability import setup_observability - - setup_observability( - enable_sensitive_data=True, - otlp_endpoint=exporter, - applicationinsights_connection_string=app_insights_conn_str, - ) - elif project_endpoint: - self.setup_tracing_with_azure_ai_client(project_endpoint) + try: + exporter = os.environ.get(AdapterConstants.OTEL_EXPORTER_ENDPOINT) + app_insights_conn_str = os.environ.get(APPINSIGHT_CONNSTR_ENV_NAME) + project_endpoint = os.environ.get(AdapterConstants.AZURE_AI_PROJECT_ENDPOINT) + + if exporter or app_insights_conn_str: + from agent_framework.observability import setup_observability + + setup_observability( + enable_sensitive_data=True, + otlp_endpoint=exporter, + applicationinsights_connection_string=app_insights_conn_str, + ) + elif project_endpoint: + self.setup_tracing_with_azure_ai_client(project_endpoint) + except Exception as e: + logger.warning(f"Failed to initialize tracing: {e}", exc_info=True) self.tracer = trace.get_tracer(__name__) def setup_tracing_with_azure_ai_client(self, project_endpoint: str): From 81a0b21b8f3d49744fdfa9c9c2118cdeffbfb821 Mon Sep 17 00:00:00 2001 From: junanchen Date: Sat, 6 Dec 2025 23:50:33 -0800 Subject: [PATCH 04/12] release -core 1.0.0b7 first --- sdk/agentserver/ci.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sdk/agentserver/ci.yml b/sdk/agentserver/ci.yml index bb2d6f479b00..55768f825b53 100644 --- a/sdk/agentserver/ci.yml +++ b/sdk/agentserver/ci.yml @@ -42,7 +42,7 @@ extends: Artifacts: - name: azure-ai-agentserver-core safeName: azureaiagentservercore - - name: azure-ai-agentserver-agentframework - safeName: azureaiagentserveragentframework - - name: azure-ai-agentserver-langgraph - safeName: azureaiagentserverlanggraph +# - name: azure-ai-agentserver-agentframework +# safeName: azureaiagentserveragentframework +# - name: azure-ai-agentserver-langgraph +# safeName: azureaiagentserverlanggraph From 5f2e46c2c81427bea7ffd1e7431d77bef0cf3c9f Mon Sep 17 00:00:00 2001 From: junanchen Date: Sun, 7 Dec 2025 00:01:15 -0800 Subject: [PATCH 05/12] fix response iterator --- .../azure/ai/agentserver/core/server/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py index e4842effff18..35b1041765d5 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py @@ -160,7 +160,7 @@ async def gen_async(ex): if ex: return it = iterate_in_threadpool(resp) if inspect.isgenerator(resp) else resp - async for event in resp: + async for event in it: seq += 1 yield _event_to_sse_chunk(event) logger.info("End of processing CreateResponse request.") From e6e235a640ea2f7ab2deb01bcae4818a0062367d Mon Sep 17 00:00:00 2001 From: junanchen Date: Sun, 7 Dec 2025 01:55:40 -0800 Subject: [PATCH 06/12] fix tests --- .../azure/ai/agentserver/core/server/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py index 35b1041765d5..eeb49a2896ad 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py @@ -7,6 +7,7 @@ import json import os import time +import traceback from abc import abstractmethod from typing import Any, AsyncGenerator, Generator, Optional, Union @@ -145,7 +146,7 @@ async def runs_endpoint(request): logger.error(f"Error processing CreateResponse request: {e}", exc_info=True) ex = e - if not request.stream: + if not context.stream: logger.info("End of processing CreateResponse request.") result = resp if not ex else project_models.ResponseError( code=project_models.ResponseErrorCode.SERVER_ERROR, From f3e4ac7e89aa4ae56a9dd73bf3804f13e92db0e7 Mon Sep 17 00:00:00 2001 From: junanchen Date: Sun, 7 Dec 2025 02:45:45 -0800 Subject: [PATCH 07/12] suppress error while testing --- .../azure-ai-agentserver-agentframework/pyproject.toml | 3 +++ sdk/agentserver/ci.yml | 8 ++++---- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml index 19840e57fadb..c51bb424e925 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml @@ -66,4 +66,7 @@ pyright = false verifytypes = false # incompatible python version for -core verify_keywords = false mindependency = false # depends on -core package +latestdependency = false +whl = false +depends = false whl_no_aio = false diff --git a/sdk/agentserver/ci.yml b/sdk/agentserver/ci.yml index 55768f825b53..bb2d6f479b00 100644 --- a/sdk/agentserver/ci.yml +++ b/sdk/agentserver/ci.yml @@ -42,7 +42,7 @@ extends: Artifacts: - name: azure-ai-agentserver-core safeName: azureaiagentservercore -# - name: azure-ai-agentserver-agentframework -# safeName: azureaiagentserveragentframework -# - name: azure-ai-agentserver-langgraph -# safeName: azureaiagentserverlanggraph + - name: azure-ai-agentserver-agentframework + safeName: azureaiagentserveragentframework + - name: azure-ai-agentserver-langgraph + safeName: azureaiagentserverlanggraph From 9a14862551ccc342bac5de077784f030cb082ae8 Mon Sep 17 00:00:00 2001 From: junanchen Date: Sun, 7 Dec 2025 03:08:13 -0800 Subject: [PATCH 08/12] upgrade -core dep to 1.0.0b7 for -af & -lg --- .../azure-ai-agentserver-agentframework/pyproject.toml | 7 ++----- .../azure-ai-agentserver-langgraph/pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml index c51bb424e925..8bb6cb11d3c9 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml @@ -20,7 +20,7 @@ classifiers = [ keywords = ["azure", "azure sdk"] dependencies = [ - "azure-ai-agentserver-core>=1.0.0b5", + "azure-ai-agentserver-core>=1.0.0b7", "agent-framework-azure-ai>=1.0.0b251112", "agent-framework-core>=1.0.0b251112", "opentelemetry-exporter-otlp-proto-grpc>=1.36.0", @@ -65,8 +65,5 @@ breaking = false # incompatible python version pyright = false verifytypes = false # incompatible python version for -core verify_keywords = false -mindependency = false # depends on -core package -latestdependency = false -whl = false -depends = false +# mindependency = false # depends on -core package whl_no_aio = false diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml index 9abeff0d58d6..a6fc7b3227db 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml @@ -19,7 +19,7 @@ classifiers = [ keywords = ["azure", "azure sdk"] dependencies = [ - "azure-ai-agentserver-core>=1.0.0b5", + "azure-ai-agentserver-core>=1.0.0b7", "langchain>0.3.20", "langchain-openai>0.3.10", "langchain-azure-ai[opentelemetry]>=0.1.8", From 42148225980ac7c4b6eb58f71ca814306debdf06 Mon Sep 17 00:00:00 2001 From: junanchen Date: Sun, 7 Dec 2025 10:34:59 -0800 Subject: [PATCH 09/12] removed unused import & disable pyling on af --- .../azure-ai-agentserver-agentframework/pyproject.toml | 6 +++++- .../azure/ai/agentserver/core/server/base.py | 1 - 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml index 8bb6cb11d3c9..d48b5c7bdbcb 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml @@ -65,5 +65,9 @@ breaking = false # incompatible python version pyright = false verifytypes = false # incompatible python version for -core verify_keywords = false -# mindependency = false # depends on -core package +mindependency = false # depends on -core package +latestdependency = false +whl = false +depends = false +pylint = false whl_no_aio = false diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py index eeb49a2896ad..bc749a1fd782 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py @@ -7,7 +7,6 @@ import json import os import time -import traceback from abc import abstractmethod from typing import Any, AsyncGenerator, Generator, Optional, Union From c65bd85a4ec434c8ea532329d887a914e558fd95 Mon Sep 17 00:00:00 2001 From: junanchen Date: Sun, 7 Dec 2025 10:35:43 -0800 Subject: [PATCH 10/12] bypass checks for lg --- .../azure-ai-agentserver-langgraph/pyproject.toml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml index a6fc7b3227db..df7adb4c7c04 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml @@ -64,5 +64,9 @@ breaking = false # incompatible python version pyright = false verifytypes = false # incompatible python version for -core verify_keywords = false -# mindependency = false # depends on -core package +mindependency = false # depends on -core package +latestdependency = false +whl = false +depends = false +pylint = false whl_no_aio = false From 6cb55827cb04b399098086ea8e8960ed0dc66c5c Mon Sep 17 00:00:00 2001 From: junanchen Date: Sun, 7 Dec 2025 11:07:32 -0800 Subject: [PATCH 11/12] add bugfix in changelog --- .../azure-ai-agentserver-agentframework/CHANGELOG.md | 3 +++ sdk/agentserver/azure-ai-agentserver-core/CHANGELOG.md | 3 +++ sdk/agentserver/azure-ai-agentserver-langgraph/CHANGELOG.md | 3 +++ 3 files changed, 9 insertions(+) diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/CHANGELOG.md b/sdk/agentserver/azure-ai-agentserver-agentframework/CHANGELOG.md index 9f55aa85b60e..84c4a76a27e5 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/CHANGELOG.md +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/CHANGELOG.md @@ -7,6 +7,9 @@ - Update response with created_by +### Bugs Fixed + +- Fixed error response handling in stream and non-stream modes ## 1.0.0b6 (2025-11-26) diff --git a/sdk/agentserver/azure-ai-agentserver-core/CHANGELOG.md b/sdk/agentserver/azure-ai-agentserver-core/CHANGELOG.md index 9f55aa85b60e..84c4a76a27e5 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/CHANGELOG.md +++ b/sdk/agentserver/azure-ai-agentserver-core/CHANGELOG.md @@ -7,6 +7,9 @@ - Update response with created_by +### Bugs Fixed + +- Fixed error response handling in stream and non-stream modes ## 1.0.0b6 (2025-11-26) diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/CHANGELOG.md b/sdk/agentserver/azure-ai-agentserver-langgraph/CHANGELOG.md index da32978eb374..abea93ee106a 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/CHANGELOG.md +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/CHANGELOG.md @@ -7,6 +7,9 @@ - Update response with created_by +### Bugs Fixed + +- Fixed error response handling in stream and non-stream modes ## 1.0.0b6 (2025-11-26) From d1ed2cb2dedf8f41014919c53f40eddc0046f830 Mon Sep 17 00:00:00 2001 From: junanchen Date: Tue, 9 Dec 2025 10:50:15 -0800 Subject: [PATCH 12/12] enable tox checks --- .../azure-ai-agentserver-agentframework/pyproject.toml | 10 +++++----- .../azure-ai-agentserver-langgraph/pyproject.toml | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml index d48b5c7bdbcb..a86c9eef2648 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/pyproject.toml @@ -65,9 +65,9 @@ breaking = false # incompatible python version pyright = false verifytypes = false # incompatible python version for -core verify_keywords = false -mindependency = false # depends on -core package -latestdependency = false -whl = false -depends = false -pylint = false +#mindependency = false # depends on -core package +#latestdependency = false +#whl = false +#depends = false +#pylint = false whl_no_aio = false diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml index df7adb4c7c04..b970062738ee 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/pyproject.toml @@ -64,9 +64,9 @@ breaking = false # incompatible python version pyright = false verifytypes = false # incompatible python version for -core verify_keywords = false -mindependency = false # depends on -core package -latestdependency = false -whl = false -depends = false -pylint = false +#mindependency = false # depends on -core package +#latestdependency = false +#whl = false +#depends = false +#pylint = false whl_no_aio = false