Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,8 @@ push-test-agent: buildx-create build-kagent-adk
$(DOCKER_BUILDER) build --push $(BUILD_ARGS) $(TOOLS_IMAGE_BUILD_ARGS) -t $(DOCKER_REGISTRY)/kebab:latest -f go/test/e2e/agents/kebab/Dockerfile ./go/test/e2e/agents/kebab
kubectl apply --namespace kagent --context kind-$(KIND_CLUSTER_NAME) -f go/test/e2e/agents/kebab/agent.yaml
$(DOCKER_BUILDER) build --push $(BUILD_ARGS) $(TOOLS_IMAGE_BUILD_ARGS) -t $(DOCKER_REGISTRY)/poem-flow:latest -f python/samples/crewai/poem_flow/Dockerfile ./python

$(DOCKER_BUILDER) build --push $(BUILD_ARGS) $(TOOLS_IMAGE_BUILD_ARGS) -t $(DOCKER_REGISTRY)/basic-openai:latest -f python/samples/openai/basic_agent/Dockerfile ./python

.PHONY: push-test-skill
push-test-skill: buildx-create
echo "Building FROM DOCKER_REGISTRY=$(DOCKER_REGISTRY)/$(DOCKER_REPO)/kebab-maker:$(VERSION)"
Expand Down
96 changes: 94 additions & 2 deletions go/test/e2e/invoke_api_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -501,8 +501,45 @@ func TestE2EInvokeDeclarativeAgentWithMcpServerTool(t *testing.T) {
})
}

// This function generates a CrewAI agent that uses a mock LLM server
// Assumes that the image is built and pushed to registry, the agent can be found in python/samples/crewai/poem_flow
// This function generates an OpenAI BYO agent that uses a mock LLM server
// Assumes that the image is built and pushed to registry
func generateOpenAIAgent(baseURL string) *v1alpha2.Agent {
return &v1alpha2.Agent{
ObjectMeta: metav1.ObjectMeta{
Name: "basic-openai-test-agent",
Namespace: "kagent",
},
Spec: v1alpha2.AgentSpec{
Description: "A basic OpenAI agent with calculator and weather tools",
Type: v1alpha2.AgentType_BYO,
BYO: &v1alpha2.BYOAgentSpec{
Deployment: &v1alpha2.ByoDeploymentSpec{
Image: "localhost:5001/basic-openai:latest",
SharedDeploymentSpec: v1alpha2.SharedDeploymentSpec{
Env: []corev1.EnvVar{
{
Name: "OPENAI_API_KEY",
ValueFrom: &corev1.EnvVarSource{
SecretKeyRef: &corev1.SecretKeySelector{
LocalObjectReference: corev1.LocalObjectReference{
Name: "kagent-openai",
},
Key: "OPENAI_API_KEY",
},
},
},
{
Name: "OPENAI_API_BASE",
Value: baseURL + "/v1",
},
},
},
},
},
},
}
}

func generateCrewAIAgent(baseURL string) *v1alpha2.Agent {
return &v1alpha2.Agent{
ObjectMeta: metav1.ObjectMeta{
Expand Down Expand Up @@ -541,6 +578,59 @@ func generateCrewAIAgent(baseURL string) *v1alpha2.Agent {
}
}

func TestE2EInvokeOpenAIAgent(t *testing.T) {
// Setup mock server
baseURL, stopServer := setupMockServer(t, "mocks/invoke_openai_agent.json")
defer stopServer()

// Setup Kubernetes client
cli := setupK8sClient(t, false)

// Setup specific resources
modelCfg := setupModelConfig(t, cli, baseURL)
agent := generateOpenAIAgent(baseURL)

// Create the agent on the cluster
err := cli.Create(t.Context(), agent)
require.NoError(t, err)

// Wait for agent to be ready
args := []string{
"wait",
"--for",
"condition=Ready",
"--timeout=1m",
"agents.kagent.dev",
agent.Name,
"-n",
agent.Namespace,
}

cmd := exec.CommandContext(t.Context(), "kubectl", args...)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
require.NoError(t, cmd.Run())

defer func() {
cli.Delete(t.Context(), agent) //nolint:errcheck
cli.Delete(t.Context(), modelCfg) //nolint:errcheck
}()

// Setup A2A client - use the agent's actual name
a2aURL := a2aUrl("kagent", "basic-openai-test-agent")
a2aClient, err := a2aclient.NewA2AClient(a2aURL)
require.NoError(t, err)

useArtifacts := true
t.Run("sync_invocation_calculator", func(t *testing.T) {
runSyncTest(t, a2aClient, "What is 2+2?", "4", &useArtifacts)
})

t.Run("streaming_invocation_weather", func(t *testing.T) {
runStreamingTest(t, a2aClient, "What is the weather in London?", "Rainy, 52°F")
})
}

func TestE2EInvokeCrewAIAgent(t *testing.T) {
mockllmCfg, err := mockllm.LoadConfigFromFile("mocks/invoke_crewai_agent.json", mocks)
require.NoError(t, err)
Expand Down Expand Up @@ -619,6 +709,8 @@ func TestE2EInvokeCrewAIAgent(t *testing.T) {
t.Run("streaming_invocation", func(t *testing.T) {
runStreamingTest(t, a2aClient, "Generate a poem about CrewAI", "CrewAI is awesome, it makes coding fun.")
})

cli.Delete(t.Context(), agent) //nolint:errcheck
}

func TestE2EInvokeSTSIntegration(t *testing.T) {
Expand Down
130 changes: 130 additions & 0 deletions go/test/e2e/mocks/invoke_openai_agent.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
{
"openai": [
{
"name": "calculate_request",
"match": {
"match_type": "contains",
"message": {
"content": "What is 2+2?",
"role": "user"
}
},
"response": {
"id": "chatcmpl-calc",
"object": "chat.completion",
"created": 1677652288,
"model": "gpt-4.1-mini",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": null,
"tool_calls": [
{
"id": "call_abc123",
"type": "function",
"function": {
"name": "calculate",
"arguments": "{\"expression\": \"2+2\"}"
}
}
]
},
"finish_reason": "tool_calls"
}
]
}
},
{
"name": "calculate_result",
"match": {
"match_type": "contains",
"message": {
"content": "4",
"role": "tool",
"tool_call_id": "call_abc123"
}
},
"response": {
"id": "chatcmpl-calc-result",
"object": "chat.completion",
"created": 1677652288,
"model": "gpt-4.1-mini",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": "The result of 2+2 is 4"
},
"finish_reason": "stop"
}
]
}
},
{
"name": "weather_request",
"match": {
"match_type": "contains",
"message": {
"content": "What is the weather in London?",
"role": "user"
}
},
"response": {
"id": "chatcmpl-weather",
"object": "chat.completion",
"created": 1677652289,
"model": "gpt-4.1-mini",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": null,
"tool_calls": [
{
"id": "call_def456",
"type": "function",
"function": {
"name": "get_weather",
"arguments": "{\"location\": \"London\"}"
}
}
]
},
"finish_reason": "tool_calls"
}
]
}
},
{
"name": "weather_result",
"match": {
"match_type": "contains",
"message": {
"content": "Rainy, 52°F",
"role": "tool",
"tool_call_id": "call_def456"
}
},
"response": {
"id": "chatcmpl-weather-result",
"object": "chat.completion",
"created": 1677652289,
"model": "gpt-4.1-mini",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": "The weather in London is Rainy, 52°F"
},
"finish_reason": "stop"
}
]
}
}
]
}
5 changes: 4 additions & 1 deletion python/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ COPY --chown=python:pythongroup .python-version .
COPY --chown=python:pythongroup uv.lock .
COPY --chown=python:pythongroup packages/kagent-adk packages/kagent-adk
COPY --chown=python:pythongroup packages/kagent-core packages/kagent-core
COPY --chown=python:pythongroup packages/kagent-skills packages/kagent-skills
COPY --chown=python:pythongroup README.md .

ARG VERSION
Expand All @@ -114,6 +115,7 @@ RUN --mount=type=cache,target=/.kagent/cache,uid=1001,gid=1001 \
&& echo "Installation complete."

# Create a separate venv for bash tool commands (sandbox environment)
# This venv does not have pip installed
RUN --mount=type=cache,target=/.kagent/cache,uid=1001,gid=1001 \
echo "Creating bash tool sandbox environment..." \
&& mkdir -p /.kagent/sandbox-venv \
Expand All @@ -122,7 +124,8 @@ RUN --mount=type=cache,target=/.kagent/cache,uid=1001,gid=1001 \

ENV PATH="/.kagent/.venv/bin:$PATH"
ENV UV_PROJECT_ENVIRONMENT=/app/.venv
ENV BASH_VENV_PATH="/.kagent/sandbox-venv"
ENV BASH_VENV_PATH=/.kagent/sandbox-venv
ENV VIRTUAL_ENV=/.kagent/.venv

WORKDIR /app

Expand Down
4 changes: 4 additions & 0 deletions python/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -73,3 +73,7 @@ generate-test-certs:
rm -f server-extensions.conf server-req.pem && \
echo "Test certificates generated successfully"; \
fi

.PHONY: basic-openai-sample
basic-openai-sample:
docker build . -f samples/openai/basic_agent/Dockerfile --tag localhost:5001/basic-openai:latest --push
2 changes: 2 additions & 0 deletions python/packages/kagent-adk/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ dependencies = [
"agentsts-adk >= 0.0.8",
"agentsts-core >= 0.0.8",
"kagent-core",
"kagent-skills",
"aiofiles>=24.1.0",
"anyio>=4.9.0",
"typer>=0.15.0",
Expand All @@ -34,6 +35,7 @@ dependencies = [

[tool.uv.sources]
kagent-core = {workspace = true}
kagent-skills = {workspace = true}

[project.scripts]
kagent-adk = "kagent.adk.cli:run_cli"
Expand Down
2 changes: 1 addition & 1 deletion python/packages/kagent-adk/src/kagent/adk/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

from . import AgentConfig, KAgentApp
from .skill_fetcher import fetch_skill
from .skills.skills_plugin import add_skills_tool_to_agent
from .tools import add_skills_tool_to_agent

logger = logging.getLogger(__name__)
logging.getLogger("google_adk.google.adk.tools.base_authenticated_tool").setLevel(logging.ERROR)
Expand Down

This file was deleted.

Loading
Loading