Skip to content

Commit 988974d

Browse files
perf: add schema caching to avoid repeated reflection
This change adds a global schema cache that dramatically reduces the cost of registering tools in stateless server patterns (new server per request). Key improvements: - Cache schemas by reflect.Type for typed handlers - Cache resolved schemas by pointer for pre-defined schemas - 132x faster tool registration after first call - 51x fewer allocations per AddTool call - 32x less memory per AddTool call Benchmarks: - BenchmarkAddToolTypedHandler: 1,223 ns/op vs 161,463 ns/op (no cache) - BenchmarkAddToolTypedHandler: 21 allocs vs 1,072 allocs (no cache) This benefits integrators like github-mcp-server automatically without any code changes required.
1 parent 272e0cd commit 988974d

File tree

4 files changed

+495
-11
lines changed

4 files changed

+495
-11
lines changed

mcp/schema_cache.go

Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
// Copyright 2025 The Go MCP SDK Authors. All rights reserved.
2+
// Use of this source code is governed by an MIT-style
3+
// license that can be found in the LICENSE file.
4+
5+
package mcp
6+
7+
import (
8+
"reflect"
9+
"sync"
10+
11+
"github.com/google/jsonschema-go/jsonschema"
12+
)
13+
14+
// schemaCache provides concurrent-safe caching for JSON schemas.
15+
// It caches both by reflect.Type (for auto-generated schemas) and
16+
// by schema pointer (for pre-defined schemas).
17+
//
18+
// This cache significantly improves performance for stateless server deployments
19+
// where tools are re-registered on every request. Without caching, each AddTool
20+
// call would trigger expensive reflection-based schema generation and resolution.
21+
type schemaCache struct {
22+
// byType caches schemas generated from Go types via jsonschema.ForType.
23+
// Key: reflect.Type, Value: *cachedSchema
24+
byType sync.Map
25+
26+
// bySchema caches resolved schemas for pre-defined Schema objects.
27+
// Key: *jsonschema.Schema (pointer identity), Value: *jsonschema.Resolved
28+
// This uses pointer identity because integrators typically reuse the same
29+
// Tool objects across requests, so the schema pointer remains stable.
30+
bySchema sync.Map
31+
}
32+
33+
// cachedSchema holds both the generated schema and its resolved form.
34+
type cachedSchema struct {
35+
schema *jsonschema.Schema
36+
resolved *jsonschema.Resolved
37+
}
38+
39+
// globalSchemaCache is the package-level cache used by setSchema.
40+
// It is unbounded since typical MCP servers have <100 tools.
41+
var globalSchemaCache = &schemaCache{}
42+
43+
// getByType retrieves a cached schema by Go type.
44+
// Returns the schema, resolved schema, and whether the cache hit.
45+
func (c *schemaCache) getByType(t reflect.Type) (*jsonschema.Schema, *jsonschema.Resolved, bool) {
46+
if v, ok := c.byType.Load(t); ok {
47+
cs := v.(*cachedSchema)
48+
return cs.schema, cs.resolved, true
49+
}
50+
return nil, nil, false
51+
}
52+
53+
// setByType caches a schema by Go type.
54+
func (c *schemaCache) setByType(t reflect.Type, schema *jsonschema.Schema, resolved *jsonschema.Resolved) {
55+
c.byType.Store(t, &cachedSchema{schema: schema, resolved: resolved})
56+
}
57+
58+
// getBySchema retrieves a cached resolved schema by the original schema pointer.
59+
// This is used when integrators provide pre-defined schemas (e.g., github-mcp-server pattern).
60+
func (c *schemaCache) getBySchema(schema *jsonschema.Schema) (*jsonschema.Resolved, bool) {
61+
if v, ok := c.bySchema.Load(schema); ok {
62+
return v.(*jsonschema.Resolved), true
63+
}
64+
return nil, false
65+
}
66+
67+
// setBySchema caches a resolved schema by the original schema pointer.
68+
func (c *schemaCache) setBySchema(schema *jsonschema.Schema, resolved *jsonschema.Resolved) {
69+
c.bySchema.Store(schema, resolved)
70+
}
71+
72+
// resetForTesting clears the cache. Only for use in tests.
73+
func (c *schemaCache) resetForTesting() {
74+
c.byType.Clear()
75+
c.bySchema.Clear()
76+
}

mcp/schema_cache_benchmark_test.go

Lines changed: 161 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,161 @@
1+
// Copyright 2025 The Go MCP SDK Authors. All rights reserved.
2+
// Use of this source code is governed by an MIT-style
3+
// license that can be found in the LICENSE file.
4+
5+
package mcp
6+
7+
import (
8+
"context"
9+
"testing"
10+
11+
"github.com/google/jsonschema-go/jsonschema"
12+
)
13+
14+
// BenchmarkAddToolTypedHandler measures performance of AddTool with typed handlers.
15+
// This simulates the stateless server pattern where new servers are created per request.
16+
func BenchmarkAddToolTypedHandler(b *testing.B) {
17+
type SearchInput struct {
18+
Query string `json:"query" jsonschema:"required"`
19+
Page int `json:"page"`
20+
PerPage int `json:"per_page"`
21+
}
22+
23+
type SearchOutput struct {
24+
Results []string `json:"results"`
25+
Total int `json:"total"`
26+
}
27+
28+
handler := func(ctx context.Context, req *CallToolRequest, in SearchInput) (*CallToolResult, SearchOutput, error) {
29+
return &CallToolResult{}, SearchOutput{}, nil
30+
}
31+
32+
tool := &Tool{
33+
Name: "search",
34+
Description: "Search for items",
35+
}
36+
37+
// Reset cache to simulate cold start for first iteration
38+
globalSchemaCache.resetForTesting()
39+
40+
b.ResetTimer()
41+
b.ReportAllocs()
42+
43+
for i := 0; i < b.N; i++ {
44+
s := NewServer(&Implementation{Name: "test", Version: "1.0"}, nil)
45+
AddTool(s, tool, handler)
46+
}
47+
}
48+
49+
// BenchmarkAddToolPreDefinedSchema measures performance with pre-defined schemas.
50+
// This simulates how github-mcp-server registers tools with manual InputSchema.
51+
func BenchmarkAddToolPreDefinedSchema(b *testing.B) {
52+
schema := &jsonschema.Schema{
53+
Type: "object",
54+
Properties: map[string]*jsonschema.Schema{
55+
"query": {Type: "string", Description: "Search query"},
56+
"page": {Type: "integer", Description: "Page number"},
57+
"per_page": {Type: "integer", Description: "Results per page"},
58+
},
59+
Required: []string{"query"},
60+
}
61+
62+
handler := func(ctx context.Context, req *CallToolRequest) (*CallToolResult, error) {
63+
return &CallToolResult{}, nil
64+
}
65+
66+
tool := &Tool{
67+
Name: "search",
68+
Description: "Search for items",
69+
InputSchema: schema, // Pre-defined schema like github-mcp-server
70+
}
71+
72+
// Reset cache to simulate cold start for first iteration
73+
globalSchemaCache.resetForTesting()
74+
75+
b.ResetTimer()
76+
b.ReportAllocs()
77+
78+
for i := 0; i < b.N; i++ {
79+
s := NewServer(&Implementation{Name: "test", Version: "1.0"}, nil)
80+
s.AddTool(tool, handler)
81+
}
82+
}
83+
84+
// BenchmarkAddToolTypedHandlerNoCache measures performance without caching.
85+
// Used to compare before/after performance.
86+
func BenchmarkAddToolTypedHandlerNoCache(b *testing.B) {
87+
type SearchInput struct {
88+
Query string `json:"query" jsonschema:"required"`
89+
Page int `json:"page"`
90+
PerPage int `json:"per_page"`
91+
}
92+
93+
type SearchOutput struct {
94+
Results []string `json:"results"`
95+
Total int `json:"total"`
96+
}
97+
98+
handler := func(ctx context.Context, req *CallToolRequest, in SearchInput) (*CallToolResult, SearchOutput, error) {
99+
return &CallToolResult{}, SearchOutput{}, nil
100+
}
101+
102+
tool := &Tool{
103+
Name: "search",
104+
Description: "Search for items",
105+
}
106+
107+
b.ResetTimer()
108+
b.ReportAllocs()
109+
110+
for i := 0; i < b.N; i++ {
111+
// Reset cache every iteration to simulate no caching
112+
globalSchemaCache.resetForTesting()
113+
114+
s := NewServer(&Implementation{Name: "test", Version: "1.0"}, nil)
115+
AddTool(s, tool, handler)
116+
}
117+
}
118+
119+
// BenchmarkAddToolMultipleTools simulates registering multiple tools like github-mcp-server.
120+
func BenchmarkAddToolMultipleTools(b *testing.B) {
121+
type Input1 struct {
122+
Query string `json:"query"`
123+
}
124+
type Input2 struct {
125+
ID int `json:"id"`
126+
}
127+
type Input3 struct {
128+
Name string `json:"name"`
129+
Value string `json:"value"`
130+
}
131+
type Output struct {
132+
Success bool `json:"success"`
133+
}
134+
135+
handler1 := func(ctx context.Context, req *CallToolRequest, in Input1) (*CallToolResult, Output, error) {
136+
return &CallToolResult{}, Output{}, nil
137+
}
138+
handler2 := func(ctx context.Context, req *CallToolRequest, in Input2) (*CallToolResult, Output, error) {
139+
return &CallToolResult{}, Output{}, nil
140+
}
141+
handler3 := func(ctx context.Context, req *CallToolRequest, in Input3) (*CallToolResult, Output, error) {
142+
return &CallToolResult{}, Output{}, nil
143+
}
144+
145+
tool1 := &Tool{Name: "tool1", Description: "Tool 1"}
146+
tool2 := &Tool{Name: "tool2", Description: "Tool 2"}
147+
tool3 := &Tool{Name: "tool3", Description: "Tool 3"}
148+
149+
// Reset cache before benchmark
150+
globalSchemaCache.resetForTesting()
151+
152+
b.ResetTimer()
153+
b.ReportAllocs()
154+
155+
for i := 0; i < b.N; i++ {
156+
s := NewServer(&Implementation{Name: "test", Version: "1.0"}, nil)
157+
AddTool(s, tool1, handler1)
158+
AddTool(s, tool2, handler2)
159+
AddTool(s, tool3, handler3)
160+
}
161+
}

0 commit comments

Comments
 (0)