Skip to content

Commit 1456f9d

Browse files
chore: add MCP token measurement script and docs (#507)
* feat: update project tool descriptions * feat: update MCP tool descriptions * chore: add MCP token measurement script and docs
1 parent 138fbe3 commit 1456f9d

File tree

4 files changed

+197
-0
lines changed

4 files changed

+197
-0
lines changed

mcp-worker/README.md

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -133,3 +133,27 @@ When a user completes OAuth on the hosted MCP Worker, the worker emits a single
133133

134134
- **Channel**: `${orgId}-mcp-install`
135135
- **Event name**: `mcp-install`
136+
137+
## MCP Tool Token Counts
138+
139+
Measure how many AI tokens our MCP tool descriptions and schemas consume. Run the measurement script (from repo root):
140+
141+
```bash
142+
yarn install &&
143+
yarn measure:mcp-tokens
144+
```
145+
146+
What it does:
147+
148+
- Uses `gpt-tokenizer` (OpenAI-style) and `@anthropic-ai/tokenizer` (Anthropic) to count tokens in each tool's `description` and `inputSchema`.
149+
150+
Current token totals:
151+
152+
```json
153+
{
154+
"totals": {
155+
"anthropic": 10428,
156+
"openai": 10746
157+
}
158+
}
159+
```

package.json

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
"deploy:worker:prod": "cd mcp-worker && yarn deploy:prod",
2525
"deploy:worker:dev": "cd mcp-worker && yarn deploy:dev",
2626
"dev:worker": "cd mcp-worker && yarn dev",
27+
"measure:mcp-tokens": "ts-node scripts/measureMcpTokens.ts",
2728
"format": "prettier --write \"src/**/*.{ts,js,json}\" \"test/**/*.{ts,js,json}\" \"test-utils/**/*.{ts,js,json}\" \"*.{ts,js,json,md}\"",
2829
"format:check": "prettier --check \"src/**/*.{ts,js,json}\" \"test/**/*.{ts,js,json}\" \"test-utils/**/*.{ts,js,json}\" \"*.{ts,js,json,md}\"",
2930
"lint": "eslint . --config eslint.config.mjs",
@@ -67,6 +68,7 @@
6768
"zod": "~3.25.76"
6869
},
6970
"devDependencies": {
71+
"@anthropic-ai/tokenizer": "^0.0.4",
7072
"@babel/code-frame": "^7.27.1",
7173
"@babel/core": "^7.28.0",
7274
"@babel/generator": "^7.28.0",
@@ -85,6 +87,7 @@
8587
"chai": "^5.1.2",
8688
"eslint": "^9.18.0",
8789
"eslint-config-prettier": "^9.1.0",
90+
"gpt-tokenizer": "^3.0.1",
8891
"mocha": "^10.8.2",
8992
"mocha-chai-jest-snapshot": "^1.1.6",
9093
"nock": "^13.5.6",

scripts/measureMcpTokens.ts

Lines changed: 135 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,135 @@
1+
#!/usr/bin/env ts-node
2+
import { registerAllToolsWithServer } from '../src/mcp/tools'
3+
import type { DevCycleMCPServerInstance } from '../src/mcp/server'
4+
import type { IDevCycleApiClient } from '../src/mcp/api/interface'
5+
6+
type Collected = {
7+
name: string
8+
description: string
9+
inputSchema?: unknown
10+
outputSchema?: unknown
11+
}
12+
13+
const collected: Collected[] = []
14+
15+
const mockServer: DevCycleMCPServerInstance = {
16+
registerToolWithErrorHandling(name, config) {
17+
collected.push({
18+
name,
19+
description: config.description,
20+
inputSchema: config.inputSchema,
21+
outputSchema: config.outputSchema,
22+
})
23+
},
24+
}
25+
26+
// We do not need a real client to collect tool metadata
27+
const fakeClient = {} as unknown as IDevCycleApiClient
28+
29+
registerAllToolsWithServer(mockServer, fakeClient)
30+
31+
let openAiEncoderPromise: Promise<(input: string) => number[]> | undefined
32+
async function countOpenAI(text: string): Promise<number> {
33+
try {
34+
if (!openAiEncoderPromise) {
35+
openAiEncoderPromise = import('gpt-tokenizer').then((m) => m.encode)
36+
}
37+
const encode = await openAiEncoderPromise
38+
return encode(text).length
39+
} catch {
40+
return 0
41+
}
42+
}
43+
let anthropicCounterPromise: Promise<(input: string) => number> | undefined
44+
async function countAnthropic(text: string): Promise<number> {
45+
try {
46+
if (!anthropicCounterPromise) {
47+
anthropicCounterPromise = import('@anthropic-ai/tokenizer').then(
48+
(m) => m.countTokens,
49+
)
50+
}
51+
const countTokens = await anthropicCounterPromise
52+
return countTokens(text)
53+
} catch {
54+
return 0
55+
}
56+
}
57+
58+
type ResultRow = {
59+
name: string
60+
anthropic: {
61+
description: number
62+
inputSchema: number
63+
outputSchema: number
64+
total: number
65+
}
66+
openai: {
67+
description: number
68+
inputSchema: number
69+
outputSchema: number
70+
total: number
71+
}
72+
}
73+
74+
const rows: ResultRow[] = []
75+
let grandAnthropic = 0
76+
let grandOpenAI = 0
77+
78+
async function main() {
79+
for (const t of collected) {
80+
const d = t.description ?? ''
81+
const i = t.inputSchema ? JSON.stringify(t.inputSchema) : ''
82+
const o = t.outputSchema ? JSON.stringify(t.outputSchema) : ''
83+
84+
const [aDesc, aIn, aOut] = await Promise.all([
85+
countAnthropic(d),
86+
i ? countAnthropic(i) : Promise.resolve(0),
87+
o ? countAnthropic(o) : Promise.resolve(0),
88+
])
89+
const aTotal = aDesc + aIn + aOut
90+
91+
const [oDesc, oIn, oOut] = await Promise.all([
92+
countOpenAI(d),
93+
i ? countOpenAI(i) : Promise.resolve(0),
94+
o ? countOpenAI(o) : Promise.resolve(0),
95+
])
96+
const oTotal = oDesc + oIn + oOut
97+
98+
grandAnthropic += aTotal
99+
grandOpenAI += oTotal
100+
101+
rows.push({
102+
name: t.name,
103+
anthropic: {
104+
description: aDesc,
105+
inputSchema: aIn,
106+
outputSchema: aOut,
107+
total: aTotal,
108+
},
109+
openai: {
110+
description: oDesc,
111+
inputSchema: oIn,
112+
outputSchema: oOut,
113+
total: oTotal,
114+
},
115+
})
116+
}
117+
118+
rows.sort((a, b) => a.name.localeCompare(b.name))
119+
120+
console.log(
121+
JSON.stringify(
122+
{
123+
tools: rows,
124+
totals: { anthropic: grandAnthropic, openai: grandOpenAI },
125+
},
126+
null,
127+
2,
128+
),
129+
)
130+
}
131+
132+
main().catch((err) => {
133+
console.error(err)
134+
process.exit(1)
135+
})

yarn.lock

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,16 @@ __metadata:
7777
languageName: node
7878
linkType: hard
7979

80+
"@anthropic-ai/tokenizer@npm:^0.0.4":
81+
version: 0.0.4
82+
resolution: "@anthropic-ai/tokenizer@npm:0.0.4"
83+
dependencies:
84+
"@types/node": "npm:^18.11.18"
85+
tiktoken: "npm:^1.0.10"
86+
checksum: 10c0/fddaa82c26228b6385a0a064c145450564d0288c51d0346a70ce62a716627b26c227077aa909405313668fb5cbef91f3e396783b83decfa01d1c8777d5141220
87+
languageName: node
88+
linkType: hard
89+
8090
"@apidevtools/json-schema-ref-parser@npm:9.0.6":
8191
version: 9.0.6
8292
resolution: "@apidevtools/json-schema-ref-parser@npm:9.0.6"
@@ -734,6 +744,7 @@ __metadata:
734744
version: 0.0.0-use.local
735745
resolution: "@devcycle/cli@workspace:."
736746
dependencies:
747+
"@anthropic-ai/tokenizer": "npm:^0.0.4"
737748
"@babel/code-frame": "npm:^7.27.1"
738749
"@babel/core": "npm:^7.28.0"
739750
"@babel/generator": "npm:^7.28.0"
@@ -769,6 +780,7 @@ __metadata:
769780
eslint-config-prettier: "npm:^9.1.0"
770781
estraverse: "npm:^5.3.0"
771782
fuzzy: "npm:^0.1.3"
783+
gpt-tokenizer: "npm:^3.0.1"
772784
inquirer: "npm:^8.2.6"
773785
inquirer-autocomplete-prompt: "npm:^2.0.1"
774786
js-sha256: "npm:^0.11.0"
@@ -2824,6 +2836,15 @@ __metadata:
28242836
languageName: node
28252837
linkType: hard
28262838

2839+
"@types/node@npm:^18.11.18":
2840+
version: 18.19.123
2841+
resolution: "@types/node@npm:18.19.123"
2842+
dependencies:
2843+
undici-types: "npm:~5.26.4"
2844+
checksum: 10c0/8077177ee2019c4c8875784b367732813b07e533f24197d1fb3bb09e81335267de9da3e70326daaba7a6499df2410257f6099d82d15c9a903d1587a752563178
2845+
languageName: node
2846+
linkType: hard
2847+
28272848
"@types/node@npm:^18.19.68":
28282849
version: 18.19.118
28292850
resolution: "@types/node@npm:18.19.118"
@@ -6115,6 +6136,13 @@ __metadata:
61156136
languageName: node
61166137
linkType: hard
61176138

6139+
"gpt-tokenizer@npm:^3.0.1":
6140+
version: 3.0.1
6141+
resolution: "gpt-tokenizer@npm:3.0.1"
6142+
checksum: 10c0/e95c0825ccc13d27ff873b507c95eca1224f6c7aafdc825a49271959c1f8480368bad75728e32190d106489e25bae788c5932c7c8266ec58dc446ad0d5a26883
6143+
languageName: node
6144+
linkType: hard
6145+
61186146
"graceful-fs@npm:^4.1.2, graceful-fs@npm:^4.1.5, graceful-fs@npm:^4.1.6, graceful-fs@npm:^4.2.0, graceful-fs@npm:^4.2.6, graceful-fs@npm:^4.2.9":
61196147
version: 4.2.11
61206148
resolution: "graceful-fs@npm:4.2.11"
@@ -10524,6 +10552,13 @@ __metadata:
1052410552
languageName: node
1052510553
linkType: hard
1052610554

10555+
"tiktoken@npm:^1.0.10":
10556+
version: 1.0.22
10557+
resolution: "tiktoken@npm:1.0.22"
10558+
checksum: 10c0/4805cf957d32ee53707ea8416256b50f6b4e865fce1d36ba507bfcbf4dd31bfa69b31dbd1303ba216ae44eb680f54c46c11defa5305445ecea946f2c048fa87d
10559+
languageName: node
10560+
linkType: hard
10561+
1052710562
"tinybench@npm:^2.9.0":
1052810563
version: 2.9.0
1052910564
resolution: "tinybench@npm:2.9.0"

0 commit comments

Comments
 (0)