11/**
22 * Minimal LLM API Client
3- * Supports OpenAI / DeepSeek / Anthropic formats
3+ * Supports OpenAI-compatible / Anthropic-compatible formats
44 */
55
66import type { LLMConfig } from './llmModels' ;
@@ -88,6 +88,73 @@ interface LLMResponse {
8888 toolCalls : ToolCall [ ] ;
8989}
9090
91+ interface InlineToolParseResult {
92+ content : string ;
93+ toolCalls : ToolCall [ ] ;
94+ }
95+
96+ function stripThinkTags ( content : string ) : string {
97+ const withoutBlocks = content
98+ . replace ( / < t h i n k \b [ ^ > ] * > [ \s \S ] * ?< \/ t h i n k > / gi, '' )
99+ . replace ( / < \/ ? t h i n k \b [ ^ > ] * > / gi, '' ) ;
100+ return withoutBlocks === content ? content : withoutBlocks . trim ( ) ;
101+ }
102+
103+ function parseInlineArgValue ( rawValue : string ) : unknown {
104+ const trimmed = rawValue . trim ( ) ;
105+ if ( ! trimmed ) return '' ;
106+ try {
107+ return JSON . parse ( trimmed ) ;
108+ } catch {
109+ return trimmed ;
110+ }
111+ }
112+
113+ function extractInlineToolCalls ( rawContent : string ) : InlineToolParseResult {
114+ const content = stripThinkTags ( rawContent ) ;
115+ if ( ! content . includes ( '<arg_key>' ) || ! content . includes ( '<arg_value>' ) ) {
116+ return { content, toolCalls : [ ] } ;
117+ }
118+
119+ const blockRegex = / (?: < t o o l _ c a l l > \s * | \( ) ( [ a - z A - Z 0 - 9 _ . - ] + ) \s * ( [ \s \S ] * ?) < \/ t o o l _ c a l l > / g;
120+ const toolCalls : ToolCall [ ] = [ ] ;
121+ let cleanedContent = content ;
122+ let matchIndex = 0 ;
123+
124+ for ( const match of content . matchAll ( blockRegex ) ) {
125+ const toolName = match [ 1 ] ?. trim ( ) ;
126+ const body = match [ 2 ] ?? '' ;
127+ if ( ! toolName ) continue ;
128+
129+ const args : Record < string , unknown > = { } ;
130+ const pairRegex =
131+ / < a r g _ k e y > \s * ( [ \s \S ] * ?) \s * < \/ a r g _ k e y > \s * < a r g _ v a l u e > \s * ( [ \s \S ] * ?) \s * < \/ a r g _ v a l u e > / g;
132+
133+ for ( const pair of body . matchAll ( pairRegex ) ) {
134+ const key = pair [ 1 ] ?. trim ( ) ;
135+ if ( ! key ) continue ;
136+ args [ key ] = parseInlineArgValue ( pair [ 2 ] ?? '' ) ;
137+ }
138+
139+ if ( Object . keys ( args ) . length === 0 ) continue ;
140+
141+ toolCalls . push ( {
142+ id : `inline_tool_${ matchIndex ++ } ` ,
143+ type : 'function' ,
144+ function : {
145+ name : toolName ,
146+ arguments : JSON . stringify ( args ) ,
147+ } ,
148+ } ) ;
149+ cleanedContent = cleanedContent . replace ( match [ 0 ] , '' ) ;
150+ }
151+
152+ return {
153+ content : cleanedContent . trim ( ) ,
154+ toolCalls,
155+ } ;
156+ }
157+
91158function hasVersionSuffix ( url : string ) : boolean {
92159 return / \/ v \d + \/ ? $ / . test ( url ) ;
93160}
@@ -162,14 +229,17 @@ async function chatOpenAI(
162229 messageCount : messages . length ,
163230 toolCount : tools . length ,
164231 } ) ;
232+ const headers : Record < string , string > = {
233+ 'Content-Type' : 'application/json' ,
234+ 'X-LLM-Target-URL' : targetUrl ,
235+ ...parseCustomHeaders ( config . customHeaders ) ,
236+ } ;
237+ if ( config . apiKey . trim ( ) ) {
238+ headers . Authorization = `Bearer ${ config . apiKey } ` ;
239+ }
165240 const res = await fetch ( '/api/llm-proxy' , {
166241 method : 'POST' ,
167- headers : {
168- 'Content-Type' : 'application/json' ,
169- Authorization : `Bearer ${ config . apiKey } ` ,
170- 'X-LLM-Target-URL' : targetUrl ,
171- ...parseCustomHeaders ( config . customHeaders ) ,
172- } ,
242+ headers,
173243 body : JSON . stringify ( body ) ,
174244 } ) ;
175245
@@ -183,7 +253,8 @@ async function chatOpenAI(
183253
184254 const data = JSON . parse ( text ) ;
185255 const choice = data . choices ?. [ 0 ] ?. message ;
186- const toolCalls = choice ?. tool_calls || [ ] ;
256+ const parsedInline = extractInlineToolCalls ( choice ?. content || '' ) ;
257+ const toolCalls = choice ?. tool_calls ?. length ? choice . tool_calls : parsedInline . toolCalls ;
187258 const calledNames = toolCalls
188259 . map ( ( tc : { function ?: { name ?: string } } ) => tc . function ?. name )
189260 . filter ( Boolean ) ;
@@ -195,7 +266,9 @@ async function chatOpenAI(
195266 calledNames ,
196267 ) ;
197268 return {
198- content : choice ?. content || '' ,
269+ content : choice ?. tool_calls ?. length
270+ ? stripThinkTags ( choice ?. content || '' )
271+ : parsedInline . content ,
199272 toolCalls,
200273 } ;
201274}
@@ -267,15 +340,18 @@ async function chatAnthropic(
267340 messageCount : anthropicMessages . length ,
268341 toolCount : anthropicTools . length ,
269342 } ) ;
343+ const headers : Record < string , string > = {
344+ 'Content-Type' : 'application/json' ,
345+ 'anthropic-version' : '2023-06-01' ,
346+ 'X-LLM-Target-URL' : targetUrl ,
347+ ...parseCustomHeaders ( config . customHeaders ) ,
348+ } ;
349+ if ( config . apiKey . trim ( ) ) {
350+ headers [ 'x-api-key' ] = config . apiKey ;
351+ }
270352 const res = await fetch ( '/api/llm-proxy' , {
271353 method : 'POST' ,
272- headers : {
273- 'Content-Type' : 'application/json' ,
274- 'x-api-key' : config . apiKey ,
275- 'anthropic-version' : '2023-06-01' ,
276- 'X-LLM-Target-URL' : targetUrl ,
277- ...parseCustomHeaders ( config . customHeaders ) ,
278- } ,
354+ headers,
279355 body : JSON . stringify ( body ) ,
280356 } ) ;
281357
@@ -314,5 +390,5 @@ async function chatAnthropic(
314390 'calledNames=' ,
315391 calledNames ,
316392 ) ;
317- return { content, toolCalls } ;
393+ return { content : stripThinkTags ( content ) , toolCalls } ;
318394}
0 commit comments