Skip to content

Commit 4085020

Browse files
committed
Implement openai for non-streaming
1 parent 4210ba8 commit 4085020

File tree

2 files changed

+104
-256
lines changed

2 files changed

+104
-256
lines changed

web/src/app/api/v1/chat/completions/_post.ts

Lines changed: 38 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,10 @@ import {
2121
handleOpenRouterNonStream,
2222
handleOpenRouterStream,
2323
} from '@/llm-api/openrouter'
24-
import { handleOpenAIStream, OPENAI_SUPPORTED_MODELS } from '@/llm-api/openai'
24+
import {
25+
handleOpenAINonStream,
26+
OPENAI_SUPPORTED_MODELS,
27+
} from '@/llm-api/openai'
2528
import { extractApiKeyFromHeader } from '@/util/auth'
2629

2730
export async function postChatCompletions(params: {
@@ -208,6 +211,36 @@ export async function postChatCompletions(params: {
208211
try {
209212
if (bodyStream) {
210213
// Streaming request
214+
const stream = await handleOpenRouterStream({
215+
body,
216+
userId,
217+
agentId,
218+
openrouterApiKey,
219+
fetch,
220+
logger,
221+
insertMessageBigquery,
222+
})
223+
224+
trackEvent({
225+
event: AnalyticsEvent.CHAT_COMPLETIONS_STREAM_STARTED,
226+
userId,
227+
properties: {
228+
agentId,
229+
runId: runIdFromBody,
230+
},
231+
logger,
232+
})
233+
234+
return new NextResponse(stream, {
235+
headers: {
236+
'Content-Type': 'text/event-stream',
237+
'Cache-Control': 'no-cache',
238+
Connection: 'keep-alive',
239+
'Access-Control-Allow-Origin': '*',
240+
},
241+
})
242+
} else {
243+
// Non-streaming request
211244
const model = (body as any)?.model
212245
const shortModelName =
213246
typeof model === 'string' ? model.split('/')[1] : undefined
@@ -216,16 +249,17 @@ export async function postChatCompletions(params: {
216249
model.startsWith('openai/') &&
217250
OPENAI_SUPPORTED_MODELS.includes(shortModelName as any)
218251
const shouldUseOpenAIEndpoint = isOpenAIDirectModel && (body as any)?.n
219-
const stream = await (shouldUseOpenAIEndpoint
220-
? handleOpenAIStream({
252+
253+
const result = await (shouldUseOpenAIEndpoint
254+
? handleOpenAINonStream({
221255
body,
222256
userId,
223257
agentId,
224258
fetch,
225259
logger,
226260
insertMessageBigquery,
227261
})
228-
: handleOpenRouterStream({
262+
: handleOpenRouterNonStream({
229263
body,
230264
userId,
231265
agentId,
@@ -235,36 +269,6 @@ export async function postChatCompletions(params: {
235269
insertMessageBigquery,
236270
}))
237271

238-
trackEvent({
239-
event: AnalyticsEvent.CHAT_COMPLETIONS_STREAM_STARTED,
240-
userId,
241-
properties: {
242-
agentId,
243-
runId: runIdFromBody,
244-
},
245-
logger,
246-
})
247-
248-
return new NextResponse(stream, {
249-
headers: {
250-
'Content-Type': 'text/event-stream',
251-
'Cache-Control': 'no-cache',
252-
Connection: 'keep-alive',
253-
'Access-Control-Allow-Origin': '*',
254-
},
255-
})
256-
} else {
257-
// Non-streaming request
258-
const result = await handleOpenRouterNonStream({
259-
body,
260-
userId,
261-
agentId,
262-
openrouterApiKey,
263-
fetch,
264-
logger,
265-
insertMessageBigquery,
266-
})
267-
268272
trackEvent({
269273
event: AnalyticsEvent.CHAT_COMPLETIONS_GENERATION_STARTED,
270274
userId,

0 commit comments

Comments
 (0)