55 VERSION ,
66} from '@ai-sdk/openai-compatible'
77import { websiteUrl } from '@codebuff/npm-app/config'
8- import { streamText } from 'ai'
8+ import { generateText } from 'ai'
99
1010const apiKey = '12345'
1111
@@ -21,7 +21,8 @@ const codebuffBackendModel = new OpenAICompatibleChatLanguageModel(
2121 } ) ,
2222 metadataExtractor : {
2323 extractMetadata : async ( ...inputs ) => {
24- console . log ( inputs , 'extractMetadata' )
24+ console . dir ( { extractMetadata : inputs } , { depth : null } )
25+
2526 return undefined
2627 } ,
2728 createStreamExtractor : ( ) => ( {
@@ -43,10 +44,9 @@ const codebuffBackendModel = new OpenAICompatibleChatLanguageModel(
4344 } ,
4445)
4546
46- const response = streamText ( {
47- // const response = await streamText({
48- // const response = await generateText({
49- // model: codebuffBackendProvider('openai/gpt-5'),
47+ // const response = streamText({
48+ // const response = await generateObject({
49+ const response = await generateText ( {
5050 model : codebuffBackendModel ,
5151 messages : [
5252 {
@@ -82,6 +82,7 @@ const response = streamText({
8282 } ,
8383} )
8484
85- for await ( const chunk of response . fullStream ) {
86- console . dir ( { chunk } , { depth : null } )
87- }
85+ // for await (const chunk of response.fullStream) {
86+ // console.dir({ chunk }, { depth: null })
87+ // }
88+ console . log ( response . text )
0 commit comments