@@ -41,7 +41,9 @@ const BLUE_PIXEL = {
4141
4242// Test both providers with their respective models
4343const PROVIDER_CONFIGS : Array < [ string , string ] > = [
44- [ "openai" , KNOWN_MODELS . GPT_MINI . providerModelId ] ,
44+ // NOTE: Use a chat-mode vision-capable model. Some *responses-only* models may advertise
45+ // supports_vision but still fail to ingest data-URI image parts in our current adapter.
46+ [ "openai" , KNOWN_MODELS . GPT . providerModelId ] ,
4547 [ "anthropic" , KNOWN_MODELS . HAIKU . providerModelId ] ,
4648] ;
4749
@@ -145,7 +147,7 @@ describeIntegration("sendMessage image handling tests", () => {
145147 await withSharedWorkspace ( "openai" , async ( { env, workspaceId, collector } ) => {
146148 // Send first message with image
147149 const result1 = await sendMessage ( env , workspaceId , "Remember this image" , {
148- model : modelString ( "openai" , KNOWN_MODELS . GPT_MINI . providerModelId ) ,
150+ model : modelString ( "openai" , KNOWN_MODELS . GPT . providerModelId ) ,
149151 imageParts : [ RED_PIXEL ] ,
150152 } ) ;
151153
@@ -163,7 +165,7 @@ describeIntegration("sendMessage image handling tests", () => {
163165 workspaceId ,
164166 "What color was the image I showed you?" ,
165167 {
166- model : modelString ( "openai" , KNOWN_MODELS . GPT_MINI . providerModelId ) ,
168+ model : modelString ( "openai" , KNOWN_MODELS . GPT . providerModelId ) ,
167169 }
168170 ) ;
169171
0 commit comments