File tree Expand file tree Collapse file tree 1 file changed +24
-1
lines changed
Expand file tree Collapse file tree 1 file changed +24
-1
lines changed Original file line number Diff line number Diff line change 55from __future__ import annotations
66
77
8+ import json
9+ import time
810import os
11+ import uuid
912from enum import Enum
1013from typing import Optional , Literal
1114
@@ -350,7 +353,27 @@ async def api_call(
350353 # Get result output
351354 output_text = self .get_text_from_response (response )
352355 if unique_id and output_text :
353- PromptServer .instance .send_progress_text (output_text , node_id = unique_id )
356+ # Not a true chat history like the OpenAI Chat node. It is emulated so the frontend can show a copy button.
357+ render_spec = {
358+ "node_id" : unique_id ,
359+ "component" : "ChatHistoryWidget" ,
360+ "props" : {
361+ "history" : json .dumps (
362+ [
363+ {
364+ "prompt" : prompt ,
365+ "response" : output_text ,
366+ "response_id" : str (uuid .uuid4 ()),
367+ "timestamp" : time .time (),
368+ }
369+ ]
370+ ),
371+ },
372+ }
373+ PromptServer .instance .send_sync (
374+ "display_component" ,
375+ render_spec ,
376+ )
354377
355378 return (output_text or "Empty response from Gemini model..." ,)
356379
You can’t perform that action at this time.
0 commit comments