Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions config.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import os
from dotenv import load_dotenv

from datetime import datetime
load_dotenv(override=True)


Expand Down Expand Up @@ -56,4 +56,8 @@ class Config:
DOCSTAR_COLLECTION_ID = os.getenv('DOCSTAR_COLLECTION_ID')
AI_ML_APIKEY = os.getenv('AI_ML_APIKEY')
AI_MIDDLEWARE_PAUTH_KEY = os.getenv('AI_MIDDLEWARE_PAUTH_KEY')
OPENAI_API_KEY_GPT_5_NANO = os.getenv('OPENAI_API_KEY_GPT_5_NANO')
OPENAI_API_KEY_GPT_5_NANO = os.getenv('OPENAI_API_KEY_GPT_5_NANO')
JSON_RESPONSE_COMPARISON_DATE = datetime.strptime(
os.getenv("JSON_RESPONSE_COMPARISON_DATE"),
"%Y-%m-%d",
).date()
6 changes: 5 additions & 1 deletion src/db_services/metrics_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,12 @@ async def create(dataset, history_params, version_id, thread_info={}):
except:
latency_data = {}

# if the llm message is dict (because the user might expect that)
message = history_params.get("message", "")
llm_message = json.dumps(message) if isinstance(message, dict) else message

conversation_log_data = {
'llm_message': history_params.get('message', ''),
'llm_message': llm_message,
'user': history_params.get('user', ''),
'chatbot_message': history_params.get('chatbot_message', ''),
'updated_llm_message': None,
Expand Down
4 changes: 2 additions & 2 deletions src/services/commonServices/AiMl/ai_ml_call.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ async def execute(self):
if not self.playground:
await self.handle_failure(openAIResponse)
raise ValueError(openAIResponse.get('error'))
response = await Response_formatter(modelResponse, service_name['ai_ml'], tools, self.type, self.image_data)
response = await Response_formatter(modelResponse, service_name['ai_ml'], tools, self.type, self.image_data, self.expects_json)
if not self.playground:
historyParams = self.prepare_history_params(response, modelResponse, tools, None)
historyParams['message'] = "image generated successfully"
Expand Down Expand Up @@ -58,7 +58,7 @@ async def execute(self):
raise ValueError(functionCallRes.get('error'))
self.update_model_response(modelResponse, functionCallRes)
tools = functionCallRes.get("tools")
response = await Response_formatter(modelResponse, service_name['ai_ml'], tools, self.type, self.image_data)
response = await Response_formatter(modelResponse, service_name['ai_ml'], tools, self.type, self.image_data, self.expects_json)
if not self.playground:
transfer_config = functionCallRes.get('transfer_agent_config') if functionCallRes else None
historyParams = self.prepare_history_params(response, modelResponse, tools, transfer_config)
Expand Down
6 changes: 3 additions & 3 deletions src/services/commonServices/Google/geminiCall.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ async def execute(self):
if not self.playground:
await self.handle_failure(gemini_response)
raise ValueError(gemini_response.get('error'))
response = await Response_formatter(model_response, service_name['gemini'], tools, self.type, self.image_data)
response = await Response_formatter(model_response, service_name['gemini'], tools, self.type, self.image_data, self.expects_json)
if not self.playground:
historyParams = self.prepare_history_params(response, model_response, tools, None)
historyParams['message'] = "image generated successfully"
Expand All @@ -33,7 +33,7 @@ async def execute(self):
await self.handle_failure(gemini_response)
raise ValueError(gemini_response.get('error'))
self.type = 'video'
response = await Response_formatter(model_response, service_name['gemini'], tools, self.type, self.file_data)
response = await Response_formatter(model_response, service_name['gemini'], tools, self.type, self.file_data, self.expects_json)
if not self.playground:
historyParams = self.prepare_history_params(response, model_response, tools, None)
historyParams['type'] = 'assistant'
Expand Down Expand Up @@ -69,7 +69,7 @@ async def execute(self):
raise ValueError(functionCallRes.get('error'))
self.update_model_response(model_response, functionCallRes)
tools = functionCallRes.get("tools", {})
response = await Response_formatter(model_response, service_name['gemini'], tools, self.type, self.image_data)
response = await Response_formatter(model_response, service_name['gemini'], tools, self.type, self.image_data, self.expects_json)
if not self.playground:
transfer_config = functionCallRes.get('transfer_agent_config') if functionCallRes else None
historyParams = self.prepare_history_params(response, model_response, tools, transfer_config)
Expand Down
2 changes: 1 addition & 1 deletion src/services/commonServices/Mistral/mistral_call.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ async def execute(self):
raise ValueError(functionCallRes.get('error'))
self.update_model_response(model_response, functionCallRes)
tools = functionCallRes.get("tools", {})
response = await Response_formatter(model_response, service_name['mistral'], tools, self.type, self.image_data)
response = await Response_formatter(model_response, service_name['mistral'], tools, self.type, self.image_data, self.expects_json)
if not self.playground:
transfer_config = functionCallRes.get('transfer_agent_config') if functionCallRes else None
historyParams = self.prepare_history_params(response, model_response, tools, transfer_config)
Expand Down
2 changes: 1 addition & 1 deletion src/services/commonServices/anthrophic/antrophicCall.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ async def execute(self):

self.update_model_response(modelResponse, functionCallRes)
tools = functionCallRes.get("tools", {})
response = await Response_formatter(modelResponse, service_name['anthropic'], tools, self.type, self.image_data)
response = await Response_formatter(modelResponse, service_name['anthropic'], tools, self.type, self.image_data, self.expects_json)
if not self.playground:
transfer_config = functionCallRes.get('transfer_agent_config') if functionCallRes else None
historyParams = self.prepare_history_params(response, modelResponse, tools, transfer_config)
Expand Down
6 changes: 5 additions & 1 deletion src/services/commonServices/baseService/baseService.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ def __init__(self, params):
self.web_search_filters = params.get('web_search_filters')
self.folder_id = params.get('folder_id')
self.bridge_configurations = params.get('bridge_configurations')
self.expects_json = params.get('expects_json') or False


def aiconfig(self):
Expand Down Expand Up @@ -227,11 +228,14 @@ def prepare_history_params(self,response, model_response, tools, transfer_agent_
agent_name = transfer_agent_config.get('tool_name', 'the agent')
original_message = f"Query is successfully transferred to agent {agent_name}"

# if message is dict (user may expect)
serialized_message = json.dumps(original_message) if isinstance(original_message, dict) else original_message

return {
'thread_id': self.thread_id,
'sub_thread_id': self.sub_thread_id,
'user': self.user if self.user else "",
'message': original_message,
'message': serialized_message,
'org_id': self.org_id,
'bridge_id': self.bridge_id,
'model': model_response.get('model') or self.configuration.get('model'),
Expand Down
2 changes: 1 addition & 1 deletion src/services/commonServices/grok/grokCall.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ async def execute(self):
self.update_model_response(model_response, function_call_response)
tools = function_call_response.get("tools", {})

response = await Response_formatter(model_response, service_name['grok'], tools, self.type, self.image_data)
response = await Response_formatter(model_response, service_name['grok'], tools, self.type, self.image_data, self.expects_json)

if not self.playground:
transfer_config = function_call_response.get('transfer_agent_config') if function_call_response else None
Expand Down
2 changes: 1 addition & 1 deletion src/services/commonServices/groq/groqCall.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ async def execute(self):
self.update_model_response(model_response, functionCallRes)
tools = functionCallRes.get("tools", {})

response = await Response_formatter(model_response, service_name['groq'], tools, self.type, self.image_data)
response = await Response_formatter(model_response, service_name['groq'], tools, self.type, self.image_data, self.expects_json)
if not self.playground:
transfer_config = functionCallRes.get('transfer_agent_config') if functionCallRes else None
historyParams = self.prepare_history_params(response, model_response, tools, transfer_config)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ async def execute(self):
if not self.playground:
await self.handle_failure(gemini_response)
raise ValueError(gemini_response.get('error'))
response = await Response_formatter(model_response, service_name['openai_completion'], tools, self.type, self.image_data)
response = await Response_formatter(model_response, service_name['openai_completion'], tools, self.type, self.image_data, self.expects_json)
if not self.playground:
historyParams = self.prepare_history_params(response, model_response, tools, None)
historyParams['message'] = "image generated successfully"
Expand Down Expand Up @@ -54,7 +54,7 @@ async def execute(self):
raise ValueError(functionCallRes.get('error'))
self.update_model_response(model_response, functionCallRes)
tools = functionCallRes.get("tools", {})
response = await Response_formatter(model_response, service_name['openai_completion'], tools, self.type, self.image_data)
response = await Response_formatter(model_response, service_name['openai_completion'], tools, self.type, self.image_data, self.expects_json)
if not self.playground:
transfer_config = functionCallRes.get('transfer_agent_config') if functionCallRes else None
historyParams = self.prepare_history_params(response, model_response, tools, transfer_config)
Expand Down
6 changes: 3 additions & 3 deletions src/services/commonServices/openAI/openai_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ async def execute(self):
if not self.playground:
await self.handle_failure(openAIResponse)
raise ValueError(openAIResponse.get('error'))
response = await Response_formatter(modelResponse, service_name['openai'], tools, self.type, self.image_data)
response = await Response_formatter(modelResponse, service_name['openai'], tools, self.type, self.image_data, self.expects_json)
if not self.playground:
historyParams = self.prepare_history_params(response, modelResponse, tools, None)
historyParams['message'] = "image generated successfully"
Expand Down Expand Up @@ -84,10 +84,10 @@ async def execute(self):
await self.handle_failure(functionCallRes)
raise ValueError(functionCallRes.get('error'))
self.update_model_response(modelResponse, functionCallRes)
response = await Response_formatter(functionCallRes.get("modelResponse", {}), service_name['openai'], functionCallRes.get("tools", {}), self.type, self.image_data)
response = await Response_formatter(functionCallRes.get("modelResponse", {}), service_name['openai'], functionCallRes.get("tools", {}), self.type, self.image_data, self.expects_json)
tools = functionCallRes.get("tools", {})
else:
response = await Response_formatter(modelResponse, service_name['openai'], {}, self.type, self.image_data)
response = await Response_formatter(modelResponse, service_name['openai'], {}, self.type, self.image_data, self.expects_json)

if not self.playground:
transfer_config = functionCallRes.get('transfer_agent_config') if has_function_call and functionCallRes else None
Expand Down
2 changes: 1 addition & 1 deletion src/services/commonServices/openRouter/openRouter_call.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ async def execute(self):
raise ValueError(functionCallRes.get('error'))
self.update_model_response(modelResponse, functionCallRes)
tools = functionCallRes.get("tools", {})
response = await Response_formatter(modelResponse, service_name['open_router'], tools, self.type, self.image_data)
response = await Response_formatter(modelResponse, service_name['open_router'], tools, self.type, self.image_data, self.expects_json)
if not self.playground:
transfer_config = functionCallRes.get('transfer_agent_config') if functionCallRes else None
historyParams = self.prepare_history_params(response, modelResponse, tools, transfer_config)
Expand Down
45 changes: 27 additions & 18 deletions src/services/utils/ai_middleware_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,13 @@
from config import Config
from src.services.utils.apiservice import fetch
from src.configs.constant import service_name
from src.services.utils.json_parser_utils import maybe_parse_json

async def Response_formatter(response = {}, service = None, tools={}, type='chat', images = None):
async def Response_formatter(response = {}, service = None, tools={}, type='chat', images = None, expects_json = False):
tools_data = tools

data_to_return = {}

if isinstance(tools_data, dict):
for key, value in tools_data.items():
if isinstance(value, str):
Expand All @@ -13,7 +17,7 @@ async def Response_formatter(response = {}, service = None, tools={}, type='chat
except json.JSONDecodeError:
pass
if service == 'openai_batch':
return {
data_to_return = {
"data" : {
"id" : response.get("id", None),
"content" : response.get("choices", [{}])[0].get("message", {}).get("content", None),
Expand All @@ -35,7 +39,7 @@ async def Response_formatter(response = {}, service = None, tools={}, type='chat
}
}
elif service == service_name['openai'] and (type != 'image' and type != 'embedding'):
return {
data_to_return = {
"data": {
"id": response.get("id", None),
"content": (
Expand Down Expand Up @@ -86,10 +90,10 @@ async def Response_formatter(response = {}, service = None, tools={}, type='chat
}
}
elif service == service_name['gemini'] and (type !='image' and type != 'embedding' and type != 'video'):
return {
data_to_return = {
"data" : {
"id" : response.get("id", None),
"content" : response.get("choices", [{}])[0].get("message", {}).get("content", None),
"content" : maybe_parse_json(response.get("choices", [{}])[0].get("message", {}).get("content", None), expects_json),
"model" : response.get("model", None),
"role" : response.get("choices", [{}])[0].get("message", {}).get("role", None),
"tools_data": tools_data or {},
Expand All @@ -108,21 +112,21 @@ async def Response_formatter(response = {}, service = None, tools={}, type='chat
}
}
elif service == service_name['openai'] and type == 'embedding':
return {
data_to_return = {
"data" : {
"embedding" : response.get('data')[0].get('embedding')
}
}
elif service == service_name['gemini'] and type == 'image':
return {
data_to_return = {
"data" : {
"revised_prompt" : response.get('data')[0].get('text_content'),
"image_url" : response.get('data')[0].get('url'),
"permanent_url" : response.get('data')[0].get('url'),
}
}
elif service == service_name['gemini'] and type == 'video':
return {
data_to_return = {
"data" : {
"content" : response.get('data')[0].get('text_content'),
"file_data" : response.get('data')[0].get('file_reference')
Expand All @@ -137,14 +141,14 @@ async def Response_formatter(response = {}, service = None, tools={}, type='chat
"permanent_url": image_data.get('url')
})

return {
data_to_return = {
"data": {
"image_urls": image_urls
}
}

elif service == service_name['anthropic']:
return {
data_to_return = {
"data" : {
"id" : response.get("id", None),
"content" : response.get("content", [{}])[0].get("text", None),
Expand All @@ -168,7 +172,7 @@ async def Response_formatter(response = {}, service = None, tools={}, type='chat
}
}
elif service == service_name['groq']:
return {
data_to_return = {
"data" : {
"id" : response.get("id", None),
"content" : response.get("choices", [{}])[0].get("message", {}).get("content", None),
Expand All @@ -185,10 +189,10 @@ async def Response_formatter(response = {}, service = None, tools={}, type='chat
}
}
elif service == service_name['grok']:
return {
data_to_return = {
"data": {
"id": response.get("id", None),
"content": response.get("choices", [{}])[0].get("message", {}).get("content", None),
"content" : response.get("choices", [{}])[0].get("message", {}).get("content", None),
"model": response.get("model", None),
"role": response.get("choices", [{}])[0].get("message", {}).get("role", None),
"tools_data": tools_data or {},
Expand All @@ -205,7 +209,7 @@ async def Response_formatter(response = {}, service = None, tools={}, type='chat
}
}
elif service == service_name['open_router']:
return {
data_to_return = {
"data" : {
"id" : response.get("id", None),
"content" : response.get("choices", [{}])[0].get("message", {}).get("content", None),
Expand All @@ -230,7 +234,7 @@ async def Response_formatter(response = {}, service = None, tools={}, type='chat
}
}
elif service == service_name['openai_completion']:
return {
data_to_return = {
"data" : {
"id" : response.get("id", None),
"content" : response.get("choices", [{}])[0].get("message", {}).get("content", None),
Expand All @@ -255,7 +259,7 @@ async def Response_formatter(response = {}, service = None, tools={}, type='chat
}
}
elif service == service_name['mistral']:
return {
data_to_return = {
"data" : {
"id" : response.get("id", None),
"content" : response.get("choices", [{}])[0].get("message", {}).get("content", None),
Expand Down Expand Up @@ -289,7 +293,7 @@ async def Response_formatter(response = {}, service = None, tools={}, type='chat
"size": image_data.get('size')
})

return {
data_to_return = {
"data": {
"image_urls": image_urls
},
Expand All @@ -300,7 +304,7 @@ async def Response_formatter(response = {}, service = None, tools={}, type='chat
}
}
elif service == service_name['ai_ml']:
return {
data_to_return = {
"data" : {
"id" : response.get("id", None),
"content" : response.get("choices", [{}])[0].get("message", {}).get("content", None),
Expand All @@ -322,6 +326,11 @@ async def Response_formatter(response = {}, service = None, tools={}, type='chat
}
}

if data_to_return.get("data", {}).get("content"):
data_to_return["data"]["content"] = maybe_parse_json(data_to_return["data"]["content"], expects_json)

return data_to_return

async def validateResponse(alert_flag, configration, bridgeId, message_id, org_id):
if alert_flag:
await send_alert(data={"response":"\n..\n","configration":configration,"message_id":message_id,"bridge_id":bridgeId, "org_id": org_id, "message": "\n issue occurs"})
Expand Down
Loading