Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 12 additions & 11 deletions src/services/commonServices/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
from fastapi import FastAPI
from fastapi.responses import JSONResponse
import traceback
from exceptions.bad_request import BadRequestException
from src.services.utils.logger import logger
from ...db_services import metrics_service as metrics_service
import pydash as _
from ..utils.helper import Helper
Expand Down Expand Up @@ -34,15 +36,12 @@
create_history_params,
add_files_to_parse_data,
orchestrator_agent_chat,
process_background_tasks_for_playground
process_background_tasks_for_playground,
transform_error_response
)
from src.services.utils.guardrails_validator import guardrails_check
from src.services.utils.rich_text_support import process_chatbot_response
app = FastAPI()
from src.services.utils.helper import Helper
from src.services.commonServices.testcases import run_testcases as run_bridge_testcases
from globals import *
from src.services.cache_service import find_in_cache

configurationModel = db["configurations"]

Expand Down Expand Up @@ -233,7 +232,7 @@ async def chat(request_body):
except (Exception, ValueError, BadRequestException) as error:
if not isinstance(error, BadRequestException):
logger.error(f'Error in chat service: %s, {str(error)}, {traceback.format_exc()}')
if not parsed_data['is_playground']:
if not parsed_data.get('is_playground', True):
# Create latency object and update usage metrics
latency = create_latency_object(timer, params)
update_usage_metrics(parsed_data, params, latency, error=error, success=False)
Expand All @@ -243,9 +242,9 @@ async def chat(request_body):
await sendResponse(parsed_data['response_format'], result.get("error", str(error)), variables=parsed_data['variables']) if parsed_data['response_format']['type'] != 'default' else None
# Process background tasks for error handling
await process_background_tasks_for_error(parsed_data, error)
# Add support contact information to error message
error_message = f"{str(error)}. For more support contact us at support@gtwy.ai"
raise ValueError(error_message)
# Transform error using one-line function
transformed_error = transform_error_response(error)
raise ValueError(transformed_error)



Expand Down Expand Up @@ -422,7 +421,7 @@ async def image(request_body):
except (Exception, ValueError, BadRequestException) as error:
if not isinstance(error, BadRequestException):
logger.error(f'Error in image service: {str(error)}, {traceback.format_exc()}')
if not parsed_data['is_playground']:
if not parsed_data.get('is_playground', True):
# Create latency object and update usage metrics
latency = create_latency_object(timer, params)
update_usage_metrics(parsed_data, params, latency, error=error, success=False)
Expand All @@ -432,4 +431,6 @@ async def image(request_body):
await sendResponse(parsed_data['response_format'], result.get("modelResponse", str(error)), variables=parsed_data['variables']) if parsed_data['response_format']['type'] != 'default' else None
# Process background tasks for error handling
await process_background_tasks_for_error(parsed_data, error)
raise ValueError(error)
# Transform error using one-line function
transformed_error = transform_error_response(error, parsed_data)
raise ValueError(transformed_error)
15 changes: 15 additions & 0 deletions src/services/utils/common_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,21 @@
from src.services.utils.rich_text_support import process_chatbot_response
from src.db_services.orchestrator_history_service import OrchestratorHistoryService, orchestrator_collector

def transform_error_response(error):
"""
Simple one-line function to transform error into proper structured response.
Returns the new error if it matches specific patterns, otherwise returns original error.
"""
# Handle both string errors and dictionary errors
error_str = str(error)

# Check for the specific Anthropic response_type error in any format
if ("AsyncMessages.stream() got an unexpected keyword argument" in error_str and
"response_type" in error_str):
return 'Anthropic API does not support {} as JSON SCHEMA. Please update response_type. For more support contact us at support@gtwy.ai'

return f"{str(error)}. For more support contact us at support@gtwy.ai"

def parse_request_body(request_body):
body = request_body.get('body', {})
state = request_body.get('state', {})
Expand Down