diff --git a/src/services/commonServices/common.py b/src/services/commonServices/common.py index b9662efa..aa52bacc 100644 --- a/src/services/commonServices/common.py +++ b/src/services/commonServices/common.py @@ -2,6 +2,8 @@ from fastapi import FastAPI from fastapi.responses import JSONResponse import traceback +from exceptions.bad_request import BadRequestException +from src.services.utils.logger import logger from ...db_services import metrics_service as metrics_service import pydash as _ from ..utils.helper import Helper @@ -34,15 +36,12 @@ create_history_params, add_files_to_parse_data, orchestrator_agent_chat, - process_background_tasks_for_playground + process_background_tasks_for_playground, + transform_error_response ) from src.services.utils.guardrails_validator import guardrails_check from src.services.utils.rich_text_support import process_chatbot_response app = FastAPI() -from src.services.utils.helper import Helper -from src.services.commonServices.testcases import run_testcases as run_bridge_testcases -from globals import * -from src.services.cache_service import find_in_cache configurationModel = db["configurations"] @@ -233,7 +232,7 @@ async def chat(request_body): except (Exception, ValueError, BadRequestException) as error: if not isinstance(error, BadRequestException): logger.error(f'Error in chat service: %s, {str(error)}, {traceback.format_exc()}') - if not parsed_data['is_playground']: + if not parsed_data.get('is_playground', True): # Create latency object and update usage metrics latency = create_latency_object(timer, params) update_usage_metrics(parsed_data, params, latency, error=error, success=False) @@ -243,9 +242,9 @@ async def chat(request_body): await sendResponse(parsed_data['response_format'], result.get("error", str(error)), variables=parsed_data['variables']) if parsed_data['response_format']['type'] != 'default' else None # Process background tasks for error handling await process_background_tasks_for_error(parsed_data, error) - # Add support contact information to error message - error_message = f"{str(error)}. For more support contact us at support@gtwy.ai" - raise ValueError(error_message) + # Transform error using one-line function + transformed_error = transform_error_response(error) + raise ValueError(transformed_error) @@ -422,7 +421,7 @@ async def image(request_body): except (Exception, ValueError, BadRequestException) as error: if not isinstance(error, BadRequestException): logger.error(f'Error in image service: {str(error)}, {traceback.format_exc()}') - if not parsed_data['is_playground']: + if not parsed_data.get('is_playground', True): # Create latency object and update usage metrics latency = create_latency_object(timer, params) update_usage_metrics(parsed_data, params, latency, error=error, success=False) @@ -432,4 +431,6 @@ async def image(request_body): await sendResponse(parsed_data['response_format'], result.get("modelResponse", str(error)), variables=parsed_data['variables']) if parsed_data['response_format']['type'] != 'default' else None # Process background tasks for error handling await process_background_tasks_for_error(parsed_data, error) - raise ValueError(error) + # Transform error using one-line function + transformed_error = transform_error_response(error, parsed_data) + raise ValueError(transformed_error) diff --git a/src/services/utils/common_utils.py b/src/services/utils/common_utils.py index 4f6092df..c4e87c38 100644 --- a/src/services/utils/common_utils.py +++ b/src/services/utils/common_utils.py @@ -29,6 +29,21 @@ from src.services.utils.rich_text_support import process_chatbot_response from src.db_services.orchestrator_history_service import OrchestratorHistoryService, orchestrator_collector +def transform_error_response(error): + """ + Simple one-line function to transform error into proper structured response. + Returns the new error if it matches specific patterns, otherwise returns original error. + """ + # Handle both string errors and dictionary errors + error_str = str(error) + + # Check for the specific Anthropic response_type error in any format + if ("AsyncMessages.stream() got an unexpected keyword argument" in error_str and + "response_type" in error_str): + return 'Anthropic API does not support {} as JSON SCHEMA. Please update response_type. For more support contact us at support@gtwy.ai' + + return f"{str(error)}. For more support contact us at support@gtwy.ai" + def parse_request_body(request_body): body = request_body.get('body', {}) state = request_body.get('state', {})