Skip to content

Commit cae70be

Browse files
authored
OpenAI conversation: Standardizing API response (#318)
* passing additional params in failure case as well * adding support to pass metadata in fail responses
1 parent 9d371f7 commit cae70be

File tree

2 files changed

+36
-9
lines changed

2 files changed

+36
-9
lines changed

backend/app/api/routes/responses.py

Lines changed: 32 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -102,12 +102,27 @@ def get_file_search_results(response):
102102

103103
def get_additional_data(request: dict) -> dict:
104104
"""Extract additional data from request, excluding specific keys."""
105-
return {
106-
k: v
107-
for k, v in request.items()
108-
if k not in {"assistant_id", "callback_url", "response_id", "question"}
105+
# Keys to exclude for async request (ResponsesAPIRequest)
106+
async_exclude_keys = {"assistant_id", "callback_url", "response_id", "question"}
107+
# Keys to exclude for sync request (ResponsesSyncAPIRequest)
108+
sync_exclude_keys = {
109+
"model",
110+
"instructions",
111+
"vector_store_ids",
112+
"max_num_results",
113+
"temperature",
114+
"response_id",
115+
"question",
109116
}
110117

118+
# Determine which keys to exclude based on the request structure
119+
if "assistant_id" in request:
120+
exclude_keys = async_exclude_keys
121+
else:
122+
exclude_keys = sync_exclude_keys
123+
124+
return {k: v for k, v in request.items() if k not in exclude_keys}
125+
111126

112127
def process_response(
113128
request: ResponsesAPIRequest,
@@ -249,7 +264,11 @@ def process_response(
249264
exc_info=True,
250265
)
251266
tracer.log_error(error_message, response_id=request.response_id)
252-
callback_response = ResponsesAPIResponse.failure_response(error=error_message)
267+
268+
request_dict = request.model_dump()
269+
callback_response = ResponsesAPIResponse.failure_response(
270+
error=error_message, metadata=get_additional_data(request_dict)
271+
)
253272

254273
tracer.flush()
255274

@@ -360,11 +379,13 @@ async def responses_sync(
360379
project_id=project_id,
361380
)
362381
if not credentials or "api_key" not in credentials:
382+
request_dict = request.model_dump()
363383
logger.error(
364384
f"[response_sync] OpenAI API key not configured for org_id={organization_id}, project_id={project_id}"
365385
)
366386
return APIResponse.failure_response(
367-
error="OpenAI API key not configured for this organization."
387+
error="OpenAI API key not configured for this organization.",
388+
metadata=get_additional_data(request_dict),
368389
)
369390

370391
client = OpenAI(api_key=credentials["api_key"])
@@ -457,4 +478,8 @@ async def responses_sync(
457478
)
458479
tracer.log_error(error_message, response_id=request.response_id)
459480
tracer.flush()
460-
return ResponsesAPIResponse.failure_response(error=error_message)
481+
482+
request_dict = request.model_dump()
483+
return ResponsesAPIResponse.failure_response(
484+
error=error_message, metadata=get_additional_data(request_dict)
485+
)

backend/app/utils.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,13 +37,15 @@ def success_response(
3737
return cls(success=True, data=data, error=None, metadata=metadata)
3838

3939
@classmethod
40-
def failure_response(cls, error: str | list) -> "APIResponse[None]":
40+
def failure_response(
41+
cls, error: str | list, metadata: Optional[Dict[str, Any]] = None
42+
) -> "APIResponse[None]":
4143
if isinstance(error, list): # to handle cases when error is a list of errors
4244
error_message = "\n".join([f"{err['loc']}: {err['msg']}" for err in error])
4345
else:
4446
error_message = error
4547

46-
return cls(success=False, data=None, error=error_message)
48+
return cls(success=False, data=None, error=error_message, metadata=metadata)
4749

4850

4951
@dataclass

0 commit comments

Comments
 (0)