Skip to content

Commit 9d371f7

Browse files
authored
Added logs for Threads and Response Endpoint (#311)
* logs for threads and response endpoint * refactor logs level * Add logs for openAI call * fix logging * refactor
1 parent 328ece7 commit 9d371f7

File tree

3 files changed

+98
-24
lines changed

3 files changed

+98
-24
lines changed

backend/app/api/routes/responses.py

Lines changed: 19 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ def process_response(
120120
):
121121
"""Process a response and send callback with results, with Langfuse tracing."""
122122
logger.info(
123-
f"Starting generating response for assistant_id={mask_string(request.assistant_id)}, project_id={project_id}"
123+
f"[process_response] Starting generating response for assistant_id={mask_string(request.assistant_id)}, project_id={project_id}"
124124
)
125125

126126
tracer.start_trace(
@@ -171,7 +171,7 @@ def process_response(
171171
response_chunks = get_file_search_results(response)
172172

173173
logger.info(
174-
f"Successfully generated response: response_id={response.id}, assistant={mask_string(request.assistant_id)}, project_id={project_id}"
174+
f"[process_response] Successfully generated response: response_id={response.id}, assistant={mask_string(request.assistant_id)}, project_id={project_id}"
175175
)
176176

177177
tracer.end_generation(
@@ -245,7 +245,8 @@ def process_response(
245245
except openai.OpenAIError as e:
246246
error_message = handle_openai_error(e)
247247
logger.error(
248-
f"OpenAI API error during response processing: {error_message}, project_id={project_id}"
248+
f"[process_response] OpenAI API error during response processing: {error_message}, project_id={project_id}",
249+
exc_info=True,
249250
)
250251
tracer.log_error(error_message, response_id=request.response_id)
251252
callback_response = ResponsesAPIResponse.failure_response(error=error_message)
@@ -254,11 +255,11 @@ def process_response(
254255

255256
if request.callback_url:
256257
logger.info(
257-
f"Sending callback to URL: {request.callback_url}, assistant={mask_string(request.assistant_id)}, project_id={project_id}"
258+
f"[process_response] Sending callback to URL: {request.callback_url}, assistant={mask_string(request.assistant_id)}, project_id={project_id}"
258259
)
259260
send_callback(request.callback_url, callback_response.model_dump())
260261
logger.info(
261-
f"Callback sent successfully, assistant={mask_string(request.assistant_id)}, project_id={project_id}"
262+
f"[process_response] Callback sent successfully, assistant={mask_string(request.assistant_id)}, project_id={project_id}"
262263
)
263264

264265

@@ -276,14 +277,10 @@ async def responses(
276277
_current_user.organization_id,
277278
)
278279

279-
logger.info(
280-
f"Processing response request for assistant_id={mask_string(request.assistant_id)}, project_id={project_id}, organization_id={organization_id}"
281-
)
282-
283280
assistant = get_assistant_by_id(_session, request.assistant_id, project_id)
284281
if not assistant:
285282
logger.warning(
286-
f"Assistant not found: assistant_id={mask_string(request.assistant_id)}, project_id={project_id}, organization_id={organization_id}",
283+
f"[response] Assistant not found: assistant_id={mask_string(request.assistant_id)}, project_id={project_id}, organization_id={organization_id}",
287284
)
288285
raise HTTPException(status_code=404, detail="Assistant not found or not active")
289286

@@ -295,7 +292,7 @@ async def responses(
295292
)
296293
if not credentials or "api_key" not in credentials:
297294
logger.error(
298-
f"OpenAI API key not configured for org_id={organization_id}, project_id={project_id}"
295+
f"[response] OpenAI API key not configured for org_id={organization_id}, project_id={project_id}"
299296
)
300297
return {
301298
"success": False,
@@ -329,7 +326,7 @@ async def responses(
329326
)
330327

331328
logger.info(
332-
f"Background task scheduled for response processing: assistant_id={mask_string(request.assistant_id)}, project_id={project_id}, organization_id={organization_id}"
329+
f"[response] Background task scheduled for response processing: assistant_id={mask_string(request.assistant_id)}, project_id={project_id}, organization_id={organization_id}"
333330
)
334331

335332
return {
@@ -363,6 +360,9 @@ async def responses_sync(
363360
project_id=project_id,
364361
)
365362
if not credentials or "api_key" not in credentials:
363+
logger.error(
364+
f"[response_sync] OpenAI API key not configured for org_id={organization_id}, project_id={project_id}"
365+
)
366366
return APIResponse.failure_response(
367367
error="OpenAI API key not configured for this organization."
368368
)
@@ -432,7 +432,9 @@ async def responses_sync(
432432
)
433433

434434
tracer.flush()
435-
435+
logger.info(
436+
f"[response_sync] Successfully generated response: response_id={response.id}, project_id={project_id}"
437+
)
436438
return ResponsesAPIResponse.success_response(
437439
data=_APIResponse(
438440
status="success",
@@ -449,6 +451,10 @@ async def responses_sync(
449451
)
450452
except openai.OpenAIError as e:
451453
error_message = handle_openai_error(e)
454+
logger.error(
455+
f"[response_sync] OpenAI API error during response processing: {error_message}, project_id={project_id}",
456+
exc_info=True,
457+
)
452458
tracer.log_error(error_message, response_id=request.response_id)
453459
tracer.flush()
454460
return ResponsesAPIResponse.failure_response(error=error_message)

backend/app/api/routes/threads.py

Lines changed: 69 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
from app.core import logging, settings
1414
from app.models import UserOrganization, OpenAIThreadCreate, UserProjectOrg
1515
from app.crud import upsert_thread_result, get_thread_result
16-
from app.utils import APIResponse
16+
from app.utils import APIResponse, mask_string
1717
from app.crud.credentials import get_provider_credential
1818
from app.core.util import configure_openai
1919
from app.core.langfuse.langfuse import LangfuseTracer
@@ -42,9 +42,10 @@ def send_callback(callback_url: str, data: dict):
4242
# session.verify = False
4343
response = session.post(callback_url, json=data)
4444
response.raise_for_status()
45+
logger.info(f"[send_callback] Callback sent successfully to {callback_url}")
4546
return True
4647
except requests.RequestException as e:
47-
logger.error(f"Callback failed: {str(e)}", exc_info=True)
48+
logger.error(f"[send_callback] Callback failed: {str(e)}", exc_info=True)
4849
return False
4950

5051

@@ -65,12 +66,19 @@ def validate_thread(client: OpenAI, thread_id: str) -> tuple[bool, str]:
6566
if runs.data and len(runs.data) > 0:
6667
latest_run = runs.data[0]
6768
if latest_run.status in ["queued", "in_progress", "requires_action"]:
69+
logger.error(
70+
f"[validate_thread] Thread ID {mask_string(thread_id)} is currently {latest_run.status}."
71+
)
6872
return (
6973
False,
7074
f"There is an active run on this thread (status: {latest_run.status}). Please wait for it to complete.",
7175
)
7276
return True, None
73-
except openai.OpenAIError:
77+
except openai.OpenAIError as e:
78+
logger.error(
79+
f"[validate_thread] Failed to validate thread ID {mask_string(thread_id)}: {str(e)}",
80+
exc_info=True,
81+
)
7482
return False, f"Invalid thread ID provided {thread_id}"
7583

7684

@@ -82,8 +90,15 @@ def setup_thread(client: OpenAI, request: dict) -> tuple[bool, str]:
8290
client.beta.threads.messages.create(
8391
thread_id=thread_id, role="user", content=request["question"]
8492
)
93+
logger.info(
94+
f"[setup_thread] Added message to existing thread {mask_string(thread_id)}"
95+
)
8596
return True, None
8697
except openai.OpenAIError as e:
98+
logger.error(
99+
f"[setup_thread] Failed to add message to existing thread {mask_string(thread_id)}: {str(e)}",
100+
exc_info=True,
101+
)
87102
return False, handle_openai_error(e)
88103
else:
89104
try:
@@ -92,8 +107,14 @@ def setup_thread(client: OpenAI, request: dict) -> tuple[bool, str]:
92107
thread_id=thread.id, role="user", content=request["question"]
93108
)
94109
request["thread_id"] = thread.id
110+
logger.info(
111+
f"[setup_thread] Created new thread with ID: {mask_string(thread.id)}"
112+
)
95113
return True, None
96114
except openai.OpenAIError as e:
115+
logger.error(
116+
f"[setup_thread] Failed to create new thread: {str(e)}", exc_info=True
117+
)
97118
return False, handle_openai_error(e)
98119

99120

@@ -156,6 +177,9 @@ def process_run_core(
156177
)
157178

158179
try:
180+
logger.info(
181+
f"[process_run_core] Starting run for thread ID: {mask_string(request.get('thread_id'))} with assistant ID: {mask_string(request.get('assistant_id'))}"
182+
)
159183
run = client.beta.threads.runs.create_and_poll(
160184
thread_id=request["thread_id"],
161185
assistant_id=request["assistant_id"],
@@ -189,15 +213,25 @@ def process_run_core(
189213
"model": run.model,
190214
}
191215
request = {**request, **{"diagnostics": diagnostics}}
216+
logger.info(
217+
f"[process_run_core] Run completed successfully for thread ID: {mask_string(request.get('thread_id'))}"
218+
)
192219
return create_success_response(request, message).model_dump(), None
193220
else:
194221
error_msg = f"Run failed with status: {run.status}"
222+
logger.error(
223+
f"[process_run_core] Run failed with error: {run.last_error} for thread ID: {mask_string(request.get('thread_id'))}"
224+
)
195225
tracer.log_error(error_msg)
196226
return APIResponse.failure_response(error=error_msg).model_dump(), error_msg
197227

198228
except openai.OpenAIError as e:
199229
error_msg = handle_openai_error(e)
200230
tracer.log_error(error_msg)
231+
logger.error(
232+
f"[process_run_core] OpenAI error: {error_msg} for thread ID: {mask_string(request.get('thread_id'))}",
233+
exc_info=True,
234+
)
201235
return APIResponse.failure_response(error=error_msg).model_dump(), error_msg
202236
finally:
203237
tracer.flush()
@@ -214,9 +248,12 @@ def poll_run_and_prepare_response(request: dict, client: OpenAI, db: Session):
214248
thread_id = request["thread_id"]
215249
prompt = request["question"]
216250

251+
logger.info(
252+
f"[poll_run_and_prepare_response] Starting run for thread ID: {mask_string(thread_id)}"
253+
)
254+
217255
try:
218256
run = run_and_poll_thread(client, thread_id, request["assistant_id"])
219-
220257
status = run.status or "unknown"
221258
response = None
222259
error = None
@@ -225,11 +262,18 @@ def poll_run_and_prepare_response(request: dict, client: OpenAI, db: Session):
225262
response = extract_response_from_thread(
226263
client, thread_id, request.get("remove_citation", False)
227264
)
265+
logger.info(
266+
f"[poll_run_and_prepare_response] Successfully executed run for thread ID: {mask_string(thread_id)}"
267+
)
228268

229269
except openai.OpenAIError as e:
230270
status = "failed"
231271
error = str(e)
232272
response = None
273+
logger.error(
274+
f"[poll_run_and_prepare_response] Run failed for thread ID {mask_string(thread_id)}: {error}",
275+
exc_info=True,
276+
)
233277

234278
upsert_thread_result(
235279
db,
@@ -259,6 +303,9 @@ async def threads(
259303
)
260304
client, success = configure_openai(credentials)
261305
if not success:
306+
logger.error(
307+
f"[threads] OpenAI API key not configured for this organization. | organization_id: {_current_user.organization_id}, project_id: {request.get('project_id')}"
308+
)
262309
return APIResponse.failure_response(
263310
error="OpenAI API key not configured for this organization."
264311
)
@@ -304,7 +351,9 @@ async def threads(
304351
)
305352
# Schedule background task
306353
background_tasks.add_task(process_run, request, client, tracer)
307-
354+
logger.info(
355+
f"[threads] Background task scheduled for thread ID: {mask_string(request.get('thread_id'))} | organization_id: {_current_user.organization_id}, project_id: {request.get('project_id')}"
356+
)
308357
return initial_response
309358

310359

@@ -325,6 +374,9 @@ async def threads_sync(
325374
# Configure OpenAI client
326375
client, success = configure_openai(credentials)
327376
if not success:
377+
logger.error(
378+
f"[threads_sync] OpenAI API key not configured for this organization. | organization_id: {_current_user.organization_id}, project_id: {request.get('project_id')}"
379+
)
328380
return APIResponse.failure_response(
329381
error="OpenAI API key not configured for this organization."
330382
)
@@ -341,6 +393,7 @@ async def threads_sync(
341393
is_valid, error_message = validate_thread(client, request.get("thread_id"))
342394
if not is_valid:
343395
raise Exception(error_message)
396+
344397
# Setup thread
345398
is_success, error_message = setup_thread(client, request)
346399
if not is_success:
@@ -360,11 +413,8 @@ async def threads_sync(
360413
metadata={"thread_id": request.get("thread_id")},
361414
)
362415

363-
try:
364-
response, error_message = process_run_core(request, client, tracer)
365-
return response
366-
finally:
367-
tracer.flush()
416+
response, error_message = process_run_core(request, client, tracer)
417+
return response
368418

369419

370420
@router.post("/threads/start")
@@ -389,6 +439,9 @@ async def start_thread(
389439
# Configure OpenAI client
390440
client, success = configure_openai(credentials)
391441
if not success:
442+
logger.error(
443+
f"[start_thread] OpenAI API key not configured for this organization. | project_id: {_current_user.project_id}"
444+
)
392445
return APIResponse.failure_response(
393446
error="OpenAI API key not configured for this organization."
394447
)
@@ -412,6 +465,9 @@ async def start_thread(
412465

413466
background_tasks.add_task(poll_run_and_prepare_response, request, client, db)
414467

468+
logger.info(
469+
f"[start_thread] Background task scheduled to process response for thread ID: {mask_string(thread_id)} | project_id: {_current_user.project_id}"
470+
)
415471
return APIResponse.success_response(
416472
data={
417473
"thread_id": thread_id,
@@ -434,6 +490,9 @@ async def get_thread(
434490
result = get_thread_result(db, thread_id)
435491

436492
if not result:
493+
logger.error(
494+
f"[get_thread] Thread result not found for ID: {mask_string(thread_id)} | org_id: {_current_user.organization_id}"
495+
)
437496
raise HTTPException(404, "thread not found")
438497

439498
status = result.status or ("success" if result.response else "processing")

backend/app/crud/thread_results.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
1+
import logging
12
from sqlmodel import Session, select
23
from datetime import datetime
34
from app.models import OpenAIThreadCreate, OpenAI_Thread
5+
from app.utils import mask_string
6+
7+
logger = logging.getLogger(__name__)
48

59

610
def upsert_thread_result(session: Session, data: OpenAIThreadCreate):
@@ -13,10 +17,15 @@ def upsert_thread_result(session: Session, data: OpenAIThreadCreate):
1317
existing.status = data.status
1418
existing.error = data.error
1519
existing.updated_at = datetime.utcnow()
20+
logger.info(
21+
f"[upsert_thread_result] Updated existing thread result in the db with ID: {mask_string(data.thread_id)}"
22+
)
1623
else:
1724
new_thread = OpenAI_Thread(**data.dict())
1825
session.add(new_thread)
19-
26+
logger.info(
27+
f"[upsert_thread_result] Created new thread result in the db with ID: {mask_string(new_thread.thread_id)}"
28+
)
2029
session.commit()
2130

2231

0 commit comments

Comments
 (0)