Skip to content

Add Missing Logs Across Platform for Key Events and Actions #235

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 11 commits into from
Closed
20 changes: 11 additions & 9 deletions backend/app/api/routes/api_keys.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import uuid
import logging
from fastapi import APIRouter, Depends, HTTPException
from sqlmodel import Session
from app.api.deps import get_db, get_current_active_superuser
Expand All @@ -14,6 +14,7 @@
from app.utils import APIResponse
from app.core.exception_handlers import HTTPException

logger = logging.getLogger(__name__)
router = APIRouter(prefix="/apikeys", tags=["API Keys"])


Expand All @@ -27,17 +28,18 @@ def create_key(
"""
Generate a new API key for the user's organization.
"""
# Validate organization
project = validate_project(session, project_id)

existing_api_key = get_api_key_by_project_user(session, project_id, user_id)
if existing_api_key:
logger.warning(
f"[create_key] API key already exists | project_id={project_id}, user_id={user_id}"
)
raise HTTPException(
status_code=400,
detail="API Key already exists for this user and project.",
)

# Create and return API key
api_key = create_api_key(
session,
organization_id=project.organization_id,
Expand All @@ -57,21 +59,18 @@ def list_keys(
Retrieve all API keys for the given project. Superusers get all keys;
regular users get only their own.
"""
# Validate project
project = validate_project(session=session, project_id=project_id)

if current_user.is_superuser:
# Superuser: fetch all API keys for the project
api_keys = get_api_keys_by_project(session=session, project_id=project_id)
else:
# Regular user: fetch only their own API key
user_api_key = get_api_key_by_project_user(
session=session, project_id=project_id, user_id=current_user.id
)
api_keys = [user_api_key] if user_api_key else []

# Raise an exception if no API keys are found for the project
if not api_keys:
logger.warning(f"[list_keys] No API keys found | project_id={project_id}")
raise HTTPException(
status_code=404,
detail="No API keys found for this project.",
Expand All @@ -91,6 +90,7 @@ def get_key(
"""
api_key = get_api_key(session, api_key_id)
if not api_key:
logger.warning(f"[get_key] API key not found | api_key_id={api_key_id}")
raise HTTPException(404, "API Key does not exist")

return APIResponse.success_response(api_key)
Expand All @@ -106,10 +106,12 @@ def revoke_key(
Soft delete an API key (revoke access).
"""
api_key = get_api_key(session, api_key_id)

if not api_key:
logger.warning(
f"[apikey.revoke] API key not found or already deleted | api_key_id={api_key_id}"
)
raise HTTPException(404, "API key not found or already deleted")

delete_api_key(session, api_key_id)

logger.info(f"[revoke_key] API key revoked | api_key_id={api_key_id}")
return APIResponse.success_response({"message": "API key revoked successfully"})
65 changes: 47 additions & 18 deletions backend/app/api/routes/collections.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from app.models.collection import CollectionStatus
from app.utils import APIResponse, load_description

logger = logging.getLogger(__name__)
router = APIRouter(prefix="/collections", tags=["collections"])


Expand Down Expand Up @@ -59,6 +60,9 @@ def model_post_init(self, __context: Any):
self.documents = list(set(self.documents))

def __call__(self, crud: DocumentCrud):
logger.info(
f"[DocumentOptions.call] Starting batch iteration for documents | {{'batch_size': {self.batch_size}, 'total_documents': {len(self.documents)}}}"
)
(start, stop) = (0, self.batch_size)
while True:
view = self.documents[start:stop]
Expand Down Expand Up @@ -130,35 +134,53 @@ def success(self, body):

class SilentCallback(CallbackHandler):
def fail(self, body):
logger.info(
f"[SilentCallback.fail] Silent callback failure | {{'body': '{body}'}}"
)
return

def success(self, body):
logger.info(
f"[SilentCallback.success] Silent callback success | {{'body': '{body}'}}"
)
return


class WebHookCallback(CallbackHandler):
def __init__(self, url: HttpUrl, payload: ResponsePayload):
super().__init__(payload)
self.url = url
logger.info(
f"[WebHookCallback.init] Initialized webhook callback | {{'url': '{url}'}}"
)

def __call__(self, response: APIResponse, status: str):
time = ResponsePayload.now()
payload = replace(self.payload, status=status, time=time)
response.metadata = asdict(payload)

logger.info(
f"[WebHookCallback.call] Posting callback | {{'url': '{self.url}', 'status': '{status}'}}"
)
post_callback(self.url, response)

def fail(self, body):
logger.error(f"[WebHookCallback.fail] Callback failed | {{'body': '{body}'}}")
self(APIResponse.failure_response(body), "incomplete")

def success(self, body):
logger.info(
f"[WebHookCallback.success] Callback succeeded"
)
self(APIResponse.success_response(body), "complete")


def _backout(crud: OpenAIAssistantCrud, assistant_id: str):
try:
crud.delete(assistant_id)
except OpenAIError:
except OpenAIError as err:
logger.error(
f"[backout] Failed to delete assistant | {{'assistant_id': '{assistant_id}', 'error': '{str(err)}'}}"
)
warnings.warn(
": ".join(
[
Expand Down Expand Up @@ -200,18 +222,10 @@ def do_create_collection(
storage.get_file_size_kb(doc.object_store_url) for doc in flat_docs
]

logging.info(
f"[VectorStore Update] Uploading {len(flat_docs)} documents to vector store {vector_store.id}"
)
list(vector_store_crud.update(vector_store.id, storage, docs))
logging.info(f"[VectorStore Upload] Upload completed")

assistant_options = dict(request.extract_super_type(AssistantOptions))
logging.info(
f"[Assistant Create] Creating assistant with options: {assistant_options}"
)
assistant = assistant_crud.create(vector_store.id, **assistant_options)
logging.info(f"[Assistant Create] Assistant created: {assistant.id}")

collection = collection_crud.read_one(UUID(payload.key))
collection.llm_service_id = assistant.id
Expand All @@ -220,22 +234,21 @@ def do_create_collection(
collection.updated_at = now()

if flat_docs:
logging.info(
f"[DocumentCollection] Linking {len(flat_docs)} documents to collection {collection.id}"
)
DocumentCollectionCrud(session).create(collection, flat_docs)

collection_crud._update(collection)

elapsed = time.time() - start_time
logging.info(
f"Collection created: {collection.id} | Time: {elapsed:.2f}s | "
f"[do_create_collection] Collection created: {collection.id} | Time: {elapsed:.2f}s | "
f"Files: {len(flat_docs)} | Sizes: {file_sizes_kb} KB | Types: {list(file_exts)}"
)
callback.success(collection.model_dump(mode="json"))

except Exception as err:
logging.error(f"[Collection Creation Failed] {err} ({type(err).__name__})")
logger.error(
f"[do_create_collection] Collection Creation Failed | {{'collection_id': '{payload.key}', 'error': '{str(err)}'}}"
)
if "assistant" in locals():
_backout(assistant_crud, assistant.id)
try:
Expand All @@ -244,7 +257,9 @@ def do_create_collection(
collection.updated_at = now()
collection_crud._update(collection)
except Exception as suberr:
logging.warning(f"[Collection Status Update Failed] {suberr}")
logger.warning(
f"[do_create_collection] Failed to update collection status | {{'collection_id': '{payload.key}', 'reason': '{str(suberr)}'}}"
)
callback.fail(str(err))


Expand Down Expand Up @@ -282,6 +297,10 @@ def create_collection(
payload,
)

logger.info(
f"[create_collection] Background task for collection creation scheduled | "
f"{{'collection_id': '{collection.id}'}}"
)
return APIResponse.success_response(data=None, metadata=asdict(payload))


Expand All @@ -301,12 +320,18 @@ def do_delete_collection(
collection = collection_crud.read_one(request.collection_id)
assistant = OpenAIAssistantCrud()
data = collection_crud.delete(collection, assistant)
logger.info(
f"[do_delete_collection] Collection deleted successfully | {{'collection_id': '{collection.id}'}}"
)
callback.success(data.model_dump(mode="json"))
except (ValueError, PermissionError, SQLAlchemyError) as err:
logger.warning(
f"[do_delete_collection] Failed to delete collection | {{'collection_id': '{request.collection_id}', 'error': '{str(err)}'}}"
)
callback.fail(str(err))
except Exception as err:
warnings.warn(
'Unexpected exception "{}": {}'.format(type(err).__name__, err),
logger.error(
f"[do_delete_collection] Unexpected error during deletion | {{'collection_id': '{request.collection_id}', 'error': '{str(err)}', 'error_type': '{type(err).__name__}'}}"
)
callback.fail(str(err))

Expand All @@ -333,6 +358,10 @@ def delete_collection(
payload,
)

logger.info(
f"[delete_collection] Background task for deletion scheduled | "
f"{{'collection_id': '{request.collection_id}'}}"
)
return APIResponse.success_response(data=None, metadata=asdict(payload))


Expand Down
Loading
Loading