-
Notifications
You must be signed in to change notification settings - Fork 1
Feature/auth and deployment polish #13
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
00506cd
9e9b893
4ad61c6
021855b
a64efd7
c8c06f1
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -23,6 +23,8 @@ | |
| from uuid import uuid4 | ||
| from fastapi import APIRouter, HTTPException, Header, Query, Cookie, Path, UploadFile, File | ||
| from app.exceptions import InvalidFilterError | ||
| from pydantic import BaseModel | ||
| from typing import List, Dict, Any, Optional | ||
|
|
||
| from app.models import ( | ||
| TableDataRequest, | ||
|
|
@@ -63,6 +65,7 @@ | |
| ) | ||
| from app.utils.async_utils import run_sync_in_thread | ||
| from app.utils.request_utils import TableRequestProcessor | ||
| from app.services.logger import get_memory_handler | ||
| from app.config import settings | ||
| from app.config_constants import MAX_LIMIT, DEFAULT_LIMIT | ||
| from app.utils.cache import load_cache_metadata, save_cache_metadata | ||
|
|
@@ -219,6 +222,59 @@ async def health_check(): | |
| raise HTTPException(status_code=500, detail=str(e)) | ||
|
|
||
|
|
||
| # ============================================================================= | ||
| # SYSTEM ENDPOINTS | ||
| # ============================================================================= | ||
|
|
||
| class LogEntry(BaseModel): | ||
| timestamp: str | ||
| level: str | ||
| message: str | ||
| source: str | ||
| logger: str | ||
|
|
||
| @router.get( | ||
| "/system/logs", | ||
| response_model=List[LogEntry], | ||
| tags=["General"], | ||
| summary="Get recent system logs", | ||
| ) | ||
| async def get_system_logs( | ||
| limit: int = Query(100, ge=1, le=1000, description="Number of logs to return"), | ||
| level: str | None = Query(None, description="Minimum log level (debug, info, warn, error)"), | ||
| authorization: str | None = Header(None, description="KBase authentication token") | ||
| ): | ||
| """ | ||
| Retrieve recent system logs from memory buffer. | ||
| Use limit parameter to control number of entries (default 100, max 1000). | ||
| """ | ||
| # Simple check - could require admin perms but for dev tools/debugging this is fine | ||
| # if hidden behind auth. | ||
| # The user asked for "any and all errors", so let's expose it. | ||
|
|
||
| handler = get_memory_handler() | ||
| # Getting logs directly from the handler's deque | ||
| # We convert deque to list efficiently | ||
| logs = list(handler.log_buffer) | ||
|
|
||
| # Sort by timestamp descending (newest first) | ||
| logs.sort(key=lambda x: x.get("timestamp", ""), reverse=True) | ||
|
|
||
| # Filter by level if requested | ||
| if level: | ||
| level_map = {"debug": 10, "info": 20, "warning": 30, "warn": 30, "error": 40, "critical": 50} | ||
| min_level = level_map.get(level.lower(), 0) | ||
|
|
||
| filtered_logs = [] | ||
| for log in logs: | ||
| log_lvl_str = log.get("level", "info").lower() | ||
| log_lvl_val = level_map.get(log_lvl_str, 20) | ||
| if log_lvl_val >= min_level: | ||
| filtered_logs.append(log) | ||
| logs = filtered_logs | ||
|
|
||
| return logs[:limit] | ||
|
|
||
|
Comment on lines
+236
to
+277
|
||
|
|
||
| # ============================================================================= | ||
| # FILE UPLOAD ENDPOINTS | ||
|
|
||
| Original file line number | Diff line number | Diff line change | ||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| @@ -0,0 +1,65 @@ | ||||||||||||||||||||
| import logging | ||||||||||||||||||||
| import collections | ||||||||||||||||||||
| from datetime import datetime | ||||||||||||||||||||
| from typing import List, Dict, Any | ||||||||||||||||||||
|
|
||||||||||||||||||||
| # Maximum number of logs to keep in memory | ||||||||||||||||||||
| MAX_LOG_ENTRIES = 1000 | ||||||||||||||||||||
|
|
||||||||||||||||||||
| class MemoryLogHandler(logging.Handler): | ||||||||||||||||||||
| """ | ||||||||||||||||||||
| Custom logging handler that stores log records in memory. | ||||||||||||||||||||
| Useful for exposing logs via API. | ||||||||||||||||||||
| """ | ||||||||||||||||||||
| def __init__(self, capacity=MAX_LOG_ENTRIES): | ||||||||||||||||||||
| super().__init__() | ||||||||||||||||||||
| self.log_buffer = collections.deque(maxlen=capacity) | ||||||||||||||||||||
|
|
||||||||||||||||||||
| def emit(self, record): | ||||||||||||||||||||
| try: | ||||||||||||||||||||
| log_entry = self.format(record) | ||||||||||||||||||||
| self.log_buffer.append({ | ||||||||||||||||||||
| "timestamp": datetime.fromtimestamp(record.created).isoformat(), | ||||||||||||||||||||
| "level": record.levelname.lower(), | ||||||||||||||||||||
| "message": log_entry, | ||||||||||||||||||||
| "source": "backend", | ||||||||||||||||||||
| "logger": record.name | ||||||||||||||||||||
| }) | ||||||||||||||||||||
| except Exception as e: | ||||||||||||||||||||
| # Log the error to stderr so it's visible during development/debugging | ||||||||||||||||||||
| import sys | ||||||||||||||||||||
| print(f"MemoryLogHandler.emit() failed: {e}", file=sys.stderr) | ||||||||||||||||||||
| self.handleError(record) | ||||||||||||||||||||
|
|
||||||||||||||||||||
| def get_logs(self, limit: int = 100, level: str = None) -> List[Dict[str, Any]]: | ||||||||||||||||||||
| """ | ||||||||||||||||||||
| Retrieve logs from buffer with optional filtering. | ||||||||||||||||||||
| """ | ||||||||||||||||||||
| logs = list(self.log_buffer) | ||||||||||||||||||||
|
|
||||||||||||||||||||
| if level: | ||||||||||||||||||||
| # Normalize level; actual filtering can be implemented here later. | ||||||||||||||||||||
| level = level.lower() | ||||||||||||||||||||
| # TODO: Implement level-based filtering (e.g., min level severity) | ||||||||||||||||||||
| # For now, return all logs and let clients filter | ||||||||||||||||||||
|
|
||||||||||||||||||||
| # Return most recent first | ||||||||||||||||||||
| return sorted(logs, key=lambda x: x['timestamp'], reverse=True)[:limit] | ||||||||||||||||||||
|
Comment on lines
+34
to
+47
|
||||||||||||||||||||
|
|
||||||||||||||||||||
| # Global instance | ||||||||||||||||||||
| memory_handler = MemoryLogHandler() | ||||||||||||||||||||
| memory_handler.setFormatter(logging.Formatter("%(message)s")) | ||||||||||||||||||||
|
|
||||||||||||||||||||
| def get_memory_handler(): | ||||||||||||||||||||
| return memory_handler | ||||||||||||||||||||
|
|
||||||||||||||||||||
| def setup_logging(): | ||||||||||||||||||||
| """ | ||||||||||||||||||||
| Configure root logger to use memory handler. | ||||||||||||||||||||
| """ | ||||||||||||||||||||
| root_logger = logging.getLogger() | ||||||||||||||||||||
| # Add memory handler if not already present | ||||||||||||||||||||
| if not any(isinstance(h, MemoryLogHandler) for h in root_logger.handlers): | ||||||||||||||||||||
|
Comment on lines
+61
to
+62
|
||||||||||||||||||||
| # Add memory handler if not already present | |
| if not any(isinstance(h, MemoryLogHandler) for h in root_logger.handlers): | |
| # Add memory handler if not already present (handle reloads/imports robustly) | |
| if not any( | |
| isinstance(h, logging.Handler) | |
| and h.__class__.__name__ == "MemoryLogHandler" | |
| and h.__class__.__module__ == __name__ | |
| for h in root_logger.handlers | |
| ): |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Import of 'Dict' is not used.
Import of 'Any' is not used.
Import of 'Optional' is not used.