Revamped logging

This commit is contained in:
Alishahryar1 2026-02-14 18:55:33 -08:00
parent 64e5b10612
commit ba344f26c9
9 changed files with 139 additions and 30 deletions

View file

@ -11,33 +11,19 @@ from contextlib import asynccontextmanager
from fastapi import FastAPI, Request
from fastapi.responses import JSONResponse
from config.settings import get_settings
from config.logging_config import configure_logging
# Configure logging first (before any module logs)
_settings = get_settings()
configure_logging(_settings.log_file)
from .routes import router
from .dependencies import cleanup_provider
from providers.exceptions import ProviderError
from config.settings import get_settings
# Configure logging (atomic - only on true fresh start)
_settings = get_settings()
LOG_FILE = _settings.log_file
# Check if logging is already configured (e.g., hot reload)
# If handlers exist, skip setup to avoid clearing logs mid-session
if not logging.root.handlers:
# Fresh start - clear log file and configure
open(LOG_FILE, "w", encoding="utf-8").close()
logging.basicConfig(
level=logging.DEBUG,
format="%(asctime)s - %(levelname)s - %(message)s",
handlers=[logging.FileHandler(LOG_FILE, encoding="utf-8", mode="a")],
)
logger = logging.getLogger(__name__)
# Suppress noisy uvicorn logs
logging.getLogger("uvicorn").setLevel(logging.WARNING)
logging.getLogger("uvicorn.access").setLevel(logging.WARNING)
logging.getLogger("uvicorn.error").setLevel(logging.WARNING)
_SHUTDOWN_TIMEOUT_S = 5.0

View file

@ -203,7 +203,7 @@ class CLISession:
yield event
except json.JSONDecodeError:
logger.debug(f"Non-JSON output: {line_str[:100]}")
logger.debug(f"Non-JSON output: {line_str}")
yield {"type": "raw", "content": line_str}
def _extract_session_id(self, event: Any) -> Optional[str]:

62
config/logging_config.py Normal file
View file

@ -0,0 +1,62 @@
"""Loguru-based structured logging configuration.
All logs are written to server.log as JSON lines for full traceability.
Stdlib logging is intercepted and funneled to loguru.
"""
import logging
from loguru import logger
_configured = False
class InterceptHandler(logging.Handler):
"""Redirect stdlib logging to loguru."""
def emit(self, record: logging.LogRecord) -> None:
try:
level = logger.level(record.levelname).name
except ValueError:
level = record.levelno
frame, depth = logging.currentframe(), 2
while frame is not None and frame.f_code.co_filename == logging.__file__:
frame = frame.f_back
depth += 1
logger.opt(depth=depth, exception=record.exc_info).log(
level, record.getMessage()
)
def configure_logging(log_file: str, *, force: bool = False) -> None:
"""Configure loguru with JSON output to log_file and intercept stdlib logging.
Idempotent: skips if already configured (e.g. hot reload).
Use force=True to reconfigure (e.g. in tests with a different log path).
"""
global _configured
if _configured and not force:
return
_configured = True
# Remove default loguru handler (writes to stderr)
logger.remove()
# Truncate log file on fresh start for clean debugging
open(log_file, "w", encoding="utf-8").close()
# Add file sink: JSON lines, DEBUG level, full traceability
logger.add(
log_file,
level="DEBUG",
serialize=True,
encoding="utf-8",
mode="a",
)
# Intercept stdlib logging: route all root logger output to loguru
intercept = InterceptHandler()
logging.root.handlers = [intercept]
logging.root.setLevel(logging.DEBUG)

View file

@ -143,7 +143,7 @@ def parse_cli_event(event: Any) -> List[Dict]:
if etype == "error":
err = event.get("error")
msg = err.get("message") if isinstance(err, dict) else str(err)
logger.info(f"CLI_PARSER: Parsed error event: {msg[:100]}")
logger.info(f"CLI_PARSER: Parsed error event: {msg}")
return [{"type": "error", "message": msg}]
elif etype == "exit":
code = event.get("code", 0)
@ -154,7 +154,7 @@ def parse_cli_event(event: Any) -> List[Dict]:
else:
# Non-zero exit is an error
error_msg = stderr if stderr else f"Process exited with code {code}"
logger.warning(f"CLI_PARSER: Error exit (code={code}): {error_msg[:100]}")
logger.warning(f"CLI_PARSER: Error exit (code={code}): {error_msg}")
return [
{"type": "error", "message": error_msg},
{"type": "complete", "status": "failed"},

View file

@ -53,7 +53,10 @@ TRANSCRIPT_EVENT_TYPES = (
# Event types -> (emoji, label) for status updates
_EVENT_STATUS_MAP = {
("thinking_start", "thinking_delta", "thinking_chunk"): ("🧠", "Claude is thinking..."),
("thinking_start", "thinking_delta", "thinking_chunk"): (
"🧠",
"Claude is thinking...",
),
("text_start", "text_delta", "text_chunk"): ("🧠", "Claude is working..."),
("tool_result",): ("", "Executing tools..."),
}
@ -341,7 +344,7 @@ class ClaudeMessageHandler:
self.session_store.save_tree(tree.root_id, tree.to_dict())
elif ptype == "error":
error_msg = parsed.get("message", "Unknown error")
logger.error(f"HANDLER: Error event received: {error_msg[:200]}")
logger.error(f"HANDLER: Error event received: {error_msg}")
logger.info("HANDLER: Updating UI with error status")
await update_ui(format_status("", "Error"), force=True)
if tree:

View file

@ -16,6 +16,7 @@ dependencies = [
"python-telegram-bot>=21.0",
"pydantic-settings>=2.12.0",
"openai>=2.16.0",
"loguru>=0.7.0",
]
[dependency-groups]

View file

@ -11,8 +11,6 @@ os.environ["MESSAGING_RATE_WINDOW"] = "0.5"
from messaging.limiter import MessagingRateLimiter
# Configure logging for tests
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

View file

@ -0,0 +1,61 @@
"""Tests for config/logging_config.py."""
import json
import logging
from pathlib import Path
import pytest
from config.logging_config import configure_logging
def test_configure_logging_writes_json_to_file(tmp_path):
"""configure_logging writes JSON lines to the specified file."""
log_file = str(tmp_path / "test.log")
configure_logging(log_file, force=True)
# Emit a log via stdlib (intercepted to loguru)
logger = logging.getLogger("test.module")
logger.info("Test message for JSON")
# Force flush - loguru may buffer
from loguru import logger as loguru_logger
loguru_logger.complete()
content = Path(log_file).read_text(encoding="utf-8")
lines = [l for l in content.strip().split("\n") if l]
assert len(lines) >= 1
# Each line should be valid JSON
for line in lines:
record = json.loads(line)
assert "text" in record or "message" in record or "record" in record
def test_configure_logging_idempotent(tmp_path):
"""configure_logging is idempotent - safe to call twice with force."""
log_file = str(tmp_path / "test.log")
configure_logging(log_file, force=True)
configure_logging(log_file, force=True) # Should not raise
logger = logging.getLogger("test.idempotent")
logger.info("After second configure")
def test_configure_logging_skips_when_already_configured(tmp_path):
"""Without force, second call is a no-op (avoids reconfig on hot reload)."""
log_file = str(tmp_path / "test.log")
configure_logging(log_file, force=True)
# Second call without force - should skip; no exception, log file unchanged
configure_logging(str(tmp_path / "other.log"), force=False)
# Logs still go to first file
logger = logging.getLogger("test.skip")
logger.info("Still goes to first file")
from loguru import logger as loguru_logger
loguru_logger.complete()
assert (tmp_path / "test.log").exists()
assert "Still goes to first file" in (tmp_path / "test.log").read_text(
encoding="utf-8"
)

View file

@ -7,8 +7,6 @@ import logging
from providers.rate_limit import GlobalRateLimiter
# Configure logging for tests
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)