Files
waiting-system/routers/logs.py
Jun-dev f699a29a85 Add waiting system application files
- Add main application files (main.py, models.py, schemas.py, etc.)
- Add routers for all features (waiting, attendance, members, etc.)
- Add HTML templates for admin and user interfaces
- Add migration scripts and utility files
- Add Docker configuration
- Add documentation files
- Add .gitignore to exclude database and cache files

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2025-12-14 00:29:39 +09:00

71 lines
2.2 KiB
Python

from fastapi import APIRouter, Depends, HTTPException, Query, Request
from fastapi.responses import HTMLResponse
from fastapi.templating import Jinja2Templates
from typing import List, Optional
import os
import json
router = APIRouter(prefix="/logs", tags=["System Logs"])
templates = Jinja2Templates(directory="templates")
LOG_FILE_PATH = "logs/system.json.log"
@router.get("/view", response_class=HTMLResponse)
async def view_logs_page(request: Request):
"""
Log Analysis Dashboard Page (UI)
"""
return templates.TemplateResponse("log_viewer.html", {"request": request})
@router.get("/api")
async def get_logs_api(
limit: int = 100,
level: Optional[str] = None,
keyword: Optional[str] = None
):
"""
API to fetch parsed logs from system.json.log
"""
if not os.path.exists(LOG_FILE_PATH):
return {"logs": []}
logs = []
# Read file in reverse is tricky with JSON lines, so read all and filter (for now)
# Optimization: Read file backwards or use `tail`.
# Since it's local file system, reading lines is okay for < 10MB.
try:
with open(LOG_FILE_PATH, 'r', encoding='utf-8') as f:
lines = f.readlines()
# Parse and Filter
for line in reversed(lines): # Show newest first
try:
if not line.strip(): continue
log_entry = json.loads(line)
# Filter by Level
if level and log_entry.get("level") != level.upper():
continue
# Filter by Keyword
if keyword:
# Search in message or other fields
search_blobs = str(log_entry.values()).lower()
if keyword.lower() not in search_blobs:
continue
logs.append(log_entry)
if len(logs) >= limit:
break
except json.JSONDecodeError:
continue
return {"logs": logs}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))