feat: add LOG_FORMAT=json for structured JSON logging (#21747)

* feat: add LOG_FORMAT env var with JSON formatter for early logging

Introduce LOG_FORMAT environment variable (set to "json" to enable).
When active, logging.basicConfig() uses a JSONFormatter that outputs
single-line JSON objects with fields: ts, level, msg, caller, error,
stacktrace. This covers all log messages emitted during module imports
before Loguru's start_logger() takes over.

* feat: add JSON sink for Loguru when LOG_FORMAT=json

Add _json_sink() as a Loguru sink function that writes single-line JSON
to stdout. In start_logger(), conditionally use the JSON sink instead of
the plain-text stdout_format when LOG_FORMAT is set to "json".

* feat: suppress ASCII banner and fix alembic logging in JSON mode

- Wrap the ASCII art banner print in main.py with a LOG_FORMAT != "json"
  guard so JSON output stays machine-parseable.
- Skip alembic's fileConfig() call in migrations/env.py when
  LOG_FORMAT=json to prevent it from replacing the JSON log handlers
  installed during early startup.
This commit is contained in:
Andrei Efanov
2026-02-23 00:40:17 +01:00
committed by GitHub
parent 3ad2ea6f28
commit 9e81e1dda1
4 changed files with 101 additions and 11 deletions

View File

@@ -5,6 +5,9 @@ import os
import pkgutil
import sys
import shutil
import traceback
from datetime import datetime, timezone
from typing import Any
from uuid import uuid4
from pathlib import Path
from cryptography.hazmat.primitives import serialization
@@ -72,9 +75,51 @@ except Exception:
# LOGGING
####################################
_LEVEL_MAP = {
"DEBUG": "debug",
"INFO": "info",
"WARNING": "warn",
"ERROR": "error",
"CRITICAL": "fatal",
}
class JSONFormatter(logging.Formatter):
"""Format log records as single-line JSON objects for structured logging."""
def format(self, record: logging.LogRecord) -> str:
log_entry: dict[str, Any] = {
"ts": datetime.fromtimestamp(record.created, tz=timezone.utc).isoformat(
timespec="milliseconds"
),
"level": _LEVEL_MAP.get(record.levelname, record.levelname.lower()),
"msg": record.getMessage(),
"caller": record.name,
}
if record.exc_info and record.exc_info[0] is not None:
log_entry["error"] = "".join(
traceback.format_exception(*record.exc_info)
).rstrip()
elif record.exc_text:
log_entry["error"] = record.exc_text
if record.stack_info:
log_entry["stacktrace"] = record.stack_info
return json.dumps(log_entry, ensure_ascii=False, default=str)
LOG_FORMAT = os.environ.get("LOG_FORMAT", "").lower()
GLOBAL_LOG_LEVEL = os.environ.get("GLOBAL_LOG_LEVEL", "").upper()
if GLOBAL_LOG_LEVEL in logging.getLevelNamesMapping():
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL, force=True)
if LOG_FORMAT == "json":
_handler = logging.StreamHandler(sys.stdout)
_handler.setFormatter(JSONFormatter())
logging.basicConfig(handlers=[_handler], level=GLOBAL_LOG_LEVEL, force=True)
else:
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL, force=True)
else:
GLOBAL_LOG_LEVEL = "INFO"

View File

@@ -503,6 +503,7 @@ from open_webui.env import (
WEBUI_ADMIN_PASSWORD,
WEBUI_ADMIN_NAME,
ENABLE_EASTER_EGGS,
LOG_FORMAT,
)
@@ -581,7 +582,8 @@ class SPAStaticFiles(StaticFiles):
raise ex
print(rf"""
if LOG_FORMAT != "json":
print(rf"""
██████╗ ██████╗ ███████╗███╗ ██╗ ██╗ ██╗███████╗██████╗ ██╗ ██╗██╗
██╔═══██╗██╔══██╗██╔════╝████╗ ██║ ██║ ██║██╔════╝██╔══██╗██║ ██║██║
██║ ██║██████╔╝█████╗ ██╔██╗ ██║ ██║ █╗ ██║█████╗ ██████╔╝██║ ██║██║

View File

@@ -1,8 +1,9 @@
import logging
from logging.config import fileConfig
from alembic import context
from open_webui.models.auths import Auth
from open_webui.env import DATABASE_URL, DATABASE_PASSWORD
from open_webui.env import DATABASE_URL, DATABASE_PASSWORD, LOG_FORMAT
from sqlalchemy import engine_from_config, pool, create_engine
# this is the Alembic Config object, which provides
@@ -14,6 +15,13 @@ config = context.config
if config.config_file_name is not None:
fileConfig(config.config_file_name, disable_existing_loggers=False)
# Re-apply JSON formatter after fileConfig replaces handlers.
if LOG_FORMAT == "json":
from open_webui.env import JSONFormatter
for handler in logging.root.handlers:
handler.setFormatter(JSONFormatter())
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel

View File

@@ -12,13 +12,15 @@ from open_webui.env import (
AUDIT_LOG_FILE_ROTATION_SIZE,
AUDIT_LOG_LEVEL,
GLOBAL_LOG_LEVEL,
LOG_FORMAT,
AUDIT_UVICORN_LOGGER_NAMES,
ENABLE_OTEL,
ENABLE_OTEL_LOGS,
_LEVEL_MAP,
)
if TYPE_CHECKING:
from loguru import Record
from loguru import Message, Record
def stdout_format(record: "Record") -> str:
@@ -43,6 +45,31 @@ def stdout_format(record: "Record") -> str:
)
def _json_sink(message: "Message") -> None:
"""Write log records as single-line JSON to stdout.
Used as a Loguru sink when LOG_FORMAT is set to "json".
"""
record = message.record
log_entry = {
"ts": record["time"].strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z",
"level": _LEVEL_MAP.get(record["level"].name, record["level"].name.lower()),
"msg": record["message"],
"caller": f"{record['name']}:{record['function']}:{record['line']}",
}
if record["extra"]:
log_entry["extra"] = record["extra"]
if record["exception"] is not None:
log_entry["error"] = "".join(
record["exception"].format_exception()
).rstrip()
sys.stdout.write(json.dumps(log_entry, ensure_ascii=False, default=str) + "\n")
sys.stdout.flush()
class InterceptHandler(logging.Handler):
"""
Intercepts log records from Python's standard logging module
@@ -127,14 +154,22 @@ def start_logger():
"""
logger.remove()
logger.add(
sys.stdout,
level=GLOBAL_LOG_LEVEL,
format=stdout_format,
filter=lambda record: (
"auditable" not in record["extra"] if ENABLE_AUDIT_STDOUT else True
),
audit_filter = lambda record: (
"auditable" not in record["extra"] if ENABLE_AUDIT_STDOUT else True
)
if LOG_FORMAT == "json":
logger.add(
_json_sink,
level=GLOBAL_LOG_LEVEL,
filter=audit_filter,
)
else:
logger.add(
sys.stdout,
level=GLOBAL_LOG_LEVEL,
format=stdout_format,
filter=audit_filter,
)
if AUDIT_LOG_LEVEL != "NONE" and ENABLE_AUDIT_LOGS_FILE:
try:
logger.add(