2024-05-30 20:44:13 +07:00
|
|
|
import json
|
2024-08-28 00:10:27 +02:00
|
|
|
import logging
|
2024-06-21 14:58:57 +02:00
|
|
|
from contextlib import contextmanager
|
2024-08-28 00:10:27 +02:00
|
|
|
from typing import Any, Optional
|
2024-07-05 23:38:53 -07:00
|
|
|
|
2024-12-10 00:54:13 -08:00
|
|
|
from open_webui.internal.wrappers import register_connection
|
2024-10-06 16:46:35 +02:00
|
|
|
from open_webui.env import (
|
|
|
|
|
OPEN_WEBUI_DIR,
|
|
|
|
|
DATABASE_URL,
|
2024-12-10 15:18:26 +01:00
|
|
|
DATABASE_SCHEMA,
|
2024-10-06 16:46:35 +02:00
|
|
|
SRC_LOG_LEVELS,
|
|
|
|
|
DATABASE_POOL_MAX_OVERFLOW,
|
|
|
|
|
DATABASE_POOL_RECYCLE,
|
|
|
|
|
DATABASE_POOL_SIZE,
|
|
|
|
|
DATABASE_POOL_TIMEOUT,
|
|
|
|
|
)
|
2024-08-28 00:10:27 +02:00
|
|
|
from peewee_migrate import Router
|
2024-12-10 15:18:26 +01:00
|
|
|
from sqlalchemy import Dialect, create_engine, MetaData, types
|
2024-08-28 00:10:27 +02:00
|
|
|
from sqlalchemy.ext.declarative import declarative_base
|
|
|
|
|
from sqlalchemy.orm import scoped_session, sessionmaker
|
2024-10-06 16:46:35 +02:00
|
|
|
from sqlalchemy.pool import QueuePool, NullPool
|
2024-08-28 00:10:27 +02:00
|
|
|
from sqlalchemy.sql.type_api import _T
|
|
|
|
|
from typing_extensions import Self
|
2024-05-30 18:55:58 +07:00
|
|
|
|
2024-03-20 17:11:36 -06:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
log.setLevel(SRC_LOG_LEVELS["DB"])
|
2024-01-19 17:13:09 -03:00
|
|
|
|
2024-06-20 04:53:23 -07:00
|
|
|
|
2024-06-18 15:03:31 +02:00
|
|
|
class JSONField(types.TypeDecorator):
|
|
|
|
|
impl = types.Text
|
|
|
|
|
cache_ok = True
|
|
|
|
|
|
|
|
|
|
def process_bind_param(self, value: Optional[_T], dialect: Dialect) -> Any:
|
|
|
|
|
return json.dumps(value)
|
|
|
|
|
|
|
|
|
|
def process_result_value(self, value: Optional[_T], dialect: Dialect) -> Any:
|
|
|
|
|
if value is not None:
|
|
|
|
|
return json.loads(value)
|
|
|
|
|
|
|
|
|
|
def copy(self, **kw: Any) -> Self:
|
|
|
|
|
return JSONField(self.impl.length)
|
|
|
|
|
|
2024-05-21 22:05:16 +01:00
|
|
|
def db_value(self, value):
|
|
|
|
|
return json.dumps(value)
|
|
|
|
|
|
|
|
|
|
def python_value(self, value):
|
|
|
|
|
if value is not None:
|
|
|
|
|
return json.loads(value)
|
|
|
|
|
|
2024-06-20 04:53:23 -07:00
|
|
|
|
2024-07-05 23:38:53 -07:00
|
|
|
# Workaround to handle the peewee migration
|
|
|
|
|
# This is required to ensure the peewee migration is handled before the alembic migration
|
2024-07-08 12:42:52 -07:00
|
|
|
def handle_peewee_migration(DATABASE_URL):
|
2024-08-22 16:34:12 +02:00
|
|
|
# db = None
|
2024-07-05 23:38:53 -07:00
|
|
|
try:
|
2024-08-22 16:11:19 +02:00
|
|
|
# Replace the postgresql:// with postgres:// to handle the peewee migration
|
2024-08-22 16:34:12 +02:00
|
|
|
db = register_connection(DATABASE_URL.replace("postgresql://", "postgres://"))
|
2024-12-19 20:01:18 -08:00
|
|
|
migrate_dir = OPEN_WEBUI_DIR / "internal" / "migrations"
|
2024-07-05 23:38:53 -07:00
|
|
|
router = Router(db, logger=log, migrate_dir=migrate_dir)
|
|
|
|
|
router.run()
|
|
|
|
|
db.close()
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
log.error(f"Failed to initialize the database connection: {e}")
|
2025-06-20 20:32:23 +04:00
|
|
|
log.warning(
|
|
|
|
|
"Hint: If your database password contains special characters, you may need to URL-encode it."
|
|
|
|
|
)
|
2024-07-05 23:38:53 -07:00
|
|
|
raise
|
|
|
|
|
finally:
|
|
|
|
|
# Properly closing the database connection
|
|
|
|
|
if db and not db.is_closed():
|
|
|
|
|
db.close()
|
|
|
|
|
|
|
|
|
|
# Assert if db connection has been closed
|
|
|
|
|
assert db.is_closed(), "Database connection is still open."
|
|
|
|
|
|
|
|
|
|
|
2024-07-08 12:42:52 -07:00
|
|
|
handle_peewee_migration(DATABASE_URL)
|
2024-07-05 23:38:53 -07:00
|
|
|
|
|
|
|
|
|
2024-06-18 15:03:31 +02:00
|
|
|
SQLALCHEMY_DATABASE_URL = DATABASE_URL
|
|
|
|
|
if "sqlite" in SQLALCHEMY_DATABASE_URL:
|
|
|
|
|
engine = create_engine(
|
|
|
|
|
SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}
|
|
|
|
|
)
|
|
|
|
|
else:
|
2024-10-06 16:46:35 +02:00
|
|
|
if DATABASE_POOL_SIZE > 0:
|
|
|
|
|
engine = create_engine(
|
|
|
|
|
SQLALCHEMY_DATABASE_URL,
|
|
|
|
|
pool_size=DATABASE_POOL_SIZE,
|
|
|
|
|
max_overflow=DATABASE_POOL_MAX_OVERFLOW,
|
|
|
|
|
pool_timeout=DATABASE_POOL_TIMEOUT,
|
|
|
|
|
pool_recycle=DATABASE_POOL_RECYCLE,
|
|
|
|
|
pool_pre_ping=True,
|
|
|
|
|
poolclass=QueuePool,
|
|
|
|
|
)
|
2025-06-21 13:37:21 +01:00
|
|
|
elif DATABASE_POOL_SIZE == 0:
|
2024-10-06 16:46:35 +02:00
|
|
|
engine = create_engine(
|
|
|
|
|
SQLALCHEMY_DATABASE_URL, pool_pre_ping=True, poolclass=NullPool
|
|
|
|
|
)
|
2025-06-21 13:37:21 +01:00
|
|
|
else:
|
|
|
|
|
engine = create_engine(
|
|
|
|
|
SQLALCHEMY_DATABASE_URL, pool_pre_ping=True
|
|
|
|
|
)
|
2024-07-03 23:32:46 -07:00
|
|
|
|
|
|
|
|
|
2024-06-24 09:57:08 +02:00
|
|
|
SessionLocal = sessionmaker(
|
|
|
|
|
autocommit=False, autoflush=False, bind=engine, expire_on_commit=False
|
|
|
|
|
)
|
2024-12-10 15:18:26 +01:00
|
|
|
metadata_obj = MetaData(schema=DATABASE_SCHEMA)
|
|
|
|
|
Base = declarative_base(metadata=metadata_obj)
|
2024-06-24 13:06:15 +02:00
|
|
|
Session = scoped_session(SessionLocal)
|
2024-07-03 23:32:46 -07:00
|
|
|
|
|
|
|
|
|
2024-07-03 23:39:16 -07:00
|
|
|
def get_session():
|
2024-07-03 23:32:46 -07:00
|
|
|
db = SessionLocal()
|
|
|
|
|
try:
|
|
|
|
|
yield db
|
|
|
|
|
finally:
|
|
|
|
|
db.close()
|
2024-07-03 23:39:16 -07:00
|
|
|
|
|
|
|
|
|
|
|
|
|
get_db = contextmanager(get_session)
|