Files
open-webui/backend/open_webui/routers/webui.py

414 lines
12 KiB
Python
Raw Normal View History

2024-08-28 00:10:27 +02:00
import inspect
import json
import logging
2024-10-22 03:16:48 -07:00
import time
2024-08-28 00:10:27 +02:00
from typing import AsyncGenerator, Generator, Iterator
from open_webui.apps.socket.main import get_event_call, get_event_emitter
2024-12-10 00:54:13 -08:00
from open_webui.models.functions import Functions
from open_webui.models.models import Models
from open_webui.routers import (
2024-01-07 23:43:32 -08:00
auths,
chats,
2024-10-16 21:05:03 -07:00
folders,
2024-08-28 00:10:27 +02:00
configs,
2024-11-14 18:35:14 -08:00
groups,
2024-08-28 00:10:27 +02:00
files,
functions,
memories,
2024-05-24 00:26:00 -07:00
models,
2024-10-01 22:45:04 -07:00
knowledge,
2024-01-07 23:43:32 -08:00
prompts,
2024-10-22 03:16:48 -07:00
evaluations,
2024-08-28 00:10:27 +02:00
tools,
users,
2024-01-07 23:43:32 -08:00
utils,
)
2024-12-10 00:54:13 -08:00
from backend.open_webui.utils.plugin import load_function_module_by_id
from open_webui.config import (
ADMIN_EMAIL,
2024-08-28 00:10:27 +02:00
CORS_ALLOW_ORIGIN,
2024-02-14 01:17:43 -08:00
DEFAULT_MODELS,
DEFAULT_PROMPT_SUGGESTIONS,
DEFAULT_USER_ROLE,
MODEL_ORDER_LIST,
2024-08-28 00:10:27 +02:00
ENABLE_COMMUNITY_SHARING,
ENABLE_LOGIN_FORM,
2024-08-28 00:10:27 +02:00
ENABLE_MESSAGE_RATING,
ENABLE_SIGNUP,
2024-11-19 12:17:23 -08:00
ENABLE_API_KEY,
2024-10-22 03:16:48 -07:00
ENABLE_EVALUATION_ARENA_MODELS,
EVALUATION_ARENA_MODELS,
DEFAULT_ARENA_MODEL,
2024-08-28 00:10:27 +02:00
JWT_EXPIRES_IN,
ENABLE_OAUTH_ROLE_MANAGEMENT,
2024-10-03 20:55:32 +02:00
OAUTH_ROLES_CLAIM,
2024-08-28 00:10:27 +02:00
OAUTH_EMAIL_CLAIM,
OAUTH_PICTURE_CLAIM,
OAUTH_USERNAME_CLAIM,
2024-10-11 14:08:11 +02:00
OAUTH_ALLOWED_ROLES,
OAUTH_ADMIN_ROLES,
2024-08-28 00:10:27 +02:00
SHOW_ADMIN_DETAILS,
2024-02-14 01:17:43 -08:00
USER_PERMISSIONS,
2024-03-20 18:35:02 -07:00
WEBHOOK_URL,
2024-08-28 00:10:27 +02:00
WEBUI_AUTH,
WEBUI_BANNERS,
ENABLE_LDAP,
LDAP_SERVER_LABEL,
LDAP_SERVER_HOST,
LDAP_SERVER_PORT,
LDAP_ATTRIBUTE_FOR_USERNAME,
LDAP_SEARCH_FILTERS,
LDAP_SEARCH_BASE,
LDAP_APP_DN,
LDAP_APP_PASSWORD,
LDAP_USE_TLS,
LDAP_CA_CERT_FILE,
LDAP_CIPHERS,
2024-06-10 20:39:55 -07:00
AppConfig,
2024-02-14 01:17:43 -08:00
)
from open_webui.env import (
2024-11-09 18:01:23 -08:00
ENV,
SRC_LOG_LEVELS,
WEBUI_AUTH_TRUSTED_EMAIL_HEADER,
WEBUI_AUTH_TRUSTED_NAME_HEADER,
)
2024-08-28 00:10:27 +02:00
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import StreamingResponse
2024-06-24 11:17:18 -07:00
from pydantic import BaseModel
from open_webui.utils.misc import (
2024-08-28 00:10:27 +02:00
openai_chat_chunk_message_template,
openai_chat_completion_message_template,
)
2024-09-07 03:09:57 +01:00
from open_webui.utils.payload import (
apply_model_params_to_body_openai,
apply_model_system_prompt_to_body,
)
from open_webui.utils.tools import get_tools
2024-06-24 11:17:18 -07:00
2023-11-18 16:47:12 -08:00
2024-08-22 13:34:35 +01:00
log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["MAIN"])
2024-08-22 13:34:35 +01:00
2023-11-18 16:47:12 -08:00
@app.get("/")
async def get_status():
2024-01-02 16:48:10 -08:00
return {
"status": True,
"auth": WEBUI_AUTH,
"default_models": app.state.config.DEFAULT_MODELS,
"default_prompt_suggestions": app.state.config.DEFAULT_PROMPT_SUGGESTIONS,
2024-01-02 16:48:10 -08:00
}
2024-06-20 04:38:59 -07:00
2024-10-21 04:14:49 -07:00
async def get_all_models():
2024-10-22 03:16:48 -07:00
models = []
2024-10-21 04:14:49 -07:00
pipe_models = await get_pipe_models()
2024-10-22 03:16:48 -07:00
models = models + pipe_models
if app.state.config.ENABLE_EVALUATION_ARENA_MODELS:
arena_models = []
if len(app.state.config.EVALUATION_ARENA_MODELS) > 0:
arena_models = [
{
"id": model["id"],
"name": model["name"],
"info": {
"meta": model["meta"],
},
"object": "model",
"created": int(time.time()),
"owned_by": "arena",
"arena": True,
}
for model in app.state.config.EVALUATION_ARENA_MODELS
]
else:
# Add default arena model
arena_models = [
{
"id": DEFAULT_ARENA_MODEL["id"],
"name": DEFAULT_ARENA_MODEL["name"],
"info": {
"meta": DEFAULT_ARENA_MODEL["meta"],
},
"object": "model",
"created": int(time.time()),
"owned_by": "arena",
"arena": True,
}
]
models = models + arena_models
return models
2024-10-21 04:14:49 -07:00
2024-07-31 13:35:02 +01:00
def get_function_module(pipe_id: str):
# Check if function is already loaded
if pipe_id not in app.state.FUNCTIONS:
function_module, _, _ = load_function_module_by_id(pipe_id)
app.state.FUNCTIONS[pipe_id] = function_module
else:
function_module = app.state.FUNCTIONS[pipe_id]
if hasattr(function_module, "valves") and hasattr(function_module, "Valves"):
valves = Functions.get_function_valves_by_id(pipe_id)
function_module.valves = function_module.Valves(**(valves if valves else {}))
return function_module
async def get_pipe_models():
pipes = Functions.get_functions_by_type("pipe", active_only=True)
2024-06-20 04:38:59 -07:00
pipe_models = []
for pipe in pipes:
2024-07-31 13:35:02 +01:00
function_module = get_function_module(pipe.id)
2024-06-24 10:37:57 -07:00
2024-06-20 04:38:59 -07:00
# Check if function is a manifold
2024-07-31 22:05:37 +01:00
if hasattr(function_module, "pipes"):
2024-09-05 18:55:31 +02:00
sub_pipes = []
2024-07-31 13:35:02 +01:00
# Check if pipes is a function or a list
2024-09-05 18:55:31 +02:00
try:
if callable(function_module.pipes):
sub_pipes = function_module.pipes()
else:
sub_pipes = function_module.pipes
except Exception as e:
log.exception(e)
sub_pipes = []
2024-11-22 23:11:46 -05:00
log.debug(
f"get_pipe_models: function '{pipe.id}' is a manifold of {sub_pipes}"
)
2024-09-05 18:55:31 +02:00
for p in sub_pipes:
sub_pipe_id = f'{pipe.id}.{p["id"]}'
sub_pipe_name = p["name"]
2024-07-31 13:35:02 +01:00
if hasattr(function_module, "name"):
2024-09-05 18:55:31 +02:00
sub_pipe_name = f"{function_module.name}{sub_pipe_name}"
2024-07-31 13:35:02 +01:00
pipe_flag = {"type": pipe.type}
2024-11-22 23:11:46 -05:00
2024-07-31 13:35:02 +01:00
pipe_models.append(
{
2024-09-05 18:55:31 +02:00
"id": sub_pipe_id,
"name": sub_pipe_name,
2024-07-31 13:35:02 +01:00
"object": "model",
"created": pipe.created_at,
"owned_by": "openai",
"pipe": pipe_flag,
}
)
2024-06-20 04:38:59 -07:00
else:
2024-07-11 16:24:59 -07:00
pipe_flag = {"type": "pipe"}
2024-11-22 23:11:46 -05:00
log.debug(
f"get_pipe_models: function '{pipe.id}' is a single pipe {{ 'id': {pipe.id}, 'name': {pipe.name} }}"
)
2024-06-20 04:38:59 -07:00
pipe_models.append(
{
"id": pipe.id,
"name": pipe.name,
"object": "model",
"created": pipe.created_at,
"owned_by": "openai",
2024-07-11 16:24:59 -07:00
"pipe": pipe_flag,
2024-06-20 04:38:59 -07:00
}
)
return pipe_models
2024-06-24 11:17:18 -07:00
2024-07-31 13:35:02 +01:00
async def execute_pipe(pipe, params):
if inspect.iscoroutinefunction(pipe):
return await pipe(**params)
else:
return pipe(**params)
2024-07-11 15:20:56 -07:00
2024-07-11 13:43:44 -07:00
2024-07-31 15:26:26 +01:00
async def get_message_content(res: str | Generator | AsyncGenerator) -> str:
2024-07-31 13:35:02 +01:00
if isinstance(res, str):
return res
if isinstance(res, Generator):
return "".join(map(str, res))
if isinstance(res, AsyncGenerator):
return "".join([str(stream) async for stream in res])
2024-07-04 13:41:18 -07:00
2024-07-31 13:35:02 +01:00
def process_line(form_data: dict, line):
if isinstance(line, BaseModel):
line = line.model_dump_json()
line = f"data: {line}"
if isinstance(line, dict):
line = f"data: {json.dumps(line)}"
2024-07-04 13:41:18 -07:00
2024-07-31 13:35:02 +01:00
try:
line = line.decode("utf-8")
except Exception:
pass
2024-07-04 13:41:18 -07:00
2024-07-31 13:35:02 +01:00
if line.startswith("data:"):
return f"{line}\n\n"
2024-07-04 13:41:18 -07:00
else:
2024-07-31 22:00:00 +01:00
line = openai_chat_chunk_message_template(form_data["model"], line)
2024-07-31 13:35:02 +01:00
return f"data: {json.dumps(line)}\n\n"
def get_pipe_id(form_data: dict) -> str:
pipe_id = form_data["model"]
if "." in pipe_id:
pipe_id, _ = pipe_id.split(".", 1)
2024-07-31 13:35:02 +01:00
return pipe_id
2024-08-22 13:34:35 +01:00
def get_function_params(function_module, form_data, user, extra_params=None):
if extra_params is None:
extra_params = {}
2024-07-31 13:35:02 +01:00
pipe_id = get_pipe_id(form_data)
2024-08-22 15:23:32 +02:00
2024-07-31 13:35:02 +01:00
# Get the signature of the function
2024-08-02 01:45:50 +02:00
sig = inspect.signature(function_module.pipe)
2024-08-22 15:23:32 +02:00
params = {"body": form_data} | {
k: v for k, v in extra_params.items() if k in sig.parameters
}
2024-08-22 13:34:35 +01:00
2024-08-22 15:20:19 +02:00
if "__user__" in params and hasattr(function_module, "UserValves"):
2024-08-22 13:34:35 +01:00
user_valves = Functions.get_user_valves_by_id_and_user_id(pipe_id, user.id)
2024-08-22 15:20:19 +02:00
try:
params["__user__"]["valves"] = function_module.UserValves(**user_valves)
except Exception as e:
log.exception(e)
params["__user__"]["valves"] = function_module.UserValves()
2024-08-22 15:03:39 +02:00
2024-07-31 13:35:02 +01:00
return params
2024-06-24 11:17:18 -07:00
2024-11-16 04:41:07 -08:00
async def generate_function_chat_completion(form_data, user, models: dict = {}):
2024-07-31 13:51:25 +01:00
model_id = form_data.get("model")
model_info = Models.get_model_by_id(model_id)
2024-08-22 15:09:06 +02:00
2024-08-20 15:41:49 +01:00
metadata = form_data.pop("metadata", {})
2024-08-22 15:09:06 +02:00
2024-08-20 15:41:49 +01:00
files = metadata.get("files", [])
2024-08-21 22:42:25 +02:00
tool_ids = metadata.get("tool_ids", [])
# Check if tool_ids is None
if tool_ids is None:
tool_ids = []
2024-07-31 13:51:25 +01:00
2024-08-02 01:45:50 +02:00
__event_emitter__ = None
__event_call__ = None
__task__ = None
2024-10-06 14:56:49 -07:00
__task_body__ = None
2024-08-02 01:45:50 +02:00
if metadata:
if all(k in metadata for k in ("session_id", "chat_id", "message_id")):
__event_emitter__ = get_event_emitter(metadata)
__event_call__ = get_event_call(metadata)
__task__ = metadata.get("task", None)
2024-10-06 14:56:49 -07:00
__task_body__ = metadata.get("task_body", None)
2024-08-02 01:45:50 +02:00
2024-08-19 11:08:27 +01:00
extra_params = {
"__event_emitter__": __event_emitter__,
"__event_call__": __event_call__,
"__task__": __task__,
2024-10-06 14:56:49 -07:00
"__task_body__": __task_body__,
"__files__": files,
2024-08-22 13:34:35 +01:00
"__user__": {
"id": user.id,
"email": user.email,
"name": user.name,
"role": user.role,
},
"__metadata__": metadata,
2024-08-22 15:24:48 +02:00
}
2024-08-23 12:58:43 +01:00
extra_params["__tools__"] = get_tools(
app,
tool_ids,
user,
{
**extra_params,
2024-11-16 04:41:07 -08:00
"__model__": models.get(form_data["model"], None),
2024-08-23 12:58:43 +01:00
"__messages__": form_data["messages"],
"__files__": files,
},
2024-08-22 15:24:48 +02:00
)
2024-07-31 13:51:25 +01:00
if model_info:
if model_info.base_model_id:
form_data["model"] = model_info.base_model_id
params = model_info.params.model_dump()
2024-08-06 11:31:45 +01:00
form_data = apply_model_params_to_body_openai(params, form_data)
2024-08-02 01:45:50 +02:00
form_data = apply_model_system_prompt_to_body(params, form_data, user)
2024-07-11 15:20:56 -07:00
2024-07-31 22:05:37 +01:00
pipe_id = get_pipe_id(form_data)
function_module = get_function_module(pipe_id)
2024-06-24 12:56:41 -07:00
2024-07-31 22:05:37 +01:00
pipe = function_module.pipe
2024-08-19 11:08:27 +01:00
params = get_function_params(function_module, form_data, user, extra_params)
2024-06-24 12:56:41 -07:00
if form_data.get("stream", False):
2024-06-24 11:17:18 -07:00
2024-07-31 22:05:37 +01:00
async def stream_content():
2024-06-24 11:17:18 -07:00
try:
2024-07-31 13:35:02 +01:00
res = await execute_pipe(pipe, params)
2024-06-24 12:56:41 -07:00
2024-07-31 22:05:37 +01:00
# Directly return if the response is a StreamingResponse
if isinstance(res, StreamingResponse):
async for data in res.body_iterator:
yield data
return
if isinstance(res, dict):
yield f"data: {json.dumps(res)}\n\n"
return
2024-06-24 11:17:18 -07:00
except Exception as e:
log.error(f"Error: {e}")
2024-07-31 22:05:37 +01:00
yield f"data: {json.dumps({'error': {'detail':str(e)}})}\n\n"
return
2024-06-24 11:17:18 -07:00
2024-07-31 22:05:37 +01:00
if isinstance(res, str):
message = openai_chat_chunk_message_template(form_data["model"], res)
yield f"data: {json.dumps(message)}\n\n"
if isinstance(res, Iterator):
for line in res:
yield process_line(form_data, line)
if isinstance(res, AsyncGenerator):
async for line in res:
yield process_line(form_data, line)
if isinstance(res, str) or isinstance(res, Generator):
finish_message = openai_chat_chunk_message_template(
form_data["model"], ""
)
finish_message["choices"][0]["finish_reason"] = "stop"
yield f"data: {json.dumps(finish_message)}\n\n"
yield "data: [DONE]"
return StreamingResponse(stream_content(), media_type="text/event-stream")
else:
try:
res = await execute_pipe(pipe, params)
except Exception as e:
log.error(f"Error: {e}")
2024-07-31 22:05:37 +01:00
return {"error": {"detail": str(e)}}
2024-07-31 13:35:02 +01:00
2024-07-31 22:05:37 +01:00
if isinstance(res, StreamingResponse) or isinstance(res, dict):
return res
if isinstance(res, BaseModel):
return res.model_dump()
2024-06-24 11:17:18 -07:00
2024-07-31 22:05:37 +01:00
message = await get_message_content(res)
return openai_chat_completion_message_template(form_data["model"], message)