Merge remote-tracking branch 'upstream/dev' into playwright

# Conflicts:
#	backend/open_webui/retrieval/web/utils.py
#	backend/open_webui/routers/retrieval.py
This commit is contained in:
Rory
2025-02-17 21:53:39 -06:00
226 changed files with 3402 additions and 1802 deletions

View File

@@ -39,7 +39,10 @@ from open_webui.routers.tasks import (
)
from open_webui.routers.retrieval import process_web_search, SearchForm
from open_webui.routers.images import image_generations, GenerateImageForm
from open_webui.routers.pipelines import (
process_pipeline_inlet_filter,
process_pipeline_outlet_filter,
)
from open_webui.utils.webhook import post_webhook
@@ -359,14 +362,25 @@ async def chat_web_search_handler(
)
files = form_data.get("files", [])
files.append(
{
"collection_name": results["collection_name"],
"name": searchQuery,
"type": "web_search_results",
"urls": results["filenames"],
}
)
if request.app.state.config.RAG_WEB_SEARCH_FULL_CONTEXT:
files.append(
{
"docs": results.get("docs", []),
"name": searchQuery,
"type": "web_search_docs",
"urls": results["filenames"],
}
)
else:
files.append(
{
"collection_name": results["collection_name"],
"name": searchQuery,
"type": "web_search_results",
"urls": results["filenames"],
}
)
form_data["files"] = files
else:
await event_emitter(
@@ -676,6 +690,25 @@ async def process_chat_payload(request, form_data, metadata, user, model):
variables = form_data.pop("variables", None)
# Process the form_data through the pipeline
try:
form_data = await process_pipeline_inlet_filter(
request, form_data, user, models
)
except Exception as e:
raise e
try:
form_data, flags = await process_filter_functions(
request=request,
filter_ids=get_sorted_filter_ids(model),
filter_type="inlet",
form_data=form_data,
extra_params=extra_params,
)
except Exception as e:
raise Exception(f"Error: {e}")
features = form_data.pop("features", None)
if features:
if "web_search" in features and features["web_search"]:
@@ -698,17 +731,6 @@ async def process_chat_payload(request, form_data, metadata, user, model):
form_data["messages"],
)
try:
form_data, flags = await process_filter_functions(
request=request,
filter_ids=get_sorted_filter_ids(model),
filter_type="inlet",
form_data=form_data,
extra_params=extra_params,
)
except Exception as e:
raise Exception(f"Error: {e}")
tool_ids = form_data.pop("tool_ids", None)
files = form_data.pop("files", None)
# Remove files duplicates
@@ -789,7 +811,7 @@ async def process_chat_payload(request, form_data, metadata, user, model):
# Workaround for Ollama 2.0+ system prompt issue
# TODO: replace with add_or_update_system_message
if model["owned_by"] == "ollama":
if model.get("owned_by") == "ollama":
form_data["messages"] = prepend_to_first_user_message_content(
rag_template(
request.app.state.config.RAG_TEMPLATE, context_string, prompt
@@ -997,6 +1019,7 @@ async def process_chat_response(
webhook_url = Users.get_user_webhook_url_by_id(user.id)
if webhook_url:
post_webhook(
request.app.state.WEBUI_NAME,
webhook_url,
f"{title} - {request.app.state.config.WEBUI_URL}/c/{metadata['chat_id']}\n\n{content}",
{
@@ -1335,7 +1358,14 @@ async def process_chat_response(
)
tool_calls = []
content = message.get("content", "") if message else ""
last_assistant_message = get_last_assistant_message(form_data["messages"])
content = (
message.get("content", "")
if message
else last_assistant_message if last_assistant_message else ""
)
content_blocks = [
{
"type": "text",
@@ -1862,6 +1892,7 @@ async def process_chat_response(
webhook_url = Users.get_user_webhook_url_by_id(user.id)
if webhook_url:
post_webhook(
request.app.state.WEBUI_NAME,
webhook_url,
f"{title} - {request.app.state.config.WEBUI_URL}/c/{metadata['chat_id']}\n\n{content}",
{