mirror of
https://github.com/open-webui/open-webui.git
synced 2025-12-16 20:07:49 +01:00
convert embedding function name to be more consistence
This commit is contained in:
@@ -15,7 +15,7 @@ from open_webui.routers.pipelines import process_pipeline_inlet_filter
|
||||
|
||||
|
||||
from open_webui.utils.payload import convert_embedding_payload_openai_to_ollama
|
||||
from open_webui.utils.response import convert_response_ollama_to_openai
|
||||
from open_webui.utils.response import convert_embedding_response_ollama_to_openai
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -117,7 +117,7 @@ async def generate_embeddings(
|
||||
form_data=form_obj,
|
||||
user=user,
|
||||
)
|
||||
return convert_response_ollama_to_openai(response)
|
||||
return convert_embedding_response_ollama_to_openai(response)
|
||||
|
||||
# Default: OpenAI or compatible backend
|
||||
return await openai_embeddings(
|
||||
|
||||
@@ -126,7 +126,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
|
||||
|
||||
yield "data: [DONE]\n\n"
|
||||
|
||||
def convert_response_ollama_to_openai(response):
|
||||
def convert_embedding_response_ollama_to_openai(response) -> dict:
|
||||
"""
|
||||
Convert the response from Ollama embeddings endpoint to the OpenAI-compatible format.
|
||||
|
||||
|
||||
Reference in New Issue
Block a user