feat: save UI config changes to config.json

This commit is contained in:
Jun Siang Cheah
2024-05-10 13:36:10 +08:00
parent 9a95767062
commit 058eb76568
11 changed files with 611 additions and 336 deletions

View File

@@ -46,6 +46,8 @@ from config import (
ENABLE_MODEL_FILTER,
MODEL_FILTER_LIST,
UPLOAD_DIR,
config_set,
config_get,
)
from utils.misc import calculate_sha256
@@ -96,7 +98,7 @@ async def get_status():
@app.get("/urls")
async def get_ollama_api_urls(user=Depends(get_admin_user)):
return {"OLLAMA_BASE_URLS": app.state.OLLAMA_BASE_URLS}
return {"OLLAMA_BASE_URLS": config_get(app.state.OLLAMA_BASE_URLS)}
class UrlUpdateForm(BaseModel):
@@ -105,10 +107,10 @@ class UrlUpdateForm(BaseModel):
@app.post("/urls/update")
async def update_ollama_api_url(form_data: UrlUpdateForm, user=Depends(get_admin_user)):
app.state.OLLAMA_BASE_URLS = form_data.urls
config_set(app.state.OLLAMA_BASE_URLS, form_data.urls)
log.info(f"app.state.OLLAMA_BASE_URLS: {app.state.OLLAMA_BASE_URLS}")
return {"OLLAMA_BASE_URLS": app.state.OLLAMA_BASE_URLS}
return {"OLLAMA_BASE_URLS": config_get(app.state.OLLAMA_BASE_URLS)}
@app.get("/cancel/{request_id}")
@@ -153,7 +155,9 @@ def merge_models_lists(model_lists):
async def get_all_models():
log.info("get_all_models()")
tasks = [fetch_url(f"{url}/api/tags") for url in app.state.OLLAMA_BASE_URLS]
tasks = [
fetch_url(f"{url}/api/tags") for url in config_get(app.state.OLLAMA_BASE_URLS)
]
responses = await asyncio.gather(*tasks)
models = {
@@ -179,14 +183,15 @@ async def get_ollama_tags(
if user.role == "user":
models["models"] = list(
filter(
lambda model: model["name"] in app.state.MODEL_FILTER_LIST,
lambda model: model["name"]
in config_get(app.state.MODEL_FILTER_LIST),
models["models"],
)
)
return models
return models
else:
url = app.state.OLLAMA_BASE_URLS[url_idx]
url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
try:
r = requests.request(method="GET", url=f"{url}/api/tags")
r.raise_for_status()
@@ -216,7 +221,10 @@ async def get_ollama_versions(url_idx: Optional[int] = None):
if url_idx == None:
# returns lowest version
tasks = [fetch_url(f"{url}/api/version") for url in app.state.OLLAMA_BASE_URLS]
tasks = [
fetch_url(f"{url}/api/version")
for url in config_get(app.state.OLLAMA_BASE_URLS)
]
responses = await asyncio.gather(*tasks)
responses = list(filter(lambda x: x is not None, responses))
@@ -235,7 +243,7 @@ async def get_ollama_versions(url_idx: Optional[int] = None):
detail=ERROR_MESSAGES.OLLAMA_NOT_FOUND,
)
else:
url = app.state.OLLAMA_BASE_URLS[url_idx]
url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
try:
r = requests.request(method="GET", url=f"{url}/api/version")
r.raise_for_status()
@@ -267,7 +275,7 @@ class ModelNameForm(BaseModel):
async def pull_model(
form_data: ModelNameForm, url_idx: int = 0, user=Depends(get_admin_user)
):
url = app.state.OLLAMA_BASE_URLS[url_idx]
url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
log.info(f"url: {url}")
r = None
@@ -355,7 +363,7 @@ async def push_model(
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.name),
)
url = app.state.OLLAMA_BASE_URLS[url_idx]
url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
log.debug(f"url: {url}")
r = None
@@ -417,7 +425,7 @@ async def create_model(
form_data: CreateModelForm, url_idx: int = 0, user=Depends(get_admin_user)
):
log.debug(f"form_data: {form_data}")
url = app.state.OLLAMA_BASE_URLS[url_idx]
url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
log.info(f"url: {url}")
r = None
@@ -490,7 +498,7 @@ async def copy_model(
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.source),
)
url = app.state.OLLAMA_BASE_URLS[url_idx]
url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
log.info(f"url: {url}")
try:
@@ -537,7 +545,7 @@ async def delete_model(
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.name),
)
url = app.state.OLLAMA_BASE_URLS[url_idx]
url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
log.info(f"url: {url}")
try:
@@ -577,7 +585,7 @@ async def show_model_info(form_data: ModelNameForm, user=Depends(get_verified_us
)
url_idx = random.choice(app.state.MODELS[form_data.name]["urls"])
url = app.state.OLLAMA_BASE_URLS[url_idx]
url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
log.info(f"url: {url}")
try:
@@ -634,7 +642,7 @@ async def generate_embeddings(
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model),
)
url = app.state.OLLAMA_BASE_URLS[url_idx]
url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
log.info(f"url: {url}")
try:
@@ -684,7 +692,7 @@ def generate_ollama_embeddings(
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model),
)
url = app.state.OLLAMA_BASE_URLS[url_idx]
url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
log.info(f"url: {url}")
try:
@@ -753,7 +761,7 @@ async def generate_completion(
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model),
)
url = app.state.OLLAMA_BASE_URLS[url_idx]
url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
log.info(f"url: {url}")
r = None
@@ -856,7 +864,7 @@ async def generate_chat_completion(
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model),
)
url = app.state.OLLAMA_BASE_URLS[url_idx]
url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
log.info(f"url: {url}")
r = None
@@ -965,7 +973,7 @@ async def generate_openai_chat_completion(
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model),
)
url = app.state.OLLAMA_BASE_URLS[url_idx]
url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
log.info(f"url: {url}")
r = None
@@ -1064,7 +1072,7 @@ async def get_openai_models(
}
else:
url = app.state.OLLAMA_BASE_URLS[url_idx]
url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
try:
r = requests.request(method="GET", url=f"{url}/api/tags")
r.raise_for_status()
@@ -1198,7 +1206,7 @@ async def download_model(
if url_idx == None:
url_idx = 0
url = app.state.OLLAMA_BASE_URLS[url_idx]
url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
file_name = parse_huggingface_url(form_data.url)
@@ -1217,7 +1225,7 @@ async def download_model(
def upload_model(file: UploadFile = File(...), url_idx: Optional[int] = None):
if url_idx == None:
url_idx = 0
ollama_url = app.state.OLLAMA_BASE_URLS[url_idx]
ollama_url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
file_path = f"{UPLOAD_DIR}/{file.filename}"
@@ -1282,7 +1290,7 @@ def upload_model(file: UploadFile = File(...), url_idx: Optional[int] = None):
# async def upload_model(file: UploadFile = File(), url_idx: Optional[int] = None):
# if url_idx == None:
# url_idx = 0
# url = app.state.OLLAMA_BASE_URLS[url_idx]
# url = config_get(app.state.OLLAMA_BASE_URLS)[url_idx]
# file_location = os.path.join(UPLOAD_DIR, file.filename)
# total_size = file.size
@@ -1319,7 +1327,7 @@ def upload_model(file: UploadFile = File(...), url_idx: Optional[int] = None):
async def deprecated_proxy(
path: str, request: Request, user=Depends(get_verified_user)
):
url = app.state.OLLAMA_BASE_URLS[0]
url = config_get(app.state.OLLAMA_BASE_URLS)[0]
target_url = f"{url}/{path}"
body = await request.body()