refactor: refactoring refresh server api (#197)

This commit is contained in:
Medcl
2025-02-25 16:07:41 +08:00
committed by GitHub
parent 7c88e7374b
commit 455e20f20f
3 changed files with 14 additions and 21 deletions

View File

@@ -1,7 +1,7 @@
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Debug,Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum Status {
Green,
@@ -9,8 +9,8 @@ pub enum Status {
Red,
}
#[derive(Debug,Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Health {
pub services: HashMap<String, Status>,
pub services: Option<HashMap<String, Status>>,
pub status: Status,
}

View File

@@ -41,7 +41,7 @@ pub async fn refresh_all_datasources<R: Runtime>(app_handle: &AppHandle<R>) -> R
// Attempt to get datasources by server, and continue even if it fails
let connectors =
match get_datasources_by_server(app_handle.clone(), server.id.clone()).await {
match get_datasources_by_server(server.id.as_str()).await {
Ok(connectors) => {
// Process connectors only after fetching them
let connectors_map: HashMap<String, DataSource> = connectors
@@ -85,13 +85,12 @@ pub async fn refresh_all_datasources<R: Runtime>(app_handle: &AppHandle<R>) -> R
}
#[tauri::command]
pub async fn get_datasources_by_server<R: Runtime>(
_app_handle: AppHandle<R>,
id: String,
pub async fn get_datasources_by_server(
id: &str,
) -> Result<Vec<DataSource>, String> {
// Perform the async HTTP request outside the cache lock
let resp = HttpClient::get(&id, "/datasource/_search",None)
let resp = HttpClient::get(id, "/datasource/_search", None)
.await
.map_err(|e| {
// dbg!("Error fetching datasource: {}", &e);

View File

@@ -1,7 +1,7 @@
use crate::common::register::SearchSourceRegistry;
use crate::common::server::{AuthProvider, Provider, Server, ServerAccessToken, Sso, Version};
use crate::server::connector::refresh_all_connectors;
use crate::server::datasource::refresh_all_datasources;
use crate::server::connector::fetch_connectors_by_server;
use crate::server::datasource::get_datasources_by_server;
use crate::server::http_client::HttpClient;
use crate::server::search::CocoSearchSource;
use crate::COCO_TAURI_STORE;
@@ -302,10 +302,9 @@ pub async fn refresh_coco_server_info<R: Runtime>(
if let Some(content_length) = response.content_length() {
if content_length > 0 {
let new_coco_server: Result<Server, _> = response.json().await;
match new_coco_server {
Ok(mut server) => {
server.id = id;
server.id = id.clone();
server.builtin = is_builtin;
server.available = true;
server.profile = profile;
@@ -316,17 +315,14 @@ pub async fn refresh_coco_server_info<R: Runtime>(
.expect("Failed to persist coco servers.");
//refresh connectors and datasources
if let Err(err) = refresh_all_connectors(&app_handle).await {
return Err(format!("Failed to load server connectors: {}", err));
}
let _ = fetch_connectors_by_server(&id).await;
let _ = get_datasources_by_server(&id).await;
if let Err(err) = refresh_all_datasources(&app_handle).await {
return Err(format!("Failed to load server datasources: {}", err));
}
Ok(server)
}
Err(e) => Err(format!("Failed to deserialize the response: {}", e)),
Err(e) => Err(format!("Failed to deserialize the response: {:?}", e)),
}
} else {
Err("Received empty response body.".to_string())
@@ -571,8 +567,6 @@ fn test_trim_endpoint_last_forward_slash() {
},
},
priority: 0,
enabled: true,
health: None,
};
trim_endpoint_last_forward_slash(&mut server);