mirror of
https://github.com/infinilabs/coco-app.git
synced 2025-12-16 19:47:43 +01:00
feat: add support for local application search (#125)
* chore: refactoring new search interface * feat: add support for application search
This commit is contained in:
124
src-tauri/Cargo.lock
generated
124
src-tauri/Cargo.lock
generated
@@ -352,6 +352,12 @@ dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.13.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.21.7"
|
||||
@@ -602,12 +608,18 @@ dependencies = [
|
||||
name = "coco"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"base64 0.13.1",
|
||||
"dirs 5.0.1",
|
||||
"futures",
|
||||
"hostname",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"notify",
|
||||
"once_cell",
|
||||
"ordered-float",
|
||||
"pizza-common",
|
||||
"plist",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -623,6 +635,7 @@ dependencies = [
|
||||
"tauri-plugin-store",
|
||||
"tauri-plugin-theme",
|
||||
"tauri-plugin-websocket",
|
||||
"thiserror 1.0.64",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
@@ -1246,6 +1259,18 @@ dependencies = [
|
||||
"rustc_version",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "filetime"
|
||||
version = "0.2.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"libredox",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "flate2"
|
||||
version = "1.0.34"
|
||||
@@ -1313,6 +1338,15 @@ dependencies = [
|
||||
"percent-encoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fsevent-sys"
|
||||
version = "4.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futf"
|
||||
version = "0.1.5"
|
||||
@@ -1824,6 +1858,17 @@ version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||
|
||||
[[package]]
|
||||
name = "hostname"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"match_cfg",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "html5ever"
|
||||
version = "0.26.0"
|
||||
@@ -2053,6 +2098,26 @@ dependencies = [
|
||||
"cfb",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inotify"
|
||||
version = "0.9.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"inotify-sys",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inotify-sys"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "instant"
|
||||
version = "0.1.13"
|
||||
@@ -2197,6 +2262,26 @@ dependencies = [
|
||||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kqueue"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7447f1ca1b7b563588a205fe93dea8df60fd981423a768bc1c0ded35ed147d0c"
|
||||
dependencies = [
|
||||
"kqueue-sys",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kqueue-sys"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kuchikiki"
|
||||
version = "0.8.2"
|
||||
@@ -2264,6 +2349,7 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2323,6 +2409,12 @@ dependencies = [
|
||||
"tendril",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "match_cfg"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4"
|
||||
|
||||
[[package]]
|
||||
name = "matches"
|
||||
version = "0.1.10"
|
||||
@@ -2369,6 +2461,18 @@ dependencies = [
|
||||
"simd-adler32",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mio"
|
||||
version = "0.8.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"wasi 0.11.0+wasi-snapshot-preview1",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mio"
|
||||
version = "1.0.2"
|
||||
@@ -2485,6 +2589,24 @@ version = "0.1.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb"
|
||||
|
||||
[[package]]
|
||||
name = "notify"
|
||||
version = "5.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "729f63e1ca555a43fe3efa4f3efdf4801c479da85b432242a7b726f353c88486"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"crossbeam-channel",
|
||||
"filetime",
|
||||
"fsevent-sys",
|
||||
"inotify",
|
||||
"kqueue",
|
||||
"libc",
|
||||
"mio 0.8.11",
|
||||
"walkdir",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-conv"
|
||||
version = "0.1.0"
|
||||
@@ -4792,7 +4914,7 @@ dependencies = [
|
||||
"backtrace",
|
||||
"bytes",
|
||||
"libc",
|
||||
"mio",
|
||||
"mio 1.0.2",
|
||||
"pin-project-lite",
|
||||
"socket2 0.5.7",
|
||||
"tokio-macros",
|
||||
|
||||
@@ -39,7 +39,13 @@ lazy_static = "1.5.0"
|
||||
log = "0.4.22"
|
||||
tokio = "1.40.0"
|
||||
once_cell = "1.20.2"
|
||||
|
||||
notify = "5.0"
|
||||
async-trait = "0.1.82"
|
||||
thiserror = "1.0.64"
|
||||
dirs = "5.0.1"
|
||||
hostname = "0.3"
|
||||
plist = "1.7"
|
||||
base64 = "0.13"
|
||||
|
||||
[profile.dev]
|
||||
incremental = true # Compile your binary in smaller steps.
|
||||
|
||||
@@ -1,34 +1,34 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RichLabel {
|
||||
pub label: Option<String>,
|
||||
pub key: Option<String>,
|
||||
pub icon: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DataSourceReference {
|
||||
pub r#type: Option<String>,
|
||||
pub name: Option<String>,
|
||||
pub id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct UserInfo {
|
||||
pub avatar: Option<String>,
|
||||
pub username: Option<String>,
|
||||
pub userid: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct EditorInfo {
|
||||
pub user: UserInfo,
|
||||
pub timestamp: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Document {
|
||||
pub id: String,
|
||||
pub created: Option<String>,
|
||||
@@ -54,3 +54,32 @@ pub struct Document {
|
||||
pub owner: Option<UserInfo>,
|
||||
pub last_updated_by: Option<EditorInfo>,
|
||||
}
|
||||
impl Document {
|
||||
pub fn new(source: Option<DataSourceReference>, id: String, category: String, name: String, url: String) -> Self {
|
||||
Self {
|
||||
id,
|
||||
created: None,
|
||||
updated: None,
|
||||
source,
|
||||
r#type: None,
|
||||
category: Some(category),
|
||||
subcategory: None,
|
||||
categories: None,
|
||||
rich_categories: None,
|
||||
title: Some(name),
|
||||
summary: None,
|
||||
lang: None,
|
||||
content: None,
|
||||
icon: None,
|
||||
thumbnail: None,
|
||||
cover: None,
|
||||
tags: None,
|
||||
url: Some(url),
|
||||
size: None,
|
||||
metadata: None,
|
||||
payload: None,
|
||||
owner: None,
|
||||
last_updated_by: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,5 +4,7 @@ pub mod server;
|
||||
pub mod auth;
|
||||
pub mod datasource;
|
||||
pub mod connector;
|
||||
pub mod search_response;
|
||||
pub mod search;
|
||||
pub mod document;
|
||||
pub mod traits;
|
||||
pub mod register;
|
||||
|
||||
37
src-tauri/src/common/register.rs
Normal file
37
src-tauri/src/common/register.rs
Normal file
@@ -0,0 +1,37 @@
|
||||
use crate::common::traits::SearchSource;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
// Define a shared registry for search sources
|
||||
#[derive(Default)]
|
||||
pub struct SearchSourceRegistry {
|
||||
sources: RwLock<HashMap<String, Arc<dyn SearchSource>>>, // Store trait objects
|
||||
}
|
||||
|
||||
impl SearchSourceRegistry {
|
||||
pub async fn register_source<T: SearchSource + 'static>(&self, source: T) {
|
||||
let mut sources = self.sources.write().await;
|
||||
let source_id = source.get_type().id.clone();
|
||||
sources.insert(source_id, Arc::new(source));
|
||||
}
|
||||
|
||||
pub async fn clear(&self) {
|
||||
let mut sources = self.sources.write().await;
|
||||
sources.clear();
|
||||
}
|
||||
|
||||
pub async fn remove_source(&self, id: String) {
|
||||
let mut sources = self.sources.write().await;
|
||||
sources.remove(id.as_str());
|
||||
}
|
||||
|
||||
pub async fn get_source(&self, id: &str) -> Option<Arc<dyn SearchSource>> {
|
||||
let sources = self.sources.read().await;
|
||||
sources.get(id).cloned()
|
||||
}
|
||||
pub async fn get_sources(&self) -> Vec<Arc<dyn SearchSource>> {
|
||||
let sources = self.sources.read().await;
|
||||
sources.values().cloned().collect() // Returns Vec<Arc<dyn SearchSource>>
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,9 @@
|
||||
use std::collections::HashMap;
|
||||
use std::error::Error;
|
||||
use reqwest::Response;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::common::document::Document;
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SearchResponse<T> {
|
||||
pub took: u64,
|
||||
@@ -37,12 +38,12 @@ pub struct SearchHit<T> {
|
||||
pub _index: String,
|
||||
pub _type: String,
|
||||
pub _id: String,
|
||||
pub _score: Option<f32>,
|
||||
pub _score: Option<f64>,
|
||||
pub _source: T, // This will hold the type we pass in (e.g., DataSource)
|
||||
}
|
||||
pub async fn parse_search_hits<T>(
|
||||
pub async fn parse_search_response<T>(
|
||||
response: Response,
|
||||
) -> Result<Vec<SearchHit<T>>, Box<dyn Error>>
|
||||
) -> Result<SearchResponse<T>, Box<dyn Error>>
|
||||
where
|
||||
T: for<'de> Deserialize<'de> + std::fmt::Debug,
|
||||
{
|
||||
@@ -54,7 +55,18 @@ where
|
||||
let search_response: SearchResponse<T> = serde_json::from_value(body)
|
||||
.map_err(|e| format!("Failed to deserialize search response: {}", e))?;
|
||||
|
||||
Ok(search_response.hits.hits)
|
||||
Ok(search_response)
|
||||
}
|
||||
|
||||
pub async fn parse_search_hits<T>(
|
||||
response: Response,
|
||||
) -> Result<Vec<SearchHit<T>>, Box<dyn Error>>
|
||||
where
|
||||
T: for<'de> Deserialize<'de> + std::fmt::Debug,
|
||||
{
|
||||
let response=parse_search_response(response).await?;
|
||||
|
||||
Ok(response.hits.hits)
|
||||
}
|
||||
|
||||
pub async fn parse_search_results<T>(
|
||||
@@ -68,9 +80,62 @@ where
|
||||
|
||||
pub async fn parse_search_results_with_score<T>(
|
||||
response: Response,
|
||||
) -> Result<Vec<(T, Option<f32>)>, Box<dyn Error>>
|
||||
) -> Result<Vec<(T, Option<f64>)>, Box<dyn Error>>
|
||||
where
|
||||
T: for<'de> Deserialize<'de> + std::fmt::Debug,
|
||||
{
|
||||
Ok(parse_search_hits(response).await?.into_iter().map(|hit| (hit._source, hit._score)).collect())
|
||||
}
|
||||
|
||||
#[derive(Debug,Clone,Serialize)]
|
||||
pub struct SearchQuery {
|
||||
pub from: u64,
|
||||
pub size: u64,
|
||||
pub query_strings: HashMap<String, String>,
|
||||
}
|
||||
|
||||
impl SearchQuery {
|
||||
pub fn new(from: u64, size: u64, query_strings: HashMap<String, String>) -> Self {
|
||||
Self {
|
||||
from,
|
||||
size,
|
||||
query_strings,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug,Clone, Serialize)]
|
||||
pub struct QuerySource{
|
||||
pub r#type: String, //coco-server/local/ etc.
|
||||
pub id: String, //coco server's id
|
||||
pub name: String, //coco server's name, local computer name, etc.
|
||||
}
|
||||
|
||||
#[derive(Debug,Clone, Serialize)]
|
||||
pub struct QueryHits {
|
||||
pub source: Option<QuerySource>,
|
||||
pub document: Document,
|
||||
}
|
||||
|
||||
#[derive(Debug,Clone, Serialize)]
|
||||
pub struct FailedRequest{
|
||||
pub source: QuerySource,
|
||||
pub status: u16,
|
||||
pub error: Option<String>,
|
||||
pub reason: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug,Clone, Serialize)]
|
||||
pub struct QueryResponse {
|
||||
pub source: QuerySource,
|
||||
pub hits: Vec<(Document,f64)>,
|
||||
pub total_hits: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug,Clone, Serialize)]
|
||||
pub struct MultiSourceQueryResponse {
|
||||
pub failed: Vec<FailedRequest>,
|
||||
pub hits: Vec<QueryHits>,
|
||||
pub total_hits: usize,
|
||||
}
|
||||
|
||||
48
src-tauri/src/common/traits.rs
Normal file
48
src-tauri/src/common/traits.rs
Normal file
@@ -0,0 +1,48 @@
|
||||
use crate::common::search::{QueryResponse, QuerySource};
|
||||
use thiserror::Error;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use std::{future::Future, pin::Pin};
|
||||
use serde::Serialize;
|
||||
use crate::common::search::SearchQuery;
|
||||
|
||||
#[async_trait]
|
||||
pub trait SearchSource: Send + Sync {
|
||||
fn get_type (&self) -> QuerySource;
|
||||
|
||||
async fn search(
|
||||
&self,
|
||||
query: SearchQuery,
|
||||
) -> Result<QueryResponse, SearchError>;
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Error,Serialize)]
|
||||
pub enum SearchError {
|
||||
#[error("HTTP request failed: {0}")]
|
||||
HttpError(String),
|
||||
|
||||
#[error("Invalid response format: {0}")]
|
||||
ParseError(String),
|
||||
|
||||
#[error("Timeout occurred")]
|
||||
Timeout,
|
||||
|
||||
#[error("Unknown error: {0}")]
|
||||
Unknown(String),
|
||||
|
||||
#[error("InternalError error: {0}")]
|
||||
InternalError(String),
|
||||
}
|
||||
|
||||
impl From<reqwest::Error> for SearchError {
|
||||
fn from(err: reqwest::Error) -> Self {
|
||||
if err.is_timeout() {
|
||||
SearchError::Timeout
|
||||
} else if err.is_decode() {
|
||||
SearchError::ParseError(err.to_string())
|
||||
} else {
|
||||
SearchError::HttpError(err.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,15 +3,23 @@ mod common;
|
||||
mod server;
|
||||
mod shortcut;
|
||||
mod util;
|
||||
mod local;
|
||||
mod search;
|
||||
|
||||
use crate::common::register::SearchSourceRegistry;
|
||||
use crate::common::traits::SearchSource;
|
||||
use crate::server::search::CocoSearchSource;
|
||||
use crate::server::servers::{load_or_insert_default_server, load_servers_token};
|
||||
use autostart::{change_autostart, enable_autostart};
|
||||
use reqwest::Client;
|
||||
use std::path::PathBuf;
|
||||
#[cfg(target_os = "macos")]
|
||||
use tauri::ActivationPolicy;
|
||||
use tauri::{AppHandle, Emitter, Listener, Manager, Runtime, WebviewWindow};
|
||||
use tauri_plugin_autostart::MacosLauncher;
|
||||
use tauri_plugin_deep_link::DeepLinkExt;
|
||||
use tokio::runtime::Runtime as RT; // Add this import
|
||||
use tokio::runtime::Runtime as RT;
|
||||
// Add this import
|
||||
// Add this import
|
||||
|
||||
/// Tauri store name
|
||||
@@ -98,7 +106,7 @@ pub fn run() {
|
||||
server::profile::get_user_profiles,
|
||||
server::datasource::get_datasources_by_server,
|
||||
server::connector::get_connectors_by_server,
|
||||
server::search::query_coco_servers,
|
||||
search::query_coco_fusion,
|
||||
// server::get_coco_server_health_info,
|
||||
// server::get_coco_servers_health_info,
|
||||
// server::get_user_profiles,
|
||||
@@ -106,7 +114,9 @@ pub fn run() {
|
||||
// server::get_coco_server_connectors
|
||||
])
|
||||
.setup(|app| {
|
||||
let registry = SearchSourceRegistry::default();
|
||||
|
||||
app.manage(registry); // Store registry in Tauri's app state
|
||||
|
||||
// Get app handle
|
||||
let app_handle = app.handle().clone();
|
||||
@@ -122,7 +132,6 @@ pub fn run() {
|
||||
});
|
||||
|
||||
|
||||
|
||||
shortcut::enable_shortcut(app);
|
||||
enable_tray(app);
|
||||
enable_autostart(app);
|
||||
@@ -162,6 +171,29 @@ pub async fn init<R: Runtime>(app_handle: &AppHandle<R>) {
|
||||
eprintln!("Failed to load server tokens: {}", err);
|
||||
}
|
||||
|
||||
let coco_servers = server::servers::get_all_servers();
|
||||
|
||||
// Get the registry from Tauri's state
|
||||
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||
|
||||
for server in coco_servers {
|
||||
let source = CocoSearchSource::new(server.clone(), Client::new());
|
||||
registry.register_source(source).await;
|
||||
}
|
||||
|
||||
|
||||
let dir = vec![
|
||||
dirs::home_dir().map(|home| home.join("Applications")), // Resolve `~/Applications`
|
||||
Some(PathBuf::from("/Applications")),
|
||||
Some(PathBuf::from("/System/Applications")),
|
||||
Some(PathBuf::from("/System/Applications/Utilities")),
|
||||
];
|
||||
|
||||
// Remove any `None` values if `home_dir()` fails
|
||||
let app_dirs: Vec<PathBuf> = dir.into_iter().flatten().collect();
|
||||
let application_search = local::application::ApplicationSearchSource::new(1000f64, app_dirs);
|
||||
registry.register_source(application_search).await;
|
||||
|
||||
dbg!("Initialization completed");
|
||||
|
||||
// let window: WebviewWindow = app_handle.get_webview_window("main").unwrap();
|
||||
|
||||
315
src-tauri/src/local/application.rs
Normal file
315
src-tauri/src/local/application.rs
Normal file
@@ -0,0 +1,315 @@
|
||||
use crate::common::document::{DataSourceReference, Document};
|
||||
use crate::common::search::{QueryResponse, QuerySource, SearchQuery};
|
||||
use crate::common::traits::{SearchError, SearchSource};
|
||||
use async_trait::async_trait;
|
||||
use base64::encode;
|
||||
use dirs::data_dir;
|
||||
use hostname;
|
||||
use plist::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ApplicationSearchSource {
|
||||
base_score: f64,
|
||||
app_dirs: Vec<PathBuf>,
|
||||
icons: HashMap<String, PathBuf>, // Map app names to their icon paths
|
||||
}
|
||||
|
||||
/// Extracts the app icon from the `.app` bundle or system icons and converts it to PNG format.
|
||||
fn extract_icon_from_app_bundle(app_dir: &Path, app_data_folder: &Path) -> Option<PathBuf> {
|
||||
// First, check if the icon is specified in the info.plist (e.g., CFBundleIconFile)
|
||||
if let Some(icon_name) = get_icon_name_from_info_plist(app_dir) {
|
||||
let icns_path = app_dir.join(format!("Contents/Resources/{}", icon_name));
|
||||
|
||||
if icns_path.exists() {
|
||||
if let Some(output_path) = convert_icns_to_png(&app_dir, &icns_path, app_data_folder) {
|
||||
return Some(output_path);
|
||||
}
|
||||
} else {
|
||||
if !icon_name.ends_with(".icns") {
|
||||
// If the icon name doesn't end with .icns, try appending it
|
||||
let icns_path = app_dir.join(format!("Contents/Resources/{}.icns", icon_name));
|
||||
if icns_path.exists() {
|
||||
if let Some(output_path) = convert_icns_to_png(&app_dir, &icns_path, app_data_folder) {
|
||||
return Some(output_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Attempt to get the ICNS file from the app bundle (Contents/Resources/AppIcon.icns)
|
||||
if let Some(icon_path) = get_icns_from_app_bundle(app_dir) {
|
||||
if let Some(output_path) = convert_icns_to_png(&app_dir, &icon_path, app_data_folder) {
|
||||
return Some(output_path);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: Check for PNG icon in the Resources folder
|
||||
if let Some(png_icon_path) = get_png_from_resources(app_dir) {
|
||||
if let Some(output_path) = convert_png_to_png(&png_icon_path, app_data_folder) {
|
||||
return Some(output_path);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: If no icon found, return a default system icon
|
||||
if let Some(system_icon_path) = get_system_icon(app_dir) {
|
||||
return Some(system_icon_path);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Reads the info.plist and extracts the icon file name if specified (CFBundleIconFile).
|
||||
fn get_icon_name_from_info_plist(app_dir: &Path) -> Option<String> {
|
||||
let plist_path = app_dir.join("Contents/Info.plist");
|
||||
|
||||
if plist_path.exists() {
|
||||
// Use `Value::from_file` directly, which parses the plist into a `Value` type
|
||||
if let Ok(plist_value) = Value::from_file(plist_path) {
|
||||
// Check if the plist value is a dictionary
|
||||
if let Some(icon_value) = plist_value.as_dictionary() {
|
||||
// Look for the CFBundleIconFile key in the dictionary
|
||||
if let Some(icon_file) = icon_value.get("CFBundleIconFile") {
|
||||
// Ensure the value is a string and return it
|
||||
if let Some(icon_name) = icon_file.as_string() {
|
||||
return Some(icon_name.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Tries to get the ICNS icon from the `.app` bundle.
|
||||
fn get_icns_from_app_bundle(app_dir: &Path) -> Option<PathBuf> {
|
||||
let icns_path = app_dir.join("Contents/Resources/AppIcon.icns");
|
||||
if icns_path.exists() {
|
||||
Some(icns_path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Tries to get a PNG icon from the `.app` bundle's Resources folder.
|
||||
fn get_png_from_resources(app_dir: &Path) -> Option<PathBuf> {
|
||||
let png_path = app_dir.join("Contents/Resources/Icon.png");
|
||||
if png_path.exists() {
|
||||
Some(png_path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an ICNS file to PNG using macOS's `sips` command.
|
||||
fn convert_icns_to_png(app_dir: &Path, icns_path: &Path, app_data_folder: &Path) -> Option<PathBuf> {
|
||||
if let Some(app_name) = app_dir.file_name().and_then(|name| name.to_str()) {
|
||||
let icon_storage_dir = app_data_folder.join("coco-appIcons");
|
||||
fs::create_dir_all(&icon_storage_dir).ok();
|
||||
|
||||
// dbg!("app_name:", &app_name);
|
||||
|
||||
let output_png_path = icon_storage_dir.join(format!("{}.png", app_name));
|
||||
|
||||
// dbg!("Converting ICNS to PNG:", &output_png_path);
|
||||
|
||||
// Run the `sips` command to convert the ICNS to PNG
|
||||
let status = Command::new("sips")
|
||||
.arg("-s")
|
||||
.arg("format")
|
||||
.arg("png")
|
||||
.arg(icns_path)
|
||||
.arg("--out")
|
||||
.arg(&output_png_path)
|
||||
.stdout(Stdio::null()) // Redirect stdout to null
|
||||
.stderr(Stdio::null()) // Redirect stderr to null
|
||||
.status();
|
||||
|
||||
if let Ok(status) = status {
|
||||
if status.success() {
|
||||
return Some(output_png_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Converts a PNG file to PNG (essentially just copying it to a new location).
|
||||
fn convert_png_to_png(png_path: &Path, app_data_folder: &Path) -> Option<PathBuf> {
|
||||
if let Some(app_name) = png_path.parent().and_then(|p| p.file_name()).and_then(|name| name.to_str()) {
|
||||
let icon_storage_dir = app_data_folder.join("coco-appIcons");
|
||||
fs::create_dir_all(&icon_storage_dir).ok();
|
||||
|
||||
let output_png_path = icon_storage_dir.join(format!("{}.png", app_name));
|
||||
|
||||
// Copy the PNG file to the output directory
|
||||
if let Err(e) = fs::copy(png_path, &output_png_path) {
|
||||
return None;
|
||||
}
|
||||
|
||||
return Some(output_png_path);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Fallback function to fetch a system icon if the app doesn't have its own.
|
||||
fn get_system_icon(app_dir: &Path) -> Option<PathBuf> {
|
||||
// Just a placeholder for getting a default icon if no app-specific icon is found
|
||||
let default_icon_path = Path::new("/System/Library/CoreServices/CoreTypes.bundle/Contents/Resources/GenericApplicationIcon.icns");
|
||||
|
||||
if default_icon_path.exists() {
|
||||
Some(default_icon_path.to_path_buf())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl ApplicationSearchSource {
|
||||
pub fn new(base_score: f64, app_dirs: Vec<PathBuf>) -> Self {
|
||||
let mut icons = HashMap::new();
|
||||
|
||||
// Iterate over the directories to find .app files and extract icons
|
||||
for app_dir in &app_dirs {
|
||||
if let Ok(entries) = fs::read_dir(app_dir) {
|
||||
for entry in entries.filter_map(Result::ok) {
|
||||
let file_path = entry.path();
|
||||
// Only process .app directories
|
||||
if file_path.is_dir() && file_path.extension() == Some("app".as_ref()) {
|
||||
if let Some(app_data_folder) = data_dir() {
|
||||
if let Some(icon_path) = extract_icon_from_app_bundle(&file_path, &app_data_folder) {
|
||||
// dbg!("Icon path:", &file_path, &icon_path);
|
||||
if let Some(app_name) = file_path.file_name().and_then(|name| name.to_str()) {
|
||||
// dbg!("Save Icon path:", &file_path, &icon_path);
|
||||
icons.insert(file_path.to_string_lossy().to_string(), icon_path);
|
||||
}
|
||||
} else {
|
||||
// dbg!("Icon not found for:");
|
||||
// dbg!("Icon not found for:", &file_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ApplicationSearchSource {
|
||||
base_score,
|
||||
app_dirs,
|
||||
icons,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Extracts the clean app name by removing `.app`
|
||||
fn clean_app_name(path: &Path) -> Option<String> {
|
||||
path.file_name()?
|
||||
.to_str()
|
||||
.map(|name| name.trim_end_matches(".app").to_string())
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl SearchSource for ApplicationSearchSource {
|
||||
fn get_type(&self) -> QuerySource {
|
||||
QuerySource {
|
||||
r#type: "Local".into(),
|
||||
name: hostname::get().unwrap_or("My Computer".into()).to_string_lossy().into(),
|
||||
id: "local_app_1".into(),
|
||||
}
|
||||
}
|
||||
|
||||
// Implement the search method to return a Future
|
||||
async fn search(
|
||||
&self,
|
||||
query: SearchQuery,
|
||||
) -> Result<QueryResponse, SearchError> {
|
||||
let mut total_hits = 0;
|
||||
let mut hits: Vec<(Document, f64)> = Vec::new();
|
||||
|
||||
// Extract query string from query
|
||||
let query_string = query.query_strings.get("query").unwrap_or(&"".to_string()).to_lowercase().clone();
|
||||
|
||||
// If query string is empty, return default response
|
||||
if query_string.is_empty() {
|
||||
return Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits,
|
||||
total_hits,
|
||||
});
|
||||
}
|
||||
|
||||
// Iterate over app directories asynchronously
|
||||
for app_dir in &self.app_dirs {
|
||||
if let Ok(entries) = fs::read_dir(app_dir) {
|
||||
// Use async iterator to process entries
|
||||
for entry in entries.filter_map(Result::ok) {
|
||||
let full_path = entry.path().to_string_lossy().to_string();
|
||||
let file_name_str = clean_app_name(&entry.path()).unwrap();
|
||||
|
||||
if file_name_str.starts_with('.') || !full_path.ends_with(".app") {
|
||||
// dbg!("Skipping:", &file_name_str);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if the file name contains the query string
|
||||
if file_name_str.to_lowercase().contains(&query_string) {
|
||||
total_hits += 1;
|
||||
let path = entry.path().to_string_lossy().to_string();
|
||||
|
||||
let mut doc = Document::new(
|
||||
Some(DataSourceReference {
|
||||
r#type: Some("Local".into()),
|
||||
name: Some(app_dir.to_string_lossy().to_string().into()),
|
||||
id: Some(file_name_str.clone()), // Using the app name as ID
|
||||
}),
|
||||
path.clone(),
|
||||
"Application".to_string(),
|
||||
file_name_str.clone(),
|
||||
path.clone(),
|
||||
);
|
||||
match self.icons.get(&path) {
|
||||
Some(icon_path) => {
|
||||
// dbg!("Icon path:", &path, &icon_path);
|
||||
if let Ok(icon_data) = read_icon_and_encode(icon_path) {
|
||||
// Update doc.icon with the base64 encoded icon data
|
||||
doc.icon = Some(format!("data:image/png;base64,{}", icon_data));
|
||||
// dbg!("doc:",&doc.clone());
|
||||
} else {
|
||||
dbg!("Failed to read or encode icon:", &icon_path);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
// Log a warning if the icon path is not found for the given path
|
||||
dbg!("Icon not found for:", &path);
|
||||
}
|
||||
};
|
||||
|
||||
// dbg!("Found hit:", &file_name_str);
|
||||
hits.push((doc, self.base_score));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Return the results in the QueryResponse format
|
||||
Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits,
|
||||
total_hits,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Function to read the icon file and convert it to base64
|
||||
fn read_icon_and_encode(icon_path: &Path) -> Result<String, std::io::Error> {
|
||||
// Read the icon file as binary data
|
||||
let icon_data = fs::read(icon_path)?;
|
||||
|
||||
// Encode the data to base64
|
||||
Ok(encode(&icon_data))
|
||||
}
|
||||
0
src-tauri/src/local/file_system.rs
Normal file
0
src-tauri/src/local/file_system.rs
Normal file
2
src-tauri/src/local/mod.rs
Normal file
2
src-tauri/src/local/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub mod application;
|
||||
pub mod file_system;
|
||||
85
src-tauri/src/search/mod.rs
Normal file
85
src-tauri/src/search/mod.rs
Normal file
@@ -0,0 +1,85 @@
|
||||
use crate::common::register::SearchSourceRegistry;
|
||||
use crate::common::search::{FailedRequest, MultiSourceQueryResponse, QuerySource, SearchQuery};
|
||||
use crate::common::traits::{SearchError, SearchSource};
|
||||
use futures::stream::FuturesUnordered;
|
||||
use futures::StreamExt;
|
||||
use std::collections::HashMap;
|
||||
use tauri::{AppHandle, Manager, Runtime};
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn query_coco_fusion<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
from: u64,
|
||||
size: u64,
|
||||
query_strings: HashMap<String, String>,
|
||||
) -> Result<MultiSourceQueryResponse, SearchError> {
|
||||
let search_sources = app_handle.state::<SearchSourceRegistry>();
|
||||
|
||||
let sources_future = search_sources.get_sources(); // Don't await yet
|
||||
let mut futures = FuturesUnordered::new();
|
||||
let mut sources = HashMap::new();
|
||||
|
||||
let sources_list = sources_future.await; // Now we await
|
||||
|
||||
for query_source in sources_list {
|
||||
let query_source_type = query_source.get_type().clone();
|
||||
sources.insert(query_source_type.id.clone(), query_source_type);
|
||||
|
||||
let query = SearchQuery::new(from, size, query_strings.clone());
|
||||
let query_source_clone = query_source.clone(); // Clone Arc to avoid ownership issues
|
||||
|
||||
futures.push(tokio::spawn(async move {
|
||||
query_source_clone.search(query).await
|
||||
}));
|
||||
}
|
||||
|
||||
let mut docs_collector = crate::server::search::DocumentsSizedCollector::new(size);
|
||||
let mut total_hits = 0;
|
||||
let mut failed_requests = Vec::new();
|
||||
|
||||
while let Some(result) = futures.next().await {
|
||||
match result {
|
||||
Ok(Ok(response)) => {
|
||||
total_hits += response.total_hits;
|
||||
for (doc, score) in response.hits {
|
||||
// dbg!("Found hit:", &doc.title, &score);
|
||||
docs_collector.push(response.source.id.clone(), doc, score);
|
||||
}
|
||||
}
|
||||
Ok(Err(err)) => {
|
||||
failed_requests.push(FailedRequest {
|
||||
source: QuerySource {
|
||||
r#type: "N/A".into(),
|
||||
name: "N/A".into(),
|
||||
id: "N/A".into(),
|
||||
},
|
||||
status: 0,
|
||||
error: Some(err.to_string()),
|
||||
reason: None,
|
||||
});
|
||||
}
|
||||
Err(_) => {
|
||||
failed_requests.push(FailedRequest {
|
||||
source: QuerySource {
|
||||
r#type: "N/A".into(),
|
||||
name: "N/A".into(),
|
||||
id: "N/A".into(),
|
||||
},
|
||||
status: 0,
|
||||
error: Some("Task panicked".to_string()),
|
||||
reason: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let all_hits = docs_collector.documents_with_sources(&sources);
|
||||
|
||||
// dbg!(&all_hits);
|
||||
|
||||
Ok(MultiSourceQueryResponse {
|
||||
failed: failed_requests,
|
||||
hits: all_hits,
|
||||
total_hits,
|
||||
})
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
use std::fmt::format;
|
||||
use reqwest::StatusCode;
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use crate::common::auth::RequestAccessTokenResponse;
|
||||
use crate::common::server::{Server, ServerAccessToken};
|
||||
use crate::common::register::SearchSourceRegistry;
|
||||
use crate::common::server::ServerAccessToken;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::server::profile::get_user_profiles;
|
||||
use crate::server::search::CocoSearchSource;
|
||||
use crate::server::servers::{get_server_by_id, persist_servers, persist_servers_token, save_access_token, save_server};
|
||||
use crate::util;
|
||||
use reqwest::{Client, StatusCode};
|
||||
use tauri::{AppHandle, Manager, Runtime};
|
||||
fn request_access_token_url(request_id: &str) -> String {
|
||||
// Remove the endpoint part and keep just the path for the request
|
||||
format!("/auth/request_access_token?request_id={}", request_id)
|
||||
@@ -51,6 +51,10 @@ pub async fn handle_sso_callback<R: Runtime>(
|
||||
save_access_token(server_id.clone(), access_token);
|
||||
persist_servers_token(&app_handle)?;
|
||||
|
||||
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||
let source = CocoSearchSource::new(server.clone(), Client::new());
|
||||
registry.register_source(source).await;
|
||||
|
||||
// Update the server's profile using the util::http::HttpClient::get method
|
||||
let profile = get_user_profiles(app_handle.clone(), server_id.clone()).await;
|
||||
dbg!(&profile);
|
||||
@@ -60,7 +64,7 @@ pub async fn handle_sso_callback<R: Runtime>(
|
||||
server.profile = Some(p);
|
||||
server.available = true;
|
||||
save_server(&server);
|
||||
persist_servers(&app_handle)?;
|
||||
persist_servers(&app_handle).await?;
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => Err(format!("Failed to get user profile: {}", e)),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::common::connector::Connector;
|
||||
use crate::common::search_response::parse_search_results;
|
||||
use crate::common::search::parse_search_results;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::server::servers::{get_all_servers, list_coco_servers};
|
||||
use crate::server::servers::get_all_servers;
|
||||
use lazy_static::lazy_static;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, RwLock};
|
||||
@@ -30,14 +30,14 @@ pub fn get_connector_by_id(server_id: &str, connector_id: &str) -> Option<Connec
|
||||
pub async fn refresh_all_connectors<R: Runtime>(
|
||||
app_handle: &AppHandle<R>,
|
||||
) -> Result<(), String> {
|
||||
dbg!("Attempting to refresh all connectors");
|
||||
// dbg!("Attempting to refresh all connectors");
|
||||
|
||||
let servers = get_all_servers();
|
||||
|
||||
// Collect all the tasks for fetching and refreshing connectors
|
||||
let mut serverMap = HashMap::new();
|
||||
let mut server_map = HashMap::new();
|
||||
for server in servers {
|
||||
dbg!("start fetch connectors for server: {}", &server.id);
|
||||
// dbg!("start fetch connectors for server: {}", &server.id);
|
||||
let connectors = match get_connectors_by_server(app_handle.clone(), server.id.clone()).await {
|
||||
Ok(connectors) => {
|
||||
let connectors_map: HashMap<String, Connector> = connectors
|
||||
@@ -47,13 +47,13 @@ pub async fn refresh_all_connectors<R: Runtime>(
|
||||
connectors_map
|
||||
}
|
||||
Err(e) => {
|
||||
dbg!("Failed to get connectors for server {}: {}", &server.id, e);
|
||||
// dbg!("Failed to get connectors for server {}: {}", &server.id, e);
|
||||
HashMap::new() // Return empty map on failure
|
||||
}
|
||||
};
|
||||
|
||||
serverMap.insert(server.id.clone(), connectors);
|
||||
dbg!("end fetch connectors for server: {}", &server.id);
|
||||
server_map.insert(server.id.clone(), connectors);
|
||||
// dbg!("end fetch connectors for server: {}", &server.id);
|
||||
}
|
||||
|
||||
// After all tasks have finished, perform a read operation on the cache
|
||||
@@ -61,12 +61,12 @@ pub async fn refresh_all_connectors<R: Runtime>(
|
||||
// Insert connectors into the cache (async write lock)
|
||||
let mut cache = CONNECTOR_CACHE.write().unwrap(); // Async write lock
|
||||
cache.clear();
|
||||
cache.extend(serverMap);
|
||||
cache.extend(server_map);
|
||||
// let cache = CONNECTOR_CACHE.read().await; // Async read lock
|
||||
cache.len()
|
||||
};
|
||||
|
||||
dbg!("finished refresh connectors: {:?}", cache_size);
|
||||
// dbg!("finished refresh connectors: {:?}", cache_size);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -98,19 +98,19 @@ pub async fn get_connectors_from_cache_or_remote(server_id: &str) -> Result<Vec<
|
||||
}
|
||||
|
||||
pub async fn fetch_connectors_by_server(id: &str) -> Result<Vec<Connector>, String> {
|
||||
dbg!("start get_connectors_by_server: id =", &id);
|
||||
// dbg!("start get_connectors_by_server: id =", &id);
|
||||
|
||||
// Use the generic GET method from HttpClient
|
||||
let resp = HttpClient::get(&id, "/connector/_search")
|
||||
.await
|
||||
.map_err(|e| {
|
||||
dbg!("Error fetching connector for id {}: {}", &id, &e);
|
||||
// dbg!("Error fetching connector for id {}: {}", &id, &e);
|
||||
format!("Error fetching connector: {}", e)
|
||||
})?;
|
||||
|
||||
// Log the raw response status and headers
|
||||
dbg!("Response status: {:?}", resp.status());
|
||||
dbg!("Response headers: {:?}", resp.headers());
|
||||
// // Log the raw response status and headers
|
||||
// dbg!("Response status: {:?}", resp.status());
|
||||
// dbg!("Response headers: {:?}", resp.headers());
|
||||
|
||||
// Ensure the response body is not empty or invalid
|
||||
if resp.status().is_success() {
|
||||
@@ -121,7 +121,7 @@ pub async fn fetch_connectors_by_server(id: &str) -> Result<Vec<Connector>, Stri
|
||||
|
||||
// Parse the search results directly from the response body
|
||||
let datasources: Vec<Connector> = parse_search_results(resp).await.map_err(|e| {
|
||||
dbg!("Error parsing search results for id {}: {}", &id, &e);
|
||||
// dbg!("Error parsing search results for id {}: {}", &id, &e);
|
||||
e.to_string()
|
||||
})?;
|
||||
|
||||
@@ -131,7 +131,7 @@ pub async fn fetch_connectors_by_server(id: &str) -> Result<Vec<Connector>, Stri
|
||||
// Save the connectors to the cache
|
||||
save_connectors_to_cache(&id, datasources.clone());
|
||||
|
||||
dbg!("end get_connectors_by_server: id =", &id);
|
||||
// dbg!("end get_connectors_by_server: id =", &id);
|
||||
return Ok(datasources);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
use crate::common::datasource::DataSource;
|
||||
use crate::common::search_response::parse_search_results;
|
||||
use crate::server::connector::{fetch_connectors_by_server, get_connector_by_id, get_connectors_by_server, get_connectors_from_cache_or_remote};
|
||||
use crate::common::search::parse_search_results;
|
||||
use crate::server::connector::get_connector_by_id;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::server::servers::{get_all_servers, list_coco_servers};
|
||||
use crate::server::servers::get_all_servers;
|
||||
use lazy_static::lazy_static;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, RwLock};
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use crate::common::connector::Connector;
|
||||
|
||||
lazy_static! {
|
||||
static ref DATASOURCE_CACHE: Arc<RwLock<HashMap<String,HashMap<String,DataSource>>>> = Arc::new(RwLock::new(HashMap::new()));
|
||||
@@ -26,7 +25,7 @@ pub fn save_datasource_to_cache(server_id: &str, datasources: Vec<DataSource>) {
|
||||
|
||||
pub fn get_datasources_from_cache(server_id: &str) -> Option<HashMap<String, DataSource>> {
|
||||
let cache = DATASOURCE_CACHE.read().unwrap(); // Acquire read lock
|
||||
dbg!("cache: {:?}", &cache);
|
||||
// dbg!("cache: {:?}", &cache);
|
||||
let server_cache = cache.get(server_id)?; // Get the server's cache
|
||||
Some(server_cache.clone())
|
||||
}
|
||||
@@ -34,14 +33,14 @@ pub fn get_datasources_from_cache(server_id: &str) -> Option<HashMap<String, Dat
|
||||
pub async fn refresh_all_datasources<R: Runtime>(
|
||||
app_handle: &AppHandle<R>,
|
||||
) -> Result<(), String> {
|
||||
dbg!("Attempting to refresh all datasources");
|
||||
// dbg!("Attempting to refresh all datasources");
|
||||
|
||||
let servers = get_all_servers();
|
||||
|
||||
let mut serverMap = HashMap::new();
|
||||
let mut server_map = HashMap::new();
|
||||
|
||||
for server in servers {
|
||||
dbg!("fetch datasources for server: {}", &server.id);
|
||||
// dbg!("fetch datasources for server: {}", &server.id);
|
||||
|
||||
// Attempt to get datasources by server, and continue even if it fails
|
||||
let mut connectors = match get_datasources_by_server(app_handle.clone(), server.id.clone()).await {
|
||||
@@ -53,38 +52,38 @@ pub async fn refresh_all_datasources<R: Runtime>(
|
||||
(connector.id.clone(), connector)
|
||||
})
|
||||
.collect();
|
||||
dbg!("connectors_map: {:?}", &connectors_map);
|
||||
// dbg!("connectors_map: {:?}", &connectors_map);
|
||||
connectors_map
|
||||
}
|
||||
Err(e) => {
|
||||
dbg!("Failed to get dataSources for server {}: {}", &server.id, e);
|
||||
// dbg!("Failed to get dataSources for server {}: {}", &server.id, e);
|
||||
HashMap::new()
|
||||
}
|
||||
};
|
||||
|
||||
let mut new_map = HashMap::new();
|
||||
for (id, mut datasource) in connectors.iter() {
|
||||
dbg!("connector: {:?}", &datasource);
|
||||
// dbg!("connector: {:?}", &datasource);
|
||||
if let Some(existing_connector) = get_connector_by_id(&server.id, &datasource.id) {
|
||||
// If found in cache, update the connector's info
|
||||
dbg!("Found connector in cache for {}: {:?}", &datasource.id, &existing_connector);
|
||||
// dbg!("Found connector in cache for {}: {:?}", &datasource.id, &existing_connector);
|
||||
let mut obj = datasource.clone();
|
||||
obj.connector_info = Some(existing_connector);
|
||||
new_map.insert(id.clone(), obj);
|
||||
}
|
||||
}
|
||||
|
||||
serverMap.insert(server.id.clone(), new_map);
|
||||
server_map.insert(server.id.clone(), new_map);
|
||||
}
|
||||
|
||||
// Perform a read operation after all writes are done
|
||||
let cache_size = {
|
||||
let mut cache = DATASOURCE_CACHE.write().unwrap();
|
||||
cache.clear();
|
||||
cache.extend(serverMap);
|
||||
cache.extend(server_map);
|
||||
cache.len()
|
||||
};
|
||||
dbg!("datasource_map size: {:?}", cache_size);
|
||||
// dbg!("datasource_map size: {:?}", cache_size);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -94,19 +93,19 @@ pub async fn get_datasources_by_server<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
id: String,
|
||||
) -> Result<Vec<DataSource>, String> {
|
||||
dbg!("get_datasources_by_server: id = {}", &id);
|
||||
// dbg!("get_datasources_by_server: id = {}", &id);
|
||||
|
||||
// Perform the async HTTP request outside the cache lock
|
||||
let resp = HttpClient::get(&id, "/datasource/_search")
|
||||
.await
|
||||
.map_err(|e| {
|
||||
dbg!("Error fetching datasource: {}", &e);
|
||||
// dbg!("Error fetching datasource: {}", &e);
|
||||
format!("Error fetching datasource: {}", e)
|
||||
})?;
|
||||
|
||||
// Parse the search results from the response
|
||||
let mut datasources: Vec<DataSource> = parse_search_results(resp).await.map_err(|e| {
|
||||
dbg!("Error parsing search results: {}", &e);
|
||||
// dbg!("Error parsing search results: {}", &e);
|
||||
e.to_string()
|
||||
})?;
|
||||
|
||||
@@ -129,7 +128,7 @@ pub async fn get_datasources_by_server<R: Runtime>(
|
||||
// }
|
||||
// }
|
||||
|
||||
dbg!("Parsed datasources: {:?}", &datasources);
|
||||
// dbg!("Parsed datasources: {:?}", &datasources);
|
||||
|
||||
// Save the updated datasources to cache
|
||||
save_datasource_to_cache(&id, datasources.clone());
|
||||
|
||||
@@ -1,19 +1,17 @@
|
||||
use crate::common::document::Document;
|
||||
use crate::common::server::Server;
|
||||
use crate::common::traits::{SearchError, SearchSource};
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::server::servers::get_server_token;
|
||||
use async_trait::async_trait;
|
||||
use futures::stream::StreamExt;
|
||||
use ordered_float::OrderedFloat;
|
||||
use reqwest::{Client, Method, RequestBuilder};
|
||||
use std::collections::HashMap;
|
||||
use std::hash::Hash;
|
||||
use ordered_float::OrderedFloat;
|
||||
use reqwest::Method;
|
||||
use serde::Serialize;
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use serde_json::Map as JsonMap;
|
||||
use serde_json::Value as Json;
|
||||
use crate::server::http_client::{HttpClient, HTTP_CLIENT};
|
||||
use crate::server::servers::{get_all_servers, get_server_token, get_servers_as_hashmap};
|
||||
use futures::stream::{FuturesUnordered, StreamExt};
|
||||
use crate::common::document::Document;
|
||||
use crate::common::search_response::parse_search_results_with_score;
|
||||
use crate::common::server::Server;
|
||||
|
||||
struct DocumentsSizedCollector {
|
||||
use std::pin::Pin;
|
||||
use crate::common::search::{parse_search_response, QueryHits, QueryResponse, QuerySource, SearchQuery};
|
||||
pub(crate) struct DocumentsSizedCollector {
|
||||
size: u64,
|
||||
/// Documents and scores
|
||||
///
|
||||
@@ -22,21 +20,21 @@ struct DocumentsSizedCollector {
|
||||
}
|
||||
|
||||
impl DocumentsSizedCollector {
|
||||
fn new(size: u64) -> Self {
|
||||
pub(crate) fn new(size: u64) -> Self {
|
||||
// there will be size + 1 documents in docs at max
|
||||
let docs = Vec::with_capacity((size + 1) as usize);
|
||||
|
||||
Self { size, docs }
|
||||
}
|
||||
|
||||
fn push(&mut self, server_id: String, item: Document, score: f64) {
|
||||
pub(crate) fn push(&mut self, source: String, item: Document, score: f64) {
|
||||
let score = OrderedFloat(score);
|
||||
let insert_idx = match self.docs.binary_search_by(|(_, _, s)| score.cmp(s)) {
|
||||
Ok(idx) => idx,
|
||||
Err(idx) => idx,
|
||||
};
|
||||
|
||||
self.docs.insert(insert_idx, (server_id, item, score));
|
||||
self.docs.insert(insert_idx, (source, item, score));
|
||||
|
||||
// Ensure we do not exceed `size`
|
||||
if self.docs.len() as u64 > self.size {
|
||||
@@ -49,16 +47,14 @@ impl DocumentsSizedCollector {
|
||||
}
|
||||
|
||||
// New function to return documents grouped by server_id
|
||||
fn documents_by_server_id(self, x: &HashMap<String, Server>) -> Vec<QueryHits> {
|
||||
pub(crate) fn documents_with_sources(self, x: &HashMap<String, QuerySource>) -> Vec<QueryHits> {
|
||||
let mut grouped_docs: Vec<QueryHits> = Vec::new();
|
||||
|
||||
for (server_id, doc, _) in self.docs.into_iter() {
|
||||
let source= QuerySource {
|
||||
r#type: Some("coco-server".to_string()),
|
||||
name: Some(x.get(&server_id).map(|s| s.name.clone()).unwrap_or_default()),
|
||||
id: Some(server_id.clone()),
|
||||
};
|
||||
for (source_id, doc, _) in self.docs.into_iter() {
|
||||
// Try to get the source from the hashmap
|
||||
let source = x.get(&source_id).cloned();
|
||||
|
||||
// Push the document and source into the result
|
||||
grouped_docs.push(QueryHits {
|
||||
source,
|
||||
document: doc,
|
||||
@@ -69,148 +65,101 @@ impl DocumentsSizedCollector {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct QuerySource{
|
||||
pub r#type: Option<String>, //coco-server/local/ etc.
|
||||
pub name: Option<String>, //coco server's name, local computer name, etc.
|
||||
pub id: Option<String>, //coco server's id
|
||||
const COCO_SERVERS: &str = "coco-servers";
|
||||
|
||||
pub struct CocoSearchSource {
|
||||
server: Server,
|
||||
client: Client,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct QueryHits {
|
||||
pub source: QuerySource,
|
||||
pub document: Document,
|
||||
impl CocoSearchSource {
|
||||
pub fn new(server: Server, client: Client) -> Self {
|
||||
CocoSearchSource { server, client }
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct FailedRequest{
|
||||
pub source: QuerySource,
|
||||
pub status: u16,
|
||||
pub error: Option<String>,
|
||||
pub reason: Option<String>,
|
||||
fn build_request_from_query(&self, query: &SearchQuery) -> RequestBuilder {
|
||||
self.build_request(query.from, query.size, &query.query_strings)
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct QueryResponse {
|
||||
failed: Vec<FailedRequest>,
|
||||
hits: Vec<QueryHits>,
|
||||
total_hits: usize,
|
||||
}
|
||||
fn build_request(&self, from: u64, size: u64, query_strings: &HashMap<String, String>) -> RequestBuilder {
|
||||
let url = HttpClient::join_url(&self.server.endpoint, "/query/_search");
|
||||
let mut request_builder = self.client.request(Method::GET, url);
|
||||
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn query_coco_servers<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
from: u64,
|
||||
size: u64,
|
||||
query_strings: HashMap<String, String>,
|
||||
) -> Result<QueryResponse, ()> {
|
||||
println!(
|
||||
"DBG: query_coco_servers, from: {} size: {} query_strings {:?}",
|
||||
from, size, query_strings
|
||||
);
|
||||
|
||||
let coco_servers = get_servers_as_hashmap();
|
||||
let mut futures = FuturesUnordered::new();
|
||||
let size_for_each_request = (from + size).to_string();
|
||||
|
||||
for (_, server) in &coco_servers {
|
||||
let url = HttpClient::join_url(&server.endpoint, "/query/_search");
|
||||
let client = HTTP_CLIENT.lock().await; // Acquire the lock on HTTP_CLIENT
|
||||
let mut request_builder = client.request(Method::GET, url);
|
||||
|
||||
if !server.public {
|
||||
if let Some(token) = get_server_token(&server.id).map(|t| t.access_token) {
|
||||
if !self.server.public {
|
||||
if let Some(token) = get_server_token(&self.server.id).map(|t| t.access_token) {
|
||||
request_builder = request_builder.header("X-API-TOKEN", token);
|
||||
}
|
||||
}
|
||||
let query_strings_cloned = query_strings.clone(); // Clone for each iteration
|
||||
|
||||
let from = from.to_string();
|
||||
let size = size_for_each_request.clone();
|
||||
let future = async move {
|
||||
let response = request_builder
|
||||
.query(&[("from", from.as_str()), ("size", size.as_str())])
|
||||
.query(&query_strings_cloned) // Use cloned instance
|
||||
.send()
|
||||
.await;
|
||||
(server.id.clone(), response)
|
||||
};
|
||||
|
||||
futures.push(future);
|
||||
request_builder.query(&[("from", &from.to_string()), ("size", &size.to_string())])
|
||||
.query(query_strings)
|
||||
}
|
||||
}
|
||||
use futures::future::join_all;
|
||||
use std::sync::Arc;
|
||||
|
||||
let mut total_hits = 0;
|
||||
let mut failed_requests:Vec<FailedRequest> = Vec::new();
|
||||
let mut docs_collector = DocumentsSizedCollector::new(size);
|
||||
|
||||
// Helper function to create failed request
|
||||
fn create_failed_request(server_id: &str, coco_servers: &HashMap<String,Server>, error: &str, status: u16) -> FailedRequest {
|
||||
FailedRequest {
|
||||
source: QuerySource {
|
||||
r#type: Some("coco-server".to_string()),
|
||||
name: Some(coco_servers.get(server_id).map(|s| s.name.clone()).unwrap_or_default()),
|
||||
id: Some(server_id.to_string()),
|
||||
},
|
||||
status,
|
||||
error: Some(error.to_string()),
|
||||
reason: None,
|
||||
#[async_trait]
|
||||
impl SearchSource for CocoSearchSource {
|
||||
fn get_type(&self) -> QuerySource {
|
||||
QuerySource {
|
||||
r#type: COCO_SERVERS.into(),
|
||||
name: self.server.name.clone(),
|
||||
id: self.server.id.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
// Iterate over the stream of futures
|
||||
while let Some((server_id, res_response)) = futures.next().await {
|
||||
match res_response {
|
||||
// Directly return Result<QueryResponse, SearchError> instead of Future
|
||||
async fn search(
|
||||
&self,
|
||||
query: SearchQuery,
|
||||
) -> Result<QueryResponse, SearchError> {
|
||||
let server_id = self.server.id.clone();
|
||||
let server_name = self.server.name.clone();
|
||||
let request_builder = self.build_request_from_query(&query);
|
||||
|
||||
// Send the HTTP request asynchronously
|
||||
let response = request_builder.send().await;
|
||||
|
||||
match response {
|
||||
Ok(response) => {
|
||||
let status_code = response.status().as_u16();
|
||||
|
||||
// Check if the status code indicates a successful request (2xx)
|
||||
if status_code >= 200 && status_code < 400 {
|
||||
// Parse the response only if the status code is success
|
||||
match parse_search_results_with_score(response).await {
|
||||
Ok(documents) => {
|
||||
total_hits += documents.len(); // No need for `&` here, as `len` is `usize`
|
||||
for (doc, score) in documents {
|
||||
let score = score.unwrap_or(0.0) as f64;
|
||||
docs_collector.push(server_id.clone(), doc, score);
|
||||
}
|
||||
// Parse the response only if the status code is successful
|
||||
match parse_search_response(response).await {
|
||||
Ok(response) => {
|
||||
let total_hits = response.hits.total.value as usize;
|
||||
let hits: Vec<(Document, f64)> = response.hits.hits.into_iter()
|
||||
.map(|hit| {
|
||||
// Handling Option<f64> in hit._score by defaulting to 0.0 if None
|
||||
(hit._source, hit._score.unwrap_or(0.0)) // Use 0.0 if _score is None
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Return the QueryResponse with hits and total hits
|
||||
Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits,
|
||||
total_hits,
|
||||
})
|
||||
}
|
||||
Err(err) => {
|
||||
failed_requests.push(create_failed_request(
|
||||
&server_id, &coco_servers, &err.to_string(), status_code,
|
||||
));
|
||||
// Parse error when response parsing fails
|
||||
Err(SearchError::ParseError(err.to_string()))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If status code is not successful, log the failure
|
||||
failed_requests.push(create_failed_request(
|
||||
&server_id, &coco_servers, "Unsuccessful response", status_code,
|
||||
));
|
||||
// Handle unsuccessful HTTP status codes (e.g., 4xx, 5xx)
|
||||
Err(SearchError::HttpError(format!(
|
||||
"Request failed with status code: {}",
|
||||
status_code
|
||||
)))
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
// Handle the error from the future itself
|
||||
failed_requests.push(create_failed_request(
|
||||
&server_id, &coco_servers, &err.to_string(), 0,
|
||||
));
|
||||
// Handle error from the request itself
|
||||
Err(SearchError::HttpError(err.to_string()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let docs = docs_collector.documents_by_server_id(&coco_servers);
|
||||
|
||||
// dbg!(&total_hits);
|
||||
// dbg!(&failed_requests);
|
||||
// dbg!(&docs);
|
||||
|
||||
let query_response = QueryResponse {
|
||||
failed: failed_requests,
|
||||
hits: docs,
|
||||
total_hits,
|
||||
};
|
||||
|
||||
//print to json
|
||||
// println!("{}", serde_json::to_string_pretty(&query_response).unwrap());
|
||||
|
||||
Ok(query_response)
|
||||
}
|
||||
@@ -1,18 +1,20 @@
|
||||
use crate::common::register::SearchSourceRegistry;
|
||||
use crate::common::server::{AuthProvider, Provider, Server, ServerAccessToken, Sso, Version};
|
||||
use crate::server::connector::refresh_all_connectors;
|
||||
use crate::server::datasource::refresh_all_datasources;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::server::search::CocoSearchSource;
|
||||
use crate::COCO_TAURI_STORE;
|
||||
use lazy_static::lazy_static;
|
||||
use reqwest::{Method, StatusCode};
|
||||
use reqwest::{Client, Method, StatusCode};
|
||||
use serde_json::from_value;
|
||||
use serde_json::Value as JsonValue;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use std::sync::RwLock;
|
||||
use tauri::AppHandle;
|
||||
use tauri::Runtime;
|
||||
use tauri::{AppHandle, Manager};
|
||||
use tauri_plugin_store::StoreExt;
|
||||
use crate::server::connector::refresh_all_connectors;
|
||||
use crate::server::datasource::refresh_all_datasources;
|
||||
// Assuming you're using serde_json
|
||||
|
||||
lazy_static! {
|
||||
@@ -58,7 +60,7 @@ fn remove_server_by_id(id: String) -> bool {
|
||||
}
|
||||
|
||||
|
||||
pub fn persist_servers<R: Runtime>(app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||
pub async fn persist_servers<R: Runtime>(app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||
let cache = SERVER_CACHE.read().unwrap(); // Acquire a read lock, not a write lock, since you're not modifying the cache
|
||||
|
||||
// Convert HashMap to Vec for serialization (iterating over values of HashMap)
|
||||
@@ -70,8 +72,6 @@ pub fn persist_servers<R: Runtime>(app_handle: &AppHandle<R>) -> Result<(), Stri
|
||||
.map(|server| serde_json::to_value(server).expect("Failed to serialize server")) // Automatically serialize all fields
|
||||
.collect();
|
||||
|
||||
// dbg!(format!("persist servers: {:?}", &json_servers));
|
||||
|
||||
// Save the serialized servers to Tauri's store
|
||||
app_handle
|
||||
.store(COCO_TAURI_STORE)
|
||||
@@ -143,7 +143,6 @@ fn get_default_server() -> Server {
|
||||
}
|
||||
|
||||
pub async fn load_servers_token<R: Runtime>(app_handle: &AppHandle<R>) -> Result<Vec<ServerAccessToken>, String> {
|
||||
|
||||
dbg!("Attempting to load servers token");
|
||||
|
||||
let store = app_handle
|
||||
@@ -227,7 +226,6 @@ pub async fn load_servers<R: Runtime>(app_handle: &AppHandle<R>) -> Result<Vec<S
|
||||
|
||||
/// Function to load servers or insert a default one if none exist
|
||||
pub async fn load_or_insert_default_server<R: Runtime>(app_handle: &AppHandle<R>) -> Result<Vec<Server>, String> {
|
||||
|
||||
dbg!("Attempting to load or insert default server");
|
||||
|
||||
let exists_servers = load_servers(&app_handle).await;
|
||||
@@ -300,16 +298,16 @@ pub async fn refresh_coco_server_info<R: Runtime>(
|
||||
server.profile = profile;
|
||||
trim_endpoint_last_forward_slash(&mut server);
|
||||
save_server(&server);
|
||||
persist_servers(&app_handle).expect("Failed to persist coco servers.");
|
||||
persist_servers(&app_handle).await.expect("Failed to persist coco servers.");
|
||||
|
||||
|
||||
//refresh connectors and datasources
|
||||
if let Err(err) = refresh_all_connectors(&app_handle).await {
|
||||
return Err(format!("Failed to load server connectors: {}", err))
|
||||
return Err(format!("Failed to load server connectors: {}", err));
|
||||
}
|
||||
|
||||
if let Err(err) = refresh_all_datasources(&app_handle).await {
|
||||
return Err(format!("Failed to load server datasources: {}", err))
|
||||
return Err(format!("Failed to load server datasources: {}", err));
|
||||
}
|
||||
|
||||
Ok(server)
|
||||
@@ -380,8 +378,12 @@ pub async fn add_coco_server<R: Runtime>(
|
||||
// Save the new server to the cache
|
||||
save_server(&server);
|
||||
|
||||
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||
let source = CocoSearchSource::new(server.clone(), Client::new());
|
||||
registry.register_source(source).await;
|
||||
|
||||
// Persist the servers to the store
|
||||
persist_servers(&app_handle)
|
||||
persist_servers(&app_handle).await
|
||||
.expect("Failed to persist Coco servers.");
|
||||
|
||||
dbg!(format!("Successfully registered server: {:?}", &endpoint));
|
||||
@@ -407,9 +409,13 @@ pub async fn remove_coco_server<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
id: String,
|
||||
) -> Result<(), ()> {
|
||||
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||
registry.remove_source(id.clone()).await;
|
||||
|
||||
remove_server_token(id.as_str());
|
||||
remove_server_by_id(id);
|
||||
persist_servers(&app_handle).expect("failed to save servers");
|
||||
|
||||
persist_servers(&app_handle).await.expect("failed to save servers");
|
||||
persist_servers_token(&app_handle).expect("failed to save server tokens");
|
||||
Ok(())
|
||||
}
|
||||
@@ -419,7 +425,6 @@ pub async fn logout_coco_server<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
id: String,
|
||||
) -> Result<(), String> {
|
||||
|
||||
dbg!("Attempting to log out server by id:", &id);
|
||||
|
||||
// Check if server token exists
|
||||
@@ -450,7 +455,7 @@ pub async fn logout_coco_server<R: Runtime>(
|
||||
save_server(&server);
|
||||
|
||||
// Persist the updated server data
|
||||
if let Err(e) = persist_servers(&app_handle) {
|
||||
if let Err(e) = persist_servers(&app_handle).await {
|
||||
dbg!("Failed to save server for id: {}. Error: {:?}", &id, &e);
|
||||
return Err(format!("Failed to save server: {}", &e));
|
||||
}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
import {
|
||||
File,
|
||||
} from "lucide-react";
|
||||
import {File,} from "lucide-react";
|
||||
|
||||
import IconWrapper from './IconWrapper';
|
||||
import ThemedIcon from './ThemedIcon';
|
||||
@@ -16,14 +14,25 @@ interface ItemIconProps {
|
||||
function ItemIcon({
|
||||
item,
|
||||
className = "w-5 h-5 flex-shrink-0",
|
||||
onClick = () => {}
|
||||
onClick = () => {
|
||||
}
|
||||
}: ItemIconProps) {
|
||||
const endpoint_http = useAppStore((state) => state.endpoint_http);
|
||||
|
||||
const connectorSource = useFindConnectorIcon(item);
|
||||
const icons = connectorSource?.assets?.icons || {};
|
||||
const selectedIcon = icons[item?.icon];
|
||||
|
||||
// If the icon is a valid base64-encoded image
|
||||
const isBase64 = item?.icon?.startsWith("data:image/");
|
||||
if (isBase64) {
|
||||
return (
|
||||
<IconWrapper className={className} onClick={onClick}>
|
||||
<img className={className} src={item?.icon} alt="icon"/>
|
||||
</IconWrapper>
|
||||
);
|
||||
}
|
||||
|
||||
const selectedIcon = icons[item?.icon];
|
||||
if (!selectedIcon) {
|
||||
return (
|
||||
<IconWrapper className={className} onClick={onClick}>
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
import {
|
||||
Folder,
|
||||
} from "lucide-react";
|
||||
import {Folder,} from "lucide-react";
|
||||
|
||||
import IconWrapper from './IconWrapper';
|
||||
import ThemedIcon from './ThemedIcon';
|
||||
@@ -19,6 +17,16 @@ function RichIcon({ item, className, onClick }: RichIconProps) {
|
||||
const connectorSource = useFindConnectorIcon(item);
|
||||
const icons = connectorSource?.assets?.icons || {};
|
||||
|
||||
// If the selectedIcon is a valid base64-encoded image
|
||||
const isBase64 = item?.rich_categories?.[0]?.icon?.startsWith("data:image/");
|
||||
if (isBase64) {
|
||||
return (
|
||||
<IconWrapper className={className} onClick={onClick}>
|
||||
<img className={className} src={item?.rich_categories?.[0]?.icon} alt="icon"/>
|
||||
</IconWrapper>
|
||||
);
|
||||
}
|
||||
|
||||
const selectedIcon = icons[item?.rich_categories?.[0]?.icon];
|
||||
|
||||
if (!selectedIcon) {
|
||||
|
||||
@@ -15,10 +15,22 @@ interface TypeIconProps {
|
||||
function TypeIcon({
|
||||
item,
|
||||
className = "w-5 h-5 flex-shrink-0",
|
||||
onClick = () => { }
|
||||
onClick = () => {
|
||||
}
|
||||
}: TypeIconProps) {
|
||||
const endpoint_http = useAppStore((state) => state.endpoint_http);
|
||||
const connectorSource = useFindConnectorIcon(item);
|
||||
|
||||
// If the icon is a valid base64-encoded image
|
||||
const isBase64 = connectorSource?.icon?.startsWith("data:image/");
|
||||
if (isBase64) {
|
||||
return (
|
||||
<IconWrapper className={className} onClick={onClick}>
|
||||
<img className={className} src={connectorSource?.icon} alt="icon"/>
|
||||
</IconWrapper>
|
||||
);
|
||||
}
|
||||
|
||||
const selectedIcon = connectorSource?.icon;
|
||||
|
||||
if (!selectedIcon) {
|
||||
|
||||
@@ -47,7 +47,7 @@ export const DocumentList: React.FC<DocumentListProps> = ({
|
||||
}
|
||||
|
||||
try {
|
||||
const response: any = await invoke("query_coco_servers", {
|
||||
const response: any = await invoke("query_coco_fusion", {
|
||||
from: from,
|
||||
size: PAGE_SIZE,
|
||||
queryStrings,
|
||||
|
||||
@@ -78,7 +78,7 @@ function Search({ isChatMode, input }: SearchProps) {
|
||||
// baseURL: appStore.endpoint_http,
|
||||
// });
|
||||
|
||||
const response: any = await invoke("query_coco_servers", { from: 0, size: 10, queryStrings: { query: input } });
|
||||
const response: any = await invoke("query_coco_fusion", { from: 0, size: 10, queryStrings: { query: input } });
|
||||
// failed_coco_servers documents
|
||||
|
||||
console.log("_suggest", input, response);
|
||||
|
||||
Reference in New Issue
Block a user