mirror of
https://github.com/infinilabs/coco-app.git
synced 2025-12-16 11:37:47 +01:00
refactor: error handling in install_extension interfaces (#995)
* refactor: error handling in install_extension interfaces * fmt * fix build * release notes
This commit is contained in:
@@ -24,6 +24,7 @@ Information about release notes of Coco App is provided here.
|
||||
### ✈️ Improvements
|
||||
|
||||
- chore: write panic message to stdout in panic hook #989
|
||||
- refactor: error handling in install_extension interfaces #995
|
||||
|
||||
## 0.9.0 (2025-11-19)
|
||||
|
||||
|
||||
22
src-tauri/Cargo.lock
generated
22
src-tauri/Cargo.lock
generated
@@ -1186,6 +1186,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_plain",
|
||||
"snafu",
|
||||
"strsim 0.10.0",
|
||||
"strum",
|
||||
"sys-locale",
|
||||
@@ -6600,6 +6601,27 @@ version = "1.15.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
|
||||
|
||||
[[package]]
|
||||
name = "snafu"
|
||||
version = "0.8.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6e84b3f4eacbf3a1ce05eac6763b4d629d60cbc94d632e4092c54ade71f1e1a2"
|
||||
dependencies = [
|
||||
"snafu-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "snafu-derive"
|
||||
version = "0.8.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c1c97747dbf44bb1ca44a561ece23508e99cb592e862f22222dcf42f51d1e451"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.111",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
version = "0.5.10"
|
||||
|
||||
@@ -121,6 +121,7 @@ actix-files = "0.6.8"
|
||||
actix-web = "4.11.0"
|
||||
tauri-plugin-clipboard-manager = "2"
|
||||
tauri-plugin-zustand = "1"
|
||||
snafu = "0.8.9"
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.23.0"
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
use crate::common::assistant::ChatRequestMessage;
|
||||
use crate::common::http::convert_query_params_to_strings;
|
||||
use crate::common::register::SearchSourceRegistry;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::server::http_client::{DecodeResponseSnafu, HttpClient, HttpRequestError};
|
||||
use crate::{common, server::servers::COCO_SERVERS};
|
||||
use futures::StreamExt;
|
||||
use futures::stream::FuturesUnordered;
|
||||
use futures_util::TryStreamExt;
|
||||
use http::Method;
|
||||
use serde_json::Value;
|
||||
use snafu::ResultExt;
|
||||
use std::collections::HashMap;
|
||||
use tauri::{AppHandle, Emitter, Manager};
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
@@ -19,7 +20,7 @@ pub async fn chat_history(
|
||||
from: u32,
|
||||
size: u32,
|
||||
query: Option<String>,
|
||||
) -> Result<String, String> {
|
||||
) -> Result<String, HttpRequestError> {
|
||||
let mut query_params = Vec::new();
|
||||
|
||||
// Add from/size as number values
|
||||
@@ -32,12 +33,7 @@ pub async fn chat_history(
|
||||
}
|
||||
}
|
||||
|
||||
let response = HttpClient::get(&server_id, "/chat/_history", Some(query_params))
|
||||
.await
|
||||
.map_err(|e| {
|
||||
dbg!("Error get history: {}", &e);
|
||||
format!("Error get history: {}", e)
|
||||
})?;
|
||||
let response = HttpClient::get(&server_id, "/chat/_history", Some(query_params)).await?;
|
||||
|
||||
common::http::get_response_body_text(response).await
|
||||
}
|
||||
@@ -49,7 +45,7 @@ pub async fn session_chat_history(
|
||||
session_id: String,
|
||||
from: u32,
|
||||
size: u32,
|
||||
) -> Result<String, String> {
|
||||
) -> Result<String, HttpRequestError> {
|
||||
let mut query_params = Vec::new();
|
||||
|
||||
// Add from/size as number values
|
||||
@@ -58,9 +54,7 @@ pub async fn session_chat_history(
|
||||
|
||||
let path = format!("/chat/{}/_history", session_id);
|
||||
|
||||
let response = HttpClient::get(&server_id, path.as_str(), Some(query_params))
|
||||
.await
|
||||
.map_err(|e| format!("Error get session message: {}", e))?;
|
||||
let response = HttpClient::get(&server_id, path.as_str(), Some(query_params)).await?;
|
||||
|
||||
common::http::get_response_body_text(response).await
|
||||
}
|
||||
@@ -70,12 +64,10 @@ pub async fn open_session_chat(
|
||||
_app_handle: AppHandle,
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
) -> Result<String, String> {
|
||||
) -> Result<String, HttpRequestError> {
|
||||
let path = format!("/chat/{}/_open", session_id);
|
||||
|
||||
let response = HttpClient::post(&server_id, path.as_str(), None, None)
|
||||
.await
|
||||
.map_err(|e| format!("Error open session: {}", e))?;
|
||||
let response = HttpClient::post(&server_id, path.as_str(), None, None).await?;
|
||||
|
||||
common::http::get_response_body_text(response).await
|
||||
}
|
||||
@@ -85,12 +77,10 @@ pub async fn close_session_chat(
|
||||
_app_handle: AppHandle,
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
) -> Result<String, String> {
|
||||
) -> Result<String, HttpRequestError> {
|
||||
let path = format!("/chat/{}/_close", session_id);
|
||||
|
||||
let response = HttpClient::post(&server_id, path.as_str(), None, None)
|
||||
.await
|
||||
.map_err(|e| format!("Error close session: {}", e))?;
|
||||
let response = HttpClient::post(&server_id, path.as_str(), None, None).await?;
|
||||
|
||||
common::http::get_response_body_text(response).await
|
||||
}
|
||||
@@ -100,13 +90,11 @@ pub async fn cancel_session_chat(
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
query_params: Option<HashMap<String, Value>>,
|
||||
) -> Result<String, String> {
|
||||
) -> Result<String, HttpRequestError> {
|
||||
let path = format!("/chat/{}/_cancel", session_id);
|
||||
let query_params = convert_query_params_to_strings(query_params);
|
||||
|
||||
let response = HttpClient::post(&server_id, path.as_str(), query_params, None)
|
||||
.await
|
||||
.map_err(|e| format!("Error cancel session: {}", e))?;
|
||||
let response = HttpClient::post(&server_id, path.as_str(), query_params, None).await?;
|
||||
|
||||
common::http::get_response_body_text(response).await
|
||||
}
|
||||
@@ -270,14 +258,23 @@ pub async fn chat_chat(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_session_chat(server_id: String, session_id: String) -> Result<bool, String> {
|
||||
pub async fn delete_session_chat(
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
) -> Result<bool, HttpRequestError> {
|
||||
let response =
|
||||
HttpClient::delete(&server_id, &format!("/chat/{}", session_id), None, None).await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let status = response.status();
|
||||
|
||||
if status.is_success() {
|
||||
Ok(true)
|
||||
} else {
|
||||
Err(format!("Delete failed with status: {}", response.status()))
|
||||
Err(HttpRequestError::RequestFailed {
|
||||
status: status.as_u16(),
|
||||
error_response_body_str: None,
|
||||
coco_server_api_error_response_body: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -287,7 +284,7 @@ pub async fn update_session_chat(
|
||||
session_id: String,
|
||||
title: Option<String>,
|
||||
context: Option<HashMap<String, Value>>,
|
||||
) -> Result<bool, String> {
|
||||
) -> Result<bool, HttpRequestError> {
|
||||
let mut body = HashMap::new();
|
||||
if let Some(title) = title {
|
||||
body.insert("title".to_string(), Value::String(title));
|
||||
@@ -306,8 +303,7 @@ pub async fn update_session_chat(
|
||||
None,
|
||||
Some(reqwest::Body::from(serde_json::to_string(&body).unwrap())),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Error updating session: {}", e))?;
|
||||
.await?;
|
||||
|
||||
Ok(response.status().is_success())
|
||||
}
|
||||
@@ -317,15 +313,10 @@ pub async fn assistant_search(
|
||||
_app_handle: AppHandle,
|
||||
server_id: String,
|
||||
query_params: Option<Vec<String>>,
|
||||
) -> Result<Value, String> {
|
||||
let response = HttpClient::post(&server_id, "/assistant/_search", query_params, None)
|
||||
.await
|
||||
.map_err(|e| format!("Error searching assistants: {}", e))?;
|
||||
) -> Result<Value, HttpRequestError> {
|
||||
let response = HttpClient::post(&server_id, "/assistant/_search", query_params, None).await?;
|
||||
|
||||
response
|
||||
.json::<Value>()
|
||||
.await
|
||||
.map_err(|err| err.to_string())
|
||||
response.json::<Value>().await.context(DecodeResponseSnafu)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
@@ -333,19 +324,15 @@ pub async fn assistant_get(
|
||||
_app_handle: AppHandle,
|
||||
server_id: String,
|
||||
assistant_id: String,
|
||||
) -> Result<Value, String> {
|
||||
) -> Result<Value, HttpRequestError> {
|
||||
let response = HttpClient::get(
|
||||
&server_id,
|
||||
&format!("/assistant/{}", assistant_id),
|
||||
None, // headers
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Error getting assistant: {}", e))?;
|
||||
.await?;
|
||||
|
||||
response
|
||||
.json::<Value>()
|
||||
.await
|
||||
.map_err(|err| err.to_string())
|
||||
response.json::<Value>().await.context(DecodeResponseSnafu)
|
||||
}
|
||||
|
||||
/// Gets the information of the assistant specified by `assistant_id` by querying **all**
|
||||
@@ -356,7 +343,7 @@ pub async fn assistant_get(
|
||||
pub async fn assistant_get_multi(
|
||||
app_handle: AppHandle,
|
||||
assistant_id: String,
|
||||
) -> Result<Value, String> {
|
||||
) -> Result<Option<Value>, HttpRequestError> {
|
||||
let search_sources = app_handle.state::<SearchSourceRegistry>();
|
||||
let sources_future = search_sources.get_sources();
|
||||
let sources_list = sources_future.await;
|
||||
@@ -375,19 +362,17 @@ pub async fn assistant_get_multi(
|
||||
let path = format!("/assistant/{}", assistant_id);
|
||||
|
||||
let fut = async move {
|
||||
let res_response = HttpClient::get(
|
||||
let response = HttpClient::get(
|
||||
&coco_server_id,
|
||||
&path,
|
||||
None, // headers
|
||||
)
|
||||
.await;
|
||||
match res_response {
|
||||
Ok(response) => response
|
||||
.json::<serde_json::Value>()
|
||||
.await
|
||||
.map_err(|e| e.to_string()),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
.await?;
|
||||
|
||||
response
|
||||
.json::<serde_json::Value>()
|
||||
.await
|
||||
.context(DecodeResponseSnafu)
|
||||
};
|
||||
|
||||
futures.push(fut);
|
||||
@@ -419,15 +404,12 @@ pub async fn assistant_get_multi(
|
||||
// ```
|
||||
if let Some(found) = response_json.get("found") {
|
||||
if found == true {
|
||||
return Ok(response_json);
|
||||
return Ok(Some(response_json));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(format!(
|
||||
"could not find Assistant [{}] on all the Coco servers",
|
||||
assistant_id
|
||||
))
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
use regex::Regex;
|
||||
@@ -453,7 +435,7 @@ pub async fn ask_ai(
|
||||
server_id: String,
|
||||
assistant_id: String,
|
||||
client_id: String,
|
||||
) -> Result<(), String> {
|
||||
) -> Result<(), HttpRequestError> {
|
||||
let cleaned = remove_icon_fields(message.as_str());
|
||||
|
||||
let body = serde_json::json!({ "message": cleaned });
|
||||
@@ -472,13 +454,19 @@ pub async fn ask_ai(
|
||||
)
|
||||
.await?;
|
||||
|
||||
if response.status() == 429 {
|
||||
let status = response.status().as_u16();
|
||||
|
||||
if status == 429 {
|
||||
log::warn!("Rate limit exceeded for assistant: {}", &assistant_id);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("Request Failed: {}", response.status()));
|
||||
return Err(HttpRequestError::RequestFailed {
|
||||
status,
|
||||
error_response_body_str: None,
|
||||
coco_server_api_error_response_body: None,
|
||||
});
|
||||
}
|
||||
|
||||
let stream = response.bytes_stream();
|
||||
@@ -491,7 +479,7 @@ pub async fn ask_ai(
|
||||
dbg!("Received line: {}", &line);
|
||||
|
||||
let _ = app_handle.emit(&client_id, line).map_err(|err| {
|
||||
println!("Failed to emit: {:?}", err);
|
||||
log::error!("Failed to emit: {:?}", err);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,81 +1,162 @@
|
||||
use reqwest::StatusCode;
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
use thiserror::Error;
|
||||
use snafu::prelude::*;
|
||||
|
||||
fn serialize_optional_status_code<S>(
|
||||
status_code: &Option<StatusCode>,
|
||||
use crate::server::http_client::HttpRequestError;
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct ApiErrorCause {
|
||||
/// Only the top-level error contains this.
|
||||
#[serde(default)]
|
||||
pub root_cause: Option<Vec<ApiErrorCause>>,
|
||||
|
||||
#[serde(default)]
|
||||
pub r#type: Option<String>,
|
||||
#[serde(default)]
|
||||
pub reason: Option<String>,
|
||||
|
||||
/// Recursion, [error A] cause by [error B] caused by [error C]
|
||||
#[serde(default)]
|
||||
pub caused_by: Option<Box<ApiErrorCause>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct ApiError {
|
||||
#[serde(default)]
|
||||
pub error: Option<ApiErrorCause>,
|
||||
#[serde(default)]
|
||||
pub status: Option<u16>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu, Serialize)]
|
||||
#[snafu(visibility(pub(crate)))]
|
||||
pub enum SearchError {
|
||||
#[snafu(display("HTTP request error"))]
|
||||
HttpError { source: HttpRequestError },
|
||||
#[snafu(display("failed to decode query response"))]
|
||||
ResponseDecodeError {
|
||||
#[serde(serialize_with = "serialize_error")]
|
||||
source: serde_json::Error,
|
||||
},
|
||||
/// The search operation timed out.
|
||||
#[snafu(display("search operation timed out"))]
|
||||
SearchTimeout,
|
||||
#[snafu(display("an internal error occurred: '{}'", error))]
|
||||
InternalError { error: String },
|
||||
}
|
||||
|
||||
pub(crate) fn serialize_error<S, E: std::error::Error>(
|
||||
error: &E,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match status_code {
|
||||
Some(code) => serializer.serialize_str(&format!("{:?}", code)),
|
||||
None => serializer.serialize_none(),
|
||||
}
|
||||
serializer.serialize_str(&report_error(error, ReportErrorStyle::SingleLine))
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ErrorCause {
|
||||
#[serde(default)]
|
||||
pub r#type: Option<String>,
|
||||
#[serde(default)]
|
||||
pub reason: Option<String>,
|
||||
/// `ReportErrorStyle` controls the error reporting format.
|
||||
pub(crate) enum ReportErrorStyle {
|
||||
/// Report it in one line of message. This is suitable when you write dump
|
||||
/// errors to logs.
|
||||
///
|
||||
/// ```text
|
||||
/// 'failed to installed extension', caused by ['Json parsing error' 'I/O error: file not found']
|
||||
/// ```
|
||||
SingleLine,
|
||||
/// Allow it to span multiple lines.
|
||||
///
|
||||
/// ```text
|
||||
/// failed to installed extension
|
||||
/// Caused by:
|
||||
///
|
||||
/// 0: Json parsing error
|
||||
/// 1: I/O error: file not found
|
||||
/// ```
|
||||
MultipleLines,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(unused)]
|
||||
pub struct ErrorDetail {
|
||||
#[serde(default)]
|
||||
pub root_cause: Option<Vec<ErrorCause>>,
|
||||
#[serde(default)]
|
||||
pub r#type: Option<String>,
|
||||
#[serde(default)]
|
||||
pub reason: Option<String>,
|
||||
#[serde(default)]
|
||||
pub caused_by: Option<ErrorCause>,
|
||||
}
|
||||
/// In Rust, a typical Display impl of an Error won't contain it source information[1],
|
||||
/// so we need a reporter to report the full error message.
|
||||
///
|
||||
/// [1]: https://stackoverflow.com/q/62869360/14092446
|
||||
pub(crate) fn report_error<E: std::error::Error>(e: &E, style: ReportErrorStyle) -> String {
|
||||
use std::fmt::Write;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ErrorResponse {
|
||||
#[serde(default)]
|
||||
pub error: Option<ErrorDetail>,
|
||||
#[serde(default)]
|
||||
#[allow(unused)]
|
||||
pub status: Option<u16>,
|
||||
}
|
||||
match style {
|
||||
ReportErrorStyle::SingleLine => {
|
||||
let mut error_msg = format!("'{}'", e);
|
||||
if let Some(cause) = e.source() {
|
||||
error_msg.push_str(", caused by: [");
|
||||
|
||||
#[derive(Debug, Error, Serialize)]
|
||||
pub enum SearchError {
|
||||
#[error("HttpError: status code [{status_code:?}], msg [{msg}]")]
|
||||
HttpError {
|
||||
#[serde(serialize_with = "serialize_optional_status_code")]
|
||||
status_code: Option<StatusCode>,
|
||||
msg: String,
|
||||
},
|
||||
|
||||
#[error("ParseError: {0}")]
|
||||
ParseError(String),
|
||||
|
||||
#[error("Timeout occurred")]
|
||||
Timeout,
|
||||
|
||||
#[error("InternalError: {0}")]
|
||||
InternalError(String),
|
||||
}
|
||||
|
||||
impl From<reqwest::Error> for SearchError {
|
||||
fn from(err: reqwest::Error) -> Self {
|
||||
if err.is_timeout() {
|
||||
SearchError::Timeout
|
||||
} else if err.is_decode() {
|
||||
SearchError::ParseError(err.to_string())
|
||||
} else {
|
||||
SearchError::HttpError {
|
||||
status_code: err.status(),
|
||||
msg: err.to_string(),
|
||||
for (i, e) in std::iter::successors(Some(cause), |e| e.source()).enumerate() {
|
||||
if i != 0 {
|
||||
error_msg.push(' ');
|
||||
}
|
||||
write!(&mut error_msg, "'{}'", e).expect("failed to write in-memory string");
|
||||
}
|
||||
error_msg.push(']');
|
||||
}
|
||||
|
||||
error_msg
|
||||
}
|
||||
ReportErrorStyle::MultipleLines => snafu::Report::from_error(e).to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::io;
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
enum Error {
|
||||
#[snafu(display("I/O Error"))]
|
||||
Io { source: io::Error },
|
||||
#[snafu(display("Foo"))]
|
||||
Foo,
|
||||
#[snafu(display("Nested"))]
|
||||
Nested { source: ReadError },
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
enum ReadError {
|
||||
#[snafu(display("failed to read config file"))]
|
||||
ReadConfig { source: io::Error },
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_report_error_single_line_one_caused_by() {
|
||||
let err = Error::Io {
|
||||
source: io::Error::new(io::ErrorKind::NotFound, "file Cargo.toml not found"),
|
||||
};
|
||||
|
||||
let error_msg = report_error(&err, ReportErrorStyle::SingleLine);
|
||||
assert_eq!(
|
||||
error_msg,
|
||||
"'I/O Error', caused by: ['file Cargo.toml not found']"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_report_error_single_line_multiple_caused_by() {
|
||||
let err = Error::Nested {
|
||||
source: ReadError::ReadConfig {
|
||||
source: io::Error::new(io::ErrorKind::NotFound, "not found"),
|
||||
},
|
||||
};
|
||||
|
||||
let error_msg = report_error(&err, ReportErrorStyle::SingleLine);
|
||||
assert_eq!(
|
||||
error_msg,
|
||||
"'Nested', caused by: ['failed to read config file' 'not found']"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_report_error_single_line_no_caused_by() {
|
||||
let err = Error::Foo;
|
||||
|
||||
let error_msg = report_error(&err, ReportErrorStyle::SingleLine);
|
||||
assert_eq!(error_msg, "'Foo'");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,38 +1,59 @@
|
||||
use crate::common;
|
||||
use crate::{
|
||||
common,
|
||||
server::http_client::{DecodeResponseSnafu, HttpRequestError},
|
||||
};
|
||||
use reqwest::Response;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use snafu::ResultExt;
|
||||
use std::collections::HashMap;
|
||||
use tauri_plugin_store::JsonValue;
|
||||
|
||||
pub async fn get_response_body_text(response: Response) -> Result<String, String> {
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct GetResponse {
|
||||
pub _id: String,
|
||||
pub _source: Source,
|
||||
pub result: String,
|
||||
pub payload: Option<Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Source {
|
||||
pub id: String,
|
||||
pub created: String,
|
||||
pub updated: String,
|
||||
pub status: String,
|
||||
}
|
||||
|
||||
pub async fn get_response_body_text(response: Response) -> Result<String, HttpRequestError> {
|
||||
let status = response.status().as_u16();
|
||||
let body = response
|
||||
.text()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read response body: {}, code: {}", e, status))?;
|
||||
.context(DecodeResponseSnafu)?
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
log::debug!("Response status: {}, body: {}", status, &body);
|
||||
|
||||
if status < 200 || status >= 400 {
|
||||
// Try to parse the error body
|
||||
let fallback_error = "Failed to send message".to_string();
|
||||
|
||||
if body.trim().is_empty() {
|
||||
return Err(fallback_error);
|
||||
if body.is_empty() {
|
||||
return Err(HttpRequestError::RequestFailed {
|
||||
status,
|
||||
error_response_body_str: None,
|
||||
coco_server_api_error_response_body: None,
|
||||
});
|
||||
}
|
||||
|
||||
match serde_json::from_str::<common::error::ErrorResponse>(&body) {
|
||||
Ok(parsed_error) => {
|
||||
dbg!(&parsed_error);
|
||||
Err(format!(
|
||||
"Server error ({}): {:?}",
|
||||
status, parsed_error.error
|
||||
))
|
||||
}
|
||||
Err(_) => {
|
||||
log::warn!("Failed to parse error response: {}", &body);
|
||||
Err(fallback_error)
|
||||
}
|
||||
}
|
||||
// Ignore this error, including a `serde_json::Error` in `HttpRequestError::RequestFailed`
|
||||
// would be too verbose. And it is still easy to debug without this error, since we have
|
||||
// the raw error response body.
|
||||
let api_error = serde_json::from_str::<common::error::ApiError>(&body).ok();
|
||||
Err(HttpRequestError::RequestFailed {
|
||||
status,
|
||||
error_response_body_str: Some(body),
|
||||
coco_server_api_error_response_body: api_error,
|
||||
})
|
||||
} else {
|
||||
Ok(body)
|
||||
}
|
||||
|
||||
@@ -678,7 +678,7 @@ impl SearchSource for ApplicationSearchSource {
|
||||
.expect("tx dropped, the runtime thread is possibly dead")
|
||||
.map_err(|pizza_engine_err| {
|
||||
let err_str = pizza_engine_err.to_string();
|
||||
SearchError::InternalError(err_str)
|
||||
SearchError::InternalError { error: err_str }
|
||||
})?;
|
||||
|
||||
let total_hits = search_result.total_hits;
|
||||
|
||||
@@ -85,7 +85,7 @@ impl SearchSource for FileSearchExtensionSearchSource {
|
||||
|
||||
let hits = implementation::hits(&query_string, from, size, &config)
|
||||
.await
|
||||
.map_err(SearchError::InternalError)?;
|
||||
.map_err(|e| SearchError::InternalError { error: e })?;
|
||||
|
||||
let total_hits = hits.len();
|
||||
Ok(QueryResponse {
|
||||
|
||||
@@ -11,6 +11,7 @@ pub mod window_management;
|
||||
|
||||
use super::Extension;
|
||||
use crate::SearchSourceRegistry;
|
||||
use crate::common::error::{ReportErrorStyle, report_error};
|
||||
use crate::extension::built_in::application::{set_apps_hotkey, unset_apps_hotkey};
|
||||
use crate::extension::{
|
||||
ExtensionBundleIdBorrowed, PLUGIN_JSON_FILE_NAME, alter_extension_json_file,
|
||||
@@ -628,7 +629,9 @@ fn load_extension_from_json_file(
|
||||
)
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
super::canonicalize_relative_icon_path(extension_directory, &mut extension)?;
|
||||
// TODO: refactor error handling
|
||||
super::canonicalize_relative_icon_path(extension_directory, &mut extension)
|
||||
.map_err(|e| report_error(&e, ReportErrorStyle::SingleLine))?;
|
||||
|
||||
Ok(extension)
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ impl SearchSource for WindowManagementSearchSource {
|
||||
&get_built_in_extension_directory(&tauri_app_handle),
|
||||
super::EXTENSION_ID,
|
||||
)
|
||||
.map_err(SearchError::InternalError)?;
|
||||
.map_err(|e| SearchError::InternalError { error: e })?;
|
||||
let commands = extension.commands.expect("this extension has commands");
|
||||
|
||||
let mut hits: Vec<(Document, f64)> = Vec::new();
|
||||
|
||||
@@ -6,6 +6,8 @@ pub(crate) mod view_extension;
|
||||
use crate::common::document::ExtensionOnOpened;
|
||||
use crate::common::document::ExtensionOnOpenedType;
|
||||
use crate::common::document::OnOpened;
|
||||
use crate::common::error::ReportErrorStyle;
|
||||
use crate::common::error::report_error;
|
||||
use crate::common::register::SearchSourceRegistry;
|
||||
use crate::util::platform::Platform;
|
||||
use crate::util::version::COCO_VERSION;
|
||||
@@ -21,6 +23,7 @@ use serde::Serialize;
|
||||
use serde_json::Value as Json;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::io;
|
||||
use std::ops::Deref;
|
||||
use std::path::Path;
|
||||
use tauri::{AppHandle, Manager};
|
||||
@@ -405,8 +408,12 @@ where
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
let Some(semver) = parse_coco_semver(&version_str) else {
|
||||
return Err(serde::de::Error::custom("version string format is invalid"));
|
||||
let semver = match parse_coco_semver(&version_str) {
|
||||
Ok(ver) => ver,
|
||||
Err(e) => {
|
||||
let error_msg = report_error(&e, ReportErrorStyle::SingleLine);
|
||||
return Err(serde::de::Error::custom(&error_msg));
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Some(semver))
|
||||
@@ -592,7 +599,7 @@ pub(crate) enum QuicklinkLinkComponent {
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Deserialize, Serialize, Clone, Display, Copy)]
|
||||
#[derive(Debug, PartialEq, Deserialize, Serialize, Clone, Display, Copy, Eq)]
|
||||
#[serde(rename_all(serialize = "snake_case", deserialize = "snake_case"))]
|
||||
pub enum ExtensionType {
|
||||
#[display("Group")]
|
||||
@@ -980,11 +987,11 @@ pub(crate) async fn is_extension_enabled(
|
||||
pub(crate) fn canonicalize_relative_icon_path(
|
||||
extension_dir: &Path,
|
||||
extension: &mut Extension,
|
||||
) -> Result<(), String> {
|
||||
) -> Result<(), io::Error> {
|
||||
fn _canonicalize_relative_icon_path(
|
||||
extension_dir: &Path,
|
||||
extension: &mut Extension,
|
||||
) -> Result<(), String> {
|
||||
) -> Result<(), io::Error> {
|
||||
let icon_str = &extension.icon;
|
||||
let icon_path = Path::new(icon_str);
|
||||
|
||||
@@ -1003,7 +1010,7 @@ pub(crate) fn canonicalize_relative_icon_path(
|
||||
assets_directory
|
||||
};
|
||||
|
||||
if absolute_icon_path.try_exists().map_err(|e| e.to_string())? {
|
||||
if absolute_icon_path.try_exists()? {
|
||||
extension.icon = absolute_icon_path
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
@@ -1046,11 +1053,11 @@ pub(crate) fn canonicalize_relative_icon_path(
|
||||
pub(crate) fn canonicalize_relative_page_path(
|
||||
extension_dir: &Path,
|
||||
extension: &mut Extension,
|
||||
) -> Result<(), String> {
|
||||
) -> Result<(), io::Error> {
|
||||
fn _canonicalize_view_extension_page_path(
|
||||
extension_dir: &Path,
|
||||
extension: &mut Extension,
|
||||
) -> Result<(), String> {
|
||||
) -> Result<(), io::Error> {
|
||||
let page = extension
|
||||
.page
|
||||
.as_ref()
|
||||
@@ -1068,7 +1075,7 @@ pub(crate) fn canonicalize_relative_page_path(
|
||||
if page_path.is_relative() {
|
||||
let absolute_page_path = extension_dir.join(page_path);
|
||||
|
||||
if absolute_page_path.try_exists().map_err(|e| e.to_string())? {
|
||||
if absolute_page_path.try_exists()? {
|
||||
extension.page = Some(
|
||||
absolute_page_path
|
||||
.into_os_string()
|
||||
|
||||
495
src-tauri/src/extension/third_party/check.rs
vendored
495
src-tauri/src/extension/third_party/check.rs
vendored
@@ -14,14 +14,67 @@
|
||||
|
||||
use crate::extension::Extension;
|
||||
use crate::extension::ExtensionType;
|
||||
use crate::extension::PLUGIN_JSON_FIELD_MINIMUM_COCO_VERSION;
|
||||
use crate::util::platform::Platform;
|
||||
use derive_more::Display;
|
||||
use serde::Serialize;
|
||||
use std::collections::HashSet;
|
||||
use std::error::Error;
|
||||
use std::fmt::Display;
|
||||
|
||||
pub(crate) fn general_check(extension: &Extension) -> Result<(), String> {
|
||||
/// Errors that may be found when we check() `plugin.json`, i.e., `struct Extension`
|
||||
#[derive(Debug, Serialize)]
|
||||
pub(crate) struct InvalidPluginJsonError {
|
||||
kind: InvalidPluginJsonErrorKind,
|
||||
/// Some if it is a sub-extension rather than the main extension that is
|
||||
/// invalid
|
||||
sub_extension_id: Option<String>,
|
||||
}
|
||||
|
||||
impl Display for InvalidPluginJsonError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
if let Some(ref sub_extension_id) = self.sub_extension_id {
|
||||
write!(f, "invalid sub-extension '{}'", sub_extension_id)?;
|
||||
}
|
||||
|
||||
write!(f, "{}", self.kind)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for InvalidPluginJsonError {}
|
||||
|
||||
#[derive(Debug, Display, PartialEq, Eq, Serialize)]
|
||||
pub(crate) enum InvalidPluginJsonErrorKind {
|
||||
#[display("duplicate ID, sub-extension with ID '{}' already exists", id)]
|
||||
DuplicateSubExtensionId { id: String },
|
||||
#[display(
|
||||
"fields '{:?}' are not allowed for extensions of type '{}'",
|
||||
fields,
|
||||
ty
|
||||
)]
|
||||
FieldsNotAllowed {
|
||||
fields: &'static [&'static str],
|
||||
ty: ExtensionType,
|
||||
},
|
||||
#[display("fields '{:?}' are not allowed for sub-extensions", fields)]
|
||||
FieldsNotAllowedForSubExtension { fields: &'static [&'static str] },
|
||||
#[display("sub-extensions cannot be of types {:?}", types)]
|
||||
TypesNotAllowedForSubExtension { types: &'static [ExtensionType] },
|
||||
#[display(
|
||||
"it supports platforms {:?} that are not supported by the main extension",
|
||||
extra_platforms
|
||||
)]
|
||||
SubExtensionHasMoreSupportedPlatforms { extra_platforms: Vec<String> },
|
||||
#[display("an extensions of type '{}' should have field '{}' set", ty, field)]
|
||||
FieldRequired {
|
||||
field: &'static str,
|
||||
ty: ExtensionType,
|
||||
},
|
||||
}
|
||||
|
||||
pub(crate) fn general_check(extension: &Extension) -> Result<(), InvalidPluginJsonError> {
|
||||
// Check main extension
|
||||
check_main_extension_only(extension)?;
|
||||
check_main_extension_or_sub_extension(extension, &format!("extension [{}]", extension.id))?;
|
||||
check_main_extension_or_sub_extension(extension, false)?;
|
||||
|
||||
// `None` if `extension` is compatible with all the platforms. Otherwise `Some(limited_platforms)`
|
||||
let limited_supported_platforms = match extension.platforms.as_ref() {
|
||||
@@ -56,18 +109,17 @@ pub(crate) fn general_check(extension: &Extension) -> Result<(), String> {
|
||||
let mut sub_extension_ids = HashSet::new();
|
||||
|
||||
for sub_extension in sub_extensions.iter() {
|
||||
check_sub_extension_only(&extension.id, sub_extension, limited_supported_platforms)?;
|
||||
check_main_extension_or_sub_extension(
|
||||
extension,
|
||||
&format!("sub-extension [{}-{}]", extension.id, sub_extension.id),
|
||||
)?;
|
||||
check_sub_extension_only(sub_extension, limited_supported_platforms)?;
|
||||
check_main_extension_or_sub_extension(extension, true)?;
|
||||
|
||||
if !sub_extension_ids.insert(sub_extension.id.as_str()) {
|
||||
// extension ID already exists
|
||||
return Err(format!(
|
||||
"sub-extension with ID [{}] already exists",
|
||||
sub_extension.id
|
||||
));
|
||||
return Err(InvalidPluginJsonError {
|
||||
sub_extension_id: Some(sub_extension.id.clone()),
|
||||
kind: InvalidPluginJsonErrorKind::DuplicateSubExtensionId {
|
||||
id: sub_extension.id.clone(),
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,27 +127,33 @@ pub(crate) fn general_check(extension: &Extension) -> Result<(), String> {
|
||||
}
|
||||
|
||||
/// This checks the main extension only, it won't check sub-extensions.
|
||||
fn check_main_extension_only(extension: &Extension) -> Result<(), String> {
|
||||
fn check_main_extension_only(extension: &Extension) -> Result<(), InvalidPluginJsonError> {
|
||||
// Helper closure to construct `InvalidPluginJsonError` easily.
|
||||
let err = |kind| InvalidPluginJsonError {
|
||||
sub_extension_id: None,
|
||||
kind,
|
||||
};
|
||||
|
||||
// Group and Extension cannot have alias
|
||||
if extension.alias.is_some() {
|
||||
if extension.r#type == ExtensionType::Group || extension.r#type == ExtensionType::Extension
|
||||
{
|
||||
return Err(format!(
|
||||
"invalid extension [{}], extension of type [{:?}] cannot have alias",
|
||||
extension.id, extension.r#type
|
||||
));
|
||||
}
|
||||
if extension.alias.is_some()
|
||||
&& (extension.r#type == ExtensionType::Group
|
||||
|| extension.r#type == ExtensionType::Extension)
|
||||
{
|
||||
return Err(err(InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["alias"],
|
||||
ty: extension.r#type,
|
||||
}));
|
||||
}
|
||||
|
||||
// Group and Extension cannot have hotkey
|
||||
if extension.hotkey.is_some() {
|
||||
if extension.r#type == ExtensionType::Group || extension.r#type == ExtensionType::Extension
|
||||
{
|
||||
return Err(format!(
|
||||
"invalid extension [{}], extension of type [{:?}] cannot have hotkey",
|
||||
extension.id, extension.r#type
|
||||
));
|
||||
}
|
||||
if extension.hotkey.is_some()
|
||||
&& (extension.r#type == ExtensionType::Group
|
||||
|| extension.r#type == ExtensionType::Extension)
|
||||
{
|
||||
return Err(err(InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["hotkey"],
|
||||
ty: extension.r#type,
|
||||
}));
|
||||
}
|
||||
|
||||
if extension.commands.is_some()
|
||||
@@ -105,20 +163,20 @@ fn check_main_extension_only(extension: &Extension) -> Result<(), String> {
|
||||
{
|
||||
if extension.r#type != ExtensionType::Group && extension.r#type != ExtensionType::Extension
|
||||
{
|
||||
return Err(format!(
|
||||
"invalid extension [{}], only extension of type [Group] and [Extension] can have sub-extensions",
|
||||
extension.id,
|
||||
));
|
||||
return Err(err(InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["commands", "scripts", "quicklinks", "views"],
|
||||
ty: extension.r#type,
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
if extension.settings.is_some() {
|
||||
// Sub-extensions are all searchable, so this check is only for main extensions.
|
||||
if !extension.searchable() {
|
||||
return Err(format!(
|
||||
"invalid extension {}, field [settings] is currently only allowed in searchable extension, this type of extension is not searchable [{}]",
|
||||
extension.id, extension.r#type
|
||||
));
|
||||
return Err(err(InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["settings"],
|
||||
ty: extension.r#type,
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -126,16 +184,21 @@ fn check_main_extension_only(extension: &Extension) -> Result<(), String> {
|
||||
}
|
||||
|
||||
fn check_sub_extension_only(
|
||||
extension_id: &str,
|
||||
sub_extension: &Extension,
|
||||
limited_platforms: Option<&HashSet<Platform>>,
|
||||
) -> Result<(), String> {
|
||||
) -> Result<(), InvalidPluginJsonError> {
|
||||
let err = |kind| InvalidPluginJsonError {
|
||||
sub_extension_id: Some(sub_extension.id.clone()),
|
||||
kind,
|
||||
};
|
||||
|
||||
if sub_extension.r#type == ExtensionType::Group
|
||||
|| sub_extension.r#type == ExtensionType::Extension
|
||||
{
|
||||
return Err(format!(
|
||||
"invalid sub-extension [{}-{}]: sub-extensions should not be of type [Group] or [Extension]",
|
||||
extension_id, sub_extension.id
|
||||
return Err(err(
|
||||
InvalidPluginJsonErrorKind::TypesNotAllowedForSubExtension {
|
||||
types: &[ExtensionType::Group, ExtensionType::Extension],
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
@@ -144,16 +207,18 @@ fn check_sub_extension_only(
|
||||
|| sub_extension.quicklinks.is_some()
|
||||
|| sub_extension.views.is_some()
|
||||
{
|
||||
return Err(format!(
|
||||
"invalid sub-extension [{}-{}]: fields [commands/scripts/quicklinks/views] should not be set in sub-extensions",
|
||||
extension_id, sub_extension.id
|
||||
return Err(err(
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowedForSubExtension {
|
||||
fields: &["commands", "scripts", "quicklinks", "views"],
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
if sub_extension.developer.is_some() {
|
||||
return Err(format!(
|
||||
"invalid sub-extension [{}-{}]: field [developer] should not be set in sub-extensions",
|
||||
extension_id, sub_extension.id
|
||||
return Err(err(
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowedForSubExtension {
|
||||
fields: &["developer"],
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
@@ -167,9 +232,10 @@ fn check_sub_extension_only(
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
if !diff.is_empty() {
|
||||
return Err(format!(
|
||||
"invalid sub-extension [{}-{}]: it supports platforms {:?} that are not supported by the main extension",
|
||||
extension_id, sub_extension.id, diff
|
||||
return Err(err(
|
||||
InvalidPluginJsonErrorKind::SubExtensionHasMoreSupportedPlatforms {
|
||||
extra_platforms: diff,
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
@@ -181,9 +247,10 @@ fn check_sub_extension_only(
|
||||
}
|
||||
|
||||
if sub_extension.minimum_coco_version.is_some() {
|
||||
return Err(format!(
|
||||
"invalid sub-extension [{}-{}]: [{}] cannot be set for sub-extensions",
|
||||
extension_id, sub_extension.id, PLUGIN_JSON_FIELD_MINIMUM_COCO_VERSION
|
||||
return Err(err(
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowedForSubExtension {
|
||||
fields: &["minimum_coco_version"],
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
@@ -192,59 +259,64 @@ fn check_sub_extension_only(
|
||||
|
||||
fn check_main_extension_or_sub_extension(
|
||||
extension: &Extension,
|
||||
identifier: &str,
|
||||
) -> Result<(), String> {
|
||||
is_sub_extension: bool,
|
||||
) -> Result<(), InvalidPluginJsonError> {
|
||||
let err = |kind| InvalidPluginJsonError {
|
||||
kind,
|
||||
sub_extension_id: is_sub_extension.then(|| extension.id.clone()),
|
||||
};
|
||||
|
||||
// If field `action` is Some, then it should be a Command
|
||||
if extension.action.is_some() && extension.r#type != ExtensionType::Command {
|
||||
return Err(format!(
|
||||
"invalid {}, field [action] is set for a non-Command extension",
|
||||
identifier
|
||||
));
|
||||
return Err(err(InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["action"],
|
||||
ty: extension.r#type,
|
||||
}));
|
||||
}
|
||||
|
||||
if extension.r#type == ExtensionType::Command && extension.action.is_none() {
|
||||
return Err(format!(
|
||||
"invalid {}, field [action] should be set for a Command extension",
|
||||
identifier
|
||||
));
|
||||
return Err(err(InvalidPluginJsonErrorKind::FieldRequired {
|
||||
field: "action",
|
||||
ty: extension.r#type,
|
||||
}));
|
||||
}
|
||||
|
||||
// If field `quicklink` is Some, then it should be a Quicklink
|
||||
if extension.quicklink.is_some() && extension.r#type != ExtensionType::Quicklink {
|
||||
return Err(format!(
|
||||
"invalid {}, field [quicklink] is set for a non-Quicklink extension",
|
||||
identifier
|
||||
));
|
||||
return Err(err(InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["quicklink"],
|
||||
ty: extension.r#type,
|
||||
}));
|
||||
}
|
||||
|
||||
if extension.r#type == ExtensionType::Quicklink && extension.quicklink.is_none() {
|
||||
return Err(format!(
|
||||
"invalid {}, field [quicklink] should be set for a Quicklink extension",
|
||||
identifier
|
||||
));
|
||||
return Err(err(InvalidPluginJsonErrorKind::FieldRequired {
|
||||
field: "quicklink",
|
||||
ty: extension.r#type,
|
||||
}));
|
||||
}
|
||||
|
||||
// If field `page` is Some, then it should be a View
|
||||
if extension.page.is_some() && extension.r#type != ExtensionType::View {
|
||||
return Err(format!(
|
||||
"invalid {}, field [page] is set for a non-View extension",
|
||||
identifier
|
||||
));
|
||||
return Err(err(InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["page"],
|
||||
ty: extension.r#type,
|
||||
}));
|
||||
}
|
||||
|
||||
if extension.r#type == ExtensionType::View && extension.page.is_none() {
|
||||
return Err(format!(
|
||||
"invalid {}, field [page] should be set for a View extension",
|
||||
identifier
|
||||
));
|
||||
return Err(err(InvalidPluginJsonErrorKind::FieldRequired {
|
||||
field: "page",
|
||||
ty: extension.r#type,
|
||||
}));
|
||||
}
|
||||
|
||||
// If field `ui` is Some, then it should be a View
|
||||
if extension.ui.is_some() && extension.r#type != ExtensionType::View {
|
||||
return Err(format!(
|
||||
"invalid {}, field [ui] is set for a non-View extension",
|
||||
identifier
|
||||
));
|
||||
return Err(err(InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["ui"],
|
||||
ty: extension.r#type,
|
||||
}));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -313,15 +385,28 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
fn expect_error_kind(
|
||||
result: Result<(), InvalidPluginJsonError>,
|
||||
expected_kind: InvalidPluginJsonErrorKind,
|
||||
) -> InvalidPluginJsonError {
|
||||
let err = result.expect_err("expected Err but got Ok");
|
||||
assert_eq!(&err.kind, &expected_kind);
|
||||
err
|
||||
}
|
||||
|
||||
/* test_check_main_extension_only */
|
||||
#[test]
|
||||
fn test_group_cannot_have_alias() {
|
||||
let mut extension = create_basic_extension("test-group", ExtensionType::Group);
|
||||
extension.alias = Some("group-alias".to_string());
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("cannot have alias"));
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["alias"],
|
||||
ty: ExtensionType::Group,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -329,9 +414,13 @@ mod tests {
|
||||
let mut extension = create_basic_extension("test-ext", ExtensionType::Extension);
|
||||
extension.alias = Some("ext-alias".to_string());
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("cannot have alias"));
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["alias"],
|
||||
ty: ExtensionType::Extension,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -339,9 +428,13 @@ mod tests {
|
||||
let mut extension = create_basic_extension("test-group", ExtensionType::Group);
|
||||
extension.hotkey = Some("cmd+g".to_string());
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("cannot have hotkey"));
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["hotkey"],
|
||||
ty: ExtensionType::Group,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -349,9 +442,13 @@ mod tests {
|
||||
let mut extension = create_basic_extension("test-ext", ExtensionType::Extension);
|
||||
extension.hotkey = Some("cmd+e".to_string());
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("cannot have hotkey"));
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["hotkey"],
|
||||
ty: ExtensionType::Extension,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -363,12 +460,12 @@ mod tests {
|
||||
ExtensionType::Command,
|
||||
)]);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("only extension of type [Group] and [Extension] can have sub-extensions")
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["commands", "scripts", "quicklinks", "views"],
|
||||
ty: ExtensionType::Command,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -378,20 +475,24 @@ mod tests {
|
||||
extension.settings = Some(ExtensionSettings {
|
||||
hide_before_open: None,
|
||||
});
|
||||
let error_msg = general_check(&extension).unwrap_err();
|
||||
assert!(
|
||||
error_msg
|
||||
.contains("field [settings] is currently only allowed in searchable extension")
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["settings"],
|
||||
ty: ExtensionType::Group,
|
||||
},
|
||||
);
|
||||
|
||||
let mut extension = create_basic_extension("test-extension", ExtensionType::Extension);
|
||||
extension.settings = Some(ExtensionSettings {
|
||||
hide_before_open: None,
|
||||
});
|
||||
let error_msg = general_check(&extension).unwrap_err();
|
||||
assert!(
|
||||
error_msg
|
||||
.contains("field [settings] is currently only allowed in searchable extension")
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["settings"],
|
||||
ty: ExtensionType::Extension,
|
||||
},
|
||||
);
|
||||
}
|
||||
/* test_check_main_extension_only */
|
||||
@@ -401,12 +502,12 @@ mod tests {
|
||||
fn test_command_must_have_action() {
|
||||
let extension = create_basic_extension("test-cmd", ExtensionType::Command);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("field [action] should be set for a Command extension")
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldRequired {
|
||||
field: "action",
|
||||
ty: ExtensionType::Command,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -415,12 +516,12 @@ mod tests {
|
||||
let mut extension = create_basic_extension("test-script", ExtensionType::Script);
|
||||
extension.action = Some(create_command_action());
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("field [action] is set for a non-Command extension")
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["action"],
|
||||
ty: ExtensionType::Script,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -428,12 +529,12 @@ mod tests {
|
||||
fn test_quicklink_must_have_quicklink_field() {
|
||||
let extension = create_basic_extension("test-quicklink", ExtensionType::Quicklink);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("field [quicklink] should be set for a Quicklink extension")
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldRequired {
|
||||
field: "quicklink",
|
||||
ty: ExtensionType::Quicklink,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -443,12 +544,12 @@ mod tests {
|
||||
extension.action = Some(create_command_action());
|
||||
extension.quicklink = Some(create_quicklink());
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("field [quicklink] is set for a non-Quicklink extension")
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["quicklink"],
|
||||
ty: ExtensionType::Command,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -458,12 +559,12 @@ mod tests {
|
||||
// create_basic_extension() will set its page field if type is View, clear it
|
||||
extension.page = None;
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("field [page] should be set for a View extension")
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldRequired {
|
||||
field: "page",
|
||||
ty: ExtensionType::View,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -473,12 +574,12 @@ mod tests {
|
||||
extension.action = Some(create_command_action());
|
||||
extension.page = Some("index.html".into());
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("field [page] is set for a non-View extension")
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowed {
|
||||
fields: &["page"],
|
||||
ty: ExtensionType::Command,
|
||||
},
|
||||
);
|
||||
}
|
||||
/* test check_main_extension_or_sub_extension */
|
||||
@@ -490,12 +591,11 @@ mod tests {
|
||||
let sub_group = create_basic_extension("sub-group", ExtensionType::Group);
|
||||
extension.commands = Some(vec![sub_group]);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("sub-extensions should not be of type [Group] or [Extension]")
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::TypesNotAllowedForSubExtension {
|
||||
types: &[ExtensionType::Group, ExtensionType::Extension],
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -505,12 +605,11 @@ mod tests {
|
||||
let sub_ext = create_basic_extension("sub-ext", ExtensionType::Extension);
|
||||
extension.scripts = Some(vec![sub_ext]);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("sub-extensions should not be of type [Group] or [Extension]")
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::TypesNotAllowedForSubExtension {
|
||||
types: &[ExtensionType::Group, ExtensionType::Extension],
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -523,12 +622,11 @@ mod tests {
|
||||
|
||||
extension.commands = Some(vec![sub_cmd]);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("field [developer] should not be set in sub-extensions")
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowedForSubExtension {
|
||||
fields: &["developer"],
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -544,11 +642,12 @@ mod tests {
|
||||
|
||||
extension.commands = Some(vec![sub_cmd]);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains(
|
||||
"fields [commands/scripts/quicklinks/views] should not be set in sub-extensions"
|
||||
));
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowedForSubExtension {
|
||||
fields: &["commands", "scripts", "quicklinks", "views"],
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -558,12 +657,12 @@ mod tests {
|
||||
sub_cmd.minimum_coco_version = Some(semver::Version::new(0, 8, 0));
|
||||
extension.commands = Some(vec![sub_cmd]);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains(&format!(
|
||||
"[{}] cannot be set for sub-extensions",
|
||||
PLUGIN_JSON_FIELD_MINIMUM_COCO_VERSION
|
||||
)));
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::FieldsNotAllowedForSubExtension {
|
||||
fields: &["minimum_coco_version"],
|
||||
},
|
||||
);
|
||||
}
|
||||
/* Test check_sub_extension_only */
|
||||
|
||||
@@ -579,12 +678,11 @@ mod tests {
|
||||
|
||||
extension.commands = Some(vec![cmd1, cmd2]);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("sub-extension with ID [duplicate-id] already exists")
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::DuplicateSubExtensionId {
|
||||
id: "duplicate-id".to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -600,12 +698,11 @@ mod tests {
|
||||
extension.commands = Some(vec![cmd]);
|
||||
extension.scripts = Some(vec![script]);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("sub-extension with ID [same-id] already exists")
|
||||
expect_error_kind(
|
||||
general_check(&extension),
|
||||
InvalidPluginJsonErrorKind::DuplicateSubExtensionId {
|
||||
id: "same-id".to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -768,12 +865,12 @@ mod tests {
|
||||
|
||||
main_extension.commands = Some(vec![sub_cmd]);
|
||||
|
||||
let result = general_check(&main_extension);
|
||||
assert!(result.is_err());
|
||||
let error_msg = result.unwrap_err();
|
||||
assert!(error_msg.contains("it supports platforms"));
|
||||
assert!(error_msg.contains("that are not supported by the main extension"));
|
||||
assert!(error_msg.contains("Linux")); // Should mention the unsupported platform
|
||||
expect_error_kind(
|
||||
general_check(&main_extension),
|
||||
InvalidPluginJsonErrorKind::SubExtensionHasMoreSupportedPlatforms {
|
||||
extra_platforms: vec!["Linux".to_string()],
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -789,12 +886,12 @@ mod tests {
|
||||
|
||||
main_extension.commands = Some(vec![sub_cmd]);
|
||||
|
||||
let result = general_check(&main_extension);
|
||||
assert!(result.is_err());
|
||||
let error_msg = result.unwrap_err();
|
||||
assert!(error_msg.contains("it supports platforms"));
|
||||
assert!(error_msg.contains("that are not supported by the main extension"));
|
||||
assert!(error_msg.contains("Linux")); // Should mention the unsupported platform
|
||||
expect_error_kind(
|
||||
general_check(&main_extension),
|
||||
InvalidPluginJsonErrorKind::SubExtensionHasMoreSupportedPlatforms {
|
||||
extra_platforms: vec!["Linux".to_string()],
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
74
src-tauri/src/extension/third_party/install/error.rs
vendored
Normal file
74
src-tauri/src/extension/third_party/install/error.rs
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
use super::super::check::InvalidPluginJsonError;
|
||||
use crate::common::error::serialize_error;
|
||||
use crate::extension::third_party::install::ParsingMinimumCocoVersionError;
|
||||
use crate::server::http_client::HttpRequestError;
|
||||
use crate::util::platform::Platform;
|
||||
use serde::Serialize;
|
||||
use snafu::prelude::*;
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::OsString;
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Snafu, Serialize)]
|
||||
#[snafu(visibility(pub(crate)))]
|
||||
pub(crate) enum InvalidExtensionError {
|
||||
#[snafu(display("path '{}' contains no filename", path.display()))]
|
||||
NoFileName { path: PathBuf },
|
||||
#[snafu(display("'{}' is not UTF-8 encoded", os_str.display()))]
|
||||
NonUtf8Encoding { os_str: OsString },
|
||||
#[snafu(display("file 'plugin.json' does not exist"))]
|
||||
MissingPluginJson,
|
||||
#[snafu(display("failed to read 'plugin.json'"))]
|
||||
ReadPluginJson {
|
||||
#[serde(serialize_with = "serialize_error")]
|
||||
source: io::Error,
|
||||
},
|
||||
#[snafu(display("failed to decode 'plugin.json'"))]
|
||||
DecodePluginJson {
|
||||
#[serde(serialize_with = "serialize_error")]
|
||||
source: serde_json::Error,
|
||||
},
|
||||
#[snafu(display("'plugin.json' is invalid"))]
|
||||
InvalidPluginJson { source: InvalidPluginJsonError },
|
||||
#[snafu(display("failed to parse field 'minimum_coco_version'"))]
|
||||
ParseMinimumCocoVersion {
|
||||
source: ParsingMinimumCocoVersionError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu, Serialize)]
|
||||
#[snafu(visibility(pub(crate)))]
|
||||
pub(crate) enum InstallExtensionError {
|
||||
#[snafu(display("extension is invalid"))]
|
||||
InvalidExtension { source: InvalidExtensionError },
|
||||
#[snafu(display("extension '{}' does not exist", id))]
|
||||
NotFound { id: String },
|
||||
#[snafu(display("failed to download extension"))]
|
||||
DownloadFailure { source: HttpRequestError },
|
||||
#[snafu(display("failed to decode the downloaded archive"))]
|
||||
ZipArchiveDecodingError {
|
||||
#[serde(serialize_with = "serialize_error")]
|
||||
source: zip::result::ZipError,
|
||||
},
|
||||
#[snafu(display("extension is already installed"))]
|
||||
AlreadyInstalled,
|
||||
#[snafu(display(
|
||||
"extension is incompatible with your current platform '{}', it can be installed on '{:?}'",
|
||||
current_platform,
|
||||
// Use Display print instead of Debug
|
||||
compatible_platforms.into_iter().map(|p|p.to_string()).collect::<Vec<String>>(),
|
||||
))]
|
||||
IncompatiblePlatform {
|
||||
current_platform: Platform,
|
||||
compatible_platforms: HashSet<Platform>,
|
||||
},
|
||||
#[snafu(display("extension is incompatible with your Coco AI app",))]
|
||||
// TODO: include the actual 'minimum_coco_version' in the Display impl
|
||||
IncompatibleCocoApp,
|
||||
#[snafu(display("I/O Error"))]
|
||||
IoError {
|
||||
#[serde(serialize_with = "serialize_error")]
|
||||
source: io::Error,
|
||||
},
|
||||
}
|
||||
@@ -1,6 +1,15 @@
|
||||
use super::check_compatibility_via_mcv;
|
||||
use super::error::InstallExtensionError;
|
||||
use super::error::InvalidExtensionSnafu;
|
||||
use crate::common::error::ReportErrorStyle;
|
||||
use crate::common::error::report_error;
|
||||
use crate::extension::PLUGIN_JSON_FILE_NAME;
|
||||
use crate::extension::third_party::check::general_check;
|
||||
use crate::extension::third_party::install::error::DecodePluginJsonSnafu;
|
||||
use crate::extension::third_party::install::error::InvalidExtensionError;
|
||||
use crate::extension::third_party::install::error::InvalidPluginJsonSnafu;
|
||||
use crate::extension::third_party::install::error::IoSnafu;
|
||||
use crate::extension::third_party::install::error::ParseMinimumCocoVersionSnafu;
|
||||
use crate::extension::third_party::install::{
|
||||
filter_out_incompatible_sub_extensions, is_extension_installed,
|
||||
};
|
||||
@@ -12,6 +21,9 @@ use crate::extension::{
|
||||
};
|
||||
use crate::util::platform::Platform;
|
||||
use serde_json::Value as Json;
|
||||
use snafu::ResultExt;
|
||||
use std::io;
|
||||
use std::io::ErrorKind as IoErrorKind;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tauri::AppHandle;
|
||||
@@ -36,52 +48,53 @@ const DEVELOPER_ID_LOCAL: &str = "__local__";
|
||||
pub(crate) async fn install_local_extension(
|
||||
tauri_app_handle: AppHandle,
|
||||
path: PathBuf,
|
||||
) -> Result<(), String> {
|
||||
) -> Result<(), InstallExtensionError> {
|
||||
let extension_dir_name = path
|
||||
.file_name()
|
||||
.ok_or_else(|| "Invalid extension: no directory name".to_string())?
|
||||
.ok_or_else(|| InvalidExtensionError::NoFileName { path: path.clone() })
|
||||
.context(InvalidExtensionSnafu)?
|
||||
.to_str()
|
||||
.ok_or_else(|| "Invalid extension: non-UTF8 extension id".to_string())?;
|
||||
.ok_or_else(|| InvalidExtensionError::NonUtf8Encoding {
|
||||
os_str: path.clone().into_os_string(),
|
||||
})
|
||||
.context(InvalidExtensionSnafu)?;
|
||||
|
||||
// we use extension directory name as the extension ID.
|
||||
let extension_id = extension_dir_name;
|
||||
if is_extension_installed(DEVELOPER_ID_LOCAL, extension_id).await {
|
||||
// The frontend code uses this string to distinguish between 2 error cases:
|
||||
//
|
||||
// 1. This extension is already imported
|
||||
// 2. This extension is incompatible with the current platform
|
||||
// 3. The selected directory does not contain a valid extension
|
||||
//
|
||||
// do NOT edit this without updating the frontend code.
|
||||
//
|
||||
// ```ts
|
||||
// if (errorMessage === "already imported") {
|
||||
// addError(t("settings.extensions.hints.extensionAlreadyImported"));
|
||||
// } else if (errorMessage === "incompatible") {
|
||||
// addError(t("settings.extensions.hints.incompatibleExtension"));
|
||||
// } else {
|
||||
// addError(t("settings.extensions.hints.importFailed"));
|
||||
// }
|
||||
// ```
|
||||
//
|
||||
// This is definitely error-prone, but we have to do this until we have
|
||||
// structured error type
|
||||
return Err("already imported".into());
|
||||
return Err(InstallExtensionError::AlreadyInstalled);
|
||||
}
|
||||
|
||||
let plugin_json_path = path.join(PLUGIN_JSON_FILE_NAME);
|
||||
|
||||
let plugin_json_content = fs::read_to_string(&plugin_json_path)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
let plugin_json_content = match fs::read_to_string(&plugin_json_path).await {
|
||||
Ok(content) => content,
|
||||
Err(io_err) => {
|
||||
let io_err_kind = io_err.kind();
|
||||
|
||||
if io_err_kind == IoErrorKind::NotFound {
|
||||
return Err(InstallExtensionError::InvalidExtension {
|
||||
source: InvalidExtensionError::MissingPluginJson,
|
||||
});
|
||||
} else {
|
||||
return Err(InstallExtensionError::InvalidExtension {
|
||||
source: InvalidExtensionError::ReadPluginJson { source: io_err },
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Parse as JSON first as it is not valid for `struct Extension`, we need to
|
||||
// correct it (set fields `id` and `developer`) before converting it to `struct Extension`:
|
||||
let mut extension_json: Json =
|
||||
serde_json::from_str(&plugin_json_content).map_err(|e| e.to_string())?;
|
||||
let mut extension_json: Json = serde_json::from_str(&plugin_json_content)
|
||||
.context(DecodePluginJsonSnafu)
|
||||
.context(InvalidExtensionSnafu)?;
|
||||
|
||||
if !check_compatibility_via_mcv(&extension_json)? {
|
||||
return Err("app_incompatible".into());
|
||||
let compatible_with_app = check_compatibility_via_mcv(&extension_json)
|
||||
.context(ParseMinimumCocoVersionSnafu)
|
||||
.context(InvalidExtensionSnafu)?;
|
||||
if !compatible_with_app {
|
||||
return Err(InstallExtensionError::IncompatibleCocoApp);
|
||||
}
|
||||
|
||||
// Set the main extension ID to the directory name
|
||||
@@ -134,36 +147,22 @@ pub(crate) async fn install_local_extension(
|
||||
}
|
||||
|
||||
// Now we can convert JSON to `struct Extension`
|
||||
let mut extension: Extension =
|
||||
serde_json::from_value(extension_json).map_err(|e| e.to_string())?;
|
||||
let mut extension: Extension = serde_json::from_value(extension_json)
|
||||
.context(DecodePluginJsonSnafu)
|
||||
.context(InvalidExtensionSnafu)?;
|
||||
|
||||
let current_platform = Platform::current();
|
||||
/* Check begins here */
|
||||
general_check(&extension)?;
|
||||
general_check(&extension)
|
||||
.context(InvalidPluginJsonSnafu)
|
||||
.context(InvalidExtensionSnafu)?;
|
||||
|
||||
if let Some(ref platforms) = extension.platforms {
|
||||
if !platforms.contains(¤t_platform) {
|
||||
// The frontend code uses this string to distinguish between 3 error cases:
|
||||
//
|
||||
// 1. This extension is already imported
|
||||
// 2. This extension is incompatible with the current platform
|
||||
// 3. The selected directory does not contain a valid extension
|
||||
//
|
||||
// do NOT edit this without updating the frontend code.
|
||||
//
|
||||
// ```ts
|
||||
// if (errorMessage === "already imported") {
|
||||
// addError(t("settings.extensions.hints.extensionAlreadyImported"));
|
||||
// } else if (errorMessage === "incompatible") {
|
||||
// addError(t("settings.extensions.hints.incompatibleExtension"));
|
||||
// } else {
|
||||
// addError(t("settings.extensions.hints.importFailed"));
|
||||
// }
|
||||
// ```
|
||||
//
|
||||
// This is definitely error-prone, but we have to do this until we have
|
||||
// structured error type
|
||||
return Err("platform_incompatible".into());
|
||||
return Err(InstallExtensionError::IncompatiblePlatform {
|
||||
current_platform,
|
||||
compatible_platforms: platforms.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
/* Check ends here */
|
||||
@@ -185,18 +184,19 @@ pub(crate) async fn install_local_extension(
|
||||
.join(DEVELOPER_ID_LOCAL)
|
||||
.join(extension_dir_name);
|
||||
|
||||
fs::create_dir_all(&dest_dir)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
fs::create_dir_all(&dest_dir).await.context(IoSnafu)?;
|
||||
|
||||
// Copy all files except plugin.json
|
||||
let mut entries = fs::read_dir(&path).await.map_err(|e| e.to_string())?;
|
||||
let mut entries = fs::read_dir(&path).await.context(IoSnafu)?;
|
||||
|
||||
while let Some(entry) = entries.next_entry().await.map_err(|e| e.to_string())? {
|
||||
while let Some(entry) = entries.next_entry().await.context(IoSnafu)? {
|
||||
let file_name = entry.file_name();
|
||||
let file_name_str = file_name
|
||||
.to_str()
|
||||
.ok_or_else(|| "Invalid filename: non-UTF8".to_string())?;
|
||||
.ok_or_else(|| InvalidExtensionError::NonUtf8Encoding {
|
||||
os_str: file_name.clone(),
|
||||
})
|
||||
.context(InvalidExtensionSnafu)?;
|
||||
|
||||
// plugin.json will be handled separately.
|
||||
if file_name_str == PLUGIN_JSON_FILE_NAME {
|
||||
@@ -208,27 +208,32 @@ pub(crate) async fn install_local_extension(
|
||||
|
||||
if src_path.is_dir() {
|
||||
// Recursively copy directory
|
||||
copy_dir_recursively(&src_path, &dest_path).await?;
|
||||
copy_dir_recursively(&src_path, &dest_path)
|
||||
.await
|
||||
.context(IoSnafu)?;
|
||||
} else {
|
||||
// Copy file
|
||||
fs::copy(&src_path, &dest_path)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
fs::copy(&src_path, &dest_path).await.context(IoSnafu)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Write the corrected plugin.json file
|
||||
let corrected_plugin_json =
|
||||
serde_json::to_string_pretty(&extension).map_err(|e| e.to_string())?;
|
||||
let corrected_plugin_json = serde_json::to_string_pretty(&extension).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"failed to serialize extension {:?}, error:\n{}",
|
||||
extension,
|
||||
report_error(&e, ReportErrorStyle::MultipleLines)
|
||||
)
|
||||
});
|
||||
|
||||
let dest_plugin_json_path = dest_dir.join(PLUGIN_JSON_FILE_NAME);
|
||||
fs::write(&dest_plugin_json_path, corrected_plugin_json)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
.context(IoSnafu)?;
|
||||
|
||||
// Canonicalize relative icon and page paths
|
||||
canonicalize_relative_icon_path(&dest_dir, &mut extension)?;
|
||||
canonicalize_relative_page_path(&dest_dir, &mut extension)?;
|
||||
canonicalize_relative_icon_path(&dest_dir, &mut extension).context(IoSnafu)?;
|
||||
canonicalize_relative_page_path(&dest_dir, &mut extension).context(IoSnafu)?;
|
||||
|
||||
// Add extension to the search source
|
||||
third_party_ext_list_write_lock.push(extension);
|
||||
@@ -238,22 +243,18 @@ pub(crate) async fn install_local_extension(
|
||||
|
||||
/// Helper function to recursively copy directories.
|
||||
#[async_recursion::async_recursion]
|
||||
async fn copy_dir_recursively(src: &Path, dest: &Path) -> Result<(), String> {
|
||||
tokio::fs::create_dir_all(dest)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
let mut read_dir = tokio::fs::read_dir(src).await.map_err(|e| e.to_string())?;
|
||||
async fn copy_dir_recursively(src: &Path, dest: &Path) -> Result<(), io::Error> {
|
||||
tokio::fs::create_dir_all(dest).await?;
|
||||
let mut read_dir = tokio::fs::read_dir(src).await?;
|
||||
|
||||
while let Some(entry) = read_dir.next_entry().await.map_err(|e| e.to_string())? {
|
||||
while let Some(entry) = read_dir.next_entry().await? {
|
||||
let src_path = entry.path();
|
||||
let dest_path = dest.join(entry.file_name());
|
||||
|
||||
if src_path.is_dir() {
|
||||
copy_dir_recursively(&src_path, &dest_path).await?;
|
||||
} else {
|
||||
tokio::fs::copy(&src_path, &dest_path)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
tokio::fs::copy(&src_path, &dest_path).await?;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -41,14 +41,18 @@
|
||||
//!
|
||||
//! 7. Add the extension to the in-memory extension list.
|
||||
|
||||
pub(crate) mod error;
|
||||
pub(crate) mod local_extension;
|
||||
pub(crate) mod store;
|
||||
|
||||
use crate::extension::Extension;
|
||||
use crate::extension::PLUGIN_JSON_FIELD_MINIMUM_COCO_VERSION;
|
||||
use crate::util::platform::Platform;
|
||||
use crate::util::version::ParseVersionError;
|
||||
use crate::util::version::{COCO_VERSION, parse_coco_semver};
|
||||
use serde::Serialize;
|
||||
use serde_json::Value as Json;
|
||||
use snafu::prelude::*;
|
||||
use std::ops::Deref;
|
||||
|
||||
use super::THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE;
|
||||
@@ -121,9 +125,17 @@ pub(crate) fn filter_out_incompatible_sub_extensions(
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu, Serialize)]
|
||||
pub(crate) enum ParsingMinimumCocoVersionError {
|
||||
#[snafu(display("field 'minimum_coco_version' should be a string, but it is not"))]
|
||||
MismatchType,
|
||||
#[snafu(display("failed to parse field 'minimum_coco_version'"))]
|
||||
ParsingVersionError { source: ParseVersionError },
|
||||
}
|
||||
|
||||
/// Inspect the "minimum_coco_version" field and see if this extension is
|
||||
/// compatible with the current Coco app.
|
||||
fn check_compatibility_via_mcv(plugin_json: &Json) -> Result<bool, String> {
|
||||
fn check_compatibility_via_mcv(plugin_json: &Json) -> Result<bool, ParsingMinimumCocoVersionError> {
|
||||
let Some(mcv_json) = plugin_json.get(PLUGIN_JSON_FIELD_MINIMUM_COCO_VERSION) else {
|
||||
return Ok(true);
|
||||
};
|
||||
@@ -132,18 +144,10 @@ fn check_compatibility_via_mcv(plugin_json: &Json) -> Result<bool, String> {
|
||||
}
|
||||
|
||||
let Some(mcv_str) = mcv_json.as_str() else {
|
||||
return Err(format!(
|
||||
"invalid extension: field [{}] should be a string",
|
||||
PLUGIN_JSON_FIELD_MINIMUM_COCO_VERSION
|
||||
));
|
||||
return Err(ParsingMinimumCocoVersionError::MismatchType);
|
||||
};
|
||||
|
||||
let Some(mcv) = parse_coco_semver(mcv_str) else {
|
||||
return Err(format!(
|
||||
"invalid extension: [{}] is not a valid version string",
|
||||
PLUGIN_JSON_FIELD_MINIMUM_COCO_VERSION
|
||||
));
|
||||
};
|
||||
let mcv = parse_coco_semver(mcv_str).context(ParsingVersionSnafu)?;
|
||||
|
||||
Ok(COCO_VERSION.deref() >= &mcv)
|
||||
}
|
||||
|
||||
124
src-tauri/src/extension/third_party/install/store.rs
vendored
124
src-tauri/src/extension/third_party/install/store.rs
vendored
@@ -5,7 +5,9 @@ use super::check_compatibility_via_mcv;
|
||||
use super::is_extension_installed;
|
||||
use crate::common::document::DataSourceReference;
|
||||
use crate::common::document::Document;
|
||||
use crate::common::error::ReportErrorStyle;
|
||||
use crate::common::error::SearchError;
|
||||
use crate::common::error::report_error;
|
||||
use crate::common::search::QueryResponse;
|
||||
use crate::common::search::QuerySource;
|
||||
use crate::common::search::SearchQuery;
|
||||
@@ -17,13 +19,24 @@ use crate::extension::canonicalize_relative_icon_path;
|
||||
use crate::extension::canonicalize_relative_page_path;
|
||||
use crate::extension::third_party::check::general_check;
|
||||
use crate::extension::third_party::get_third_party_extension_directory;
|
||||
use crate::extension::third_party::install::error::DecodePluginJsonSnafu;
|
||||
use crate::extension::third_party::install::error::DownloadFailureSnafu;
|
||||
use crate::extension::third_party::install::error::InstallExtensionError;
|
||||
use crate::extension::third_party::install::error::InvalidExtensionError;
|
||||
use crate::extension::third_party::install::error::InvalidExtensionSnafu;
|
||||
use crate::extension::third_party::install::error::InvalidPluginJsonSnafu;
|
||||
use crate::extension::third_party::install::error::IoSnafu;
|
||||
use crate::extension::third_party::install::error::ParseMinimumCocoVersionSnafu;
|
||||
use crate::extension::third_party::install::error::ZipArchiveDecodingSnafu;
|
||||
use crate::extension::third_party::install::filter_out_incompatible_sub_extensions;
|
||||
use crate::server::http_client::DecodeResponseSnafu;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::util::platform::Platform;
|
||||
use async_trait::async_trait;
|
||||
use reqwest::StatusCode;
|
||||
use serde_json::Map as JsonObject;
|
||||
use serde_json::Value as Json;
|
||||
use snafu::ResultExt;
|
||||
use std::io::Read;
|
||||
use tauri::AppHandle;
|
||||
|
||||
@@ -233,24 +246,24 @@ pub(crate) async fn extension_detail(
|
||||
pub(crate) async fn install_extension_from_store(
|
||||
tauri_app_handle: AppHandle,
|
||||
id: String,
|
||||
) -> Result<(), String> {
|
||||
) -> Result<(), InstallExtensionError> {
|
||||
let path = format!("store/extension/{}/_download", id);
|
||||
let response = HttpClient::get("default_coco_server", &path, None)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to download extension: {}", e))?;
|
||||
.context(DownloadFailureSnafu)?;
|
||||
|
||||
if response.status() == StatusCode::NOT_FOUND {
|
||||
return Err(format!("extension [{}] not found", id));
|
||||
return Err(InstallExtensionError::NotFound { id });
|
||||
}
|
||||
|
||||
let bytes = response
|
||||
.bytes()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read response bytes: {}", e))?;
|
||||
.context(DecodeResponseSnafu)
|
||||
.context(DownloadFailureSnafu)?;
|
||||
|
||||
let cursor = std::io::Cursor::new(bytes);
|
||||
let mut archive =
|
||||
zip::ZipArchive::new(cursor).map_err(|e| format!("Failed to read zip archive: {}", e))?;
|
||||
let mut archive = zip::ZipArchive::new(cursor).context(ZipArchiveDecodingSnafu)?;
|
||||
|
||||
// The plugin.json sent from the server does not conform to our `struct Extension` definition:
|
||||
//
|
||||
@@ -260,27 +273,48 @@ pub(crate) async fn install_extension_from_store(
|
||||
// we need to correct it
|
||||
let mut plugin_json = archive
|
||||
.by_name(PLUGIN_JSON_FILE_NAME)
|
||||
.map_err(|e| e.to_string())?;
|
||||
.context(ZipArchiveDecodingSnafu)?;
|
||||
let mut plugin_json_content = String::new();
|
||||
std::io::Read::read_to_string(&mut plugin_json, &mut plugin_json_content)
|
||||
.map_err(|e| e.to_string())?;
|
||||
let mut extension: Json = serde_json::from_str(&plugin_json_content)
|
||||
.map_err(|e| format!("Failed to parse plugin.json: {}", e))?;
|
||||
|
||||
if !check_compatibility_via_mcv(&extension)? {
|
||||
return Err("app_incompatible".into());
|
||||
std::io::Read::read_to_string(&mut plugin_json, &mut plugin_json_content).context(IoSnafu)?;
|
||||
|
||||
let mut extension: Json = serde_json::from_str(&plugin_json_content)
|
||||
.context(DecodePluginJsonSnafu)
|
||||
.context(InvalidExtensionSnafu)?;
|
||||
|
||||
let compatible_with_app = check_compatibility_via_mcv(&extension)
|
||||
.context(ParseMinimumCocoVersionSnafu)
|
||||
.context(InvalidExtensionSnafu)?;
|
||||
if !compatible_with_app {
|
||||
return Err(InstallExtensionError::IncompatibleCocoApp);
|
||||
}
|
||||
|
||||
let mut_ref_to_developer_object: &mut Json = extension
|
||||
let extension_object = extension
|
||||
.as_object_mut()
|
||||
.expect("plugin.json should be an object")
|
||||
.ok_or_else(|| InvalidExtensionError::DecodePluginJson {
|
||||
source: serde::de::Error::custom("plugin.json should be an object"),
|
||||
})
|
||||
.context(InvalidExtensionSnafu)?;
|
||||
|
||||
let mut_ref_to_developer_object: &mut Json = extension_object
|
||||
.get_mut("developer")
|
||||
.expect("plugin.json should contain field [developer]");
|
||||
.ok_or_else(|| InvalidExtensionError::DecodePluginJson {
|
||||
source: serde::de::Error::missing_field("developer"),
|
||||
})
|
||||
.context(InvalidExtensionSnafu)?;
|
||||
|
||||
let developer_id = mut_ref_to_developer_object
|
||||
.get("id")
|
||||
.expect("plugin.json should contain [developer.id]")
|
||||
.ok_or_else(|| InvalidExtensionError::DecodePluginJson {
|
||||
source: serde::de::Error::missing_field("id"),
|
||||
})
|
||||
.context(InvalidExtensionSnafu)?
|
||||
.as_str()
|
||||
.expect("plugin.json field [developer.id] should be a string");
|
||||
.ok_or_else(|| InvalidExtensionError::DecodePluginJson {
|
||||
source: serde::de::Error::custom("field 'id' should be of type 'string'"),
|
||||
})
|
||||
.context(InvalidExtensionSnafu)?;
|
||||
|
||||
*mut_ref_to_developer_object = Json::String(developer_id.into());
|
||||
|
||||
// Set IDs for sub-extensions (commands, quicklinks, scripts)
|
||||
@@ -305,27 +339,33 @@ pub(crate) async fn install_extension_from_store(
|
||||
set_ids_for_field(&mut extension, "scripts", &mut counter);
|
||||
|
||||
// Now the extension JSON is valid
|
||||
let mut extension: Extension = serde_json::from_value(extension).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"cannot parse plugin.json as struct Extension, error [{:?}]",
|
||||
e
|
||||
);
|
||||
});
|
||||
let developer_id = extension.developer.clone().expect("developer has been set");
|
||||
let mut extension: Extension = serde_json::from_value(extension)
|
||||
.context(DecodePluginJsonSnafu)
|
||||
.context(InvalidExtensionSnafu)?;
|
||||
|
||||
let developer_id = extension
|
||||
.developer
|
||||
.clone()
|
||||
.expect("we checked this field exists");
|
||||
|
||||
drop(plugin_json);
|
||||
|
||||
general_check(&extension)?;
|
||||
general_check(&extension)
|
||||
.context(InvalidPluginJsonSnafu)
|
||||
.context(InvalidExtensionSnafu)?;
|
||||
|
||||
let current_platform = Platform::current();
|
||||
if let Some(ref platforms) = extension.platforms {
|
||||
if !platforms.contains(¤t_platform) {
|
||||
return Err("platform_incompatible".into());
|
||||
return Err(InstallExtensionError::IncompatiblePlatform {
|
||||
current_platform,
|
||||
compatible_platforms: platforms.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if is_extension_installed(&developer_id, &id).await {
|
||||
return Err("Extension already installed.".into());
|
||||
return Err(InstallExtensionError::AlreadyInstalled);
|
||||
}
|
||||
|
||||
// Extension is compatible with current platform, but it could contain sub
|
||||
@@ -350,11 +390,11 @@ pub(crate) async fn install_extension_from_store(
|
||||
};
|
||||
tokio::fs::create_dir_all(extension_directory.as_path())
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
.context(IoSnafu)?;
|
||||
|
||||
// Extract all files except plugin.json
|
||||
for i in 0..archive.len() {
|
||||
let mut zip_file = archive.by_index(i).map_err(|e| e.to_string())?;
|
||||
let mut zip_file = archive.by_index(i).context(ZipArchiveDecodingSnafu)?;
|
||||
// `.name()` is safe to use in our cases, the cases listed in the below
|
||||
// page won't happen to us.
|
||||
//
|
||||
@@ -382,35 +422,39 @@ pub(crate) async fn install_extension_from_store(
|
||||
{
|
||||
tokio::fs::create_dir_all(parent_dir)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
.context(IoSnafu)?;
|
||||
}
|
||||
|
||||
let mut dest_file = tokio::fs::File::create(&dest_file_path)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
.context(IoSnafu)?;
|
||||
let mut src_bytes = Vec::with_capacity(
|
||||
zip_file
|
||||
.size()
|
||||
.try_into()
|
||||
.expect("we won't have a extension file that is bigger than 4GiB"),
|
||||
);
|
||||
zip_file
|
||||
.read_to_end(&mut src_bytes)
|
||||
.map_err(|e| e.to_string())?;
|
||||
zip_file.read_to_end(&mut src_bytes).context(IoSnafu)?;
|
||||
tokio::io::copy(&mut src_bytes.as_slice(), &mut dest_file)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
.context(IoSnafu)?;
|
||||
}
|
||||
// Create plugin.json from the extension variable
|
||||
let plugin_json_path = extension_directory.join(PLUGIN_JSON_FILE_NAME);
|
||||
let extension_json = serde_json::to_string_pretty(&extension).map_err(|e| e.to_string())?;
|
||||
let extension_json = serde_json::to_string_pretty(&extension).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"failed to serialize extension {:?}, error:\n{}",
|
||||
extension,
|
||||
report_error(&e, ReportErrorStyle::MultipleLines)
|
||||
)
|
||||
});
|
||||
tokio::fs::write(&plugin_json_path, extension_json)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
.context(IoSnafu)?;
|
||||
|
||||
// Canonicalize relative icon and page paths
|
||||
canonicalize_relative_icon_path(&extension_directory, &mut extension)?;
|
||||
canonicalize_relative_page_path(&extension_directory, &mut extension)?;
|
||||
canonicalize_relative_icon_path(&extension_directory, &mut extension).context(IoSnafu)?;
|
||||
canonicalize_relative_page_path(&extension_directory, &mut extension).context(IoSnafu)?;
|
||||
|
||||
third_party_ext_list_write_lock.push(extension);
|
||||
|
||||
|
||||
32
src-tauri/src/extension/third_party/mod.rs
vendored
32
src-tauri/src/extension/third_party/mod.rs
vendored
@@ -9,7 +9,9 @@ use super::canonicalize_relative_icon_path;
|
||||
use crate::common::document::DataSourceReference;
|
||||
use crate::common::document::Document;
|
||||
use crate::common::document::open;
|
||||
use crate::common::error::ReportErrorStyle;
|
||||
use crate::common::error::SearchError;
|
||||
use crate::common::error::report_error;
|
||||
use crate::common::search::QueryResponse;
|
||||
use crate::common::search::QuerySource;
|
||||
use crate::common::search::SearchQuery;
|
||||
@@ -159,13 +161,17 @@ pub(crate) async fn load_third_party_extensions_from_directory(
|
||||
continue 'extension;
|
||||
};
|
||||
|
||||
let Some(mcv) = parse_coco_semver(mcv_str) else {
|
||||
log::warn!(
|
||||
"invalid extension: [{}]: field [{}] has invalid version string",
|
||||
extension_dir_file_name,
|
||||
PLUGIN_JSON_FIELD_MINIMUM_COCO_VERSION
|
||||
);
|
||||
continue 'extension;
|
||||
let mcv = match parse_coco_semver(mcv_str) {
|
||||
Ok(ver) => ver,
|
||||
Err(e) => {
|
||||
log::warn!(
|
||||
"invalid extension: [{}]: field [{}] has invalid version: {} ",
|
||||
extension_dir_file_name,
|
||||
PLUGIN_JSON_FIELD_MINIMUM_COCO_VERSION,
|
||||
report_error(&e, ReportErrorStyle::SingleLine)
|
||||
);
|
||||
continue 'extension;
|
||||
}
|
||||
};
|
||||
|
||||
Some(mcv)
|
||||
@@ -265,10 +271,8 @@ pub(crate) async fn load_third_party_extensions_from_directory(
|
||||
};
|
||||
|
||||
// Turn icon path into an absolute path if it is a valid relative path
|
||||
canonicalize_relative_icon_path(
|
||||
&extension_dir.path(),
|
||||
&mut incompatible_extension,
|
||||
)?;
|
||||
canonicalize_relative_icon_path(&extension_dir.path(), &mut incompatible_extension)
|
||||
.map_err(|e| report_error(&e, ReportErrorStyle::SingleLine))?;
|
||||
// No need to canonicalize the path field as it is not set
|
||||
|
||||
extensions.push(incompatible_extension);
|
||||
@@ -339,8 +343,10 @@ pub(crate) async fn load_third_party_extensions_from_directory(
|
||||
/* Check ends here */
|
||||
|
||||
// Turn it into an absolute path if it is a valid relative path because frontend code needs this.
|
||||
canonicalize_relative_icon_path(&extension_dir.path(), &mut extension)?;
|
||||
canonicalize_relative_page_path(&extension_dir.path(), &mut extension)?;
|
||||
canonicalize_relative_icon_path(&extension_dir.path(), &mut extension)
|
||||
.map_err(|e| report_error(&e, ReportErrorStyle::SingleLine))?;
|
||||
canonicalize_relative_page_path(&extension_dir.path(), &mut extension)
|
||||
.map_err(|e| report_error(&e, ReportErrorStyle::SingleLine))?;
|
||||
|
||||
extensions.push(extension);
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
use crate::common::error::SearchError;
|
||||
use crate::common::error::{ReportErrorStyle, SearchError, report_error};
|
||||
use crate::common::register::SearchSourceRegistry;
|
||||
use crate::common::search::{
|
||||
FailedRequest, MultiSourceQueryResponse, QueryHits, QuerySource, SearchQuery,
|
||||
};
|
||||
use crate::common::traits::SearchSource;
|
||||
use crate::extension::LOCAL_QUERY_SOURCE_TYPE;
|
||||
use crate::server::http_client::HttpRequestError;
|
||||
use crate::server::servers::logout_coco_server;
|
||||
use crate::server::servers::mark_server_as_offline;
|
||||
use crate::settings::get_local_query_source_weight;
|
||||
@@ -507,15 +508,20 @@ async fn query_coco_fusion_handle_failed_request(
|
||||
|
||||
let mut status_code_num: u16 = 0;
|
||||
|
||||
if let SearchError::HttpError {
|
||||
status_code: opt_status_code,
|
||||
msg: _,
|
||||
} = search_error
|
||||
{
|
||||
if let SearchError::HttpError { source } = &search_error {
|
||||
let opt_status_code = match source {
|
||||
HttpRequestError::RequestFailed {
|
||||
status,
|
||||
error_response_body_str: _,
|
||||
coco_server_api_error_response_body: _,
|
||||
} => Some(status),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
if let Some(status_code) = opt_status_code {
|
||||
status_code_num = status_code.as_u16();
|
||||
if status_code != StatusCode::OK {
|
||||
if status_code == StatusCode::UNAUTHORIZED {
|
||||
status_code_num = *status_code;
|
||||
if *status_code != StatusCode::OK.as_u16() {
|
||||
if *status_code == StatusCode::UNAUTHORIZED {
|
||||
// This Coco server is unavailable. In addition to marking it as
|
||||
// unavailable, we need to log out because the status code is 401.
|
||||
logout_coco_server(tauri_app_handle.clone(), query_source.id.to_string()).await.unwrap_or_else(|e| {
|
||||
@@ -535,7 +541,7 @@ async fn query_coco_fusion_handle_failed_request(
|
||||
failed_requests.push(FailedRequest {
|
||||
source: query_source,
|
||||
status: status_code_num,
|
||||
error: Some(search_error.to_string()),
|
||||
error: Some(report_error(&search_error, ReportErrorStyle::SingleLine)),
|
||||
reason: None,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
use super::servers::{get_server_by_id, get_server_token};
|
||||
use crate::common::error::serialize_error;
|
||||
use crate::common::http::get_response_body_text;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::server::http_client::{HttpClient, HttpRequestError, SendSnafu};
|
||||
use reqwest::multipart::{Form, Part};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
use snafu::prelude::*;
|
||||
use std::ffi::OsString;
|
||||
use std::io;
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
use tauri::command;
|
||||
use tokio::fs::File;
|
||||
@@ -21,23 +26,66 @@ pub struct DeleteAttachmentResponse {
|
||||
pub result: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Snafu, Serialize)]
|
||||
#[snafu(visibility(pub(crate)))]
|
||||
pub(crate) enum AttachmentError {
|
||||
#[snafu(display("attachment file '{}' does not exist", file.display()))]
|
||||
FileNotFound { file: PathBuf },
|
||||
#[snafu(display("I/O error"))]
|
||||
Io {
|
||||
#[serde(serialize_with = "serialize_error")]
|
||||
source: io::Error,
|
||||
},
|
||||
#[snafu(display("attachment file '{}' does not have a name", file.display()))]
|
||||
NoFilename { file: PathBuf },
|
||||
#[snafu(display("attachment filename '{}' is not UTF-8 encoded", filename.display()))]
|
||||
NonUtf8Filename { filename: OsString },
|
||||
#[snafu(display("coco server with the specified ID filename '{}' does not exist", id))]
|
||||
ServerNotFound { id: String },
|
||||
#[snafu(display("HTTP request failed"))]
|
||||
HttpRequestError { source: HttpRequestError },
|
||||
#[snafu(display("decoding JSON failed"))]
|
||||
JsonDecodingError {
|
||||
#[serde(serialize_with = "serialize_error")]
|
||||
source: serde_json::Error,
|
||||
},
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn upload_attachment(
|
||||
server_id: String,
|
||||
file_paths: Vec<PathBuf>,
|
||||
) -> Result<UploadAttachmentResponse, String> {
|
||||
) -> Result<UploadAttachmentResponse, AttachmentError> {
|
||||
let mut form = Form::new();
|
||||
|
||||
for file_path in file_paths {
|
||||
let file = File::open(&file_path)
|
||||
.await
|
||||
.map_err(|err| err.to_string())?;
|
||||
let file = match File::open(&file_path).await {
|
||||
Ok(file) => file,
|
||||
Err(io_err) => {
|
||||
let io_err_kind = io_err.kind();
|
||||
if io_err_kind == io::ErrorKind::NotFound {
|
||||
return Err(AttachmentError::FileNotFound {
|
||||
file: file_path.clone(),
|
||||
});
|
||||
} else {
|
||||
return Err(AttachmentError::Io { source: io_err });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let stream = FramedRead::new(file, BytesCodec::new());
|
||||
let file_name = file_path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.ok_or("Invalid filename")?;
|
||||
let file_name_os_str =
|
||||
file_path
|
||||
.file_name()
|
||||
.ok_or_else(|| AttachmentError::NoFilename {
|
||||
file: file_path.clone(),
|
||||
})?;
|
||||
let file_name =
|
||||
file_name_os_str
|
||||
.to_str()
|
||||
.ok_or_else(|| AttachmentError::NonUtf8Filename {
|
||||
filename: file_name_os_str.to_os_string(),
|
||||
})?;
|
||||
|
||||
let part =
|
||||
Part::stream(reqwest::Body::wrap_stream(stream)).file_name(file_name.to_string());
|
||||
@@ -45,9 +93,12 @@ pub async fn upload_attachment(
|
||||
form = form.part("files", part);
|
||||
}
|
||||
|
||||
let server = get_server_by_id(&server_id)
|
||||
.await
|
||||
.ok_or("Server not found")?;
|
||||
let server =
|
||||
get_server_by_id(&server_id)
|
||||
.await
|
||||
.ok_or_else(|| AttachmentError::ServerNotFound {
|
||||
id: server_id.clone(),
|
||||
})?;
|
||||
let url = HttpClient::join_url(&server.endpoint, &format!("attachment/_upload"));
|
||||
|
||||
let token = get_server_token(&server_id).await;
|
||||
@@ -60,22 +111,24 @@ pub async fn upload_attachment(
|
||||
let response = client
|
||||
.post(url)
|
||||
.multipart(form)
|
||||
.headers((&headers).try_into().map_err(|err| format!("{}", err))?)
|
||||
.headers((&headers).try_into().expect("conversion should not fail"))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|err| err.to_string())?;
|
||||
.context(SendSnafu)
|
||||
.context(HttpRequestSnafu)?;
|
||||
|
||||
let body = get_response_body_text(response).await?;
|
||||
let body = get_response_body_text(response)
|
||||
.await
|
||||
.context(HttpRequestSnafu)?;
|
||||
|
||||
serde_json::from_str::<UploadAttachmentResponse>(&body)
|
||||
.map_err(|e| format!("Failed to parse upload response: {}", e))
|
||||
serde_json::from_str::<UploadAttachmentResponse>(&body).context(JsonDecodingSnafu)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn get_attachment_by_ids(
|
||||
server_id: String,
|
||||
attachments: Vec<String>,
|
||||
) -> Result<Value, String> {
|
||||
) -> Result<Value, AttachmentError> {
|
||||
println!("get_attachment_by_ids server_id: {}", server_id);
|
||||
println!("get_attachment_by_ids attachments: {:?}", attachments);
|
||||
|
||||
@@ -86,28 +139,27 @@ pub async fn get_attachment_by_ids(
|
||||
|
||||
let response = HttpClient::post(&server_id, "/attachment/_search", None, Some(body))
|
||||
.await
|
||||
.map_err(|e| format!("Request error: {}", e))?;
|
||||
.context(HttpRequestSnafu)?;
|
||||
|
||||
let body = get_response_body_text(response).await?;
|
||||
let body = get_response_body_text(response)
|
||||
.await
|
||||
.context(HttpRequestSnafu)?;
|
||||
|
||||
serde_json::from_str::<Value>(&body)
|
||||
.map_err(|e| format!("Failed to parse attachment response: {}", e))
|
||||
serde_json::from_str::<Value>(&body).context(JsonDecodingSnafu)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn delete_attachment(server_id: String, id: String) -> Result<bool, String> {
|
||||
pub async fn delete_attachment(server_id: String, id: String) -> Result<bool, AttachmentError> {
|
||||
let response = HttpClient::delete(&server_id, &format!("/attachment/{}", id), None, None)
|
||||
.await
|
||||
.map_err(|e| format!("Request error: {}", e))?;
|
||||
.context(HttpRequestSnafu)?;
|
||||
|
||||
let body = get_response_body_text(response).await?;
|
||||
let body = get_response_body_text(response)
|
||||
.await
|
||||
.context(HttpRequestSnafu)?;
|
||||
|
||||
let parsed: DeleteAttachmentResponse = serde_json::from_str(&body)
|
||||
.map_err(|e| format!("Failed to parse delete response: {}", e))?;
|
||||
let parsed: DeleteAttachmentResponse =
|
||||
serde_json::from_str(&body).context(JsonDecodingSnafu)?;
|
||||
|
||||
parsed
|
||||
.result
|
||||
.eq("deleted")
|
||||
.then_some(true)
|
||||
.ok_or_else(|| "Delete operation was not successful".to_string())
|
||||
Ok(parsed.result.eq("deleted"))
|
||||
}
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
use crate::common::error::ApiError;
|
||||
use crate::common::error::serialize_error;
|
||||
use crate::server::servers::{get_server_by_id, get_server_token};
|
||||
use crate::util::app_lang::get_app_lang;
|
||||
use crate::util::platform::Platform;
|
||||
use http::{HeaderName, HeaderValue, StatusCode};
|
||||
use once_cell::sync::Lazy;
|
||||
use reqwest::{Client, Method, RequestBuilder};
|
||||
use serde::Serialize;
|
||||
use snafu::prelude::*;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::LazyLock;
|
||||
use std::time::Duration;
|
||||
@@ -29,6 +33,52 @@ pub static HTTP_CLIENT: Lazy<Mutex<Client>> = Lazy::new(|| {
|
||||
Mutex::new(new_reqwest_http_client(allow_self_signature))
|
||||
});
|
||||
|
||||
/// Errors that could happen when handling a HTTP request.
|
||||
///
|
||||
/// `reqwest` uses the same error type `reqwest::Error` for all kinds of
|
||||
/// errors, it distinguishes kinds via those `is_xxx()` methods (e.g.,
|
||||
/// `is_connect()` [1]). Due to this reason, both `SendError` and
|
||||
/// `DecodeResponseError` use `request::Error` as the associated value.
|
||||
///
|
||||
/// Technically, `ServerNotFound` is not a HTTP request error, but Coco app
|
||||
/// primarily send HTTP requests to Coco servers, so it is included.
|
||||
///
|
||||
/// [1]: https://docs.rs/reqwest/0.12.24/reqwest/struct.Error.html#method.is_connect
|
||||
#[derive(Debug, Snafu, Serialize)]
|
||||
#[snafu(visibility(pub(crate)))]
|
||||
pub(crate) enum HttpRequestError {
|
||||
#[snafu(display("failed to send HTTP request"))]
|
||||
SendError {
|
||||
#[serde(serialize_with = "serialize_error")]
|
||||
source: reqwest::Error,
|
||||
},
|
||||
#[snafu(display("failed to decode HTTP response"))]
|
||||
DecodeResponseError {
|
||||
#[serde(serialize_with = "serialize_error")]
|
||||
source: reqwest::Error,
|
||||
},
|
||||
#[snafu(display("connection timed out"))]
|
||||
ConnectionTimeout,
|
||||
#[snafu(display(
|
||||
"HTTP request failed, status '{}', response body '{:?}', coco_server_api_error: '{:?}'",
|
||||
status,
|
||||
error_response_body_str,
|
||||
coco_server_api_error_response_body,
|
||||
))]
|
||||
RequestFailed {
|
||||
status: u16,
|
||||
/// None if we do not have response body.
|
||||
error_response_body_str: Option<String>,
|
||||
/// Some if:
|
||||
///
|
||||
/// 1. This is a request sent to Coco server
|
||||
/// 2. We successfully decode an `ApiError` from the `error_response_body_str`.
|
||||
coco_server_api_error_response_body: Option<ApiError>,
|
||||
},
|
||||
#[snafu(display("no Coco server with specific ID '{}' exists", id))]
|
||||
ServerNotFound { id: String },
|
||||
}
|
||||
|
||||
/// These header values won't change during a process's lifetime.
|
||||
static STATIC_HEADERS: LazyLock<HashMap<String, String>> = LazyLock::new(|| {
|
||||
HashMap::from([
|
||||
@@ -65,7 +115,7 @@ impl HttpClient {
|
||||
query_params: Option<Vec<String>>,
|
||||
headers: Option<HashMap<String, String>>,
|
||||
body: Option<reqwest::Body>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
) -> Result<reqwest::Response, HttpRequestError> {
|
||||
log::debug!(
|
||||
"Sending Request: {}, query_params: {:?}, header: {:?}, body: {:?}",
|
||||
&url,
|
||||
@@ -77,10 +127,16 @@ impl HttpClient {
|
||||
let request_builder =
|
||||
Self::get_request_builder(method, url, headers, query_params, body).await;
|
||||
|
||||
let response = request_builder.send().await.map_err(|e| {
|
||||
//dbg!("Failed to send request: {}", &e);
|
||||
format!("Failed to send request: {}", e)
|
||||
})?;
|
||||
let response = match request_builder.send().await {
|
||||
Ok(response) => response,
|
||||
Err(e) => {
|
||||
if e.is_timeout() {
|
||||
return Err(HttpRequestError::ConnectionTimeout);
|
||||
} else {
|
||||
return Err(HttpRequestError::SendError { source: e });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
log::debug!(
|
||||
"Request: {}, Response status: {:?}, header: {:?}",
|
||||
@@ -173,7 +229,7 @@ impl HttpClient {
|
||||
custom_headers: Option<HashMap<String, String>>,
|
||||
query_params: Option<Vec<String>>,
|
||||
body: Option<reqwest::Body>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
) -> Result<reqwest::Response, HttpRequestError> {
|
||||
// Fetch the server using the server_id
|
||||
let server = get_server_by_id(server_id).await;
|
||||
if let Some(s) = server {
|
||||
@@ -205,7 +261,9 @@ impl HttpClient {
|
||||
|
||||
Self::send_raw_request(method, &url, query_params, Some(headers), body).await
|
||||
} else {
|
||||
Err(format!("Server [{}] not found", server_id))
|
||||
Err(HttpRequestError::ServerNotFound {
|
||||
id: server_id.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -214,7 +272,7 @@ impl HttpClient {
|
||||
server_id: &str,
|
||||
path: &str,
|
||||
query_params: Option<Vec<String>>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
) -> Result<reqwest::Response, HttpRequestError> {
|
||||
HttpClient::send_request(server_id, Method::GET, path, None, query_params, None).await
|
||||
}
|
||||
|
||||
@@ -224,7 +282,7 @@ impl HttpClient {
|
||||
path: &str,
|
||||
query_params: Option<Vec<String>>,
|
||||
body: Option<reqwest::Body>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
) -> Result<reqwest::Response, HttpRequestError> {
|
||||
HttpClient::send_request(server_id, Method::POST, path, None, query_params, body).await
|
||||
}
|
||||
|
||||
@@ -234,7 +292,7 @@ impl HttpClient {
|
||||
custom_headers: Option<HashMap<String, String>>,
|
||||
query_params: Option<Vec<String>>,
|
||||
body: Option<reqwest::Body>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
) -> Result<reqwest::Response, HttpRequestError> {
|
||||
HttpClient::send_request(
|
||||
server_id,
|
||||
Method::POST,
|
||||
@@ -254,7 +312,7 @@ impl HttpClient {
|
||||
custom_headers: Option<HashMap<String, String>>,
|
||||
query_params: Option<Vec<String>>,
|
||||
body: Option<reqwest::Body>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
) -> Result<reqwest::Response, HttpRequestError> {
|
||||
HttpClient::send_request(
|
||||
server_id,
|
||||
Method::PUT,
|
||||
@@ -273,7 +331,7 @@ impl HttpClient {
|
||||
path: &str,
|
||||
custom_headers: Option<HashMap<String, String>>,
|
||||
query_params: Option<Vec<String>>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
) -> Result<reqwest::Response, HttpRequestError> {
|
||||
HttpClient::send_request(
|
||||
server_id,
|
||||
Method::DELETE,
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
use crate::common::document::{Document, OnOpened};
|
||||
use crate::common::error::SearchError;
|
||||
use crate::common::error::{HttpSnafu, ResponseDecodeSnafu, SearchError};
|
||||
use crate::common::http::get_response_body_text;
|
||||
use crate::common::search::{QueryHits, QueryResponse, QuerySource, SearchQuery, SearchResponse};
|
||||
use crate::common::server::Server;
|
||||
use crate::common::traits::SearchSource;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::server::http_client::{HttpClient, HttpRequestError};
|
||||
use async_trait::async_trait;
|
||||
use ordered_float::OrderedFloat;
|
||||
use reqwest::StatusCode;
|
||||
use snafu::ResultExt;
|
||||
use std::collections::HashMap;
|
||||
use tauri::AppHandle;
|
||||
|
||||
@@ -112,31 +113,29 @@ impl SearchSource for CocoSearchSource {
|
||||
|
||||
let response = HttpClient::get(&self.server.id, &url, Some(query_params))
|
||||
.await
|
||||
.map_err(|e| SearchError::HttpError {
|
||||
status_code: None,
|
||||
msg: format!("{}", e),
|
||||
})?;
|
||||
.context(HttpSnafu)?;
|
||||
let status_code = response.status();
|
||||
|
||||
if ![StatusCode::OK, StatusCode::CREATED].contains(&status_code) {
|
||||
return Err(SearchError::HttpError {
|
||||
status_code: Some(status_code),
|
||||
msg: format!("Request failed with status code [{}]", status_code),
|
||||
});
|
||||
let http_err = HttpRequestError::RequestFailed {
|
||||
status: status_code.as_u16(),
|
||||
error_response_body_str: None,
|
||||
coco_server_api_error_response_body: None,
|
||||
};
|
||||
let search_err = SearchError::HttpError { source: http_err };
|
||||
return Err(search_err);
|
||||
}
|
||||
|
||||
// Use the helper function to parse the response body
|
||||
let response_body = get_response_body_text(response)
|
||||
.await
|
||||
.map_err(|e| SearchError::ParseError(e))?;
|
||||
let response_body = get_response_body_text(response).await.context(HttpSnafu)?;
|
||||
|
||||
// Check if the response body is empty
|
||||
if !response_body.is_empty() {
|
||||
// log::info!("Search response body: {}", &response_body);
|
||||
|
||||
// Parse the search response from the body text
|
||||
let parsed: SearchResponse<Document> = serde_json::from_str(&response_body)
|
||||
.map_err(|e| SearchError::ParseError(format!("{}", e)))?;
|
||||
let parsed: SearchResponse<Document> =
|
||||
serde_json::from_str(&response_body).context(ResponseDecodeSnafu)?;
|
||||
|
||||
// Process the parsed response
|
||||
total_hits = parsed.hits.total.value as usize;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::COCO_TAURI_STORE;
|
||||
use crate::common::error::{ReportErrorStyle, report_error};
|
||||
use crate::common::http::get_response_body_text;
|
||||
use crate::common::register::SearchSourceRegistry;
|
||||
use crate::common::server::{AuthProvider, Provider, Server, ServerAccessToken, Sso, Version};
|
||||
@@ -441,7 +442,7 @@ pub async fn refresh_coco_server_info(app_handle: AppHandle, id: String) -> Resu
|
||||
Ok(response) => response,
|
||||
Err(e) => {
|
||||
mark_server_as_offline(app_handle, &id).await;
|
||||
return Err(e);
|
||||
return Err(report_error(&e, ReportErrorStyle::SingleLine));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -451,7 +452,9 @@ pub async fn refresh_coco_server_info(app_handle: AppHandle, id: String) -> Resu
|
||||
}
|
||||
|
||||
// Get body text via helper
|
||||
let body = get_response_body_text(response).await?;
|
||||
let body = get_response_body_text(response)
|
||||
.await
|
||||
.map_err(|e| report_error(&e, ReportErrorStyle::SingleLine))?;
|
||||
|
||||
// Deserialize server
|
||||
let mut updated_server: Server = serde_json::from_str(&body)
|
||||
@@ -520,7 +523,9 @@ pub async fn add_coco_server(app_handle: AppHandle, endpoint: String) -> Result<
|
||||
return Err("This Coco server is possibly down".into());
|
||||
}
|
||||
|
||||
let body = get_response_body_text(response).await?;
|
||||
let body = get_response_body_text(response)
|
||||
.await
|
||||
.map_err(|e| report_error(&e, ReportErrorStyle::SingleLine))?;
|
||||
|
||||
let mut server: Server = serde_json::from_str(&body)
|
||||
.map_err(|e| format!("Failed to deserialize the response: {}", e))?;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::server::http_client::{HttpClient, HttpRequestError};
|
||||
use futures_util::StreamExt;
|
||||
use http::Method;
|
||||
use serde_json::json;
|
||||
@@ -11,7 +11,7 @@ pub async fn synthesize(
|
||||
server_id: String,
|
||||
voice: String,
|
||||
content: String,
|
||||
) -> Result<(), String> {
|
||||
) -> Result<(), HttpRequestError> {
|
||||
let body = json!({
|
||||
"voice": voice,
|
||||
"content": content,
|
||||
@@ -30,12 +30,18 @@ pub async fn synthesize(
|
||||
|
||||
log::info!("Synthesize response status: {}", response.status());
|
||||
|
||||
if response.status() == 429 {
|
||||
let status_code = response.status();
|
||||
|
||||
if status_code == 429 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("Request Failed: {}", response.status()));
|
||||
if !status_code.is_success() {
|
||||
return Err(HttpRequestError::RequestFailed {
|
||||
status: status_code.as_u16(),
|
||||
error_response_body_str: None,
|
||||
coco_server_api_error_response_body: None,
|
||||
});
|
||||
}
|
||||
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
@@ -1,4 +1,9 @@
|
||||
use crate::common::error::serialize_error;
|
||||
use crate::common::error::{ReportErrorStyle, report_error};
|
||||
use semver::{BuildMetadata, Prerelease, Version as SemVer};
|
||||
use serde::Serialize;
|
||||
use snafu::{ResultExt, prelude::*};
|
||||
use std::num::ParseIntError;
|
||||
use std::sync::LazyLock;
|
||||
use tauri_plugin_updater::RemoteRelease;
|
||||
|
||||
@@ -12,6 +17,22 @@ pub(crate) static COCO_VERSION: LazyLock<SemVer> = LazyLock::new(|| {
|
||||
parse_coco_semver(env!("CARGO_PKG_VERSION")).expect("parsing should never fail, if version format changes, then parse_coco_semver() should be updated as well")
|
||||
});
|
||||
|
||||
#[derive(Debug, Snafu, Serialize)]
|
||||
#[snafu(visibility(pub(crate)))]
|
||||
pub(crate) enum ParseVersionError {
|
||||
#[snafu(display("SemVer::Version::parse() failed"))]
|
||||
SemVerParseError {
|
||||
#[serde(serialize_with = "serialize_error")]
|
||||
source: semver::Error,
|
||||
},
|
||||
#[snafu(display("failed to parse build number '{}'", build_number))]
|
||||
ParseBuildNumberError {
|
||||
build_number: String,
|
||||
#[serde(serialize_with = "serialize_error")]
|
||||
source: ParseIntError,
|
||||
},
|
||||
}
|
||||
|
||||
/// Coco AI app adopt SemVer but the version string format does not adhere to
|
||||
/// the SemVer specification, this function does the conversion. Returns `None`
|
||||
/// if the input is not in the expected format so that the conversion cannot
|
||||
@@ -30,11 +51,11 @@ pub(crate) static COCO_VERSION: LazyLock<SemVer> = LazyLock::new(|| {
|
||||
/// * 0.9.0-SNAPSHOT-<build num> => 0.9.0-SNAPSHOT.<build num>
|
||||
///
|
||||
/// A pre-release of 0.9.0
|
||||
fn to_semver(version: &SemVer) -> Option<SemVer> {
|
||||
fn to_semver(version: &SemVer) -> Result<SemVer, ParseVersionError> {
|
||||
let pre = &version.pre;
|
||||
|
||||
if pre.is_empty() {
|
||||
return Some(SemVer::new(version.major, version.minor, version.patch));
|
||||
return Ok(SemVer::new(version.major, version.minor, version.patch));
|
||||
}
|
||||
let is_pre_release = pre.starts_with(SNAPSHOT_DASH);
|
||||
|
||||
@@ -44,19 +65,23 @@ fn to_semver(version: &SemVer) -> Option<SemVer> {
|
||||
pre.as_str()
|
||||
};
|
||||
// Parse the build number to validate it, we do not need the actual number though.
|
||||
build_number_str.parse::<usize>().ok()?;
|
||||
build_number_str
|
||||
.parse::<usize>()
|
||||
.context(ParseBuildNumberSnafu {
|
||||
build_number: build_number_str.to_string(),
|
||||
})?;
|
||||
|
||||
// Return after checking the build number is valid
|
||||
if !is_pre_release {
|
||||
return Some(SemVer::new(version.major, version.minor, version.patch));
|
||||
return Ok(SemVer::new(version.major, version.minor, version.patch));
|
||||
}
|
||||
|
||||
let pre = {
|
||||
let pre_str = format!("{}.{}", SNAPSHOT, build_number_str);
|
||||
Prerelease::new(&pre_str).unwrap_or_else(|e| panic!("invalid Prerelease: {}", e))
|
||||
Prerelease::new(&pre_str).context(SemVerParseSnafu)?
|
||||
};
|
||||
|
||||
Some(SemVer {
|
||||
Ok(SemVer {
|
||||
major: version.major,
|
||||
minor: version.minor,
|
||||
patch: version.patch,
|
||||
@@ -67,15 +92,39 @@ fn to_semver(version: &SemVer) -> Option<SemVer> {
|
||||
|
||||
/// Parse Coco version string to a `SemVer`. Returns `None` if it is not a valid
|
||||
/// version string.
|
||||
pub(crate) fn parse_coco_semver(version_str: &str) -> Option<SemVer> {
|
||||
let not_semver = SemVer::parse(version_str).ok()?;
|
||||
pub(crate) fn parse_coco_semver(version_str: &str) -> Result<SemVer, ParseVersionError> {
|
||||
let not_semver = SemVer::parse(version_str).context(SemVerParseSnafu)?;
|
||||
to_semver(¬_semver)
|
||||
}
|
||||
|
||||
pub(crate) fn custom_version_comparator(local: SemVer, remote_release: RemoteRelease) -> bool {
|
||||
/// We are not allowed to populate errors in this function, so when errors
|
||||
/// happen, we do not update.
|
||||
const SHOULD_NOT_UPDATE_WHEN_UNEXPECTED_ERROR_HAPPEN: bool = false;
|
||||
|
||||
let remote = remote_release.version;
|
||||
let local_semver = to_semver(&local);
|
||||
let remote_semver = to_semver(&remote);
|
||||
let local_semver = match to_semver(&local) {
|
||||
Ok(ver) => ver,
|
||||
Err(e) => {
|
||||
log::error!(
|
||||
"failed to parse this Coco app's version '{}', error {}",
|
||||
local,
|
||||
snafu::Report::from_error(e)
|
||||
);
|
||||
return SHOULD_NOT_UPDATE_WHEN_UNEXPECTED_ERROR_HAPPEN;
|
||||
}
|
||||
};
|
||||
let remote_semver = match to_semver(&remote) {
|
||||
Ok(ver) => ver,
|
||||
Err(e) => {
|
||||
log::error!(
|
||||
"failed to parse the version '{}' fetch from the '.latest.json' file, error '{}'",
|
||||
remote,
|
||||
report_error(&e, ReportErrorStyle::SingleLine)
|
||||
);
|
||||
return SHOULD_NOT_UPDATE_WHEN_UNEXPECTED_ERROR_HAPPEN;
|
||||
}
|
||||
};
|
||||
|
||||
let should_update = remote_semver > local_semver;
|
||||
|
||||
@@ -172,14 +221,28 @@ mod tests {
|
||||
fn test_try_into_semver_invalid_build_number() {
|
||||
// Should panic when build number is not a valid number
|
||||
let input = SemVer::parse("0.8.0-abc").unwrap();
|
||||
assert!(to_semver(&input).is_none());
|
||||
let err = to_semver(&input).unwrap_err();
|
||||
assert!(matches!(
|
||||
err,
|
||||
ParseVersionError::ParseBuildNumberError {
|
||||
build_number: _,
|
||||
source: _
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_try_into_semver_invalid_snapshot_build_number() {
|
||||
// Should panic when SNAPSHOT build number is not a valid number
|
||||
let input = SemVer::parse("0.9.0-SNAPSHOT-xyz").unwrap();
|
||||
assert!(to_semver(&input).is_none());
|
||||
let err = to_semver(&input).unwrap_err();
|
||||
assert!(matches!(
|
||||
err,
|
||||
ParseVersionError::ParseBuildNumberError {
|
||||
build_number: _,
|
||||
source: _
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -233,7 +233,7 @@ export const Extensions = () => {
|
||||
"info"
|
||||
);
|
||||
} catch (error) {
|
||||
installExtensionError(String(error));
|
||||
installExtensionError(error);
|
||||
}
|
||||
}}
|
||||
>
|
||||
|
||||
@@ -252,7 +252,21 @@
|
||||
"appIncompatibleExtension": "Installation failed! Incompatible with your Coco App version. Please update and retry.",
|
||||
"uninstall": "Uninstall",
|
||||
"uninstallSuccess": "Uninstalled successfully",
|
||||
"incompatible": "Extension cannot run on the current version. Please upgrade Coco App."
|
||||
"incompatible": "Extension cannot run on the current version. Please upgrade Coco App.",
|
||||
"invalidExtension": "Extension is invalid.",
|
||||
"noFileName": "Path '{{path}}' contains no filename.",
|
||||
"nonUtf8Encoding": "'{{os_str}}' is not UTF-8 encoded.",
|
||||
"missingPluginJson": "File 'plugin.json' does not exist.",
|
||||
"readPluginJson": "Failed to read 'plugin.json'.",
|
||||
"decodePluginJson": "Failed to decode 'plugin.json'.",
|
||||
"invalidPluginJson": "'plugin.json' is invalid.",
|
||||
"parseMinimumCocoVersion": "Failed to parse field 'minimum_coco_version'.",
|
||||
"duplicateSubExtensionId": "Duplicate ID, sub-extension with ID '{{id}}' already exists.",
|
||||
"fieldsNotAllowed": "Fields '{{fields}}' are not allowed for extensions of type '{{ty}}'.",
|
||||
"fieldsNotAllowedForSubExtension": "Fields '{{fields}}' are not allowed for sub-extensions.",
|
||||
"typesNotAllowedForSubExtension": "Sub-extensions cannot be of types {{types}}.",
|
||||
"subExtensionHasMoreSupportedPlatforms": "It supports platforms {{extra_platforms}} that are not supported by the main extension.",
|
||||
"fieldRequired": "An extension of type '{{ty}}' should have field '{{field}}' set."
|
||||
},
|
||||
"application": {
|
||||
"title": "Applications",
|
||||
|
||||
@@ -252,7 +252,21 @@
|
||||
"appIncompatibleExtension": "安装失败!该插件与当前 Coco App 版本不兼容,请升级后重试。",
|
||||
"uninstall": "卸载",
|
||||
"uninstallSuccess": "卸载成功",
|
||||
"incompatible": "扩展无法在当前版本中运行,请升级 Coco App。"
|
||||
"incompatible": "扩展无法在当前版本中运行,请升级 Coco App。",
|
||||
"invalidExtension": "插件无效。",
|
||||
"noFileName": "路径 '{{path}}' 不包含文件名。",
|
||||
"nonUtf8Encoding": "'{{os_str}}' 不是 UTF-8 编码。",
|
||||
"missingPluginJson": "文件 'plugin.json' 不存在。",
|
||||
"readPluginJson": "读取 'plugin.json' 失败。",
|
||||
"decodePluginJson": "解码 'plugin.json' 失败。",
|
||||
"invalidPluginJson": "'plugin.json' 无效。",
|
||||
"parseMinimumCocoVersion": "解析字段 'minimum_coco_version' 失败。",
|
||||
"duplicateSubExtensionId": "ID 重复,ID 为 '{{id}}' 的子插件已存在。",
|
||||
"fieldsNotAllowed": "字段 '{{fields}}' 不允许用于类型为 '{{ty}}' 的插件。",
|
||||
"fieldsNotAllowedForSubExtension": "字段 '{{fields}}' 不允许用于子插件。",
|
||||
"typesNotAllowedForSubExtension": "子插件不能是类型 {{types}}。",
|
||||
"subExtensionHasMoreSupportedPlatforms": "它支持主插件不支持的平台 {{extra_platforms}}。",
|
||||
"fieldRequired": "类型为 '{{ty}}' 的插件应设置字段 '{{field}}'。"
|
||||
},
|
||||
"application": {
|
||||
"title": "应用程序",
|
||||
|
||||
@@ -327,22 +327,76 @@ export const visibleFooterBar = () => {
|
||||
return ui?.footer ?? true;
|
||||
};
|
||||
|
||||
export const installExtensionError = (error: string) => {
|
||||
export const installExtensionError = (error: any) => {
|
||||
console.log(error);
|
||||
|
||||
const { addError } = useAppStore.getState();
|
||||
|
||||
let message = "settings.extensions.hints.importFailed";
|
||||
|
||||
if (error === "already imported") {
|
||||
if (error == "AlreadyInstalled") {
|
||||
message = "settings.extensions.hints.extensionAlreadyImported";
|
||||
}
|
||||
|
||||
if (error === "platform_incompatible") {
|
||||
if ( isObject(error) && "IncompatiblePlatform" in error
|
||||
) {
|
||||
message = "settings.extensions.hints.platformIncompatibleExtension";
|
||||
}
|
||||
|
||||
if (error === "app_incompatible") {
|
||||
if (error === "IncompatibleApp") {
|
||||
message = "settings.extensions.hints.appIncompatibleExtension";
|
||||
}
|
||||
|
||||
if (isObject(error) && "InvalidExtension" in error) {
|
||||
const source = (error as any).InvalidExtension.source;
|
||||
let options = {};
|
||||
|
||||
if (isObject(source)) {
|
||||
if ("NoFileName" in source) {
|
||||
message = "settings.extensions.hints.noFileName";
|
||||
options = (source as any).NoFileName;
|
||||
} else if ("NonUtf8Encoding" in source) {
|
||||
message = "settings.extensions.hints.nonUtf8Encoding";
|
||||
options = (source as any).NonUtf8Encoding;
|
||||
} else if ("ReadPluginJson" in source) {
|
||||
message = "settings.extensions.hints.readPluginJson";
|
||||
} else if ("DecodePluginJson" in source) {
|
||||
message = "settings.extensions.hints.decodePluginJson";
|
||||
} else if ("ParseMinimumCocoVersion" in source) {
|
||||
message = "settings.extensions.hints.parseMinimumCocoVersion";
|
||||
} else if ("InvalidPluginJson" in source) {
|
||||
const innerSource = (source as any).InvalidPluginJson.source;
|
||||
const kind = innerSource.kind;
|
||||
|
||||
if (isObject(kind)) {
|
||||
if ("DuplicateSubExtensionId" in kind) {
|
||||
message = "settings.extensions.hints.duplicateSubExtensionId";
|
||||
options = (kind as any).DuplicateSubExtensionId;
|
||||
} else if ("FieldsNotAllowed" in kind) {
|
||||
message = "settings.extensions.hints.fieldsNotAllowed";
|
||||
options = (kind as any).FieldsNotAllowed;
|
||||
} else if ("FieldsNotAllowedForSubExtension" in kind) {
|
||||
message = "settings.extensions.hints.fieldsNotAllowedForSubExtension";
|
||||
options = (kind as any).FieldsNotAllowedForSubExtension;
|
||||
} else if ("TypesNotAllowedForSubExtension" in kind) {
|
||||
message = "settings.extensions.hints.typesNotAllowedForSubExtension";
|
||||
options = (kind as any).TypesNotAllowedForSubExtension;
|
||||
} else if ("SubExtensionHasMoreSupportedPlatforms" in kind) {
|
||||
message = "settings.extensions.hints.subExtensionHasMoreSupportedPlatforms";
|
||||
options = (kind as any).SubExtensionHasMoreSupportedPlatforms;
|
||||
} else if ("FieldRequired" in kind) {
|
||||
message = "settings.extensions.hints.fieldRequired";
|
||||
options = (kind as any).FieldRequired;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (source === "MissingPluginJson") {
|
||||
message = "settings.extensions.hints.missingPluginJson";
|
||||
}
|
||||
|
||||
addError(i18next.t(message, options));
|
||||
return;
|
||||
}
|
||||
|
||||
addError(i18next.t(message));
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user