refactor: execute Calculator/Extension search() in spawn_blocking (#601)

This commit is contained in:
SteveLauC
2025-06-04 18:45:17 +08:00
committed by GitHub
parent 9ea7dbf3aa
commit 9b53a026ff
3 changed files with 91 additions and 62 deletions

View File

@@ -23,7 +23,7 @@ impl CalculatorSource {
}
}
fn parse_query(query: String) -> Value {
fn parse_query(query: &str) -> Value {
let mut query_json = serde_json::Map::new();
let operators = ["+", "-", "*", "/", "%"];
@@ -48,7 +48,7 @@ fn parse_query(query: String) -> Value {
query_json.insert("type".to_string(), Value::String("expression".to_string()));
}
query_json.insert("value".to_string(), Value::String(query));
query_json.insert("value".to_string(), Value::String(query.to_string()));
Value::Object(query_json)
}
@@ -128,42 +128,56 @@ impl SearchSource for CalculatorSource {
});
}
match meval::eval_str(query_string) {
Ok(num) => {
let mut payload: HashMap<String, Value> = HashMap::new();
let query_string_clone = query_string.to_string();
let query_source = self.get_type();
let base_score = self.base_score;
let closure = move || -> QueryResponse {
let res_num = meval::eval_str(&query_string_clone);
let payload_query = parse_query(query_string.into());
let payload_result = parse_result(num);
match res_num {
Ok(num) => {
let mut payload: HashMap<String, Value> = HashMap::new();
payload.insert("query".to_string(), payload_query);
payload.insert("result".to_string(), payload_result);
let payload_query = parse_query(&query_string_clone);
let payload_result = parse_result(num);
let doc = Document {
id: DATA_SOURCE_ID.to_string(),
category: Some(DATA_SOURCE_ID.to_string()),
payload: Some(payload),
source: Some(DataSourceReference {
r#type: Some(LOCAL_QUERY_SOURCE_TYPE.into()),
name: Some(DATA_SOURCE_ID.into()),
id: Some(DATA_SOURCE_ID.into()),
icon: Some(String::from("font_Calculator")),
}),
..Default::default()
};
payload.insert("query".to_string(), payload_query);
payload.insert("result".to_string(), payload_result);
return Ok(QueryResponse {
source: self.get_type(),
hits: vec![(doc, self.base_score)],
total_hits: 1,
});
}
Err(_) => {
return Ok(QueryResponse {
source: self.get_type(),
hits: Vec::new(),
total_hits: 0,
});
let doc = Document {
id: DATA_SOURCE_ID.to_string(),
category: Some(DATA_SOURCE_ID.to_string()),
payload: Some(payload),
source: Some(DataSourceReference {
r#type: Some(LOCAL_QUERY_SOURCE_TYPE.into()),
name: Some(DATA_SOURCE_ID.into()),
id: Some(DATA_SOURCE_ID.into()),
icon: Some(String::from("font_Calculator")),
}),
..Default::default()
};
QueryResponse {
source: query_source,
hits: vec![(doc, base_score)],
total_hits: 1,
}
}
Err(_) => {
QueryResponse {
source: query_source,
hits: Vec::new(),
total_hits: 0,
}
}
}
};
let spawn_result = tokio::task::spawn_blocking(closure).await;
match spawn_result {
Ok(response) => Ok(response),
Err(e) => std::panic::resume_unwind(e.into_panic()),
}
}
}

View File

@@ -492,47 +492,62 @@ impl SearchSource for ThirdPartyExtensionsSearchSource {
let opt_data_source = query
.query_strings
.get("datasource")
.map(|owned_str| owned_str.as_str());
.map(|owned_str| owned_str.to_string());
let mut hits = Vec::new();
let extensions_read_lock = self.inner.extensions.read().await;
let query_lower = query_string.to_lowercase();
let inner_clone = Arc::clone(&self.inner);
for extension in extensions_read_lock.iter().filter(|ext| ext.enabled) {
if extension.r#type.contains_sub_items() {
if let Some(ref commands) = extension.commands {
for command in commands.iter().filter(|cmd| cmd.enabled) {
if let Some(hit) = extension_to_hit(command, &query_lower, opt_data_source)
{
hits.push(hit);
let closure = move || {
let mut hits = Vec::new();
let extensions_read_lock = futures::executor::block_on(async { inner_clone.extensions.read().await });
for extension in extensions_read_lock.iter().filter(|ext| ext.enabled) {
if extension.r#type.contains_sub_items() {
if let Some(ref commands) = extension.commands {
for command in commands.iter().filter(|cmd| cmd.enabled) {
if let Some(hit) =
extension_to_hit(command, &query_lower, opt_data_source.as_deref())
{
hits.push(hit);
}
}
}
}
if let Some(ref scripts) = extension.scripts {
for script in scripts.iter().filter(|script| script.enabled) {
if let Some(hit) = extension_to_hit(script, &query_lower, opt_data_source) {
hits.push(hit);
if let Some(ref scripts) = extension.scripts {
for script in scripts.iter().filter(|script| script.enabled) {
if let Some(hit) =
extension_to_hit(script, &query_lower, opt_data_source.as_deref())
{
hits.push(hit);
}
}
}
}
if let Some(ref quick_links) = extension.quick_links {
for quick_link in quick_links.iter().filter(|link| link.enabled) {
if let Some(hit) =
extension_to_hit(quick_link, &query_lower, opt_data_source)
{
hits.push(hit);
if let Some(ref quick_links) = extension.quick_links {
for quick_link in quick_links.iter().filter(|link| link.enabled) {
if let Some(hit) =
extension_to_hit(quick_link, &query_lower, opt_data_source.as_deref())
{
hits.push(hit);
}
}
}
}
} else {
if let Some(hit) = extension_to_hit(extension, &query_lower, opt_data_source) {
hits.push(hit);
} else {
if let Some(hit) = extension_to_hit(extension, &query_lower, opt_data_source.as_deref()) {
hits.push(hit);
}
}
}
}
hits
};
let join_result = tokio::task::spawn_blocking(closure).await;
let hits = match join_result {
Ok(hits) => hits,
Err(e) => std::panic::resume_unwind(e.into_panic()),
};
let total_hits = hits.len();
Ok(QueryResponse {

View File

@@ -32,7 +32,7 @@ fn same_type_futures(
timeout_duration: Duration,
search_query: SearchQuery,
) -> impl Future<
Output=(
Output = (
QuerySource,
Result<Result<QueryResponse, SearchError>, Elapsed>,
),
@@ -44,7 +44,7 @@ fn same_type_futures(
timeout(timeout_duration, async {
query_source_trait_object.search(search_query).await
})
.await,
.await,
)
}
}