mirror of
https://github.com/infinilabs/coco-app.git
synced 2025-12-17 20:17:43 +01:00
Compare commits
115 Commits
v0.6.0
...
microphone
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7d0d11860c | ||
|
|
d48d4af7d2 | ||
|
|
876d14f9d9 | ||
|
|
a8e090c9be | ||
|
|
c30df6cee0 | ||
|
|
b833769c25 | ||
|
|
855fb2a168 | ||
|
|
d2735ec13b | ||
|
|
c40fc5818a | ||
|
|
a553ebd593 | ||
|
|
232166eb89 | ||
|
|
99144950d9 | ||
|
|
32d4f45144 | ||
|
|
6bc78b41ef | ||
|
|
cd54beee04 | ||
|
|
ee45d21bbe | ||
|
|
4709f8c660 | ||
|
|
4696aa1759 | ||
|
|
924fc09516 | ||
|
|
5a700662dd | ||
|
|
8f992bfa92 | ||
|
|
e7dd27c744 | ||
|
|
7914836c3e | ||
|
|
b37bf1f7c7 | ||
|
|
419d9d55c5 | ||
|
|
d3ed54c771 | ||
|
|
8f26dbcbe6 | ||
|
|
663873ae14 | ||
|
|
286b1be212 | ||
|
|
37221782b0 | ||
|
|
644e291105 | ||
|
|
aae6984aa7 | ||
|
|
dbd296d399 | ||
|
|
e2ad25967d | ||
|
|
21b61d80d8 | ||
|
|
9f4c693ac4 | ||
|
|
45c27cac56 | ||
|
|
e46035afd4 | ||
|
|
1004bb73f4 | ||
|
|
d664fa7271 | ||
|
|
067fb7144f | ||
|
|
579f91f3aa | ||
|
|
abe2aecedf | ||
|
|
e8f9a4e627 | ||
|
|
22b1558e8b | ||
|
|
ca3b514a65 | ||
|
|
c694c4eda9 | ||
|
|
ac835c76aa | ||
|
|
25bbab7432 | ||
|
|
cca00e944e | ||
|
|
e78fe4ac89 | ||
|
|
60fd79f1fa | ||
|
|
5c0a865822 | ||
|
|
5b50e4b51b | ||
|
|
b97386a827 | ||
|
|
29aa26af94 | ||
|
|
3650d9914c | ||
|
|
f26031047c | ||
|
|
c8719926be | ||
|
|
f1dfc5c730 | ||
|
|
74ed642a42 | ||
|
|
5a17173620 | ||
|
|
29d14ff931 | ||
|
|
ad01504766 | ||
|
|
57ab08fb6d | ||
|
|
db5c09f80c | ||
|
|
b1e2c6961d | ||
|
|
3f4abe51e5 | ||
|
|
060c09e11c | ||
|
|
657df482bf | ||
|
|
f4f7732927 | ||
|
|
5e536e1444 | ||
|
|
2b48cdf84a | ||
|
|
bc37616506 | ||
|
|
07bcd80776 | ||
|
|
7b8b396368 | ||
|
|
823a95d601 | ||
|
|
af0b98a41b | ||
|
|
7d0e7cd7dc | ||
|
|
e56d6b1b60 | ||
|
|
941cf96a07 | ||
|
|
14fbf2ac5d | ||
|
|
494e2f0d8a | ||
|
|
e3a3849fa4 | ||
|
|
0b5e31a476 | ||
|
|
c8a723ed9d | ||
|
|
aaf4bf2737 | ||
|
|
24b0123a61 | ||
|
|
e8bd970cdb | ||
|
|
dd3be3a819 | ||
|
|
5b034c28ac | ||
|
|
b17949fe29 | ||
|
|
5d37420109 | ||
|
|
1d3ceb0c70 | ||
|
|
4d11afe18e | ||
|
|
0c0291c8c0 | ||
|
|
cca672b2cb | ||
|
|
5b27488402 | ||
|
|
c1c4e0db7b | ||
|
|
074a7c8b0a | ||
|
|
bc524e19db | ||
|
|
05f70d26d9 | ||
|
|
ab26dc7c6a | ||
|
|
6ff6b46139 | ||
|
|
119fd87a25 | ||
|
|
de226a8fa4 | ||
|
|
6865957725 | ||
|
|
87818d69ed | ||
|
|
38b67d01b8 | ||
|
|
a4f4a24730 | ||
|
|
87bd3d020f | ||
|
|
825ac5d565 | ||
|
|
f21a35e15d | ||
|
|
6e90b28204 | ||
|
|
e92e5e5158 |
2
.env
2
.env
@@ -1,5 +1,3 @@
|
||||
COCO_SERVER_URL=http://localhost:9000 #https://coco.infini.cloud #http://localhost:9000
|
||||
|
||||
COCO_WEBSOCKET_URL=ws://localhost:9000/ws #wss://coco.infini.cloud/ws #ws://localhost:9000/ws
|
||||
|
||||
#TAURI_DEV_HOST=0.0.0.0
|
||||
5
.github/workflows/release.yml
vendored
5
.github/workflows/release.yml
vendored
@@ -77,7 +77,6 @@ jobs:
|
||||
target: "aarch64-unknown-linux-gnu"
|
||||
env:
|
||||
APP_VERSION: ${{ needs.create-release.outputs.APP_VERSION }}
|
||||
RELEASE_BODY: ${{ needs.create-release.outputs.RELEASE_BODY }}
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
@@ -107,7 +106,7 @@ jobs:
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf xdg-utils
|
||||
|
||||
- name: Add Rust build target at ${{ matrix.platform}} for ${{ matrix.target }}
|
||||
- name: Add Rust build target
|
||||
working-directory: src-tauri
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -158,7 +157,7 @@ jobs:
|
||||
with:
|
||||
tagName: ${{ github.ref_name }}
|
||||
releaseName: Coco ${{ env.APP_VERSION }}
|
||||
releaseBody: "${{ env.RELEASE_BODY }}"
|
||||
releaseBody: "${{ needs.create-release.outputs.RELEASE_BODY }}"
|
||||
releaseDraft: true
|
||||
prerelease: false
|
||||
args: ${{ env.BUILD_ARGS }}
|
||||
|
||||
61
.github/workflows/rust_code_check.yml
vendored
Normal file
61
.github/workflows/rust_code_check.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
name: Rust Code Check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
# Only run it when Rust code changes
|
||||
paths:
|
||||
- 'src-tauri/**'
|
||||
|
||||
jobs:
|
||||
check:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, windows-latest, macos-latest]
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Checkout dependency (pizza-engine) repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: 'infinilabs/pizza'
|
||||
ssh-key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
submodules: recursive
|
||||
ref: main
|
||||
path: pizza
|
||||
|
||||
- name: Install dependencies (ubuntu only)
|
||||
if: startsWith(matrix.platform, 'ubuntu-latest')
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf xdg-utils
|
||||
|
||||
- name: Add pizza engine as a dependency
|
||||
working-directory: src-tauri
|
||||
shell: bash
|
||||
run: cargo add --path ../pizza/lib/engine --features query_string_parser,persistence
|
||||
|
||||
- name: Format check
|
||||
working-directory: src-tauri
|
||||
shell: bash
|
||||
run: |
|
||||
rustup component add rustfmt
|
||||
cargo fmt --all --check
|
||||
|
||||
- name: Check compilation (Without Pizza engine enabled)
|
||||
working-directory: ./src-tauri
|
||||
run: cargo check
|
||||
|
||||
- name: Check compilation (With Pizza engine enabled)
|
||||
working-directory: ./src-tauri
|
||||
run: cargo check --features use_pizza_engine
|
||||
|
||||
- name: Run tests (Without Pizza engine)
|
||||
working-directory: ./src-tauri
|
||||
run: cargo test
|
||||
|
||||
- name: Run tests (With Pizza engine)
|
||||
working-directory: ./src-tauri
|
||||
run: cargo test --features use_pizza_engine
|
||||
7
.vscode/settings.json
vendored
7
.vscode/settings.json
vendored
@@ -8,6 +8,8 @@
|
||||
"clsx",
|
||||
"codegen",
|
||||
"dataurl",
|
||||
"deeplink",
|
||||
"deepthink",
|
||||
"dtolnay",
|
||||
"dyld",
|
||||
"elif",
|
||||
@@ -30,6 +32,8 @@
|
||||
"localstorage",
|
||||
"lucide",
|
||||
"maximizable",
|
||||
"mdast",
|
||||
"meval",
|
||||
"Minimizable",
|
||||
"msvc",
|
||||
"nord",
|
||||
@@ -39,9 +43,11 @@
|
||||
"overscan",
|
||||
"partialize",
|
||||
"patchelf",
|
||||
"Quicklink",
|
||||
"Raycast",
|
||||
"rehype",
|
||||
"reqwest",
|
||||
"rerank",
|
||||
"rgba",
|
||||
"rustup",
|
||||
"screenshotable",
|
||||
@@ -56,6 +62,7 @@
|
||||
"traptitech",
|
||||
"unlisten",
|
||||
"unlistener",
|
||||
"unlisteners",
|
||||
"unminimize",
|
||||
"uuidv",
|
||||
"VITE",
|
||||
|
||||
@@ -91,6 +91,8 @@ pnpm tauri build
|
||||
|
||||
- [Coco App Documentation](https://docs.infinilabs.com/coco-app/main/)
|
||||
- [Coco Server Documentation](https://docs.infinilabs.com/coco-server/main/)
|
||||
- [DeepWiki Coco App](https://deepwiki.com/infinilabs/coco-app)
|
||||
- [DeepWiki Coco Server](https://deepwiki.com/infinilabs/coco-server)
|
||||
- [Tauri Documentation](https://tauri.app/)
|
||||
|
||||
## Contributors
|
||||
|
||||
@@ -5,15 +5,104 @@ title: "Release Notes"
|
||||
|
||||
# Release Notes
|
||||
|
||||
Information about release notes of Coco Server is provided here.
|
||||
Information about release notes of Coco App is provided here.
|
||||
|
||||
## Latest (In development)
|
||||
|
||||
### ❌ Breaking changes
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- feat: enhance ui for skipped version #834
|
||||
- feat: support installing local extensions #749
|
||||
- feat: support sending files in chat messages #764
|
||||
- feat: sub extension can set 'platforms' now #847
|
||||
|
||||
### 🐛 Bug fix
|
||||
|
||||
- fix: fix issue with update check failure #833
|
||||
|
||||
### ✈️ Improvements
|
||||
|
||||
- refactor: calling service related interfaces #831
|
||||
- refactor: split query_coco_fusion() #836
|
||||
- chore: web component loading font icon #838
|
||||
- chore: delete unused code files and dependencies #841
|
||||
- chore: ignore tauri::AppHandle's generic argument R #845
|
||||
- refactor: check Extension/plugin.json from all sources #846
|
||||
|
||||
## 0.7.1 (2025-07-27)
|
||||
|
||||
### ❌ Breaking changes
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
### 🐛 Bug fix
|
||||
|
||||
- fix: correct enter key behavior #828
|
||||
|
||||
### ✈️ Improvements
|
||||
|
||||
- chore: web component add notification component #825
|
||||
- refactor: collection behavior defaults to `MoveToActiveSpace`, and only use `CanJoinAllSpaces` when window is pinned #829
|
||||
|
||||
## 0.7.0 (2025-07-25)
|
||||
|
||||
### ❌ Breaking changes
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- feat: file search using spotlight #705
|
||||
- feat: voice input support in both search and chat modes #732
|
||||
- feat: text to speech now powered by LLM #750
|
||||
- feat: file search for Windows #762
|
||||
|
||||
### 🐛 Bug fix
|
||||
|
||||
- fix(file search): apply filters before from/size parameters #741
|
||||
- fix(file search): searching by name&content does not search file name #743
|
||||
- fix: prevent window from hiding when moved on Windows #748
|
||||
- fix: unregister ext hotkey when it gets deleted #770
|
||||
- fix: indexing apps does not respect search scope config #773
|
||||
- fix: restore missing category titles on subpages #772
|
||||
- fix: correct incorrect assistant display when quick ai access #779
|
||||
- fix: resolved minor issues with voice playback #780
|
||||
- fix: fixed incorrect taskbar icon display on linux #783
|
||||
- fix: fix data inconsistency issue on secondary pages #784
|
||||
- fix: incorrect status when installing extension #789
|
||||
- fix: increase read_timeout for HTTP streaming stability #798
|
||||
- fix: enter key problem #794
|
||||
- fix: fix selection issue after renaming #800
|
||||
- fix: fix shortcut issue in windows context menu #804
|
||||
- fix: panic caused by "state() called before manage()" #806
|
||||
- fix: fix multiline input issue #808
|
||||
- fix: fix ctrl+k not working #815
|
||||
- fix: fix update window config sync #818
|
||||
- fix: fix enter key on subpages #819
|
||||
- fix: panic on Ubuntu (GNOME) when opening apps #821
|
||||
|
||||
### ✈️ Improvements
|
||||
|
||||
- refactor: prioritize stat(2) when checking if a file is dir #737
|
||||
- refactor: change File Search ext type to extension #738
|
||||
- refactor: create chat & send chat api #739
|
||||
- chore: icon support for more file types #740
|
||||
- chore: replace meval-rs with our fork to clear dep warning #745
|
||||
- refactor: adjusted assistant, datasource, mcp_server interface parameters #746
|
||||
- refactor: adjust extension code hierarchy #747
|
||||
- chore: bump dep applications-rs #751
|
||||
- chore: rename QuickLink/quick_link to Quicklink/quicklink #752
|
||||
- chore: assistant params & styles #753
|
||||
- chore: make optional fields optional #758
|
||||
- chore: search-chat components add formatUrl & think data & icons url #765
|
||||
- chore: Coco app http request headers #744
|
||||
- refactor: do status code check before deserializing response #767
|
||||
- style: splash adapts to the width of mobile phones #768
|
||||
- chore: search-chat add language and formatUrl parameters #775
|
||||
- chore: not request the interface if not logged in #795
|
||||
- refactor: clean up unsupported characters from query string in Win Search #802
|
||||
- chore: display backtrace in panic log #805
|
||||
|
||||
## 0.6.0 (2025-06-29)
|
||||
|
||||
### ❌ Breaking changes
|
||||
@@ -301,4 +390,4 @@ Information about release notes of Coco Server is provided here.
|
||||
|
||||
### Bug fix
|
||||
|
||||
### Improvements
|
||||
### Improvements
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "coco",
|
||||
"private": true,
|
||||
"version": "0.6.0",
|
||||
"version": "0.7.1",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
@@ -18,7 +18,6 @@
|
||||
"release-beta": "release-it --preRelease=beta --preReleaseBase=1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ant-design/icons": "^6.0.0",
|
||||
"@headlessui/react": "^2.2.2",
|
||||
"@tauri-apps/api": "^2.5.0",
|
||||
"@tauri-apps/plugin-autostart": "~2.2.0",
|
||||
@@ -32,7 +31,6 @@
|
||||
"@tauri-apps/plugin-process": "^2.2.1",
|
||||
"@tauri-apps/plugin-shell": "^2.2.1",
|
||||
"@tauri-apps/plugin-updater": "github:infinilabs/tauri-plugin-updater#v2",
|
||||
"@tauri-apps/plugin-websocket": "~2.3.0",
|
||||
"@tauri-apps/plugin-window": "2.0.0-alpha.1",
|
||||
"@wavesurfer/react": "^1.0.11",
|
||||
"ahooks": "^3.8.4",
|
||||
|
||||
69
pnpm-lock.yaml
generated
69
pnpm-lock.yaml
generated
@@ -8,9 +8,6 @@ importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
'@ant-design/icons':
|
||||
specifier: ^6.0.0
|
||||
version: 6.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
'@headlessui/react':
|
||||
specifier: ^2.2.2
|
||||
version: 2.2.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
@@ -50,9 +47,6 @@ importers:
|
||||
'@tauri-apps/plugin-updater':
|
||||
specifier: github:infinilabs/tauri-plugin-updater#v2
|
||||
version: https://codeload.github.com/infinilabs/tauri-plugin-updater/tar.gz/358e689c65e9943b53eff50bcb9dfd5b1cfc4072
|
||||
'@tauri-apps/plugin-websocket':
|
||||
specifier: ~2.3.0
|
||||
version: 2.3.0
|
||||
'@tauri-apps/plugin-window':
|
||||
specifier: 2.0.0-alpha.1
|
||||
version: 2.0.0-alpha.1
|
||||
@@ -239,23 +233,6 @@ packages:
|
||||
resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==}
|
||||
engines: {node: '>=6.0.0'}
|
||||
|
||||
'@ant-design/colors@8.0.0':
|
||||
resolution: {integrity: sha512-6YzkKCw30EI/E9kHOIXsQDHmMvTllT8STzjMb4K2qzit33RW2pqCJP0sk+hidBntXxE+Vz4n1+RvCTfBw6OErw==}
|
||||
|
||||
'@ant-design/fast-color@3.0.0':
|
||||
resolution: {integrity: sha512-eqvpP7xEDm2S7dUzl5srEQCBTXZMmY3ekf97zI+M2DHOYyKdJGH0qua0JACHTqbkRnD/KHFQP9J1uMJ/XWVzzA==}
|
||||
engines: {node: '>=8.x'}
|
||||
|
||||
'@ant-design/icons-svg@4.4.2':
|
||||
resolution: {integrity: sha512-vHbT+zJEVzllwP+CM+ul7reTEfBR0vgxFe7+lREAsAA7YGsYpboiq2sQNeQeRvh09GfQgs/GyFEvZpJ9cLXpXA==}
|
||||
|
||||
'@ant-design/icons@6.0.0':
|
||||
resolution: {integrity: sha512-o0aCCAlHc1o4CQcapAwWzHeaW2x9F49g7P3IDtvtNXgHowtRWYb7kiubt8sQPFvfVIVU/jLw2hzeSlNt0FU+Uw==}
|
||||
engines: {node: '>=8'}
|
||||
peerDependencies:
|
||||
react: '>=16.0.0'
|
||||
react-dom: '>=16.0.0'
|
||||
|
||||
'@antfu/install-pkg@1.1.0':
|
||||
resolution: {integrity: sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==}
|
||||
|
||||
@@ -1005,12 +982,6 @@ packages:
|
||||
resolution: {integrity: sha512-c83qWb22rNRuB0UaVCI0uRPNRr8Z0FWnEIvT47jiHAmOIUHbBOg5XvV7pM5x+rKn9HRpjxquDbXYSXr3fAKFcw==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
'@rc-component/util@1.2.1':
|
||||
resolution: {integrity: sha512-AUVu6jO+lWjQnUOOECwu8iR0EdElQgWW5NBv5vP/Uf9dWbAX3udhMutRlkVXjuac2E40ghkFy+ve00mc/3Fymg==}
|
||||
peerDependencies:
|
||||
react: '>=18.0.0'
|
||||
react-dom: '>=18.0.0'
|
||||
|
||||
'@react-aria/focus@3.20.2':
|
||||
resolution: {integrity: sha512-Q3rouk/rzoF/3TuH6FzoAIKrl+kzZi9LHmr8S5EqLAOyP9TXIKG34x2j42dZsAhrw7TbF9gA8tBKwnCNH4ZV+Q==}
|
||||
peerDependencies:
|
||||
@@ -1287,9 +1258,6 @@ packages:
|
||||
resolution: {tarball: https://codeload.github.com/infinilabs/tauri-plugin-updater/tar.gz/358e689c65e9943b53eff50bcb9dfd5b1cfc4072}
|
||||
version: 2.7.1
|
||||
|
||||
'@tauri-apps/plugin-websocket@2.3.0':
|
||||
resolution: {integrity: sha512-eAwRGe3tnqDeQYE0wq4g1PUKbam9tYvlC4uP/au12Y/z7MP4lrS4ylv+aoZ5Ly+hTlBdi7hDkhHomwF/UeBesA==}
|
||||
|
||||
'@tauri-apps/plugin-window@2.0.0-alpha.1':
|
||||
resolution: {integrity: sha512-dFOAgal/3Txz3SQ+LNQq0AK1EPC+acdaFlwPVB/6KXUZYmaFleIlzgxDVoJCQ+/xOhxvYrdQaFLefh0I/Kldbg==}
|
||||
|
||||
@@ -1679,9 +1647,6 @@ packages:
|
||||
resolution: {integrity: sha512-cYY9mypksY8NRqgDB1XD1RiJL338v/551niynFTGkZOO2LHuB2OmOYxDIe/ttN9AHwrqdum1360G3ald0W9kCg==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
classnames@2.5.1:
|
||||
resolution: {integrity: sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==}
|
||||
|
||||
cli-boxes@3.0.0:
|
||||
resolution: {integrity: sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==}
|
||||
engines: {node: '>=10'}
|
||||
@@ -3161,9 +3126,6 @@ packages:
|
||||
typescript:
|
||||
optional: true
|
||||
|
||||
react-is@18.3.1:
|
||||
resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==}
|
||||
|
||||
react-markdown@9.1.0:
|
||||
resolution: {integrity: sha512-xaijuJB0kzGiUdG7nc2MOMDUDBWPyGAjZtUrow9XxUeua8IqeP+VlIfAZ3bphpcLTnSZXz6z9jcVC/TCwbfgdw==}
|
||||
peerDependencies:
|
||||
@@ -3809,23 +3771,6 @@ snapshots:
|
||||
'@jridgewell/gen-mapping': 0.3.8
|
||||
'@jridgewell/trace-mapping': 0.3.25
|
||||
|
||||
'@ant-design/colors@8.0.0':
|
||||
dependencies:
|
||||
'@ant-design/fast-color': 3.0.0
|
||||
|
||||
'@ant-design/fast-color@3.0.0': {}
|
||||
|
||||
'@ant-design/icons-svg@4.4.2': {}
|
||||
|
||||
'@ant-design/icons@6.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
|
||||
dependencies:
|
||||
'@ant-design/colors': 8.0.0
|
||||
'@ant-design/icons-svg': 4.4.2
|
||||
'@rc-component/util': 1.2.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
classnames: 2.5.1
|
||||
react: 18.3.1
|
||||
react-dom: 18.3.1(react@18.3.1)
|
||||
|
||||
'@antfu/install-pkg@1.1.0':
|
||||
dependencies:
|
||||
package-manager-detector: 1.3.0
|
||||
@@ -4468,12 +4413,6 @@ snapshots:
|
||||
'@pnpm/network.ca-file': 1.0.2
|
||||
config-chain: 1.1.13
|
||||
|
||||
'@rc-component/util@1.2.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
|
||||
dependencies:
|
||||
react: 18.3.1
|
||||
react-dom: 18.3.1(react@18.3.1)
|
||||
react-is: 18.3.1
|
||||
|
||||
'@react-aria/focus@3.20.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
|
||||
dependencies:
|
||||
'@react-aria/interactions': 3.25.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
@@ -4698,10 +4637,6 @@ snapshots:
|
||||
dependencies:
|
||||
'@tauri-apps/api': 2.5.0
|
||||
|
||||
'@tauri-apps/plugin-websocket@2.3.0':
|
||||
dependencies:
|
||||
'@tauri-apps/api': 2.5.0
|
||||
|
||||
'@tauri-apps/plugin-window@2.0.0-alpha.1':
|
||||
dependencies:
|
||||
'@tauri-apps/api': 2.0.0-alpha.6
|
||||
@@ -5132,8 +5067,6 @@ snapshots:
|
||||
|
||||
ci-info@4.2.0: {}
|
||||
|
||||
classnames@2.5.1: {}
|
||||
|
||||
cli-boxes@3.0.0: {}
|
||||
|
||||
cli-cursor@5.0.0:
|
||||
@@ -6881,8 +6814,6 @@ snapshots:
|
||||
react-dom: 18.3.1(react@18.3.1)
|
||||
typescript: 5.8.3
|
||||
|
||||
react-is@18.3.1: {}
|
||||
|
||||
react-markdown@9.1.0(@types/react@18.3.21)(react@18.3.1):
|
||||
dependencies:
|
||||
'@types/hast': 3.0.4
|
||||
|
||||
431
src-tauri/Cargo.lock
generated
431
src-tauri/Cargo.lock
generated
@@ -128,26 +128,21 @@ checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
|
||||
[[package]]
|
||||
name = "applications"
|
||||
version = "0.3.1"
|
||||
source = "git+https://github.com/infinilabs/applications-rs?rev=7bb507e6b12f73c96f3a52f0578d0246a689f381#7bb507e6b12f73c96f3a52f0578d0246a689f381"
|
||||
source = "git+https://github.com/infinilabs/applications-rs?rev=31b0c030a0f3bc82275fe12debe526153978671d#31b0c030a0f3bc82275fe12debe526153978671d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cocoa 0.25.0",
|
||||
"core-foundation 0.9.4",
|
||||
"freedesktop-file-parser",
|
||||
"glob",
|
||||
"image",
|
||||
"lnk",
|
||||
"log",
|
||||
"nix 0.30.1",
|
||||
"notify 8.0.0",
|
||||
"objc",
|
||||
"parselnk",
|
||||
"plist",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"tauri-icns",
|
||||
"thiserror 1.0.69",
|
||||
"walkdir",
|
||||
"winapi",
|
||||
"windows-icons",
|
||||
@@ -360,17 +355,6 @@ version = "1.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
|
||||
|
||||
[[package]]
|
||||
name = "atty"
|
||||
version = "0.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
|
||||
dependencies = [
|
||||
"hermit-abi 0.1.19",
|
||||
"libc",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "auto-launch"
|
||||
version = "0.5.0"
|
||||
@@ -711,9 +695,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "camino"
|
||||
version = "1.1.9"
|
||||
version = "1.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3"
|
||||
checksum = "0da45bc31171d8d6960122e222a67740df867c1dd53b4d51caa297084c185cab"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
@@ -801,9 +785,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268"
|
||||
|
||||
[[package]]
|
||||
name = "cfg_aliases"
|
||||
@@ -854,34 +838,9 @@ dependencies = [
|
||||
"inout",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "3.2.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123"
|
||||
dependencies = [
|
||||
"atty",
|
||||
"bitflags 1.3.2",
|
||||
"clap_lex",
|
||||
"indexmap 1.9.3",
|
||||
"strsim 0.10.0",
|
||||
"termcolor",
|
||||
"textwrap",
|
||||
"yaml-rust",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_lex"
|
||||
version = "0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5"
|
||||
dependencies = [
|
||||
"os_str_bytes",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "coco"
|
||||
version = "0.6.0"
|
||||
version = "0.7.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"applications",
|
||||
@@ -889,8 +848,11 @@ dependencies = [
|
||||
"async-trait",
|
||||
"base64 0.13.1",
|
||||
"borrowme",
|
||||
"camino",
|
||||
"cfg-if",
|
||||
"chinese-number",
|
||||
"chrono",
|
||||
"cocoa 0.24.1",
|
||||
"derive_more 2.0.1",
|
||||
"dirs 5.0.1",
|
||||
"enigo",
|
||||
@@ -900,6 +862,7 @@ dependencies = [
|
||||
"hostname",
|
||||
"http 1.3.1",
|
||||
"hyper 0.14.32",
|
||||
"indexmap 2.10.0",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"meval",
|
||||
@@ -911,10 +874,13 @@ dependencies = [
|
||||
"plist",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"semver",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_plain",
|
||||
"strsim 0.10.0",
|
||||
"strum",
|
||||
"sysinfo",
|
||||
"tauri",
|
||||
"tauri-build",
|
||||
"tauri-nspanel",
|
||||
@@ -936,31 +902,32 @@ dependencies = [
|
||||
"tauri-plugin-single-instance",
|
||||
"tauri-plugin-store",
|
||||
"tauri-plugin-updater",
|
||||
"tauri-plugin-websocket",
|
||||
"tauri-plugin-windows-version",
|
||||
"thiserror 1.0.69",
|
||||
"tokio",
|
||||
"tokio-native-tls",
|
||||
"tokio-tungstenite 0.20.1",
|
||||
"tokio-stream",
|
||||
"tokio-tungstenite",
|
||||
"tokio-util",
|
||||
"tungstenite 0.24.0",
|
||||
"url",
|
||||
"walkdir",
|
||||
"windows 0.61.3",
|
||||
"zip 4.0.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cocoa"
|
||||
version = "0.25.0"
|
||||
version = "0.24.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f6140449f97a6e97f9511815c5632d84c8aacf8ac271ad77c559218161a1373c"
|
||||
checksum = "f425db7937052c684daec3bd6375c8abe2d146dca4b8b143d6db777c39138f3a"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"block",
|
||||
"cocoa-foundation 0.1.2",
|
||||
"core-foundation 0.9.4",
|
||||
"core-graphics 0.23.2",
|
||||
"foreign-types 0.5.0",
|
||||
"core-graphics 0.22.3",
|
||||
"foreign-types 0.3.2",
|
||||
"libc",
|
||||
"objc",
|
||||
]
|
||||
@@ -1123,14 +1090,14 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
|
||||
|
||||
[[package]]
|
||||
name = "core-graphics"
|
||||
version = "0.23.2"
|
||||
version = "0.22.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c07782be35f9e1140080c6b96f0d44b739e2278479f64e02fdab4e32dfd8b081"
|
||||
checksum = "2581bbab3b8ffc6fcbd550bf46c355135d16e9ff2a6ea032ad6b9bf1d7efe4fb"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"core-foundation 0.9.4",
|
||||
"core-graphics-types 0.1.3",
|
||||
"foreign-types 0.5.0",
|
||||
"foreign-types 0.3.2",
|
||||
"libc",
|
||||
]
|
||||
|
||||
@@ -1907,12 +1874,26 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "freedesktop-file-parser"
|
||||
version = "0.1.3"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6059d3997cc694ec3e9a378db855866233ef7edfeafd85afcb2239fd130e6e6b"
|
||||
checksum = "896344fc2c460ea34728f8f9a5685c28762a910d310c35a5001f6a8f6e0467bb"
|
||||
dependencies = [
|
||||
"freedesktop-icons",
|
||||
"thiserror 2.0.12",
|
||||
"xdgkit",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "freedesktop-icons"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "95f87364ea709292a3b3f74014ce3ee78412c89807eea75a358c8e029b000994"
|
||||
dependencies = [
|
||||
"dirs 5.0.1",
|
||||
"ini_core",
|
||||
"once_cell",
|
||||
"thiserror 1.0.69",
|
||||
"tracing",
|
||||
"xdg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2507,7 +2488,7 @@ dependencies = [
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"http 1.3.1",
|
||||
"indexmap 2.9.0",
|
||||
"indexmap 2.10.0",
|
||||
"slab",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
@@ -2561,15 +2542,6 @@ version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.1.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.4.0"
|
||||
@@ -2798,7 +2770,7 @@ dependencies = [
|
||||
"js-sys",
|
||||
"log",
|
||||
"wasm-bindgen",
|
||||
"windows-core 0.61.0",
|
||||
"windows-core 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2985,9 +2957,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.9.0"
|
||||
version = "2.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
|
||||
checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown 0.15.3",
|
||||
@@ -3003,6 +2975,15 @@ dependencies = [
|
||||
"cfb",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ini_core"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a467a31a9f439b5262fa99c17084537bff57f24703d5a09a2b5c9657ec73a61"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inotify"
|
||||
version = "0.9.6"
|
||||
@@ -3348,12 +3329,6 @@ dependencies = [
|
||||
"zlib-rs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linked-hash-map"
|
||||
version = "0.5.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.4.15"
|
||||
@@ -3508,11 +3483,10 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "meval"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f79496a5651c8d57cd033c5add8ca7ee4e3d5f7587a4777484640d9cb60392d9"
|
||||
source = "git+https://github.com/infinilabs/meval-rs#8113cdae751b7ca060d28a08ecbfbcededfd4304"
|
||||
dependencies = [
|
||||
"fnv",
|
||||
"nom 1.2.4",
|
||||
"nom 8.0.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3667,7 +3641,8 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "nix"
|
||||
version = "0.30.1"
|
||||
source = "git+https://github.com/nix-rust/nix#3cf9007216086b17a6ef5a09fa42dc00cde5c2bf"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"cfg-if",
|
||||
@@ -3681,12 +3656,6 @@ version = "0.1.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb"
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "1.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a5b8c256fd9471521bcb84c3cdba98921497f1a331cbc15b8030fc63b82050ce"
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "7.1.3"
|
||||
@@ -3697,6 +3666,15 @@ dependencies = [
|
||||
"minimal-lexical",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "8.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "noop_proc_macro"
|
||||
version = "0.3.0"
|
||||
@@ -3746,6 +3724,15 @@ version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d"
|
||||
|
||||
[[package]]
|
||||
name = "ntapi"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8a3895c6391c39d7fe7ebc444a87eb2991b2a0bc718fdabd071eec617fc68e4"
|
||||
dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-bigfloat"
|
||||
version = "1.7.2"
|
||||
@@ -4064,6 +4051,16 @@ dependencies = [
|
||||
"objc2-core-foundation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "objc2-io-kit"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "71c1c64d6120e51cd86033f67176b1cb66780c2efe34dec55176f77befd93c0a"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"objc2-core-foundation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "objc2-io-surface"
|
||||
version = "0.3.1"
|
||||
@@ -4296,12 +4293,6 @@ dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "os_str_bytes"
|
||||
version = "6.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1"
|
||||
|
||||
[[package]]
|
||||
name = "osakit"
|
||||
version = "0.3.1"
|
||||
@@ -4599,7 +4590,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eac26e981c03a6e53e0aee43c113e3202f5581d5360dae7bd2c70e800dd0451d"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"indexmap 2.9.0",
|
||||
"indexmap 2.10.0",
|
||||
"quick-xml 0.32.0",
|
||||
"serde",
|
||||
"time",
|
||||
@@ -4626,7 +4617,7 @@ checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"concurrent-queue",
|
||||
"hermit-abi 0.4.0",
|
||||
"hermit-abi",
|
||||
"pin-project-lite",
|
||||
"rustix 0.38.44",
|
||||
"tracing",
|
||||
@@ -4800,16 +4791,6 @@ version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3"
|
||||
|
||||
[[package]]
|
||||
name = "quick-xml"
|
||||
version = "0.21.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0452695941410a58c8ce4391707ba9bad26a247173bd9886a05a5e8a8babec75"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quick-xml"
|
||||
version = "0.30.0"
|
||||
@@ -5573,7 +5554,7 @@ version = "1.0.140"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
|
||||
dependencies = [
|
||||
"indexmap 2.9.0",
|
||||
"indexmap 2.10.0",
|
||||
"itoa 1.0.15",
|
||||
"memchr",
|
||||
"ryu",
|
||||
@@ -5631,7 +5612,7 @@ dependencies = [
|
||||
"chrono",
|
||||
"hex",
|
||||
"indexmap 1.9.3",
|
||||
"indexmap 2.9.0",
|
||||
"indexmap 2.10.0",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
@@ -5885,6 +5866,27 @@ version = "0.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||
|
||||
[[package]]
|
||||
name = "strum"
|
||||
version = "0.27.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf"
|
||||
dependencies = [
|
||||
"strum_macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strum_macros"
|
||||
version = "0.27.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "subtle"
|
||||
version = "2.6.1"
|
||||
@@ -5953,6 +5955,20 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sysinfo"
|
||||
version = "0.35.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3c3ffa3e4ff2b324a57f7aeb3c349656c7b127c3c189520251a648102a92496e"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"memchr",
|
||||
"ntapi",
|
||||
"objc2-core-foundation",
|
||||
"objc2-io-kit",
|
||||
"windows 0.61.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "system-configuration"
|
||||
version = "0.6.1"
|
||||
@@ -6033,8 +6049,8 @@ dependencies = [
|
||||
"tao-macros",
|
||||
"unicode-segmentation",
|
||||
"url",
|
||||
"windows 0.61.1",
|
||||
"windows-core 0.61.0",
|
||||
"windows 0.61.3",
|
||||
"windows-core 0.61.2",
|
||||
"windows-version",
|
||||
"x11-dl",
|
||||
]
|
||||
@@ -6123,7 +6139,7 @@ dependencies = [
|
||||
"webkit2gtk",
|
||||
"webview2-com",
|
||||
"window-vibrancy",
|
||||
"windows 0.61.1",
|
||||
"windows 0.61.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6263,7 +6279,7 @@ dependencies = [
|
||||
"tracing",
|
||||
"url",
|
||||
"windows-registry 0.5.1",
|
||||
"windows-result 0.3.2",
|
||||
"windows-result 0.3.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6433,7 +6449,7 @@ dependencies = [
|
||||
"tauri-plugin",
|
||||
"thiserror 2.0.12",
|
||||
"url",
|
||||
"windows 0.61.1",
|
||||
"windows 0.61.3",
|
||||
"zbus",
|
||||
]
|
||||
|
||||
@@ -6562,25 +6578,6 @@ dependencies = [
|
||||
"zip 2.6.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tauri-plugin-websocket"
|
||||
version = "2.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af3ac71aec5fb0ae5441e830cd075b1cbed49ac3d39cb975a4894ea8fa2e62b9"
|
||||
dependencies = [
|
||||
"futures-util",
|
||||
"http 1.3.1",
|
||||
"log",
|
||||
"rand 0.8.5",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tauri",
|
||||
"tauri-plugin",
|
||||
"thiserror 2.0.12",
|
||||
"tokio",
|
||||
"tokio-tungstenite 0.26.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tauri-plugin-windows-version"
|
||||
version = "2.0.0"
|
||||
@@ -6613,7 +6610,7 @@ dependencies = [
|
||||
"tauri-utils",
|
||||
"thiserror 2.0.12",
|
||||
"url",
|
||||
"windows 0.61.1",
|
||||
"windows 0.61.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6639,7 +6636,7 @@ dependencies = [
|
||||
"url",
|
||||
"webkit2gtk",
|
||||
"webview2-com",
|
||||
"windows 0.61.1",
|
||||
"windows 0.61.3",
|
||||
"wry",
|
||||
]
|
||||
|
||||
@@ -6688,7 +6685,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8d321dbc6f998d825ab3f0d62673e810c861aac2d0de2cc2c395328f1d113b4"
|
||||
dependencies = [
|
||||
"embed-resource",
|
||||
"indexmap 2.9.0",
|
||||
"indexmap 2.10.0",
|
||||
"toml",
|
||||
]
|
||||
|
||||
@@ -6716,21 +6713,6 @@ dependencies = [
|
||||
"utf-8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "termcolor"
|
||||
version = "1.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
|
||||
dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "textwrap"
|
||||
version = "0.16.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057"
|
||||
|
||||
[[package]]
|
||||
name = "thin-slice"
|
||||
version = "0.1.1"
|
||||
@@ -6821,12 +6803,6 @@ dependencies = [
|
||||
"time-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tini"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e004df4c5f0805eb5f55883204a514cfa43a6d924741be29e871753a53d5565a"
|
||||
|
||||
[[package]]
|
||||
name = "tiny-keccak"
|
||||
version = "2.0.2"
|
||||
@@ -6911,6 +6887,17 @@ dependencies = [
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-stream"
|
||||
version = "0.1.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-tungstenite"
|
||||
version = "0.20.1"
|
||||
@@ -6925,22 +6912,6 @@ dependencies = [
|
||||
"tungstenite 0.20.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-tungstenite"
|
||||
version = "0.26.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084"
|
||||
dependencies = [
|
||||
"futures-util",
|
||||
"log",
|
||||
"rustls",
|
||||
"rustls-pki-types",
|
||||
"tokio",
|
||||
"tokio-rustls",
|
||||
"tungstenite 0.26.2",
|
||||
"webpki-roots 0.26.11",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-util"
|
||||
version = "0.7.15"
|
||||
@@ -6981,7 +6952,7 @@ version = "0.19.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421"
|
||||
dependencies = [
|
||||
"indexmap 2.9.0",
|
||||
"indexmap 2.10.0",
|
||||
"toml_datetime",
|
||||
"winnow 0.5.40",
|
||||
]
|
||||
@@ -6992,7 +6963,7 @@ version = "0.20.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81"
|
||||
dependencies = [
|
||||
"indexmap 2.9.0",
|
||||
"indexmap 2.10.0",
|
||||
"toml_datetime",
|
||||
"winnow 0.5.40",
|
||||
]
|
||||
@@ -7003,7 +6974,7 @@ version = "0.22.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e"
|
||||
dependencies = [
|
||||
"indexmap 2.9.0",
|
||||
"indexmap 2.10.0",
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
"toml_datetime",
|
||||
@@ -7147,25 +7118,6 @@ dependencies = [
|
||||
"utf-8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tungstenite"
|
||||
version = "0.26.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"data-encoding",
|
||||
"http 1.3.1",
|
||||
"httparse",
|
||||
"log",
|
||||
"rand 0.9.1",
|
||||
"rustls",
|
||||
"rustls-pki-types",
|
||||
"sha1",
|
||||
"thiserror 2.0.12",
|
||||
"utf-8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typeid"
|
||||
version = "1.0.3"
|
||||
@@ -7581,8 +7533,8 @@ checksum = "b542b5cfbd9618c46c2784e4d41ba218c336ac70d44c55e47b251033e7d85601"
|
||||
dependencies = [
|
||||
"webview2-com-macros",
|
||||
"webview2-com-sys",
|
||||
"windows 0.61.1",
|
||||
"windows-core 0.61.0",
|
||||
"windows 0.61.3",
|
||||
"windows-core 0.61.2",
|
||||
"windows-implement 0.60.0",
|
||||
"windows-interface 0.59.1",
|
||||
]
|
||||
@@ -7605,8 +7557,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ae2d11c4a686e4409659d7891791254cf9286d3cfe0eef54df1523533d22295"
|
||||
dependencies = [
|
||||
"thiserror 2.0.12",
|
||||
"windows 0.61.1",
|
||||
"windows-core 0.61.0",
|
||||
"windows 0.61.3",
|
||||
"windows-core 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -7707,12 +7659,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "windows"
|
||||
version = "0.61.1"
|
||||
version = "0.61.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c5ee8f3d025738cb02bad7868bbb5f8a6327501e870bf51f1b455b0a2454a419"
|
||||
checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893"
|
||||
dependencies = [
|
||||
"windows-collections",
|
||||
"windows-core 0.61.0",
|
||||
"windows-core 0.61.2",
|
||||
"windows-future",
|
||||
"windows-link",
|
||||
"windows-numerics",
|
||||
@@ -7724,7 +7676,7 @@ version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8"
|
||||
dependencies = [
|
||||
"windows-core 0.61.0",
|
||||
"windows-core 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -7757,32 +7709,33 @@ checksum = "810ce18ed2112484b0d4e15d022e5f598113e220c53e373fb31e67e21670c1ce"
|
||||
dependencies = [
|
||||
"windows-implement 0.59.0",
|
||||
"windows-interface 0.59.1",
|
||||
"windows-result 0.3.2",
|
||||
"windows-result 0.3.4",
|
||||
"windows-strings 0.3.1",
|
||||
"windows-targets 0.53.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-core"
|
||||
version = "0.61.0"
|
||||
version = "0.61.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980"
|
||||
checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3"
|
||||
dependencies = [
|
||||
"windows-implement 0.60.0",
|
||||
"windows-interface 0.59.1",
|
||||
"windows-link",
|
||||
"windows-result 0.3.2",
|
||||
"windows-strings 0.4.0",
|
||||
"windows-result 0.3.4",
|
||||
"windows-strings 0.4.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-future"
|
||||
version = "0.2.0"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a1d6bbefcb7b60acd19828e1bc965da6fcf18a7e39490c5f8be71e54a19ba32"
|
||||
checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e"
|
||||
dependencies = [
|
||||
"windows-core 0.61.0",
|
||||
"windows-core 0.61.2",
|
||||
"windows-link",
|
||||
"windows-threading",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -7877,9 +7830,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "windows-link"
|
||||
version = "0.1.1"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38"
|
||||
checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
|
||||
|
||||
[[package]]
|
||||
name = "windows-numerics"
|
||||
@@ -7887,7 +7840,7 @@ version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1"
|
||||
dependencies = [
|
||||
"windows-core 0.61.0",
|
||||
"windows-core 0.61.2",
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
@@ -7897,7 +7850,7 @@ version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3"
|
||||
dependencies = [
|
||||
"windows-result 0.3.2",
|
||||
"windows-result 0.3.4",
|
||||
"windows-strings 0.3.1",
|
||||
"windows-targets 0.53.0",
|
||||
]
|
||||
@@ -7909,8 +7862,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad1da3e436dc7653dfdf3da67332e22bff09bb0e28b0239e1624499c7830842e"
|
||||
dependencies = [
|
||||
"windows-link",
|
||||
"windows-result 0.3.2",
|
||||
"windows-strings 0.4.0",
|
||||
"windows-result 0.3.4",
|
||||
"windows-strings 0.4.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -7924,9 +7877,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "windows-result"
|
||||
version = "0.3.2"
|
||||
version = "0.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252"
|
||||
checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6"
|
||||
dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
@@ -7952,9 +7905,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "windows-strings"
|
||||
version = "0.4.0"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97"
|
||||
checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57"
|
||||
dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
@@ -8057,6 +8010,15 @@ dependencies = [
|
||||
"windows_x86_64_msvc 0.53.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-threading"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6"
|
||||
dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-version"
|
||||
version = "0.1.4"
|
||||
@@ -8336,8 +8298,8 @@ dependencies = [
|
||||
"webkit2gtk",
|
||||
"webkit2gtk-sys",
|
||||
"webview2-com",
|
||||
"windows 0.61.1",
|
||||
"windows-core 0.61.0",
|
||||
"windows 0.61.3",
|
||||
"windows-core 0.61.2",
|
||||
"windows-version",
|
||||
"x11-dl",
|
||||
]
|
||||
@@ -8416,16 +8378,10 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xdgkit"
|
||||
version = "3.2.5"
|
||||
name = "xdg"
|
||||
version = "2.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5aeac9c0125f3c131c6a2898d2a9f25c11b7954c3ff644a018cb9e06fa92919b"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"quick-xml 0.21.0",
|
||||
"serde",
|
||||
"tini",
|
||||
]
|
||||
checksum = "213b7324336b53d2414b2db8537e56544d981803139155afa84f76eeebb7a546"
|
||||
|
||||
[[package]]
|
||||
name = "xkbcommon"
|
||||
@@ -8444,15 +8400,6 @@ version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b9cc00251562a284751c9973bace760d86c0276c471b4be569fe6b068ee97a56"
|
||||
|
||||
[[package]]
|
||||
name = "yaml-rust"
|
||||
version = "0.4.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
|
||||
dependencies = [
|
||||
"linked-hash-map",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yoke"
|
||||
version = "0.8.0"
|
||||
@@ -8641,7 +8588,7 @@ dependencies = [
|
||||
"arbitrary",
|
||||
"crc32fast",
|
||||
"crossbeam-utils",
|
||||
"indexmap 2.9.0",
|
||||
"indexmap 2.10.0",
|
||||
"memchr",
|
||||
]
|
||||
|
||||
@@ -8660,7 +8607,7 @@ dependencies = [
|
||||
"flate2",
|
||||
"getrandom 0.3.2",
|
||||
"hmac",
|
||||
"indexmap 2.9.0",
|
||||
"indexmap 2.10.0",
|
||||
"liblzma",
|
||||
"memchr",
|
||||
"pbkdf2",
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
[package]
|
||||
name = "coco"
|
||||
version = "0.6.0"
|
||||
version = "0.7.1"
|
||||
description = "Search, connect, collaborate – all in one place."
|
||||
authors = ["INFINI Labs"]
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[lib]
|
||||
@@ -51,7 +51,6 @@ serde = { version = "1", features = ["derive"] }
|
||||
# see: https://docs.rs/serde_json/latest/serde_json/struct.Number.html#method.from_u128
|
||||
serde_json = { version = "1", features = ["arbitrary_precision", "preserve_order"] }
|
||||
tauri-plugin-http = "2"
|
||||
tauri-plugin-websocket = "2"
|
||||
tauri-plugin-deep-link = "2.0.0"
|
||||
tauri-plugin-store = "2.2.0"
|
||||
tauri-plugin-os = "2"
|
||||
@@ -62,7 +61,7 @@ tauri-plugin-drag = "2"
|
||||
tauri-plugin-macos-permissions = "2"
|
||||
tauri-plugin-fs-pro = "2"
|
||||
tauri-plugin-screenshots = "2"
|
||||
applications = { git = "https://github.com/infinilabs/applications-rs", rev = "7bb507e6b12f73c96f3a52f0578d0246a689f381" }
|
||||
applications = { git = "https://github.com/infinilabs/applications-rs", rev = "31b0c030a0f3bc82275fe12debe526153978671d" }
|
||||
tokio-native-tls = "0.3" # For wss connections
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio-tungstenite = { version = "0.20", features = ["native-tls"] }
|
||||
@@ -87,7 +86,7 @@ http = "1.1.0"
|
||||
tungstenite = "0.24.0"
|
||||
tokio-util = "0.7.14"
|
||||
tauri-plugin-windows-version = "2"
|
||||
meval = "0.2"
|
||||
meval = { git = "https://github.com/infinilabs/meval-rs" }
|
||||
chinese-number = "0.7"
|
||||
num2words = "1"
|
||||
tauri-plugin-log = "2"
|
||||
@@ -102,9 +101,16 @@ tauri-plugin-opener = "2"
|
||||
async-recursion = "1.1.1"
|
||||
zip = "4.0.0"
|
||||
url = "2.5.2"
|
||||
camino = "1.1.10"
|
||||
tokio-stream = { version = "0.1.17", features = ["io-util"] }
|
||||
cfg-if = "1.0.1"
|
||||
sysinfo = "0.35.2"
|
||||
indexmap = { version = "2.10.0", features = ["serde"] }
|
||||
strum = { version = "0.27.2", features = ["derive"] }
|
||||
|
||||
[target."cfg(target_os = \"macos\")".dependencies]
|
||||
tauri-nspanel = { git = "https://github.com/ahkohd/tauri-nspanel", branch = "v2" }
|
||||
cocoa = "0.24"
|
||||
|
||||
[target."cfg(any(target_os = \"macos\", windows, target_os = \"linux\"))".dependencies]
|
||||
tauri-plugin-single-instance = { version = "2.0.0", features = ["deep-link"] }
|
||||
@@ -123,6 +129,9 @@ strip = true # Ensures debug symbols are removed.
|
||||
tauri-plugin-autostart = "^2.2"
|
||||
tauri-plugin-global-shortcut = "2"
|
||||
tauri-plugin-updater = { git = "https://github.com/infinilabs/plugins-workspace", branch = "v2" }
|
||||
# This should be compatible with the semver used by `tauri-plugin-updater`
|
||||
semver = { version = "1", features = ["serde"] }
|
||||
|
||||
[target."cfg(target_os = \"windows\")".dependencies]
|
||||
enigo="0.3"
|
||||
windows = { version = "0.61.3", features = ["Win32_Foundation", "Win32_System_Com", "Win32_System_Ole", "Win32_System_Search", "Win32_UI_Shell_PropertiesSystem", "Win32_Data"] }
|
||||
|
||||
@@ -12,6 +12,8 @@
|
||||
<true/>
|
||||
<key>com.apple.security.automation.apple-events</key>
|
||||
<true/>
|
||||
<key>com.apple.security.device.microphone</key>
|
||||
<true/>
|
||||
<key>com.apple.security.device.audio-input</key>
|
||||
<true/>
|
||||
<key>com.apple.security.network.client</key>
|
||||
@@ -24,6 +26,5 @@
|
||||
<string>6GVZT94974.rs.coco.app</string>
|
||||
<key>com.apple.developer.team-identifier</key>
|
||||
<string>6GVZT94974</string>
|
||||
|
||||
</dict>
|
||||
</plist>
|
||||
</plist>
|
||||
@@ -2,11 +2,6 @@
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>NSCameraUsageDescription</key>
|
||||
<string>Request camera access for WebRTC</string>
|
||||
<key>NSMicrophoneUsageDescription</key>
|
||||
<string>Request microphone access for WebRTC</string>
|
||||
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>rs.coco.app</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
|
||||
@@ -1,3 +1,14 @@
|
||||
fn main() {
|
||||
tauri_build::build()
|
||||
tauri_build::build();
|
||||
|
||||
// If env var `GITHUB_ACTIONS` exists, we are running in CI, set up the `ci`
|
||||
// attribute
|
||||
if std::env::var("GITHUB_ACTIONS").is_ok() {
|
||||
println!("cargo:rustc-cfg=ci");
|
||||
}
|
||||
|
||||
// Notify `rustc` of this `cfg` attribute to suppress unknown attribute warnings.
|
||||
//
|
||||
// unexpected condition name: `ci`
|
||||
println!("cargo::rustc-check-cfg=cfg(ci)");
|
||||
}
|
||||
|
||||
@@ -37,9 +37,6 @@
|
||||
"http:allow-fetch-cancel",
|
||||
"http:allow-fetch-read-body",
|
||||
"http:allow-fetch-send",
|
||||
"websocket:default",
|
||||
"websocket:allow-connect",
|
||||
"websocket:allow-send",
|
||||
"autostart:allow-enable",
|
||||
"autostart:allow-disable",
|
||||
"autostart:allow-is-enabled",
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
[toolchain]
|
||||
channel = "nightly-2025-02-28"
|
||||
channel = "nightly-2025-06-26"
|
||||
@@ -1,20 +1,20 @@
|
||||
use crate::common::assistant::ChatRequestMessage;
|
||||
use crate::common::http::{convert_query_params_to_strings, GetResponse};
|
||||
use crate::common::http::convert_query_params_to_strings;
|
||||
use crate::common::register::SearchSourceRegistry;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::{common, server::servers::COCO_SERVERS};
|
||||
use futures::stream::FuturesUnordered;
|
||||
use futures::StreamExt;
|
||||
use futures::stream::FuturesUnordered;
|
||||
use futures_util::TryStreamExt;
|
||||
use http::Method;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime};
|
||||
use tauri::{AppHandle, Emitter, Manager};
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn chat_history<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
pub async fn chat_history(
|
||||
_app_handle: AppHandle,
|
||||
server_id: String,
|
||||
from: u32,
|
||||
size: u32,
|
||||
@@ -43,8 +43,8 @@ pub async fn chat_history<R: Runtime>(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn session_chat_history<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
pub async fn session_chat_history(
|
||||
_app_handle: AppHandle,
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
from: u32,
|
||||
@@ -66,8 +66,8 @@ pub async fn session_chat_history<R: Runtime>(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn open_session_chat<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
pub async fn open_session_chat(
|
||||
_app_handle: AppHandle,
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
) -> Result<String, String> {
|
||||
@@ -81,8 +81,8 @@ pub async fn open_session_chat<R: Runtime>(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn close_session_chat<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
pub async fn close_session_chat(
|
||||
_app_handle: AppHandle,
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
) -> Result<String, String> {
|
||||
@@ -95,14 +95,16 @@ pub async fn close_session_chat<R: Runtime>(
|
||||
common::http::get_response_body_text(response).await
|
||||
}
|
||||
#[tauri::command]
|
||||
pub async fn cancel_session_chat<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
pub async fn cancel_session_chat(
|
||||
_app_handle: AppHandle,
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
query_params: Option<HashMap<String, Value>>,
|
||||
) -> Result<String, String> {
|
||||
let path = format!("/chat/{}/_cancel", session_id);
|
||||
let query_params = convert_query_params_to_strings(query_params);
|
||||
|
||||
let response = HttpClient::post(&server_id, path.as_str(), None, None)
|
||||
let response = HttpClient::post(&server_id, path.as_str(), query_params, None)
|
||||
.await
|
||||
.map_err(|e| format!("Error cancel session: {}", e))?;
|
||||
|
||||
@@ -110,82 +112,161 @@ pub async fn cancel_session_chat<R: Runtime>(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn new_chat<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
pub async fn chat_create(
|
||||
app_handle: AppHandle,
|
||||
server_id: String,
|
||||
websocket_id: String,
|
||||
message: String,
|
||||
message: Option<String>,
|
||||
attachments: Option<Vec<String>>,
|
||||
query_params: Option<HashMap<String, Value>>,
|
||||
) -> Result<GetResponse, String> {
|
||||
let body = if !message.is_empty() {
|
||||
let message = ChatRequestMessage {
|
||||
message: Some(message),
|
||||
client_id: String,
|
||||
) -> Result<(), String> {
|
||||
println!("chat_create message: {:?}", message);
|
||||
println!("chat_create attachments: {:?}", attachments);
|
||||
|
||||
let message_empty = message.as_ref().map_or(true, |m| m.is_empty());
|
||||
let attachments_empty = attachments.as_ref().map_or(true, |a| a.is_empty());
|
||||
|
||||
if message_empty && attachments_empty {
|
||||
return Err("Message and attachments are empty".to_string());
|
||||
}
|
||||
|
||||
let body = {
|
||||
let request_message: ChatRequestMessage = ChatRequestMessage {
|
||||
message,
|
||||
attachments,
|
||||
};
|
||||
|
||||
println!("chat_create body: {:?}", request_message);
|
||||
|
||||
Some(
|
||||
serde_json::to_string(&message)
|
||||
serde_json::to_string(&request_message)
|
||||
.map_err(|e| format!("Failed to serialize message: {}", e))?
|
||||
.into(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut headers = HashMap::new();
|
||||
headers.insert("WEBSOCKET-SESSION-ID".to_string(), websocket_id.into());
|
||||
|
||||
let response = HttpClient::advanced_post(
|
||||
&server_id,
|
||||
"/chat/_new",
|
||||
Some(headers),
|
||||
"/chat/_create",
|
||||
None,
|
||||
convert_query_params_to_strings(query_params),
|
||||
body,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Error sending message: {}", e))?;
|
||||
|
||||
let body_text = common::http::get_response_body_text(response).await?;
|
||||
|
||||
log::debug!("New chat response: {}", &body_text);
|
||||
|
||||
let chat_response: GetResponse = serde_json::from_str(&body_text)
|
||||
.map_err(|e| format!("Failed to parse response JSON: {}", e))?;
|
||||
|
||||
if chat_response.result != "created" {
|
||||
return Err(format!("Unexpected result: {}", chat_response.result));
|
||||
if response.status() == 429 {
|
||||
log::warn!("Rate limit exceeded for chat create");
|
||||
return Err("Rate limited".to_string());
|
||||
}
|
||||
|
||||
Ok(chat_response)
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("Request failed with status: {}", response.status()));
|
||||
}
|
||||
|
||||
let stream = response.bytes_stream();
|
||||
let reader = tokio_util::io::StreamReader::new(
|
||||
stream.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)),
|
||||
);
|
||||
let mut lines = tokio::io::BufReader::new(reader).lines();
|
||||
|
||||
log::info!("client_id_create: {}", &client_id);
|
||||
|
||||
while let Ok(Some(line)) = lines.next_line().await {
|
||||
log::info!("Received chat stream line: {}", &line);
|
||||
|
||||
if let Err(err) = app_handle.emit(&client_id, line) {
|
||||
log::error!("Emit failed: {:?}", err);
|
||||
|
||||
let _ = app_handle.emit("chat-create-error", format!("Emit failed: {:?}", err));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn send_message<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
pub async fn chat_chat(
|
||||
app_handle: AppHandle,
|
||||
server_id: String,
|
||||
websocket_id: String,
|
||||
session_id: String,
|
||||
message: String,
|
||||
message: Option<String>,
|
||||
attachments: Option<Vec<String>>,
|
||||
query_params: Option<HashMap<String, Value>>, //search,deep_thinking
|
||||
) -> Result<String, String> {
|
||||
let path = format!("/chat/{}/_send", session_id);
|
||||
let msg = ChatRequestMessage {
|
||||
message: Some(message),
|
||||
client_id: String,
|
||||
) -> Result<(), String> {
|
||||
println!("chat_chat message: {:?}", message);
|
||||
println!("chat_chat attachments: {:?}", attachments);
|
||||
|
||||
let message_empty = message.as_ref().map_or(true, |m| m.is_empty());
|
||||
let attachments_empty = attachments.as_ref().map_or(true, |a| a.is_empty());
|
||||
|
||||
if message_empty && attachments_empty {
|
||||
return Err("Message and attachments are empty".to_string());
|
||||
}
|
||||
|
||||
let body = {
|
||||
let request_message = ChatRequestMessage {
|
||||
message,
|
||||
attachments,
|
||||
};
|
||||
|
||||
println!("chat_chat body: {:?}", request_message);
|
||||
|
||||
Some(
|
||||
serde_json::to_string(&request_message)
|
||||
.map_err(|e| format!("Failed to serialize message: {}", e))?
|
||||
.into(),
|
||||
)
|
||||
};
|
||||
|
||||
let mut headers = HashMap::new();
|
||||
headers.insert("WEBSOCKET-SESSION-ID".to_string(), websocket_id.into());
|
||||
let path = format!("/chat/{}/_chat", session_id);
|
||||
|
||||
let body = reqwest::Body::from(serde_json::to_string(&msg).unwrap());
|
||||
let response = HttpClient::advanced_post(
|
||||
&server_id,
|
||||
path.as_str(),
|
||||
Some(headers),
|
||||
None,
|
||||
convert_query_params_to_strings(query_params),
|
||||
Some(body),
|
||||
body,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Error cancel session: {}", e))?;
|
||||
.map_err(|e| format!("Error sending message: {}", e))?;
|
||||
|
||||
common::http::get_response_body_text(response).await
|
||||
if response.status() == 429 {
|
||||
log::warn!("Rate limit exceeded for chat create");
|
||||
return Err("Rate limited".to_string());
|
||||
}
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("Request failed with status: {}", response.status()));
|
||||
}
|
||||
|
||||
let stream = response.bytes_stream();
|
||||
let reader = tokio_util::io::StreamReader::new(
|
||||
stream.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)),
|
||||
);
|
||||
let mut lines = tokio::io::BufReader::new(reader).lines();
|
||||
let mut first_log = true;
|
||||
|
||||
log::info!("client_id: {}", &client_id);
|
||||
|
||||
while let Ok(Some(line)) = lines.next_line().await {
|
||||
log::info!("Received chat stream line: {}", &line);
|
||||
if first_log {
|
||||
log::info!("first stream line: {}", &line);
|
||||
first_log = false;
|
||||
}
|
||||
|
||||
if let Err(err) = app_handle.emit(&client_id, line) {
|
||||
log::error!("Emit failed: {:?}", err);
|
||||
|
||||
print!("Error sending message: {:?}", err);
|
||||
|
||||
let _ = app_handle.emit("chat-create-error", format!("Emit failed: {:?}", err));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
@@ -232,8 +313,8 @@ pub async fn update_session_chat(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn assistant_search<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
pub async fn assistant_search(
|
||||
_app_handle: AppHandle,
|
||||
server_id: String,
|
||||
query_params: Option<Vec<String>>,
|
||||
) -> Result<Value, String> {
|
||||
@@ -248,8 +329,8 @@ pub async fn assistant_search<R: Runtime>(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn assistant_get<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
pub async fn assistant_get(
|
||||
_app_handle: AppHandle,
|
||||
server_id: String,
|
||||
assistant_id: String,
|
||||
) -> Result<Value, String> {
|
||||
@@ -272,8 +353,8 @@ pub async fn assistant_get<R: Runtime>(
|
||||
///
|
||||
/// Returns as soon as the assistant is found on any Coco server.
|
||||
#[tauri::command]
|
||||
pub async fn assistant_get_multi<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
pub async fn assistant_get_multi(
|
||||
app_handle: AppHandle,
|
||||
assistant_id: String,
|
||||
) -> Result<Value, String> {
|
||||
let search_sources = app_handle.state::<SearchSourceRegistry>();
|
||||
@@ -366,8 +447,8 @@ pub fn remove_icon_fields(json: &str) -> String {
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn ask_ai<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
pub async fn ask_ai(
|
||||
app_handle: AppHandle,
|
||||
message: String,
|
||||
server_id: String,
|
||||
assistant_id: String,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::{fs::create_dir, io::Read};
|
||||
|
||||
use tauri::{Manager, Runtime};
|
||||
use tauri::Manager;
|
||||
use tauri_plugin_autostart::ManagerExt;
|
||||
|
||||
/// If the state reported from the OS and the state stored by us differ, our state is
|
||||
@@ -42,7 +42,7 @@ pub fn ensure_autostart_state_consistent(app: &mut tauri::App) -> Result<(), Str
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn current_autostart<R: Runtime>(app: &tauri::AppHandle<R>) -> Result<bool, String> {
|
||||
fn current_autostart(app: &tauri::AppHandle) -> Result<bool, String> {
|
||||
use std::fs::File;
|
||||
|
||||
let path = app.path().app_config_dir().unwrap();
|
||||
@@ -65,10 +65,7 @@ fn current_autostart<R: Runtime>(app: &tauri::AppHandle<R>) -> Result<bool, Stri
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn change_autostart<R: Runtime>(
|
||||
app: tauri::AppHandle<R>,
|
||||
open: bool,
|
||||
) -> Result<(), String> {
|
||||
pub async fn change_autostart(app: tauri::AppHandle, open: bool) -> Result<(), String> {
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
|
||||
|
||||
@@ -3,7 +3,10 @@ use serde_json::Value;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ChatRequestMessage {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub message: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub attachments: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
@@ -30,4 +33,4 @@ pub struct Session {
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SessionContext {
|
||||
pub attachments: Option<Vec<String>>,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug,Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Connector {
|
||||
pub id: String,
|
||||
pub created: Option<String>,
|
||||
@@ -13,7 +13,7 @@ pub struct Connector {
|
||||
pub url: Option<String>,
|
||||
pub assets: Option<ConnectorAssets>,
|
||||
}
|
||||
#[derive(Debug,Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ConnectorAssets {
|
||||
pub icons: Option<std::collections::HashMap<String, String>>,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,4 +18,4 @@ pub struct DataSource {
|
||||
pub struct ConnectorConfig {
|
||||
pub id: Option<String>,
|
||||
pub config: Option<serde_json::Value>, // Using serde_json::Value to handle any type of config
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use tauri::AppHandle;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RichLabel {
|
||||
@@ -40,6 +41,15 @@ pub(crate) enum OnOpened {
|
||||
Command {
|
||||
action: crate::extension::CommandAction,
|
||||
},
|
||||
// NOTE that this variant has the same definition as `struct Quicklink`, but we
|
||||
// cannot use it directly, its `link` field should be deserialized/serialized
|
||||
// from/to a string, but we need a JSON object here.
|
||||
//
|
||||
// See also the comments in `struct Quicklink`.
|
||||
Quicklink {
|
||||
link: crate::extension::QuicklinkLink,
|
||||
open_with: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
impl OnOpened {
|
||||
@@ -57,30 +67,37 @@ impl OnOpened {
|
||||
|
||||
ret
|
||||
}
|
||||
// Currently, our URL is static and does not support dynamic parameters.
|
||||
// The URL of a quicklink is nearly useless without such dynamic user
|
||||
// inputs, so until we have dynamic URL support, we just use "N/A".
|
||||
Self::Quicklink { .. } => String::from("N/A"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub(crate) async fn open(on_opened: OnOpened) -> Result<(), String> {
|
||||
log::debug!("open({})", on_opened.url());
|
||||
|
||||
pub(crate) async fn open(
|
||||
tauri_app_handle: AppHandle,
|
||||
on_opened: OnOpened,
|
||||
extra_args: Option<HashMap<String, String>>,
|
||||
) -> Result<(), String> {
|
||||
use crate::util::open as homemade_tauri_shell_open;
|
||||
use crate::GLOBAL_TAURI_APP_HANDLE;
|
||||
use std::process::Command;
|
||||
|
||||
let global_tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
|
||||
match on_opened {
|
||||
OnOpened::Application { app_path } => {
|
||||
homemade_tauri_shell_open(global_tauri_app_handle.clone(), app_path).await?
|
||||
log::debug!("open application [{}]", app_path);
|
||||
|
||||
homemade_tauri_shell_open(tauri_app_handle.clone(), app_path).await?
|
||||
}
|
||||
OnOpened::Document { url } => {
|
||||
homemade_tauri_shell_open(global_tauri_app_handle.clone(), url).await?
|
||||
log::debug!("open document [{}]", url);
|
||||
|
||||
homemade_tauri_shell_open(tauri_app_handle.clone(), url).await?
|
||||
}
|
||||
OnOpened::Command { action } => {
|
||||
log::debug!("open (execute) command [{:?}]", action);
|
||||
|
||||
let mut cmd = Command::new(action.exec);
|
||||
if let Some(args) = action.args {
|
||||
cmd.args(args);
|
||||
@@ -107,6 +124,39 @@ pub(crate) async fn open(on_opened: OnOpened) -> Result<(), String> {
|
||||
));
|
||||
}
|
||||
}
|
||||
OnOpened::Quicklink {
|
||||
link,
|
||||
open_with: opt_open_with,
|
||||
} => {
|
||||
let url = link.concatenate_url(&extra_args);
|
||||
|
||||
log::debug!("open quicklink [{}] with [{:?}]", url, opt_open_with);
|
||||
|
||||
cfg_if::cfg_if! {
|
||||
// The `open_with` functionality is only supported on macOS, provided
|
||||
// by the `open -a` command.
|
||||
if #[cfg(target_os = "macos")] {
|
||||
let mut cmd = Command::new("open");
|
||||
if let Some(ref open_with) = opt_open_with {
|
||||
cmd.arg("-a");
|
||||
cmd.arg(open_with.as_str());
|
||||
}
|
||||
cmd.arg(&url);
|
||||
|
||||
let output = cmd.output().map_err(|e| format!("failed to spawn [open] due to error [{}]", e))?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(format!(
|
||||
"failed to open with app {:?}: {}",
|
||||
opt_open_with,
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
));
|
||||
}
|
||||
} else {
|
||||
homemade_tauri_shell_open(tauri_app_handle.clone(), url).await?
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -1,8 +1,22 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use reqwest::StatusCode;
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
use thiserror::Error;
|
||||
|
||||
fn serialize_optional_status_code<S>(
|
||||
status_code: &Option<StatusCode>,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match status_code {
|
||||
Some(code) => serializer.serialize_str(&format!("{:?}", code)),
|
||||
None => serializer.serialize_none(),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
pub struct ErrorCause {
|
||||
#[serde(default)]
|
||||
pub r#type: Option<String>,
|
||||
@@ -11,7 +25,7 @@ pub struct ErrorCause {
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
#[allow(unused)]
|
||||
pub struct ErrorDetail {
|
||||
#[serde(default)]
|
||||
pub root_cause: Option<Vec<ErrorCause>>,
|
||||
@@ -24,18 +38,22 @@ pub struct ErrorDetail {
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
pub struct ErrorResponse {
|
||||
#[serde(default)]
|
||||
pub error: Option<ErrorDetail>,
|
||||
#[serde(default)]
|
||||
#[allow(unused)]
|
||||
pub status: Option<u16>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error, Serialize)]
|
||||
pub enum SearchError {
|
||||
#[error("HttpError: {0}")]
|
||||
HttpError(String),
|
||||
#[error("HttpError: status code [{status_code:?}], msg [{msg}]")]
|
||||
HttpError {
|
||||
#[serde(serialize_with = "serialize_optional_status_code")]
|
||||
status_code: Option<StatusCode>,
|
||||
msg: String,
|
||||
},
|
||||
|
||||
#[error("ParseError: {0}")]
|
||||
ParseError(String),
|
||||
@@ -43,12 +61,7 @@ pub enum SearchError {
|
||||
#[error("Timeout occurred")]
|
||||
Timeout,
|
||||
|
||||
#[error("UnknownError: {0}")]
|
||||
#[allow(dead_code)]
|
||||
Unknown(String),
|
||||
|
||||
#[error("InternalError: {0}")]
|
||||
#[allow(dead_code)]
|
||||
InternalError(String),
|
||||
}
|
||||
|
||||
@@ -59,7 +72,10 @@ impl From<reqwest::Error> for SearchError {
|
||||
} else if err.is_decode() {
|
||||
SearchError::ParseError(err.to_string())
|
||||
} else {
|
||||
SearchError::HttpError(err.to_string())
|
||||
SearchError::HttpError {
|
||||
status_code: err.status(),
|
||||
msg: err.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,7 +38,6 @@ pub async fn get_response_body_text(response: Response) -> Result<String, String
|
||||
return Err(fallback_error);
|
||||
}
|
||||
|
||||
|
||||
match serde_json::from_str::<common::error::ErrorResponse>(&body) {
|
||||
Ok(parsed_error) => {
|
||||
dbg!(&parsed_error);
|
||||
@@ -57,7 +56,6 @@ pub async fn get_response_body_text(response: Response) -> Result<String, String
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn convert_query_params_to_strings(
|
||||
query_params: Option<HashMap<String, JsonValue>>,
|
||||
) -> Option<Vec<String>> {
|
||||
@@ -68,13 +66,10 @@ pub fn convert_query_params_to_strings(
|
||||
JsonValue::Number(n) => Some(format!("{}={}", k, n)),
|
||||
JsonValue::Bool(b) => Some(format!("{}={}", k, b)),
|
||||
_ => {
|
||||
eprintln!(
|
||||
"Skipping unsupported query value for key '{}': {:?}",
|
||||
k, v
|
||||
);
|
||||
eprintln!("Skipping unsupported query value for key '{}': {:?}", k, v);
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,4 +13,4 @@ pub struct UserProfile {
|
||||
pub email: String,
|
||||
pub avatar: Option<String>,
|
||||
pub preferences: Option<Preferences>,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,8 +7,8 @@ use std::error::Error;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SearchResponse<T> {
|
||||
pub took: u64,
|
||||
pub timed_out: bool,
|
||||
pub took: Option<u64>,
|
||||
pub timed_out: Option<bool>,
|
||||
pub _shards: Option<Shards>,
|
||||
pub hits: Hits<T>,
|
||||
}
|
||||
@@ -83,20 +83,6 @@ where
|
||||
.collect())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn parse_search_results_with_score<T>(
|
||||
response: Response,
|
||||
) -> Result<Vec<(T, Option<f64>)>, Box<dyn Error>>
|
||||
where
|
||||
T: for<'de> Deserialize<'de> + std::fmt::Debug,
|
||||
{
|
||||
Ok(parse_search_hits(response)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|hit| (hit._source, hit._score))
|
||||
.collect())
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct SearchQuery {
|
||||
pub from: u64,
|
||||
|
||||
@@ -50,9 +50,17 @@ pub struct Server {
|
||||
pub updated: String,
|
||||
#[serde(default = "default_enabled_type")]
|
||||
pub enabled: bool,
|
||||
/// Public Coco servers can be used without signing in.
|
||||
#[serde(default = "default_bool_type")]
|
||||
pub public: bool,
|
||||
|
||||
/// A coco server is available if:
|
||||
///
|
||||
/// 1. It is still online, we check this via the `GET /base_url/provider/_info`
|
||||
/// interface.
|
||||
/// 2. A user is logged in to this Coco server, i.e., a token is stored in the
|
||||
/// `SERVER_TOKEN_LIST_CACHE`.
|
||||
/// For public Coco servers, requirement 2 is not needed.
|
||||
#[serde(default = "default_available_type")]
|
||||
pub available: bool,
|
||||
|
||||
@@ -84,7 +92,10 @@ pub struct ServerAccessToken {
|
||||
#[serde(default = "default_empty_string")] // Custom default function for empty string
|
||||
pub id: String,
|
||||
pub access_token: String,
|
||||
pub expired_at: u32, //unix timestamp in seconds
|
||||
/// Unix timestamp in seconds
|
||||
///
|
||||
/// Currently, this is UNUSED.
|
||||
pub expired_at: u32,
|
||||
}
|
||||
|
||||
impl ServerAccessToken {
|
||||
|
||||
@@ -2,10 +2,15 @@ use crate::common::error::SearchError;
|
||||
use crate::common::search::SearchQuery;
|
||||
use crate::common::search::{QueryResponse, QuerySource};
|
||||
use async_trait::async_trait;
|
||||
use tauri::AppHandle;
|
||||
|
||||
#[async_trait]
|
||||
pub trait SearchSource: Send + Sync {
|
||||
fn get_type(&self) -> QuerySource;
|
||||
|
||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError>;
|
||||
async fn search(
|
||||
&self,
|
||||
tauri_app_handle: AppHandle,
|
||||
query: SearchQuery,
|
||||
) -> Result<QueryResponse, SearchError>;
|
||||
}
|
||||
|
||||
@@ -14,6 +14,8 @@ pub use without_feature::*;
|
||||
|
||||
#[derive(Debug, Serialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
|
||||
pub struct AppEntry {
|
||||
path: String,
|
||||
name: String,
|
||||
@@ -45,4 +47,4 @@ pub(crate) const PLUGIN_JSON_FILE: &str = r#"
|
||||
"type": "group",
|
||||
"enabled": true
|
||||
}
|
||||
"#;
|
||||
"#;
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
use super::super::Extension;
|
||||
use super::super::pizza_engine_runtime::RUNTIME_TX;
|
||||
use super::super::pizza_engine_runtime::SearchSourceState;
|
||||
use super::super::pizza_engine_runtime::Task;
|
||||
use super::super::pizza_engine_runtime::RUNTIME_TX;
|
||||
use super::super::Extension;
|
||||
use super::AppMetadata;
|
||||
use crate::GLOBAL_TAURI_APP_HANDLE;
|
||||
use crate::common::document::{DataSourceReference, Document, OnOpened};
|
||||
use crate::common::error::SearchError;
|
||||
use crate::common::search::{QueryResponse, QuerySource, SearchQuery};
|
||||
@@ -10,7 +11,6 @@ use crate::common::traits::SearchSource;
|
||||
use crate::extension::ExtensionType;
|
||||
use crate::extension::LOCAL_QUERY_SOURCE_TYPE;
|
||||
use crate::util::open;
|
||||
use crate::GLOBAL_TAURI_APP_HANDLE;
|
||||
use applications::{App, AppTrait};
|
||||
use async_trait::async_trait;
|
||||
use log::{error, warn};
|
||||
@@ -23,12 +23,12 @@ use pizza_engine::error::PizzaEngineError;
|
||||
use pizza_engine::search::{OriginalQuery, QueryContext, SearchResult, Searcher};
|
||||
use pizza_engine::store::{DiskStore, DiskStoreSnapshot};
|
||||
use pizza_engine::writer::Writer;
|
||||
use pizza_engine::{doc, Engine, EngineBuilder};
|
||||
use pizza_engine::{Engine, EngineBuilder, doc};
|
||||
use serde_json::Value as Json;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tauri::{async_runtime, AppHandle, Manager, Runtime};
|
||||
use tauri_plugin_fs_pro::{icon, metadata, name, IconOptions};
|
||||
use tauri::{AppHandle, Manager, async_runtime};
|
||||
use tauri_plugin_fs_pro::{IconOptions, icon, metadata, name};
|
||||
use tauri_plugin_global_shortcut::GlobalShortcutExt;
|
||||
use tauri_plugin_global_shortcut::Shortcut;
|
||||
use tauri_plugin_global_shortcut::ShortcutEvent;
|
||||
@@ -131,10 +131,7 @@ async fn get_app_name(app: &App) -> String {
|
||||
/// Helper function to return an absolute path to `app`'s icon.
|
||||
///
|
||||
/// On macOS/Windows, we cache icons in our data directory using the `icon()` function.
|
||||
async fn get_app_icon_path<R: Runtime>(
|
||||
tauri_app_handle: &AppHandle<R>,
|
||||
app: &App,
|
||||
) -> Result<String, String> {
|
||||
async fn get_app_icon_path(tauri_app_handle: &AppHandle, app: &App) -> Result<String, String> {
|
||||
let res_path = if cfg!(target_os = "linux") {
|
||||
let icon_path = app
|
||||
.icon_path
|
||||
@@ -213,8 +210,8 @@ impl SearchSourceState for ApplicationSearchSourceState {
|
||||
}
|
||||
|
||||
/// Index applications if they have not been indexed (by checking if `app_index_dir` exists).
|
||||
async fn index_applications_if_not_indexed<R: Runtime>(
|
||||
tauri_app_handle: &AppHandle<R>,
|
||||
async fn index_applications_if_not_indexed(
|
||||
tauri_app_handle: &AppHandle,
|
||||
app_index_dir: &Path,
|
||||
) -> anyhow::Result<ApplicationSearchSourceState> {
|
||||
let index_exists = app_index_dir.exists();
|
||||
@@ -245,8 +242,29 @@ async fn index_applications_if_not_indexed<R: Runtime>(
|
||||
let mut writer = pizza_engine.acquire_writer();
|
||||
|
||||
if !index_exists {
|
||||
let default_search_path = get_default_search_paths();
|
||||
let apps = list_app_in(default_search_path).map_err(|str| anyhow::anyhow!(str))?;
|
||||
let search_path = {
|
||||
let disabled_app_list_and_search_path_store =
|
||||
tauri_app_handle.store(TAURI_STORE_DISABLED_APP_LIST_AND_SEARCH_PATH)?;
|
||||
let search_path_json = disabled_app_list_and_search_path_store
|
||||
.get(TAURI_STORE_KEY_SEARCH_PATH)
|
||||
.unwrap_or_else(|| {
|
||||
panic!("search path should be persisted in the store, but it is not, plz ensure that the store gets initialized before calling this function")
|
||||
});
|
||||
|
||||
let search_path: Vec<String> = match search_path_json {
|
||||
Json::Array(array) => array
|
||||
.into_iter()
|
||||
.map(|json| match json {
|
||||
Json::String(str) => str,
|
||||
_ => unreachable!("search path is stored in a string"),
|
||||
})
|
||||
.collect(),
|
||||
_ => unreachable!("search path is stored in an array"),
|
||||
};
|
||||
|
||||
search_path
|
||||
};
|
||||
let apps = list_app_in(search_path).map_err(|str| anyhow::anyhow!(str))?;
|
||||
|
||||
for app in apps.iter() {
|
||||
let app_path = get_app_path(app);
|
||||
@@ -273,8 +291,9 @@ async fn index_applications_if_not_indexed<R: Runtime>(
|
||||
// We don't error out because one failure won't break the whole thing
|
||||
if let Err(e) = writer.create_document(document).await {
|
||||
warn!(
|
||||
"failed to index application [app name: '{}', app path: '{}'] due to error [{}]", app_name, app_path, e
|
||||
)
|
||||
"failed to index application [app name: '{}', app path: '{}'] due to error [{}]",
|
||||
app_name, app_path, e
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -293,13 +312,13 @@ async fn index_applications_if_not_indexed<R: Runtime>(
|
||||
}
|
||||
|
||||
/// Upon application start, index all the applications found in the `get_default_search_paths()`.
|
||||
struct IndexAllApplicationsTask<R: Runtime> {
|
||||
tauri_app_handle: AppHandle<R>,
|
||||
struct IndexAllApplicationsTask {
|
||||
tauri_app_handle: AppHandle,
|
||||
callback: Option<tokio::sync::oneshot::Sender<Result<(), String>>>,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait(?Send)]
|
||||
impl<R: Runtime> Task for IndexAllApplicationsTask<R> {
|
||||
impl Task for IndexAllApplicationsTask {
|
||||
fn search_source_id(&self) -> &'static str {
|
||||
APPLICATION_SEARCH_SOURCE_ID
|
||||
}
|
||||
@@ -321,13 +340,13 @@ impl<R: Runtime> Task for IndexAllApplicationsTask<R> {
|
||||
}
|
||||
}
|
||||
|
||||
struct ReindexAllApplicationsTask<R: Runtime> {
|
||||
tauri_app_handle: AppHandle<R>,
|
||||
struct ReindexAllApplicationsTask {
|
||||
tauri_app_handle: AppHandle,
|
||||
callback: Option<tokio::sync::oneshot::Sender<Result<(), String>>>,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait(?Send)]
|
||||
impl<R: Runtime> Task for ReindexAllApplicationsTask<R> {
|
||||
impl Task for ReindexAllApplicationsTask {
|
||||
fn search_source_id(&self) -> &'static str {
|
||||
APPLICATION_SEARCH_SOURCE_ID
|
||||
}
|
||||
@@ -355,14 +374,14 @@ impl<R: Runtime> Task for ReindexAllApplicationsTask<R> {
|
||||
}
|
||||
}
|
||||
|
||||
struct SearchApplicationsTask<R: Runtime> {
|
||||
tauri_app_handle: AppHandle<R>,
|
||||
struct SearchApplicationsTask {
|
||||
tauri_app_handle: AppHandle,
|
||||
query_string: String,
|
||||
callback: Option<OneshotSender<Result<SearchResult, PizzaEngineError>>>,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait(?Send)]
|
||||
impl<R: Runtime> Task for SearchApplicationsTask<R> {
|
||||
impl Task for SearchApplicationsTask {
|
||||
fn search_source_id(&self) -> &'static str {
|
||||
APPLICATION_SEARCH_SOURCE_ID
|
||||
}
|
||||
@@ -380,7 +399,9 @@ impl<R: Runtime> Task for SearchApplicationsTask<R> {
|
||||
|
||||
let rx_dropped_error = callback.send(Ok(empty_hits)).is_err();
|
||||
if rx_dropped_error {
|
||||
warn!("failed to send local app search result back because the corresponding channel receiver end has been unexpected dropped, which could happen due to a low query timeout")
|
||||
warn!(
|
||||
"failed to send local app search result back because the corresponding channel receiver end has been unexpected dropped, which could happen due to a low query timeout"
|
||||
)
|
||||
}
|
||||
|
||||
return;
|
||||
@@ -401,7 +422,9 @@ impl<R: Runtime> Task for SearchApplicationsTask<R> {
|
||||
// It will be passed to Pizza like "Google\nChrome". Using Display impl would result
|
||||
// in an invalid query DSL and serde will complain.
|
||||
let dsl = format!(
|
||||
"{{ \"query\": {{ \"bool\": {{ \"should\": [ {{ \"match\": {{ \"{FIELD_APP_NAME}\": {:?} }} }}, {{ \"prefix\": {{ \"{FIELD_APP_NAME}\": {:?} }} }} ] }} }} }}", self.query_string, self.query_string);
|
||||
"{{ \"query\": {{ \"bool\": {{ \"should\": [ {{ \"match\": {{ \"{FIELD_APP_NAME}\": {:?} }} }}, {{ \"prefix\": {{ \"{FIELD_APP_NAME}\": {:?} }} }} ] }} }} }}",
|
||||
self.query_string, self.query_string
|
||||
);
|
||||
|
||||
let state = state
|
||||
.as_mut_any()
|
||||
@@ -432,7 +455,9 @@ impl<R: Runtime> Task for SearchApplicationsTask<R> {
|
||||
|
||||
let rx_dropped_error = callback.send(Ok(search_result)).is_err();
|
||||
if rx_dropped_error {
|
||||
warn!("failed to send local app search result back because the corresponding channel receiver end has been unexpected dropped, which could happen due to a low query timeout")
|
||||
warn!(
|
||||
"failed to send local app search result back because the corresponding channel receiver end has been unexpected dropped, which could happen due to a low query timeout"
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -486,9 +511,33 @@ impl Task for IndexNewApplicationsTask {
|
||||
pub struct ApplicationSearchSource;
|
||||
|
||||
impl ApplicationSearchSource {
|
||||
pub async fn prepare_index_and_store<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
) -> Result<(), String> {
|
||||
pub async fn prepare_index_and_store(app_handle: AppHandle) -> Result<(), String> {
|
||||
app_handle
|
||||
.store(TAURI_STORE_APP_HOTKEY)
|
||||
.map_err(|e| e.to_string())?;
|
||||
let disabled_app_list_and_search_path_store = app_handle
|
||||
.store(TAURI_STORE_DISABLED_APP_LIST_AND_SEARCH_PATH)
|
||||
.map_err(|e| e.to_string())?;
|
||||
if disabled_app_list_and_search_path_store
|
||||
.get(TAURI_STORE_KEY_DISABLED_APP_LIST)
|
||||
.is_none()
|
||||
{
|
||||
disabled_app_list_and_search_path_store
|
||||
.set(TAURI_STORE_KEY_DISABLED_APP_LIST, Json::Array(Vec::new()));
|
||||
}
|
||||
|
||||
// IndexAllApplicationsTask will read the apps installed in search paths and
|
||||
// index them, so it depends on this configuration entry. Init this entry
|
||||
// before indexing apps.
|
||||
if disabled_app_list_and_search_path_store
|
||||
.get(TAURI_STORE_KEY_SEARCH_PATH)
|
||||
.is_none()
|
||||
{
|
||||
let default_search_path = get_default_search_paths();
|
||||
disabled_app_list_and_search_path_store
|
||||
.set(TAURI_STORE_KEY_SEARCH_PATH, default_search_path);
|
||||
}
|
||||
|
||||
let (tx, rx) = tokio::sync::oneshot::channel();
|
||||
let index_applications_task = IndexAllApplicationsTask {
|
||||
tauri_app_handle: app_handle.clone(),
|
||||
@@ -509,29 +558,6 @@ impl ApplicationSearchSource {
|
||||
)
|
||||
}
|
||||
|
||||
app_handle
|
||||
.store(TAURI_STORE_APP_HOTKEY)
|
||||
.map_err(|e| e.to_string())?;
|
||||
let disabled_app_list_and_search_path_store = app_handle
|
||||
.store(TAURI_STORE_DISABLED_APP_LIST_AND_SEARCH_PATH)
|
||||
.map_err(|e| e.to_string())?;
|
||||
if disabled_app_list_and_search_path_store
|
||||
.get(TAURI_STORE_KEY_DISABLED_APP_LIST)
|
||||
.is_none()
|
||||
{
|
||||
disabled_app_list_and_search_path_store
|
||||
.set(TAURI_STORE_KEY_DISABLED_APP_LIST, Json::Array(Vec::new()));
|
||||
}
|
||||
|
||||
if disabled_app_list_and_search_path_store
|
||||
.get(TAURI_STORE_KEY_SEARCH_PATH)
|
||||
.is_none()
|
||||
{
|
||||
let default_search_path = get_default_search_paths();
|
||||
disabled_app_list_and_search_path_store
|
||||
.set(TAURI_STORE_KEY_SEARCH_PATH, default_search_path);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -549,7 +575,11 @@ impl SearchSource for ApplicationSearchSource {
|
||||
}
|
||||
}
|
||||
|
||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
||||
async fn search(
|
||||
&self,
|
||||
_tauri_app_handle: AppHandle,
|
||||
query: SearchQuery,
|
||||
) -> Result<QueryResponse, SearchError> {
|
||||
let query_string = query
|
||||
.query_strings
|
||||
.get("query")
|
||||
@@ -648,7 +678,7 @@ fn pizza_engine_hits_to_coco_hits(
|
||||
coco_hits
|
||||
}
|
||||
|
||||
pub fn set_app_alias<R: Runtime>(tauri_app_handle: &AppHandle<R>, app_path: &str, alias: &str) {
|
||||
pub fn set_app_alias(tauri_app_handle: &AppHandle, app_path: &str, alias: &str) {
|
||||
let store = tauri_app_handle
|
||||
.store(TAURI_STORE_APP_ALIAS)
|
||||
.unwrap_or_else(|_| panic!("store [{}] not found/loaded", TAURI_STORE_APP_ALIAS));
|
||||
@@ -661,7 +691,7 @@ pub fn set_app_alias<R: Runtime>(tauri_app_handle: &AppHandle<R>, app_path: &str
|
||||
// deleted while updating it.
|
||||
}
|
||||
|
||||
fn get_app_alias<R: Runtime>(tauri_app_handle: &AppHandle<R>, app_path: &str) -> Option<String> {
|
||||
fn get_app_alias(tauri_app_handle: &AppHandle, app_path: &str) -> Option<String> {
|
||||
let store = tauri_app_handle
|
||||
.store(TAURI_STORE_APP_ALIAS)
|
||||
.unwrap_or_else(|_| panic!("store [{}] not found/loaded", TAURI_STORE_APP_ALIAS));
|
||||
@@ -679,9 +709,9 @@ fn get_app_alias<R: Runtime>(tauri_app_handle: &AppHandle<R>, app_path: &str) ->
|
||||
/// The handler that will be invoked when an application hotkey is pressed.
|
||||
///
|
||||
/// The `app_path` argument is for logging-only.
|
||||
fn app_hotkey_handler<R: Runtime>(
|
||||
fn app_hotkey_handler(
|
||||
app_path: String,
|
||||
) -> impl Fn(&AppHandle<R>, &Shortcut, ShortcutEvent) + Send + Sync + 'static {
|
||||
) -> impl Fn(&AppHandle, &Shortcut, ShortcutEvent) + Send + Sync + 'static {
|
||||
move |tauri_app_handle, _hot_key, event| {
|
||||
if event.state() == ShortcutState::Pressed {
|
||||
let app_path_clone = app_path.clone();
|
||||
@@ -697,7 +727,7 @@ fn app_hotkey_handler<R: Runtime>(
|
||||
}
|
||||
|
||||
/// For all the applications, if it is enabled & has hotkey set, then set it up.
|
||||
pub(crate) fn set_apps_hotkey<R: Runtime>(tauri_app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||
pub(crate) fn set_apps_hotkey(tauri_app_handle: &AppHandle) -> Result<(), String> {
|
||||
let app_hotkey_store = tauri_app_handle
|
||||
.store(TAURI_STORE_APP_HOTKEY)
|
||||
.unwrap_or_else(|_| panic!("store [{}] not found/loaded", TAURI_STORE_APP_HOTKEY));
|
||||
@@ -721,7 +751,7 @@ pub(crate) fn set_apps_hotkey<R: Runtime>(tauri_app_handle: &AppHandle<R>) -> Re
|
||||
}
|
||||
|
||||
/// For all the applications, if it is enabled & has hotkey set, then unset it.
|
||||
pub(crate) fn unset_apps_hotkey<R: Runtime>(tauri_app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||
pub(crate) fn unset_apps_hotkey(tauri_app_handle: &AppHandle) -> Result<(), String> {
|
||||
let app_hotkey_store = tauri_app_handle
|
||||
.store(TAURI_STORE_APP_HOTKEY)
|
||||
.unwrap_or_else(|_| panic!("store [{}] not found/loaded", TAURI_STORE_APP_HOTKEY));
|
||||
@@ -748,8 +778,8 @@ pub(crate) fn unset_apps_hotkey<R: Runtime>(tauri_app_handle: &AppHandle<R>) ->
|
||||
}
|
||||
|
||||
/// Set the hotkey but won't persist this settings change.
|
||||
pub(crate) fn set_app_hotkey<R: Runtime>(
|
||||
tauri_app_handle: &AppHandle<R>,
|
||||
pub(crate) fn set_app_hotkey(
|
||||
tauri_app_handle: &AppHandle,
|
||||
app_path: &str,
|
||||
hotkey: &str,
|
||||
) -> Result<(), String> {
|
||||
@@ -759,8 +789,8 @@ pub(crate) fn set_app_hotkey<R: Runtime>(
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
pub fn register_app_hotkey<R: Runtime>(
|
||||
tauri_app_handle: &AppHandle<R>,
|
||||
pub fn register_app_hotkey(
|
||||
tauri_app_handle: &AppHandle,
|
||||
app_path: &str,
|
||||
hotkey: &str,
|
||||
) -> Result<(), String> {
|
||||
@@ -777,10 +807,7 @@ pub fn register_app_hotkey<R: Runtime>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn unregister_app_hotkey<R: Runtime>(
|
||||
tauri_app_handle: &AppHandle<R>,
|
||||
app_path: &str,
|
||||
) -> Result<(), String> {
|
||||
pub fn unregister_app_hotkey(tauri_app_handle: &AppHandle, app_path: &str) -> Result<(), String> {
|
||||
let app_hotkey_store = tauri_app_handle
|
||||
.store(TAURI_STORE_APP_HOTKEY)
|
||||
.unwrap_or_else(|_| panic!("store [{}] not found/loaded", TAURI_STORE_APP_HOTKEY));
|
||||
@@ -807,7 +834,9 @@ pub fn unregister_app_hotkey<R: Runtime>(
|
||||
.global_shortcut()
|
||||
.is_registered(hotkey.as_str())
|
||||
{
|
||||
panic!("inconsistent state, tauri store a hotkey is stored in the tauri store but it is not registered");
|
||||
panic!(
|
||||
"inconsistent state, tauri store a hotkey is stored in the tauri store but it is not registered"
|
||||
);
|
||||
}
|
||||
|
||||
tauri_app_handle
|
||||
@@ -818,7 +847,7 @@ pub fn unregister_app_hotkey<R: Runtime>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_disabled_app_list<R: Runtime>(tauri_app_handle: &AppHandle<R>) -> Vec<String> {
|
||||
fn get_disabled_app_list(tauri_app_handle: &AppHandle) -> Vec<String> {
|
||||
let store = tauri_app_handle
|
||||
.store(TAURI_STORE_DISABLED_APP_LIST_AND_SEARCH_PATH)
|
||||
.unwrap_or_else(|_| {
|
||||
@@ -855,10 +884,7 @@ pub fn is_app_search_enabled(app_path: &str) -> bool {
|
||||
disabled_app_list.iter().all(|path| path != app_path)
|
||||
}
|
||||
|
||||
pub fn disable_app_search<R: Runtime>(
|
||||
tauri_app_handle: &AppHandle<R>,
|
||||
app_path: &str,
|
||||
) -> Result<(), String> {
|
||||
pub fn disable_app_search(tauri_app_handle: &AppHandle, app_path: &str) -> Result<(), String> {
|
||||
let store = tauri_app_handle
|
||||
.store(TAURI_STORE_DISABLED_APP_LIST_AND_SEARCH_PATH)
|
||||
.unwrap_or_else(|_| {
|
||||
@@ -902,10 +928,7 @@ pub fn disable_app_search<R: Runtime>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn enable_app_search<R: Runtime>(
|
||||
tauri_app_handle: &AppHandle<R>,
|
||||
app_path: &str,
|
||||
) -> Result<(), String> {
|
||||
pub fn enable_app_search(tauri_app_handle: &AppHandle, app_path: &str) -> Result<(), String> {
|
||||
let store = tauri_app_handle
|
||||
.store(TAURI_STORE_DISABLED_APP_LIST_AND_SEARCH_PATH)
|
||||
.unwrap_or_else(|_| {
|
||||
@@ -947,8 +970,8 @@ pub fn enable_app_search<R: Runtime>(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn add_app_search_path<R: Runtime>(
|
||||
tauri_app_handle: AppHandle<R>,
|
||||
pub async fn add_app_search_path(
|
||||
tauri_app_handle: AppHandle,
|
||||
search_path: String,
|
||||
) -> Result<(), String> {
|
||||
let mut search_paths = get_app_search_path(tauri_app_handle.clone()).await;
|
||||
@@ -973,8 +996,8 @@ pub async fn add_app_search_path<R: Runtime>(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn remove_app_search_path<R: Runtime>(
|
||||
tauri_app_handle: AppHandle<R>,
|
||||
pub async fn remove_app_search_path(
|
||||
tauri_app_handle: AppHandle,
|
||||
search_path: String,
|
||||
) -> Result<(), String> {
|
||||
let mut search_paths = get_app_search_path(tauri_app_handle.clone()).await;
|
||||
@@ -999,7 +1022,7 @@ pub async fn remove_app_search_path<R: Runtime>(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_app_search_path<R: Runtime>(tauri_app_handle: AppHandle<R>) -> Vec<String> {
|
||||
pub async fn get_app_search_path(tauri_app_handle: AppHandle) -> Vec<String> {
|
||||
let store = tauri_app_handle
|
||||
.store(TAURI_STORE_DISABLED_APP_LIST_AND_SEARCH_PATH)
|
||||
.unwrap_or_else(|_| {
|
||||
@@ -1028,9 +1051,7 @@ pub async fn get_app_search_path<R: Runtime>(tauri_app_handle: AppHandle<R>) ->
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_app_list<R: Runtime>(
|
||||
tauri_app_handle: AppHandle<R>,
|
||||
) -> Result<Vec<Extension>, String> {
|
||||
pub async fn get_app_list(tauri_app_handle: AppHandle) -> Result<Vec<Extension>, String> {
|
||||
let search_paths = get_app_search_path(tauri_app_handle.clone()).await;
|
||||
let apps = list_app_in(search_paths)?;
|
||||
|
||||
@@ -1165,9 +1186,7 @@ pub async fn get_app_metadata(app_name: String, app_path: String) -> Result<AppM
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn reindex_applications<R: Runtime>(
|
||||
tauri_app_handle: AppHandle<R>,
|
||||
) -> Result<(), String> {
|
||||
pub async fn reindex_applications(tauri_app_handle: AppHandle) -> Result<(), String> {
|
||||
let (tx, rx) = tokio::sync::oneshot::channel();
|
||||
let reindex_applications_task = ReindexAllApplicationsTask {
|
||||
tauri_app_handle: tauri_app_handle.clone(),
|
||||
|
||||
@@ -5,16 +5,14 @@ use crate::common::search::{QueryResponse, QuerySource, SearchQuery};
|
||||
use crate::common::traits::SearchSource;
|
||||
use crate::extension::LOCAL_QUERY_SOURCE_TYPE;
|
||||
use async_trait::async_trait;
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use tauri::AppHandle;
|
||||
|
||||
pub(crate) const QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME: &str = "Applications";
|
||||
|
||||
pub struct ApplicationSearchSource;
|
||||
|
||||
impl ApplicationSearchSource {
|
||||
pub async fn prepare_index_and_store<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
) -> Result<(), String> {
|
||||
pub async fn prepare_index_and_store(_app_handle: AppHandle) -> Result<(), String> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -32,7 +30,11 @@ impl SearchSource for ApplicationSearchSource {
|
||||
}
|
||||
}
|
||||
|
||||
async fn search(&self, _query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
||||
async fn search(
|
||||
&self,
|
||||
_tauri_app_handle: AppHandle,
|
||||
_query: SearchQuery,
|
||||
) -> Result<QueryResponse, SearchError> {
|
||||
Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits: Vec::new(),
|
||||
@@ -41,37 +43,28 @@ impl SearchSource for ApplicationSearchSource {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_app_alias<R: Runtime>(_tauri_app_handle: &AppHandle<R>, _app_path: &str, _alias: &str) {
|
||||
pub fn set_app_alias(_tauri_app_handle: &AppHandle, _app_path: &str, _alias: &str) {
|
||||
unreachable!("app list should be empty, there is no way this can be invoked")
|
||||
}
|
||||
|
||||
pub fn register_app_hotkey<R: Runtime>(
|
||||
_tauri_app_handle: &AppHandle<R>,
|
||||
pub fn register_app_hotkey(
|
||||
_tauri_app_handle: &AppHandle,
|
||||
_app_path: &str,
|
||||
_hotkey: &str,
|
||||
) -> Result<(), String> {
|
||||
unreachable!("app list should be empty, there is no way this can be invoked")
|
||||
}
|
||||
|
||||
pub fn unregister_app_hotkey<R: Runtime>(
|
||||
_tauri_app_handle: &AppHandle<R>,
|
||||
_app_path: &str,
|
||||
) -> Result<(), String> {
|
||||
pub fn unregister_app_hotkey(_tauri_app_handle: &AppHandle, _app_path: &str) -> Result<(), String> {
|
||||
unreachable!("app list should be empty, there is no way this can be invoked")
|
||||
}
|
||||
|
||||
pub fn disable_app_search<R: Runtime>(
|
||||
_tauri_app_handle: &AppHandle<R>,
|
||||
_app_path: &str,
|
||||
) -> Result<(), String> {
|
||||
pub fn disable_app_search(_tauri_app_handle: &AppHandle, _app_path: &str) -> Result<(), String> {
|
||||
// no-op
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn enable_app_search<R: Runtime>(
|
||||
_tauri_app_handle: &AppHandle<R>,
|
||||
_app_path: &str,
|
||||
) -> Result<(), String> {
|
||||
pub fn enable_app_search(_tauri_app_handle: &AppHandle, _app_path: &str) -> Result<(), String> {
|
||||
// no-op
|
||||
Ok(())
|
||||
}
|
||||
@@ -81,8 +74,8 @@ pub fn is_app_search_enabled(_app_path: &str) -> bool {
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn add_app_search_path<R: Runtime>(
|
||||
_tauri_app_handle: AppHandle<R>,
|
||||
pub async fn add_app_search_path(
|
||||
_tauri_app_handle: AppHandle,
|
||||
_search_path: String,
|
||||
) -> Result<(), String> {
|
||||
// no-op
|
||||
@@ -90,8 +83,8 @@ pub async fn add_app_search_path<R: Runtime>(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn remove_app_search_path<R: Runtime>(
|
||||
_tauri_app_handle: AppHandle<R>,
|
||||
pub async fn remove_app_search_path(
|
||||
_tauri_app_handle: AppHandle,
|
||||
_search_path: String,
|
||||
) -> Result<(), String> {
|
||||
// no-op
|
||||
@@ -99,43 +92,37 @@ pub async fn remove_app_search_path<R: Runtime>(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_app_search_path<R: Runtime>(_tauri_app_handle: AppHandle<R>) -> Vec<String> {
|
||||
pub async fn get_app_search_path(_tauri_app_handle: AppHandle) -> Vec<String> {
|
||||
// Return an empty list
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_app_list<R: Runtime>(
|
||||
_tauri_app_handle: AppHandle<R>,
|
||||
) -> Result<Vec<Extension>, String> {
|
||||
pub async fn get_app_list(_tauri_app_handle: AppHandle) -> Result<Vec<Extension>, String> {
|
||||
// Return an empty list
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_app_metadata<R: Runtime>(
|
||||
_tauri_app_handle: AppHandle<R>,
|
||||
pub async fn get_app_metadata(
|
||||
_tauri_app_handle: AppHandle,
|
||||
_app_path: String,
|
||||
) -> Result<AppMetadata, String> {
|
||||
unreachable!("app list should be empty, there is no way this can be invoked")
|
||||
}
|
||||
|
||||
pub(crate) fn set_apps_hotkey<R: Runtime>(_tauri_app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||
pub(crate) fn set_apps_hotkey(_tauri_app_handle: &AppHandle) -> Result<(), String> {
|
||||
// no-op
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn unset_apps_hotkey<R: Runtime>(
|
||||
_tauri_app_handle: &AppHandle<R>,
|
||||
) -> Result<(), String> {
|
||||
pub(crate) fn unset_apps_hotkey(_tauri_app_handle: &AppHandle) -> Result<(), String> {
|
||||
// no-op
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn reindex_applications<R: Runtime>(
|
||||
_tauri_app_handle: AppHandle<R>,
|
||||
) -> Result<(), String> {
|
||||
pub async fn reindex_applications(_tauri_app_handle: AppHandle) -> Result<(), String> {
|
||||
// no-op
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ use chinese_number::{ChineseCase, ChineseCountMethod, ChineseVariant, NumberToCh
|
||||
use num2words::Num2Words;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use tauri::AppHandle;
|
||||
|
||||
pub(crate) const DATA_SOURCE_ID: &str = "Calculator";
|
||||
|
||||
@@ -120,7 +121,11 @@ impl SearchSource for CalculatorSource {
|
||||
}
|
||||
}
|
||||
|
||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
||||
async fn search(
|
||||
&self,
|
||||
_tauri_app_handle: AppHandle,
|
||||
query: SearchQuery,
|
||||
) -> Result<QueryResponse, SearchError> {
|
||||
let Some(query_string) = query.query_strings.get("query") else {
|
||||
return Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
@@ -176,13 +181,11 @@ impl SearchSource for CalculatorSource {
|
||||
total_hits: 1,
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
QueryResponse {
|
||||
source: query_source,
|
||||
hits: Vec::new(),
|
||||
total_hits: 0,
|
||||
}
|
||||
}
|
||||
Err(_) => QueryResponse {
|
||||
source: query_source,
|
||||
hits: Vec::new(),
|
||||
total_hits: 0,
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
209
src-tauri/src/extension/built_in/file_search/config.rs
Normal file
209
src-tauri/src/extension/built_in/file_search/config.rs
Normal file
@@ -0,0 +1,209 @@
|
||||
//! File Search configuration entries definition and getter/setter functions.
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
use std::sync::LazyLock;
|
||||
use tauri::AppHandle;
|
||||
use tauri_plugin_store::StoreExt;
|
||||
|
||||
// Tauri store keys for file system configuration
|
||||
const TAURI_STORE_FILE_SYSTEM_CONFIG: &str = "file_system_config";
|
||||
const TAURI_STORE_KEY_SEARCH_BY: &str = "search_by";
|
||||
const TAURI_STORE_KEY_SEARCH_PATHS: &str = "search_paths";
|
||||
const TAURI_STORE_KEY_EXCLUDE_PATHS: &str = "exclude_paths";
|
||||
const TAURI_STORE_KEY_FILE_TYPES: &str = "file_types";
|
||||
|
||||
static HOME_DIR: LazyLock<String> = LazyLock::new(|| {
|
||||
let os_string = dirs::home_dir()
|
||||
.expect("$HOME should be set")
|
||||
.into_os_string();
|
||||
os_string
|
||||
.into_string()
|
||||
.expect("User home directory should be encoded with UTF-8")
|
||||
});
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Copy)]
|
||||
pub enum SearchBy {
|
||||
Name,
|
||||
NameAndContents,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FileSearchConfig {
|
||||
pub search_paths: Vec<String>,
|
||||
pub exclude_paths: Vec<String>,
|
||||
pub file_types: Vec<String>,
|
||||
pub search_by: SearchBy,
|
||||
}
|
||||
|
||||
impl Default for FileSearchConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
search_paths: vec![
|
||||
format!("{}/Documents", HOME_DIR.as_str()),
|
||||
format!("{}/Desktop", HOME_DIR.as_str()),
|
||||
format!("{}/Downloads", HOME_DIR.as_str()),
|
||||
],
|
||||
exclude_paths: Vec::new(),
|
||||
file_types: Vec::new(),
|
||||
search_by: SearchBy::Name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FileSearchConfig {
|
||||
pub(crate) fn get(tauri_app_handle: &AppHandle) -> Self {
|
||||
let store = tauri_app_handle
|
||||
.store(TAURI_STORE_FILE_SYSTEM_CONFIG)
|
||||
.unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"store [{}] not found/loaded, error [{}]",
|
||||
TAURI_STORE_FILE_SYSTEM_CONFIG, e
|
||||
)
|
||||
});
|
||||
|
||||
// Default value, will be used when specific config entries are not set
|
||||
let default_config = FileSearchConfig::default();
|
||||
|
||||
let search_paths = {
|
||||
if let Some(search_paths) = store.get(TAURI_STORE_KEY_SEARCH_PATHS) {
|
||||
match search_paths {
|
||||
Value::Array(arr) => {
|
||||
let mut vec = Vec::with_capacity(arr.len());
|
||||
for v in arr {
|
||||
match v {
|
||||
Value::String(s) => vec.push(s),
|
||||
other => panic!(
|
||||
"Expected all elements of 'search_paths' to be strings, but found: {:?}",
|
||||
other
|
||||
),
|
||||
}
|
||||
}
|
||||
vec
|
||||
}
|
||||
other => panic!(
|
||||
"Expected 'search_paths' to be an array of strings in the file system config store, but got: {:?}",
|
||||
other
|
||||
),
|
||||
}
|
||||
} else {
|
||||
store.set(
|
||||
TAURI_STORE_KEY_SEARCH_PATHS,
|
||||
default_config.search_paths.as_slice(),
|
||||
);
|
||||
default_config.search_paths
|
||||
}
|
||||
};
|
||||
|
||||
let exclude_paths = {
|
||||
if let Some(exclude_paths) = store.get(TAURI_STORE_KEY_EXCLUDE_PATHS) {
|
||||
match exclude_paths {
|
||||
Value::Array(arr) => {
|
||||
let mut vec = Vec::with_capacity(arr.len());
|
||||
for v in arr {
|
||||
match v {
|
||||
Value::String(s) => vec.push(s),
|
||||
other => panic!(
|
||||
"Expected all elements of 'exclude_paths' to be strings, but found: {:?}",
|
||||
other
|
||||
),
|
||||
}
|
||||
}
|
||||
vec
|
||||
}
|
||||
other => panic!(
|
||||
"Expected 'exclude_paths' to be an array of strings in the file system config store, but got: {:?}",
|
||||
other
|
||||
),
|
||||
}
|
||||
} else {
|
||||
store.set(
|
||||
TAURI_STORE_KEY_EXCLUDE_PATHS,
|
||||
default_config.exclude_paths.as_slice(),
|
||||
);
|
||||
default_config.exclude_paths
|
||||
}
|
||||
};
|
||||
|
||||
let file_types = {
|
||||
if let Some(file_types) = store.get(TAURI_STORE_KEY_FILE_TYPES) {
|
||||
match file_types {
|
||||
Value::Array(arr) => {
|
||||
let mut vec = Vec::with_capacity(arr.len());
|
||||
for v in arr {
|
||||
match v {
|
||||
Value::String(s) => vec.push(s),
|
||||
other => panic!(
|
||||
"Expected all elements of 'file_types' to be strings, but found: {:?}",
|
||||
other
|
||||
),
|
||||
}
|
||||
}
|
||||
vec
|
||||
}
|
||||
other => panic!(
|
||||
"Expected 'file_types' to be an array of strings in the file system config store, but got: {:?}",
|
||||
other
|
||||
),
|
||||
}
|
||||
} else {
|
||||
store.set(
|
||||
TAURI_STORE_KEY_FILE_TYPES,
|
||||
default_config.file_types.as_slice(),
|
||||
);
|
||||
default_config.file_types
|
||||
}
|
||||
};
|
||||
|
||||
let search_by = {
|
||||
if let Some(search_by) = store.get(TAURI_STORE_KEY_SEARCH_BY) {
|
||||
serde_json::from_value(search_by.clone()).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Failed to deserialize 'search_by' from file system config store. Invalid JSON: {:?}, error: {}",
|
||||
search_by, e
|
||||
)
|
||||
})
|
||||
} else {
|
||||
store.set(
|
||||
TAURI_STORE_KEY_SEARCH_BY,
|
||||
serde_json::to_value(default_config.search_by).unwrap(),
|
||||
);
|
||||
default_config.search_by
|
||||
}
|
||||
};
|
||||
|
||||
Self {
|
||||
search_by,
|
||||
search_paths,
|
||||
exclude_paths,
|
||||
file_types,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Tauri commands for managing file system configuration
|
||||
#[tauri::command]
|
||||
pub async fn get_file_system_config(tauri_app_handle: AppHandle) -> FileSearchConfig {
|
||||
FileSearchConfig::get(&tauri_app_handle)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn set_file_system_config(
|
||||
tauri_app_handle: AppHandle,
|
||||
config: FileSearchConfig,
|
||||
) -> Result<(), String> {
|
||||
let store = tauri_app_handle
|
||||
.store(TAURI_STORE_FILE_SYSTEM_CONFIG)
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
store.set(TAURI_STORE_KEY_SEARCH_PATHS, config.search_paths);
|
||||
store.set(TAURI_STORE_KEY_EXCLUDE_PATHS, config.exclude_paths);
|
||||
store.set(TAURI_STORE_KEY_FILE_TYPES, config.file_types);
|
||||
store.set(
|
||||
TAURI_STORE_KEY_SEARCH_BY,
|
||||
serde_json::to_value(config.search_by).unwrap(),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -0,0 +1,186 @@
|
||||
use super::super::EXTENSION_ID;
|
||||
use super::super::config::FileSearchConfig;
|
||||
use super::super::config::SearchBy;
|
||||
use crate::common::document::{DataSourceReference, Document};
|
||||
use crate::extension::LOCAL_QUERY_SOURCE_TYPE;
|
||||
use crate::extension::OnOpened;
|
||||
use crate::util::file::get_file_icon;
|
||||
use futures::stream::Stream;
|
||||
use futures::stream::StreamExt;
|
||||
use std::os::fd::OwnedFd;
|
||||
use std::path::Path;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::BufReader;
|
||||
use tokio::process::Child;
|
||||
use tokio::process::Command;
|
||||
use tokio_stream::wrappers::LinesStream;
|
||||
|
||||
/// `mdfind` won't return scores, we use this score for all the documents.
|
||||
const SCORE: f64 = 1.0;
|
||||
|
||||
pub(crate) async fn hits(
|
||||
query_string: &str,
|
||||
from: usize,
|
||||
size: usize,
|
||||
config: &FileSearchConfig,
|
||||
) -> Result<Vec<(Document, f64)>, String> {
|
||||
let (mut iter, mut mdfind_child_process) =
|
||||
execute_mdfind_query(&query_string, from, size, &config)?;
|
||||
|
||||
// Convert results to documents
|
||||
let mut hits: Vec<(Document, f64)> = Vec::new();
|
||||
while let Some(res_file_path) = iter.next().await {
|
||||
let file_path = res_file_path.map_err(|io_err| io_err.to_string())?;
|
||||
|
||||
let icon = get_file_icon(file_path.clone()).await;
|
||||
let file_path_of_type_path = camino::Utf8Path::new(&file_path);
|
||||
let r#where = file_path_of_type_path
|
||||
.parent()
|
||||
.unwrap_or_else(|| {
|
||||
panic!(
|
||||
"expect path [{}] to have a parent, but it does not",
|
||||
file_path
|
||||
);
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let file_name = file_path_of_type_path.file_name().unwrap_or_else(|| {
|
||||
panic!(
|
||||
"expect path [{}] to have a file name, but it does not",
|
||||
file_path
|
||||
);
|
||||
});
|
||||
let on_opened = OnOpened::Document {
|
||||
url: file_path.clone(),
|
||||
};
|
||||
|
||||
let doc = Document {
|
||||
id: file_path.clone(),
|
||||
title: Some(file_name.to_string()),
|
||||
source: Some(DataSourceReference {
|
||||
r#type: Some(LOCAL_QUERY_SOURCE_TYPE.into()),
|
||||
name: Some(EXTENSION_ID.into()),
|
||||
id: Some(EXTENSION_ID.into()),
|
||||
icon: Some(String::from("font_Filesearch")),
|
||||
}),
|
||||
category: Some(r#where),
|
||||
on_opened: Some(on_opened),
|
||||
url: Some(file_path),
|
||||
icon: Some(icon.to_string()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
hits.push((doc, SCORE));
|
||||
}
|
||||
// Kill the mdfind process once we get the needed results to prevent zombie
|
||||
// processes.
|
||||
mdfind_child_process
|
||||
.kill()
|
||||
.await
|
||||
.map_err(|e| format!("{:?}", e))?;
|
||||
|
||||
Ok(hits)
|
||||
}
|
||||
|
||||
/// Return an array containing the `mdfind` command and its arguments.
|
||||
fn build_mdfind_query(query_string: &str, config: &FileSearchConfig) -> Vec<String> {
|
||||
let mut args = vec!["mdfind".to_string()];
|
||||
|
||||
match config.search_by {
|
||||
SearchBy::Name => {
|
||||
args.push(format!("kMDItemFSName == '*{}*'", query_string));
|
||||
}
|
||||
SearchBy::NameAndContents => {
|
||||
args.push(format!(
|
||||
"kMDItemFSName == '*{}*' || kMDItemTextContent == '{}'",
|
||||
query_string, query_string
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Add search paths using -onlyin
|
||||
for path in &config.search_paths {
|
||||
if Path::new(path).exists() {
|
||||
args.extend_from_slice(&["-onlyin".to_string(), path.to_string()]);
|
||||
}
|
||||
}
|
||||
|
||||
args
|
||||
}
|
||||
|
||||
/// Spawn the `mdfind` child process and return an async iterator over its output,
|
||||
/// allowing us to collect the results asynchronously.
|
||||
///
|
||||
/// # Return value:
|
||||
///
|
||||
/// * impl Stream: an async iterator that will yield the matched files
|
||||
/// * Child: The handle to the mdfind process, we need to kill it once we
|
||||
/// collect all the results to avoid zombie processes.
|
||||
fn execute_mdfind_query(
|
||||
query_string: &str,
|
||||
from: usize,
|
||||
size: usize,
|
||||
config: &FileSearchConfig,
|
||||
) -> Result<(impl Stream<Item = std::io::Result<String>>, Child), String> {
|
||||
let args = build_mdfind_query(query_string, &config);
|
||||
let (rx, tx) = std::io::pipe().unwrap();
|
||||
let rx_owned = OwnedFd::from(rx);
|
||||
let async_rx = tokio::net::unix::pipe::Receiver::from_owned_fd(rx_owned).unwrap();
|
||||
let buffered_rx = BufReader::new(async_rx);
|
||||
let lines = LinesStream::new(buffered_rx.lines());
|
||||
|
||||
let child = Command::new(&args[0])
|
||||
.args(&args[1..])
|
||||
.stdout(tx)
|
||||
.stderr(std::process::Stdio::null())
|
||||
.spawn()
|
||||
.map_err(|e| format!("Failed to spawn mdfind: {}", e))?;
|
||||
let config_clone = config.clone();
|
||||
let iter = lines
|
||||
.filter(move |res_path| {
|
||||
std::future::ready({
|
||||
match res_path {
|
||||
Ok(path) => !should_be_filtered_out(&config_clone, path),
|
||||
Err(_) => {
|
||||
// Don't filter out Err() values
|
||||
true
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
.skip(from)
|
||||
.take(size);
|
||||
|
||||
Ok((iter, child))
|
||||
}
|
||||
|
||||
/// If `file_path` should be removed from the search results given the filter
|
||||
/// conditions specified in `config`.
|
||||
fn should_be_filtered_out(config: &FileSearchConfig, file_path: &str) -> bool {
|
||||
let is_excluded = config
|
||||
.exclude_paths
|
||||
.iter()
|
||||
.any(|exclude_path| file_path.starts_with(exclude_path));
|
||||
|
||||
if is_excluded {
|
||||
return true;
|
||||
}
|
||||
|
||||
let matches_file_type = if config.file_types.is_empty() {
|
||||
true
|
||||
} else {
|
||||
let path_obj = camino::Utf8Path::new(&file_path);
|
||||
if let Some(extension) = path_obj.extension() {
|
||||
config
|
||||
.file_types
|
||||
.iter()
|
||||
.any(|file_type| file_type == extension)
|
||||
} else {
|
||||
// `config.file_types` is not empty, then the search results
|
||||
// should have extensions.
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
!matches_file_type
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
#[cfg(target_os = "macos")]
|
||||
mod macos;
|
||||
#[cfg(target_os = "windows")]
|
||||
mod windows;
|
||||
|
||||
// `hits()` function is platform-specific, export the corresponding impl.
|
||||
#[cfg(target_os = "macos")]
|
||||
pub(crate) use macos::hits;
|
||||
#[cfg(target_os = "windows")]
|
||||
pub(crate) use windows::hits;
|
||||
@@ -0,0 +1,751 @@
|
||||
//! # Credits
|
||||
//!
|
||||
//! https://github.com/IRONAGE-Park/rag-sample/blob/3f0ad8c8012026cd3a7e453d08f041609426cb91/src/native/windows.rs
|
||||
//! is the starting point of this implementation.
|
||||
|
||||
use super::super::EXTENSION_ID;
|
||||
use super::super::config::FileSearchConfig;
|
||||
use super::super::config::SearchBy;
|
||||
use crate::common::document::{DataSourceReference, Document};
|
||||
use crate::extension::LOCAL_QUERY_SOURCE_TYPE;
|
||||
use crate::extension::OnOpened;
|
||||
use crate::util::file::get_file_icon;
|
||||
use windows::{
|
||||
Win32::System::{
|
||||
Com::{CLSCTX_INPROC_SERVER, CoCreateInstance},
|
||||
Ole::{OleInitialize, OleUninitialize},
|
||||
Search::{
|
||||
DB_NULL_HCHAPTER, DBACCESSOR_ROWDATA, DBBINDING, DBMEMOWNER_CLIENTOWNED,
|
||||
DBPARAMIO_NOTPARAM, DBPART_VALUE, DBTYPE_WSTR, HACCESSOR, IAccessor, ICommand,
|
||||
ICommandText, IDBCreateCommand, IDBCreateSession, IDBInitialize, IDataInitialize,
|
||||
IRowset, MSDAINITIALIZE,
|
||||
},
|
||||
},
|
||||
core::{GUID, IUnknown, Interface, PWSTR, w},
|
||||
};
|
||||
|
||||
/// Owned version of `PWSTR` that holds the heap memory.
|
||||
///
|
||||
/// Use `as_pwstr()` to convert it to a raw pointer.
|
||||
struct PwStrOwned(Vec<u16>);
|
||||
|
||||
impl PwStrOwned {
|
||||
/// # SAFETY
|
||||
///
|
||||
/// The returned `PWSTR` is basically a raw pointer, it is only valid within the
|
||||
/// lifetime of `PwStrOwned`.
|
||||
unsafe fn as_pwstr(&mut self) -> PWSTR {
|
||||
let raw_ptr = self.0.as_mut_ptr();
|
||||
PWSTR::from_raw(raw_ptr)
|
||||
}
|
||||
}
|
||||
|
||||
/// Construct `PwStrOwned` from any `str`.
|
||||
impl<S: AsRef<str> + ?Sized> From<&S> for PwStrOwned {
|
||||
fn from(value: &S) -> Self {
|
||||
let mut utf16_bytes = value.as_ref().encode_utf16().collect::<Vec<u16>>();
|
||||
utf16_bytes.push(0); // the tailing NULL
|
||||
|
||||
PwStrOwned(utf16_bytes)
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to replace unsupported characters with whitespace.
|
||||
///
|
||||
/// Windows search will error out if it encounters these characters.
|
||||
///
|
||||
/// The complete list of unsupported characters is unknown and we don't know how
|
||||
/// to escape them, so let's replace them.
|
||||
fn query_string_cleanup(old: &str) -> String {
|
||||
const UNSUPPORTED_CHAR: [char; 2] = ['\'', '\n'];
|
||||
|
||||
// Using len in bytes is ok
|
||||
let mut chars = Vec::with_capacity(old.len());
|
||||
for char in old.chars() {
|
||||
if UNSUPPORTED_CHAR.contains(&char) {
|
||||
chars.push(' ');
|
||||
} else {
|
||||
chars.push(char);
|
||||
}
|
||||
}
|
||||
|
||||
chars.into_iter().collect()
|
||||
}
|
||||
|
||||
/// Helper function to construct the Windows Search SQL.
|
||||
///
|
||||
/// Paging is not natively supported by windows Search SQL, it only supports `size`
|
||||
/// via the `TOP` keyword ("SELECT TOP {n} {columns}"). The SQL returned by this
|
||||
/// function will have `{n}` set to `from + size`, then we will manually implement
|
||||
/// paging.
|
||||
fn query_sql(query_string: &str, from: usize, size: usize, config: &FileSearchConfig) -> String {
|
||||
let top_n = from
|
||||
.checked_add(size)
|
||||
.expect("[from + size] cannot fit into an [usize]");
|
||||
|
||||
// System.ItemUrl is a column that contains the file path
|
||||
// example: "file:C:/Users/desktop.ini"
|
||||
//
|
||||
// System.Search.Rank is the relevance score
|
||||
let mut sql = format!(
|
||||
"SELECT TOP {} System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE",
|
||||
top_n
|
||||
);
|
||||
|
||||
let query_string = query_string_cleanup(query_string);
|
||||
|
||||
let search_by_predicate = match config.search_by {
|
||||
SearchBy::Name => {
|
||||
// `contains(System.FileName, '{query_string}')` would be faster
|
||||
// because it uses inverted index, but that's not what we want
|
||||
// due to the limitation of tokenization. For example, suppose "Coco AI.rs"
|
||||
// will be tokenized to `["Coco", "AI", "rs"]`, then if users search
|
||||
// via `Co`, this file won't be returned because term `Co` does not
|
||||
// exist in the index.
|
||||
//
|
||||
// So we use wildcard instead even though it is slower.
|
||||
format!("(System.FileName LIKE '%{query_string}%')")
|
||||
}
|
||||
SearchBy::NameAndContents => {
|
||||
// Windows File Search does not support searching by file content.
|
||||
//
|
||||
// `CONTAINS('query_string')` would search all columns for `query_string`,
|
||||
// this is the closest solution we have.
|
||||
format!("((System.FileName LIKE '%{query_string}%') OR CONTAINS('{query_string}'))")
|
||||
}
|
||||
};
|
||||
|
||||
let search_paths_predicate: Option<String> = {
|
||||
if config.search_paths.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let mut output = String::from("(");
|
||||
|
||||
for (idx, search_path) in config.search_paths.iter().enumerate() {
|
||||
if idx != 0 {
|
||||
output.push_str(" OR ");
|
||||
}
|
||||
|
||||
output.push_str("SCOPE = 'file:");
|
||||
output.push_str(&search_path);
|
||||
output.push('\'');
|
||||
}
|
||||
|
||||
output.push(')');
|
||||
|
||||
Some(output)
|
||||
}
|
||||
};
|
||||
|
||||
let exclude_paths_predicate: Option<String> = {
|
||||
if config.exclude_paths.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let mut output = String::from("(");
|
||||
|
||||
for (idx, exclude_path) in config.exclude_paths.iter().enumerate() {
|
||||
if idx != 0 {
|
||||
output.push_str(" AND ");
|
||||
}
|
||||
|
||||
output.push_str("(NOT SCOPE = 'file:");
|
||||
output.push_str(&exclude_path);
|
||||
output.push('\'');
|
||||
output.push(')');
|
||||
}
|
||||
|
||||
output.push(')');
|
||||
|
||||
Some(output)
|
||||
}
|
||||
};
|
||||
|
||||
let file_types_predicate: Option<String> = {
|
||||
if config.file_types.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let mut output = String::from("(");
|
||||
|
||||
for (idx, file_type) in config.file_types.iter().enumerate() {
|
||||
if idx != 0 {
|
||||
output.push_str(" OR ");
|
||||
}
|
||||
|
||||
// NOTE that this column contains a starting dot
|
||||
output.push_str("System.FileExtension = '.");
|
||||
output.push_str(&file_type);
|
||||
output.push('\'');
|
||||
}
|
||||
|
||||
output.push(')');
|
||||
|
||||
Some(output)
|
||||
}
|
||||
};
|
||||
|
||||
sql.push(' ');
|
||||
sql.push_str(search_by_predicate.as_str());
|
||||
if let Some(search_paths_predicate) = search_paths_predicate {
|
||||
sql.push_str(" AND ");
|
||||
sql.push_str(search_paths_predicate.as_str());
|
||||
}
|
||||
if let Some(exclude_paths_predicate) = exclude_paths_predicate {
|
||||
sql.push_str(" AND ");
|
||||
sql.push_str(exclude_paths_predicate.as_str());
|
||||
}
|
||||
if let Some(file_types_predicate) = file_types_predicate {
|
||||
sql.push_str(" AND ");
|
||||
sql.push_str(file_types_predicate.as_str());
|
||||
}
|
||||
|
||||
sql
|
||||
}
|
||||
|
||||
/// Default GUID for Search.CollatorDSO.1
|
||||
const DBGUID_DEFAULT: GUID = GUID {
|
||||
data1: 0xc8b521fb,
|
||||
data2: 0x5cf3,
|
||||
data3: 0x11ce,
|
||||
data4: [0xad, 0xe5, 0x00, 0xaa, 0x00, 0x44, 0x77, 0x3d],
|
||||
};
|
||||
|
||||
unsafe fn create_accessor_handle(accessor: &IAccessor, index: usize) -> Result<HACCESSOR, String> {
|
||||
let bindings = DBBINDING {
|
||||
iOrdinal: index,
|
||||
obValue: 0,
|
||||
obStatus: 0,
|
||||
obLength: 0,
|
||||
dwPart: DBPART_VALUE.0 as u32,
|
||||
dwMemOwner: DBMEMOWNER_CLIENTOWNED.0 as u32,
|
||||
eParamIO: DBPARAMIO_NOTPARAM.0 as u32,
|
||||
cbMaxLen: 512,
|
||||
dwFlags: 0,
|
||||
wType: DBTYPE_WSTR.0 as u16,
|
||||
bPrecision: 0,
|
||||
bScale: 0,
|
||||
..Default::default()
|
||||
};
|
||||
let mut status = 0;
|
||||
let mut accessor_handle = HACCESSOR::default();
|
||||
unsafe {
|
||||
accessor
|
||||
.CreateAccessor(
|
||||
DBACCESSOR_ROWDATA.0 as u32,
|
||||
1,
|
||||
&bindings,
|
||||
0,
|
||||
&mut accessor_handle,
|
||||
Some(&mut status),
|
||||
)
|
||||
.map_err(|e| e.to_string())?;
|
||||
}
|
||||
|
||||
Ok(accessor_handle)
|
||||
}
|
||||
|
||||
fn create_db_initialize() -> Result<IDBInitialize, String> {
|
||||
unsafe {
|
||||
let data_init: IDataInitialize =
|
||||
CoCreateInstance(&MSDAINITIALIZE, None, CLSCTX_INPROC_SERVER)
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let mut unknown: Option<IUnknown> = None;
|
||||
data_init
|
||||
.GetDataSource(
|
||||
None,
|
||||
CLSCTX_INPROC_SERVER.0,
|
||||
w!("provider=Search.CollatorDSO.1;EXTENDED PROPERTIES=\"Application=Windows\""),
|
||||
&IDBInitialize::IID,
|
||||
&mut unknown as *mut _ as *mut _,
|
||||
)
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(unknown.unwrap().cast().map_err(|e| e.to_string())?)
|
||||
}
|
||||
}
|
||||
|
||||
fn create_command(db_init: IDBInitialize) -> Result<ICommandText, String> {
|
||||
unsafe {
|
||||
let db_create_session: IDBCreateSession = db_init.cast().map_err(|e| e.to_string())?;
|
||||
let session: IUnknown = db_create_session
|
||||
.CreateSession(None, &IUnknown::IID)
|
||||
.map_err(|e| e.to_string())?;
|
||||
let db_create_command: IDBCreateCommand = session.cast().map_err(|e| e.to_string())?;
|
||||
Ok(db_create_command
|
||||
.CreateCommand(None, &ICommand::IID)
|
||||
.map_err(|e| e.to_string())?
|
||||
.cast()
|
||||
.map_err(|e| e.to_string())?)
|
||||
}
|
||||
}
|
||||
|
||||
fn execute_windows_search_sql(sql_query: &str) -> Result<Vec<(String, String)>, String> {
|
||||
unsafe {
|
||||
let mut pwstr_owned_sql = PwStrOwned::from(sql_query);
|
||||
// SAFETY: pwstr_owned_sql will live for the whole lifetime of this function.
|
||||
let sql_query = pwstr_owned_sql.as_pwstr();
|
||||
|
||||
let db_init = create_db_initialize()?;
|
||||
db_init.Initialize().map_err(|e| e.to_string())?;
|
||||
let command = create_command(db_init)?;
|
||||
|
||||
// Set the command text
|
||||
command
|
||||
.SetCommandText(&DBGUID_DEFAULT, sql_query)
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Execute the command
|
||||
let mut rowset: Option<IRowset> = None;
|
||||
command
|
||||
.Execute(
|
||||
None,
|
||||
&IRowset::IID,
|
||||
None,
|
||||
None,
|
||||
Some(&mut rowset as *mut _ as *mut _),
|
||||
)
|
||||
.map_err(|e| e.to_string())?;
|
||||
let rowset = rowset.ok_or_else(|| {
|
||||
format!(
|
||||
"No rowset returned for query: {}",
|
||||
// SAFETY: the raw pointer is not dangling
|
||||
sql_query
|
||||
.to_string()
|
||||
.expect("the conversion should work as `sql_query` was created from a String",)
|
||||
)
|
||||
})?;
|
||||
|
||||
let accessor: IAccessor = rowset
|
||||
.cast()
|
||||
.map_err(|e| format!("Failed to cast to IAccessor: {}", e.to_string()))?;
|
||||
|
||||
let mut output = Vec::new();
|
||||
let mut count = 0;
|
||||
loop {
|
||||
let mut rows_fetched = 0;
|
||||
let mut row_handles = [std::ptr::null_mut(); 1];
|
||||
let result = rowset.GetNextRows(
|
||||
DB_NULL_HCHAPTER as usize,
|
||||
0,
|
||||
&mut rows_fetched,
|
||||
&mut row_handles,
|
||||
);
|
||||
if result.is_err() {
|
||||
break;
|
||||
}
|
||||
if rows_fetched == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
let mut data = Vec::new();
|
||||
|
||||
for i in 0..2 {
|
||||
let mut item_name = [0u16; 512];
|
||||
|
||||
let accessor_handle = create_accessor_handle(&accessor, i + 1)?;
|
||||
rowset
|
||||
.GetData(
|
||||
*row_handles[0],
|
||||
accessor_handle,
|
||||
item_name.as_mut_ptr() as *mut _,
|
||||
)
|
||||
.map_err(|e| {
|
||||
format!(
|
||||
"Failed to get data at count {}, index {}: {}",
|
||||
count,
|
||||
i,
|
||||
e.to_string()
|
||||
)
|
||||
})?;
|
||||
let name = String::from_utf16_lossy(&item_name);
|
||||
// Remove null characters
|
||||
data.push(name.trim_end_matches('\u{0000}').to_string());
|
||||
|
||||
accessor
|
||||
.ReleaseAccessor(accessor_handle, None)
|
||||
.map_err(|e| {
|
||||
format!(
|
||||
"Failed to release accessor at count {}, index {}: {}",
|
||||
count,
|
||||
i,
|
||||
e.to_string()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
output.push((data[0].clone(), data[1].clone()));
|
||||
|
||||
count += 1;
|
||||
rowset
|
||||
.ReleaseRows(
|
||||
1,
|
||||
row_handles[0],
|
||||
std::ptr::null_mut(),
|
||||
std::ptr::null_mut(),
|
||||
std::ptr::null_mut(),
|
||||
)
|
||||
.map_err(|e| {
|
||||
format!(
|
||||
"Failed to release rows at count {}: {}",
|
||||
count,
|
||||
e.to_string()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn hits(
|
||||
query_string: &str,
|
||||
from: usize,
|
||||
size: usize,
|
||||
config: &FileSearchConfig,
|
||||
) -> Result<Vec<(Document, f64)>, String> {
|
||||
let sql = query_sql(query_string, from, size, config);
|
||||
unsafe { OleInitialize(None).map_err(|e| e.to_string())? };
|
||||
let result = execute_windows_search_sql(&sql)?;
|
||||
unsafe { OleUninitialize() };
|
||||
// .take(size) is not needed as `result` will contain `from+size` files at most
|
||||
let result_with_paging = result.into_iter().skip(from);
|
||||
// result_with_paging won't contain more than `size` entries
|
||||
let mut hits = Vec::with_capacity(size);
|
||||
|
||||
const ITEM_URL_PREFIX: &str = "file:";
|
||||
const ITEM_URL_PREFIX_LEN: usize = ITEM_URL_PREFIX.len();
|
||||
for (item_url, score_str) in result_with_paging {
|
||||
// path returned from Windows Search contains a prefix, we need to trim it.
|
||||
//
|
||||
// "file:C:/Users/desktop.ini" => "C:/Users/desktop.ini"
|
||||
let file_path = &item_url[ITEM_URL_PREFIX_LEN..];
|
||||
|
||||
let icon = get_file_icon(file_path.to_string()).await;
|
||||
let file_path_of_type_path = camino::Utf8Path::new(&file_path);
|
||||
let r#where = file_path_of_type_path
|
||||
.parent()
|
||||
.unwrap_or_else(|| {
|
||||
panic!(
|
||||
"expect path [{}] to have a parent, but it does not",
|
||||
file_path
|
||||
);
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let file_name = file_path_of_type_path.file_name().unwrap_or_else(|| {
|
||||
panic!(
|
||||
"expect path [{}] to have a file name, but it does not",
|
||||
file_path
|
||||
);
|
||||
});
|
||||
let on_opened = OnOpened::Document {
|
||||
url: file_path.to_string(),
|
||||
};
|
||||
|
||||
let doc = Document {
|
||||
id: file_path.to_string(),
|
||||
title: Some(file_name.to_string()),
|
||||
source: Some(DataSourceReference {
|
||||
r#type: Some(LOCAL_QUERY_SOURCE_TYPE.into()),
|
||||
name: Some(EXTENSION_ID.into()),
|
||||
id: Some(EXTENSION_ID.into()),
|
||||
icon: Some(String::from("font_Filesearch")),
|
||||
}),
|
||||
category: Some(r#where),
|
||||
on_opened: Some(on_opened),
|
||||
url: Some(file_path.into()),
|
||||
icon: Some(icon.to_string()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let score: f64 = score_str.parse().expect(
|
||||
"System.Search.Rank should be in range [0, 1000], which should be valid for [f64]",
|
||||
);
|
||||
|
||||
hits.push((doc, score));
|
||||
}
|
||||
|
||||
Ok(hits)
|
||||
}
|
||||
|
||||
// Skip these tests in our CI, they fail with the following error
|
||||
// "SQL is invalid: "0x80041820""
|
||||
//
|
||||
// I have no idea about the underlying root cause
|
||||
#[cfg(all(test, not(ci)))]
|
||||
mod test_windows_search {
|
||||
use super::*;
|
||||
|
||||
/// Helper function for ensuring `sql` is valid SQL by actually executing it.
|
||||
fn ensure_it_is_valid_sql(sql: &str) {
|
||||
unsafe { OleInitialize(None).unwrap() };
|
||||
execute_windows_search_sql(&sql).expect("SQL is invalid");
|
||||
unsafe { OleUninitialize() };
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_sql_empty_config_search_by_name() {
|
||||
let config = FileSearchConfig {
|
||||
search_paths: Vec::new(),
|
||||
exclude_paths: Vec::new(),
|
||||
file_types: Vec::new(),
|
||||
search_by: SearchBy::Name,
|
||||
};
|
||||
let sql = query_sql("coco", 0, 10, &config);
|
||||
|
||||
assert_eq!(
|
||||
sql,
|
||||
"SELECT TOP 10 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%coco%')"
|
||||
);
|
||||
ensure_it_is_valid_sql(&sql);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_sql_empty_config_search_by_name_and_content() {
|
||||
let config = FileSearchConfig {
|
||||
search_paths: Vec::new(),
|
||||
exclude_paths: Vec::new(),
|
||||
file_types: Vec::new(),
|
||||
search_by: SearchBy::NameAndContents,
|
||||
};
|
||||
let sql = query_sql("coco", 0, 10, &config);
|
||||
|
||||
assert_eq!(
|
||||
sql,
|
||||
"SELECT TOP 10 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE ((System.FileName LIKE '%coco%') OR CONTAINS('coco'))"
|
||||
);
|
||||
ensure_it_is_valid_sql(&sql);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_sql_with_search_paths() {
|
||||
let config = FileSearchConfig {
|
||||
search_paths: vec!["C:/Users/".into()],
|
||||
exclude_paths: Vec::new(),
|
||||
file_types: Vec::new(),
|
||||
search_by: SearchBy::Name,
|
||||
};
|
||||
let sql = query_sql("coco", 0, 10, &config);
|
||||
|
||||
assert_eq!(
|
||||
sql,
|
||||
"SELECT TOP 10 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%coco%') AND (SCOPE = 'file:C:/Users/')"
|
||||
);
|
||||
ensure_it_is_valid_sql(&sql);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_sql_with_multiple_search_paths() {
|
||||
let config = FileSearchConfig {
|
||||
search_paths: vec![
|
||||
"C:/Users/".into(),
|
||||
"D:/Projects/".into(),
|
||||
"E:/Documents/".into(),
|
||||
],
|
||||
exclude_paths: Vec::new(),
|
||||
file_types: Vec::new(),
|
||||
search_by: SearchBy::Name,
|
||||
};
|
||||
let sql = query_sql("test", 0, 5, &config);
|
||||
|
||||
assert_eq!(
|
||||
sql,
|
||||
"SELECT TOP 5 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%test%') AND (SCOPE = 'file:C:/Users/' OR SCOPE = 'file:D:/Projects/' OR SCOPE = 'file:E:/Documents/')"
|
||||
);
|
||||
ensure_it_is_valid_sql(&sql);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_sql_with_exclude_paths() {
|
||||
let config = FileSearchConfig {
|
||||
search_paths: Vec::new(),
|
||||
exclude_paths: vec!["C:/Windows/".into()],
|
||||
file_types: Vec::new(),
|
||||
search_by: SearchBy::Name,
|
||||
};
|
||||
let sql = query_sql("file", 0, 20, &config);
|
||||
|
||||
assert_eq!(
|
||||
sql,
|
||||
"SELECT TOP 20 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%file%') AND ((NOT SCOPE = 'file:C:/Windows/'))"
|
||||
);
|
||||
ensure_it_is_valid_sql(&sql);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_sql_with_multiple_exclude_paths() {
|
||||
let config = FileSearchConfig {
|
||||
search_paths: Vec::new(),
|
||||
exclude_paths: vec!["C:/Windows/".into(), "C:/System/".into(), "C:/Temp/".into()],
|
||||
file_types: Vec::new(),
|
||||
search_by: SearchBy::Name,
|
||||
};
|
||||
let sql = query_sql("data", 5, 15, &config);
|
||||
|
||||
assert_eq!(
|
||||
sql,
|
||||
"SELECT TOP 20 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%data%') AND ((NOT SCOPE = 'file:C:/Windows/') AND (NOT SCOPE = 'file:C:/System/') AND (NOT SCOPE = 'file:C:/Temp/'))"
|
||||
);
|
||||
ensure_it_is_valid_sql(&sql);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_sql_with_file_types() {
|
||||
let config = FileSearchConfig {
|
||||
search_paths: Vec::new(),
|
||||
exclude_paths: Vec::new(),
|
||||
file_types: vec!["txt".into()],
|
||||
search_by: SearchBy::Name,
|
||||
};
|
||||
let sql = query_sql("readme", 0, 10, &config);
|
||||
|
||||
assert_eq!(
|
||||
sql,
|
||||
"SELECT TOP 10 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%readme%') AND (System.FileExtension = '.txt')"
|
||||
);
|
||||
ensure_it_is_valid_sql(&sql);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_sql_with_multiple_file_types() {
|
||||
let config = FileSearchConfig {
|
||||
search_paths: Vec::new(),
|
||||
exclude_paths: Vec::new(),
|
||||
file_types: vec!["rs".into(), "toml".into(), "md".into(), "json".into()],
|
||||
search_by: SearchBy::Name,
|
||||
};
|
||||
let sql = query_sql("config", 0, 50, &config);
|
||||
|
||||
assert_eq!(
|
||||
sql,
|
||||
"SELECT TOP 50 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%config%') AND (System.FileExtension = '.rs' OR System.FileExtension = '.toml' OR System.FileExtension = '.md' OR System.FileExtension = '.json')"
|
||||
);
|
||||
ensure_it_is_valid_sql(&sql);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_sql_all_fields_combined() {
|
||||
let config = FileSearchConfig {
|
||||
search_paths: vec!["C:/Projects/".into(), "D:/Code/".into()],
|
||||
exclude_paths: vec!["C:/Projects/temp/".into()],
|
||||
file_types: vec!["rs".into(), "ts".into()],
|
||||
search_by: SearchBy::Name,
|
||||
};
|
||||
let sql = query_sql("main", 10, 25, &config);
|
||||
|
||||
assert_eq!(
|
||||
sql,
|
||||
"SELECT TOP 35 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%main%') AND (SCOPE = 'file:C:/Projects/' OR SCOPE = 'file:D:/Code/') AND ((NOT SCOPE = 'file:C:/Projects/temp/')) AND (System.FileExtension = '.rs' OR System.FileExtension = '.ts')"
|
||||
);
|
||||
ensure_it_is_valid_sql(&sql);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_sql_with_special_characters() {
|
||||
let config = FileSearchConfig {
|
||||
search_paths: vec!["C:/Users/John Doe/".into()],
|
||||
exclude_paths: Vec::new(),
|
||||
file_types: vec!["c++".into()],
|
||||
search_by: SearchBy::Name,
|
||||
};
|
||||
let sql = query_sql("hello-world", 0, 10, &config);
|
||||
|
||||
assert_eq!(
|
||||
sql,
|
||||
"SELECT TOP 10 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%hello-world%') AND (SCOPE = 'file:C:/Users/John Doe/') AND (System.FileExtension = '.c++')"
|
||||
);
|
||||
ensure_it_is_valid_sql(&sql);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_sql_edge_case_large_offset() {
|
||||
let config = FileSearchConfig {
|
||||
search_paths: Vec::new(),
|
||||
exclude_paths: Vec::new(),
|
||||
file_types: Vec::new(),
|
||||
search_by: SearchBy::Name,
|
||||
};
|
||||
let sql = query_sql("test", 100, 50, &config);
|
||||
|
||||
assert_eq!(
|
||||
sql,
|
||||
"SELECT TOP 150 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%test%')"
|
||||
);
|
||||
ensure_it_is_valid_sql(&sql);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_query_string_cleanup_no_unsupported_chars() {
|
||||
let input = "hello world";
|
||||
let result = query_string_cleanup(input);
|
||||
assert_eq!(result, input);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_string_cleanup_single_quote() {
|
||||
let input = "don't worry";
|
||||
let result = query_string_cleanup(input);
|
||||
assert_eq!(result, "don t worry");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_string_cleanup_newline() {
|
||||
let input = "line1\nline2";
|
||||
let result = query_string_cleanup(input);
|
||||
assert_eq!(result, "line1 line2");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_string_cleanup_both_unsupported_chars() {
|
||||
let input = "don't\nworry";
|
||||
let result = query_string_cleanup(input);
|
||||
assert_eq!(result, "don t worry");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_string_cleanup_multiple_single_quotes() {
|
||||
let input = "it's a 'test' string";
|
||||
let result = query_string_cleanup(input);
|
||||
assert_eq!(result, "it s a test string");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_string_cleanup_multiple_newlines() {
|
||||
let input = "line1\n\nline2\nline3";
|
||||
let result = query_string_cleanup(input);
|
||||
assert_eq!(result, "line1 line2 line3");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_string_cleanup_empty_string() {
|
||||
let input = "";
|
||||
let result = query_string_cleanup(input);
|
||||
assert_eq!(result, input);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_string_cleanup_only_unsupported_chars() {
|
||||
let input = "'\n'";
|
||||
let result = query_string_cleanup(input);
|
||||
assert_eq!(result, " ");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_string_cleanup_unicode_characters() {
|
||||
let input = "héllo wörld's\nfile";
|
||||
let result = query_string_cleanup(input);
|
||||
assert_eq!(result, "héllo wörld s file");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_string_cleanup_special_chars_preserved() {
|
||||
let input = "test@file#name$with%symbols";
|
||||
let result = query_string_cleanup(input);
|
||||
assert_eq!(result, input);
|
||||
}
|
||||
}
|
||||
97
src-tauri/src/extension/built_in/file_search/mod.rs
Normal file
97
src-tauri/src/extension/built_in/file_search/mod.rs
Normal file
@@ -0,0 +1,97 @@
|
||||
pub(crate) mod config;
|
||||
pub(crate) mod implementation;
|
||||
|
||||
use super::super::LOCAL_QUERY_SOURCE_TYPE;
|
||||
use crate::common::{
|
||||
error::SearchError,
|
||||
search::{QueryResponse, QuerySource, SearchQuery},
|
||||
traits::SearchSource,
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use config::FileSearchConfig;
|
||||
use hostname;
|
||||
use tauri::AppHandle;
|
||||
|
||||
pub(crate) const EXTENSION_ID: &str = "File Search";
|
||||
|
||||
/// JSON file for this extension.
|
||||
pub(crate) const PLUGIN_JSON_FILE: &str = r#"
|
||||
{
|
||||
"id": "File Search",
|
||||
"name": "File Search",
|
||||
"platforms": ["macos", "windows"],
|
||||
"description": "Search files on your system",
|
||||
"icon": "font_Filesearch",
|
||||
"type": "extension"
|
||||
}
|
||||
"#;
|
||||
|
||||
pub struct FileSearchExtensionSearchSource;
|
||||
|
||||
#[async_trait]
|
||||
impl SearchSource for FileSearchExtensionSearchSource {
|
||||
fn get_type(&self) -> QuerySource {
|
||||
QuerySource {
|
||||
r#type: LOCAL_QUERY_SOURCE_TYPE.into(),
|
||||
name: hostname::get()
|
||||
.unwrap_or(EXTENSION_ID.into())
|
||||
.to_string_lossy()
|
||||
.into(),
|
||||
id: EXTENSION_ID.into(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn search(
|
||||
&self,
|
||||
tauri_app_handle: AppHandle,
|
||||
query: SearchQuery,
|
||||
) -> Result<QueryResponse, SearchError> {
|
||||
let Some(query_string) = query.query_strings.get("query") else {
|
||||
return Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits: Vec::new(),
|
||||
total_hits: 0,
|
||||
});
|
||||
};
|
||||
let from = usize::try_from(query.from).expect("from too big");
|
||||
let size = usize::try_from(query.size).expect("size too big");
|
||||
|
||||
let query_string = query_string.trim();
|
||||
if query_string.is_empty() {
|
||||
return Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits: Vec::new(),
|
||||
total_hits: 0,
|
||||
});
|
||||
}
|
||||
|
||||
// Get configuration from tauri store
|
||||
let config = FileSearchConfig::get(&tauri_app_handle);
|
||||
|
||||
// If search paths are empty, then the hit should be empty.
|
||||
//
|
||||
// Without this, empty search paths will result in a mdfind that has no `-onlyin`
|
||||
// option, which will in turn query the whole disk volume.
|
||||
if config.search_paths.is_empty() {
|
||||
return Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits: Vec::new(),
|
||||
total_hits: 0,
|
||||
});
|
||||
}
|
||||
|
||||
// Execute search in a blocking task
|
||||
let query_source = self.get_type();
|
||||
|
||||
let hits = implementation::hits(&query_string, from, size, &config)
|
||||
.await
|
||||
.map_err(SearchError::InternalError)?;
|
||||
|
||||
let total_hits = hits.len();
|
||||
Ok(QueryResponse {
|
||||
source: query_source,
|
||||
hits,
|
||||
total_hits,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -3,34 +3,29 @@
|
||||
pub mod ai_overview;
|
||||
pub mod application;
|
||||
pub mod calculator;
|
||||
pub mod file_system;
|
||||
#[cfg(any(target_os = "macos", target_os = "windows"))]
|
||||
pub mod file_search;
|
||||
pub mod pizza_engine_runtime;
|
||||
pub mod quick_ai_access;
|
||||
|
||||
use super::Extension;
|
||||
use crate::SearchSourceRegistry;
|
||||
use crate::extension::built_in::application::{set_apps_hotkey, unset_apps_hotkey};
|
||||
use crate::extension::{
|
||||
alter_extension_json_file, ExtensionBundleIdBorrowed, PLUGIN_JSON_FILE_NAME,
|
||||
ExtensionBundleIdBorrowed, PLUGIN_JSON_FILE_NAME, alter_extension_json_file,
|
||||
};
|
||||
use crate::{SearchSourceRegistry, GLOBAL_TAURI_APP_HANDLE};
|
||||
use anyhow::Context;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::LazyLock;
|
||||
use tauri::{AppHandle, Manager, Runtime};
|
||||
use tauri::{AppHandle, Manager};
|
||||
|
||||
pub(crate) static BUILT_IN_EXTENSION_DIRECTORY: LazyLock<PathBuf> = LazyLock::new(|| {
|
||||
let mut resource_dir = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set")
|
||||
.path()
|
||||
.app_data_dir()
|
||||
.expect(
|
||||
"User home directory not found, which should be impossible on desktop environments",
|
||||
);
|
||||
pub(crate) fn get_built_in_extension_directory(tauri_app_handle: &AppHandle) -> PathBuf {
|
||||
let mut resource_dir = tauri_app_handle.path().app_data_dir().expect(
|
||||
"User home directory not found, which should be impossible on desktop environments",
|
||||
);
|
||||
resource_dir.push("built_in_extensions");
|
||||
|
||||
resource_dir
|
||||
});
|
||||
}
|
||||
|
||||
/// Helper function to load the built-in extension specified by `extension_id`, used
|
||||
/// in `list_built_in_extensions()`.
|
||||
@@ -85,7 +80,10 @@ async fn load_built_in_extension(
|
||||
.map_err(|e| e.to_string())?;
|
||||
let res_plugin_json = serde_json::from_str::<Extension>(&plugin_json_file_content);
|
||||
let Ok(plugin_json) = res_plugin_json else {
|
||||
log::warn!("user invalidated built-in extension [{}] file, overwriting it with the default template", extension_id);
|
||||
log::warn!(
|
||||
"user invalidated built-in extension [{}] file, overwriting it with the default template",
|
||||
extension_id
|
||||
);
|
||||
|
||||
// If the JSON file cannot be parsed as `struct Extension`, overwrite it with the default template and return.
|
||||
tokio::fs::write(plugin_json_file_path, default_plugin_json_file)
|
||||
@@ -136,13 +134,15 @@ async fn load_built_in_extension(
|
||||
/// We only read alias/hotkey/enabled from the JSON file, we have ensured that if
|
||||
/// alias/hotkey is not supported, then it will be `None`. Besides that, no further
|
||||
/// validation is needed because nothing could go wrong.
|
||||
pub(crate) async fn list_built_in_extensions() -> Result<Vec<Extension>, String> {
|
||||
let dir = BUILT_IN_EXTENSION_DIRECTORY.as_path();
|
||||
pub(crate) async fn list_built_in_extensions(
|
||||
tauri_app_handle: &AppHandle,
|
||||
) -> Result<Vec<Extension>, String> {
|
||||
let dir = get_built_in_extension_directory(tauri_app_handle);
|
||||
|
||||
let mut built_in_extensions = Vec::new();
|
||||
built_in_extensions.push(
|
||||
load_built_in_extension(
|
||||
dir,
|
||||
&dir,
|
||||
application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME,
|
||||
application::PLUGIN_JSON_FILE,
|
||||
)
|
||||
@@ -150,7 +150,7 @@ pub(crate) async fn list_built_in_extensions() -> Result<Vec<Extension>, String>
|
||||
);
|
||||
built_in_extensions.push(
|
||||
load_built_in_extension(
|
||||
dir,
|
||||
&dir,
|
||||
calculator::DATA_SOURCE_ID,
|
||||
calculator::PLUGIN_JSON_FILE,
|
||||
)
|
||||
@@ -158,7 +158,7 @@ pub(crate) async fn list_built_in_extensions() -> Result<Vec<Extension>, String>
|
||||
);
|
||||
built_in_extensions.push(
|
||||
load_built_in_extension(
|
||||
dir,
|
||||
&dir,
|
||||
ai_overview::EXTENSION_ID,
|
||||
ai_overview::PLUGIN_JSON_FILE,
|
||||
)
|
||||
@@ -166,22 +166,35 @@ pub(crate) async fn list_built_in_extensions() -> Result<Vec<Extension>, String>
|
||||
);
|
||||
built_in_extensions.push(
|
||||
load_built_in_extension(
|
||||
dir,
|
||||
&dir,
|
||||
quick_ai_access::EXTENSION_ID,
|
||||
quick_ai_access::PLUGIN_JSON_FILE,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(any(target_os = "macos", target_os = "windows"))] {
|
||||
built_in_extensions.push(
|
||||
load_built_in_extension(
|
||||
&dir,
|
||||
file_search::EXTENSION_ID,
|
||||
file_search::PLUGIN_JSON_FILE,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(built_in_extensions)
|
||||
}
|
||||
|
||||
pub(super) async fn init_built_in_extension<R: Runtime>(
|
||||
tauri_app_handle: &AppHandle<R>,
|
||||
pub(super) async fn init_built_in_extension(
|
||||
tauri_app_handle: &AppHandle,
|
||||
extension: &Extension,
|
||||
search_source_registry: &SearchSourceRegistry,
|
||||
) -> Result<(), String> {
|
||||
log::trace!("initializing built-in extensions");
|
||||
log::trace!("initializing built-in extensions [{}]", extension.id);
|
||||
|
||||
if extension.id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
||||
search_source_registry
|
||||
@@ -199,6 +212,18 @@ pub(super) async fn init_built_in_extension<R: Runtime>(
|
||||
log::debug!("built-in extension [{}] initialized", extension.id);
|
||||
}
|
||||
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(any(target_os = "macos", target_os = "windows"))] {
|
||||
if extension.id == file_search::EXTENSION_ID {
|
||||
let file_system_search = file_search::FileSearchExtensionSearchSource;
|
||||
search_source_registry
|
||||
.register_source(file_system_search)
|
||||
.await;
|
||||
log::debug!("built-in extension [{}] initialized", extension.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -207,11 +232,9 @@ pub(crate) fn is_extension_built_in(bundle_id: &ExtensionBundleIdBorrowed<'_>) -
|
||||
}
|
||||
|
||||
pub(crate) async fn enable_built_in_extension(
|
||||
tauri_app_handle: &AppHandle,
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
) -> Result<(), String> {
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
let search_source_registry_tauri_state = tauri_app_handle.state::<SearchSourceRegistry>();
|
||||
|
||||
let update_extension = |extension: &mut Extension| -> Result<(), String> {
|
||||
@@ -228,7 +251,7 @@ pub(crate) async fn enable_built_in_extension(
|
||||
set_apps_hotkey(tauri_app_handle)?;
|
||||
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
&get_built_in_extension_directory(tauri_app_handle),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
@@ -251,7 +274,7 @@ pub(crate) async fn enable_built_in_extension(
|
||||
.register_source(calculator_search)
|
||||
.await;
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
&get_built_in_extension_directory(tauri_app_handle),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
@@ -260,7 +283,7 @@ pub(crate) async fn enable_built_in_extension(
|
||||
|
||||
if bundle_id.extension_id == quick_ai_access::EXTENSION_ID {
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
&get_built_in_extension_directory(tauri_app_handle),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
@@ -269,22 +292,37 @@ pub(crate) async fn enable_built_in_extension(
|
||||
|
||||
if bundle_id.extension_id == ai_overview::EXTENSION_ID {
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
&get_built_in_extension_directory(tauri_app_handle),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(any(target_os = "macos", target_os = "windows"))] {
|
||||
if bundle_id.extension_id == file_search::EXTENSION_ID {
|
||||
let file_system_search = file_search::FileSearchExtensionSearchSource;
|
||||
search_source_registry_tauri_state
|
||||
.register_source(file_system_search)
|
||||
.await;
|
||||
alter_extension_json_file(
|
||||
&get_built_in_extension_directory(tauri_app_handle),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) async fn disable_built_in_extension(
|
||||
tauri_app_handle: &AppHandle,
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
) -> Result<(), String> {
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
let search_source_registry_tauri_state = tauri_app_handle.state::<SearchSourceRegistry>();
|
||||
|
||||
let update_extension = |extension: &mut Extension| -> Result<(), String> {
|
||||
@@ -301,7 +339,7 @@ pub(crate) async fn disable_built_in_extension(
|
||||
unset_apps_hotkey(tauri_app_handle)?;
|
||||
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
&get_built_in_extension_directory(tauri_app_handle),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
@@ -322,7 +360,7 @@ pub(crate) async fn disable_built_in_extension(
|
||||
.remove_source(bundle_id.extension_id)
|
||||
.await;
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
&get_built_in_extension_directory(tauri_app_handle),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
@@ -331,7 +369,7 @@ pub(crate) async fn disable_built_in_extension(
|
||||
|
||||
if bundle_id.extension_id == quick_ai_access::EXTENSION_ID {
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
&get_built_in_extension_directory(tauri_app_handle),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
@@ -341,7 +379,7 @@ pub(crate) async fn disable_built_in_extension(
|
||||
|
||||
if bundle_id.extension_id == ai_overview::EXTENSION_ID {
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
&get_built_in_extension_directory(tauri_app_handle),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
@@ -349,14 +387,30 @@ pub(crate) async fn disable_built_in_extension(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(any(target_os = "macos", target_os = "windows"))] {
|
||||
if bundle_id.extension_id == file_search::EXTENSION_ID {
|
||||
search_source_registry_tauri_state
|
||||
.remove_source(bundle_id.extension_id)
|
||||
.await;
|
||||
alter_extension_json_file(
|
||||
&get_built_in_extension_directory(tauri_app_handle),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn set_built_in_extension_alias(bundle_id: &ExtensionBundleIdBorrowed<'_>, alias: &str) {
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
|
||||
pub(crate) fn set_built_in_extension_alias(
|
||||
tauri_app_handle: &AppHandle,
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
alias: &str,
|
||||
) {
|
||||
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
||||
if let Some(app_path) = bundle_id.sub_extension_id {
|
||||
application::set_app_alias(tauri_app_handle, app_path, alias);
|
||||
@@ -365,13 +419,10 @@ pub(crate) fn set_built_in_extension_alias(bundle_id: &ExtensionBundleIdBorrowed
|
||||
}
|
||||
|
||||
pub(crate) fn register_built_in_extension_hotkey(
|
||||
tauri_app_handle: &AppHandle,
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
hotkey: &str,
|
||||
) -> Result<(), String> {
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
|
||||
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
||||
if let Some(app_path) = bundle_id.sub_extension_id {
|
||||
application::register_app_hotkey(&tauri_app_handle, app_path, hotkey)?;
|
||||
@@ -381,12 +432,9 @@ pub(crate) fn register_built_in_extension_hotkey(
|
||||
}
|
||||
|
||||
pub(crate) fn unregister_built_in_extension_hotkey(
|
||||
tauri_app_handle: &AppHandle,
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
) -> Result<(), String> {
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
|
||||
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
||||
if let Some(app_path) = bundle_id.sub_extension_id {
|
||||
application::unregister_app_hotkey(&tauri_app_handle, app_path)?;
|
||||
@@ -432,11 +480,9 @@ fn load_extension_from_json_file(
|
||||
}
|
||||
|
||||
pub(crate) async fn is_built_in_extension_enabled(
|
||||
tauri_app_handle: &AppHandle,
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
) -> Result<bool, String> {
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
let search_source_registry_tauri_state = tauri_app_handle.state::<SearchSourceRegistry>();
|
||||
|
||||
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
||||
@@ -464,7 +510,7 @@ pub(crate) async fn is_built_in_extension_enabled(
|
||||
|
||||
if bundle_id.extension_id == quick_ai_access::EXTENSION_ID {
|
||||
let extension = load_extension_from_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
&get_built_in_extension_directory(tauri_app_handle),
|
||||
bundle_id.extension_id,
|
||||
)?;
|
||||
return Ok(extension.enabled);
|
||||
@@ -472,11 +518,24 @@ pub(crate) async fn is_built_in_extension_enabled(
|
||||
|
||||
if bundle_id.extension_id == ai_overview::EXTENSION_ID {
|
||||
let extension = load_extension_from_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
&get_built_in_extension_directory(tauri_app_handle),
|
||||
bundle_id.extension_id,
|
||||
)?;
|
||||
return Ok(extension.enabled);
|
||||
}
|
||||
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(any(target_os = "macos", target_os = "windows"))] {
|
||||
if bundle_id.extension_id == file_search::EXTENSION_ID
|
||||
&& bundle_id.sub_extension_id.is_none()
|
||||
{
|
||||
return Ok(search_source_registry_tauri_state
|
||||
.get_source(bundle_id.extension_id)
|
||||
.await
|
||||
.is_some());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unreachable!("extension [{:?}] is not a built-in extension", bundle_id)
|
||||
}
|
||||
|
||||
@@ -8,8 +8,8 @@
|
||||
//! which forces us to create a dedicated thread/runtime to execute them.
|
||||
|
||||
use std::any::Any;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
pub(crate) trait SearchSourceState {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
694
src-tauri/src/extension/third_party/check.rs
vendored
Normal file
694
src-tauri/src/extension/third_party/check.rs
vendored
Normal file
@@ -0,0 +1,694 @@
|
||||
//! Coco has 4 sources of `plugin.json` to check and validate:
|
||||
//!
|
||||
//! 1. From coco-extensions repository
|
||||
//!
|
||||
//! Granted, Coco APP won't check these files directly, but the code here
|
||||
//! will run in that repository's CI to prevent errors in the first place.
|
||||
//!
|
||||
//! 2. From the "<data directory>/third_party_extensions" directory
|
||||
//! 3. Imported via "Import Local Extension"
|
||||
//! 4. Downloaded from the "store/extension/<extension ID>/_download" API
|
||||
//!
|
||||
//! This file contains the checks that are general enough to be applied to all
|
||||
//! these 4 sources
|
||||
|
||||
use crate::extension::Extension;
|
||||
use crate::extension::ExtensionType;
|
||||
use crate::util::platform::Platform;
|
||||
use std::collections::HashSet;
|
||||
|
||||
pub(crate) fn general_check(extension: &Extension) -> Result<(), String> {
|
||||
// Check main extension
|
||||
check_main_extension_only(extension)?;
|
||||
check_main_extension_or_sub_extension(extension, &format!("extension [{}]", extension.id))?;
|
||||
|
||||
// `None` if `extension` is compatible with all the platforms. Otherwise `Some(limited_platforms)`
|
||||
let limited_supported_platforms = match extension.platforms.as_ref() {
|
||||
Some(platforms) => {
|
||||
if platforms.len() == Platform::num_of_supported_platforms() {
|
||||
None
|
||||
} else {
|
||||
Some(platforms)
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
// Check sub extensions
|
||||
let commands = match extension.commands {
|
||||
Some(ref v) => v.as_slice(),
|
||||
None => &[],
|
||||
};
|
||||
let scripts = match extension.scripts {
|
||||
Some(ref v) => v.as_slice(),
|
||||
None => &[],
|
||||
};
|
||||
let quicklinks = match extension.quicklinks {
|
||||
Some(ref v) => v.as_slice(),
|
||||
None => &[],
|
||||
};
|
||||
let sub_extensions = [commands, scripts, quicklinks].concat();
|
||||
let mut sub_extension_ids = HashSet::new();
|
||||
|
||||
for sub_extension in sub_extensions.iter() {
|
||||
check_sub_extension_only(&extension.id, sub_extension, limited_supported_platforms)?;
|
||||
check_main_extension_or_sub_extension(
|
||||
extension,
|
||||
&format!("sub-extension [{}-{}]", extension.id, sub_extension.id),
|
||||
)?;
|
||||
|
||||
if !sub_extension_ids.insert(sub_extension.id.as_str()) {
|
||||
// extension ID already exists
|
||||
return Err(format!(
|
||||
"sub-extension with ID [{}] already exists",
|
||||
sub_extension.id
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// This checks the main extension only, it won't check sub-extensions.
|
||||
fn check_main_extension_only(extension: &Extension) -> Result<(), String> {
|
||||
// Group and Extension cannot have alias
|
||||
if extension.alias.is_some() {
|
||||
if extension.r#type == ExtensionType::Group || extension.r#type == ExtensionType::Extension
|
||||
{
|
||||
return Err(format!(
|
||||
"invalid extension [{}], extension of type [{:?}] cannot have alias",
|
||||
extension.id, extension.r#type
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Group and Extension cannot have hotkey
|
||||
if extension.hotkey.is_some() {
|
||||
if extension.r#type == ExtensionType::Group || extension.r#type == ExtensionType::Extension
|
||||
{
|
||||
return Err(format!(
|
||||
"invalid extension [{}], extension of type [{:?}] cannot have hotkey",
|
||||
extension.id, extension.r#type
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if extension.commands.is_some() || extension.scripts.is_some() || extension.quicklinks.is_some()
|
||||
{
|
||||
if extension.r#type != ExtensionType::Group && extension.r#type != ExtensionType::Extension
|
||||
{
|
||||
return Err(format!(
|
||||
"invalid extension [{}], only extension of type [Group] and [Extension] can have sub-extensions",
|
||||
extension.id,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check_sub_extension_only(
|
||||
extension_id: &str,
|
||||
sub_extension: &Extension,
|
||||
limited_platforms: Option<&HashSet<Platform>>,
|
||||
) -> Result<(), String> {
|
||||
if sub_extension.r#type == ExtensionType::Group
|
||||
|| sub_extension.r#type == ExtensionType::Extension
|
||||
{
|
||||
return Err(format!(
|
||||
"invalid sub-extension [{}-{}]: sub-extensions should not be of type [Group] or [Extension]",
|
||||
extension_id, sub_extension.id
|
||||
));
|
||||
}
|
||||
|
||||
if sub_extension.commands.is_some()
|
||||
|| sub_extension.scripts.is_some()
|
||||
|| sub_extension.quicklinks.is_some()
|
||||
{
|
||||
return Err(format!(
|
||||
"invalid sub-extension [{}-{}]: fields [commands/scripts/quicklinks] should not be set in sub-extensions",
|
||||
extension_id, sub_extension.id
|
||||
));
|
||||
}
|
||||
|
||||
if sub_extension.developer.is_some() {
|
||||
return Err(format!(
|
||||
"invalid sub-extension [{}-{}]: field [developer] should not be set in sub-extensions",
|
||||
extension_id, sub_extension.id
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(platforms_supported_by_main_extension) = limited_platforms {
|
||||
match sub_extension.platforms {
|
||||
Some(ref platforms_supported_by_sub_extension) => {
|
||||
let diff = platforms_supported_by_sub_extension
|
||||
.difference(&platforms_supported_by_main_extension)
|
||||
.into_iter()
|
||||
.map(|p| p.to_string())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
if !diff.is_empty() {
|
||||
return Err(format!(
|
||||
"invalid sub-extension [{}-{}]: it supports platforms {:?} that are not supported by the main extension",
|
||||
extension_id, sub_extension.id, diff
|
||||
));
|
||||
}
|
||||
}
|
||||
None => {
|
||||
// if `sub_extension.platform` is None, it means it has the same value
|
||||
// as main extension's `platforms` field, so we don't need to check it.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check_main_extension_or_sub_extension(
|
||||
extension: &Extension,
|
||||
identifier: &str,
|
||||
) -> Result<(), String> {
|
||||
// If field `action` is Some, then it should be a Command
|
||||
if extension.action.is_some() && extension.r#type != ExtensionType::Command {
|
||||
return Err(format!(
|
||||
"invalid {}, field [action] is set for a non-Command extension",
|
||||
identifier
|
||||
));
|
||||
}
|
||||
|
||||
if extension.r#type == ExtensionType::Command && extension.action.is_none() {
|
||||
return Err(format!(
|
||||
"invalid {}, field [action] should be set for a Command extension",
|
||||
identifier
|
||||
));
|
||||
}
|
||||
|
||||
// If field `quicklink` is Some, then it should be a Quicklink
|
||||
if extension.quicklink.is_some() && extension.r#type != ExtensionType::Quicklink {
|
||||
return Err(format!(
|
||||
"invalid {}, field [quicklink] is set for a non-Quicklink extension",
|
||||
identifier
|
||||
));
|
||||
}
|
||||
|
||||
if extension.r#type == ExtensionType::Quicklink && extension.quicklink.is_none() {
|
||||
return Err(format!(
|
||||
"invalid {}, field [quicklink] should be set for a Quicklink extension",
|
||||
identifier
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::extension::{CommandAction, Quicklink, QuicklinkLink, QuicklinkLinkComponent};
|
||||
|
||||
/// Helper function to create a basic valid extension
|
||||
fn create_basic_extension(id: &str, extension_type: ExtensionType) -> Extension {
|
||||
Extension {
|
||||
id: id.to_string(),
|
||||
name: "Test Extension".to_string(),
|
||||
developer: None,
|
||||
platforms: None,
|
||||
description: "Test description".to_string(),
|
||||
icon: "test-icon.png".to_string(),
|
||||
r#type: extension_type,
|
||||
action: None,
|
||||
quicklink: None,
|
||||
commands: None,
|
||||
scripts: None,
|
||||
quicklinks: None,
|
||||
alias: None,
|
||||
hotkey: None,
|
||||
enabled: true,
|
||||
settings: None,
|
||||
screenshots: None,
|
||||
url: None,
|
||||
version: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to create a command action
|
||||
fn create_command_action() -> CommandAction {
|
||||
CommandAction {
|
||||
exec: "echo".to_string(),
|
||||
args: Some(vec!["test".to_string()]),
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to create a quicklink
|
||||
fn create_quicklink() -> Quicklink {
|
||||
Quicklink {
|
||||
link: QuicklinkLink {
|
||||
components: vec![QuicklinkLinkComponent::StaticStr(
|
||||
"https://example.com".to_string(),
|
||||
)],
|
||||
},
|
||||
open_with: None,
|
||||
}
|
||||
}
|
||||
|
||||
/* test_check_main_extension_only */
|
||||
#[test]
|
||||
fn test_group_cannot_have_alias() {
|
||||
let mut extension = create_basic_extension("test-group", ExtensionType::Group);
|
||||
extension.alias = Some("group-alias".to_string());
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("cannot have alias"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extension_cannot_have_alias() {
|
||||
let mut extension = create_basic_extension("test-ext", ExtensionType::Extension);
|
||||
extension.alias = Some("ext-alias".to_string());
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("cannot have alias"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_group_cannot_have_hotkey() {
|
||||
let mut extension = create_basic_extension("test-group", ExtensionType::Group);
|
||||
extension.hotkey = Some("cmd+g".to_string());
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("cannot have hotkey"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extension_cannot_have_hotkey() {
|
||||
let mut extension = create_basic_extension("test-ext", ExtensionType::Extension);
|
||||
extension.hotkey = Some("cmd+e".to_string());
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("cannot have hotkey"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_container_types_cannot_have_sub_extensions() {
|
||||
let mut extension = create_basic_extension("test-cmd", ExtensionType::Command);
|
||||
extension.action = Some(create_command_action());
|
||||
extension.commands = Some(vec![create_basic_extension(
|
||||
"sub-cmd",
|
||||
ExtensionType::Command,
|
||||
)]);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("only extension of type [Group] and [Extension] can have sub-extensions")
|
||||
);
|
||||
}
|
||||
/* test_check_main_extension_only */
|
||||
|
||||
/* test check_main_extension_or_sub_extension */
|
||||
#[test]
|
||||
fn test_command_must_have_action() {
|
||||
let extension = create_basic_extension("test-cmd", ExtensionType::Command);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("field [action] should be set for a Command extension")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_command_cannot_have_action() {
|
||||
let mut extension = create_basic_extension("test-script", ExtensionType::Script);
|
||||
extension.action = Some(create_command_action());
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("field [action] is set for a non-Command extension")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quicklink_must_have_quicklink_field() {
|
||||
let extension = create_basic_extension("test-quicklink", ExtensionType::Quicklink);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("field [quicklink] should be set for a Quicklink extension")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_quicklink_cannot_have_quicklink_field() {
|
||||
let mut extension = create_basic_extension("test-cmd", ExtensionType::Command);
|
||||
extension.action = Some(create_command_action());
|
||||
extension.quicklink = Some(create_quicklink());
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("field [quicklink] is set for a non-Quicklink extension")
|
||||
);
|
||||
}
|
||||
/* test check_main_extension_or_sub_extension */
|
||||
|
||||
/* Test check_sub_extension_only */
|
||||
#[test]
|
||||
fn test_sub_extension_cannot_be_group() {
|
||||
let mut extension = create_basic_extension("test-group", ExtensionType::Group);
|
||||
let sub_group = create_basic_extension("sub-group", ExtensionType::Group);
|
||||
extension.commands = Some(vec![sub_group]);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("sub-extensions should not be of type [Group] or [Extension]")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sub_extension_cannot_be_extension() {
|
||||
let mut extension = create_basic_extension("test-ext", ExtensionType::Extension);
|
||||
let sub_ext = create_basic_extension("sub-ext", ExtensionType::Extension);
|
||||
extension.scripts = Some(vec![sub_ext]);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("sub-extensions should not be of type [Group] or [Extension]")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sub_extension_cannot_have_developer() {
|
||||
let mut extension = create_basic_extension("test-group", ExtensionType::Group);
|
||||
let mut sub_cmd = create_basic_extension("sub-cmd", ExtensionType::Command);
|
||||
sub_cmd.action = Some(create_command_action());
|
||||
sub_cmd.developer = Some("test-dev".to_string());
|
||||
|
||||
extension.commands = Some(vec![sub_cmd]);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("field [developer] should not be set in sub-extensions")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sub_extension_cannot_have_sub_extensions() {
|
||||
let mut extension = create_basic_extension("test-group", ExtensionType::Group);
|
||||
let mut sub_cmd = create_basic_extension("sub-cmd", ExtensionType::Command);
|
||||
sub_cmd.action = Some(create_command_action());
|
||||
sub_cmd.commands = Some(vec![create_basic_extension(
|
||||
"nested-cmd",
|
||||
ExtensionType::Command,
|
||||
)]);
|
||||
|
||||
extension.commands = Some(vec![sub_cmd]);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result.unwrap_err().contains(
|
||||
"fields [commands/scripts/quicklinks] should not be set in sub-extensions"
|
||||
)
|
||||
);
|
||||
}
|
||||
/* Test check_sub_extension_only */
|
||||
|
||||
#[test]
|
||||
fn test_duplicate_sub_extension_ids() {
|
||||
let mut extension = create_basic_extension("test-group", ExtensionType::Group);
|
||||
|
||||
let mut cmd1 = create_basic_extension("duplicate-id", ExtensionType::Command);
|
||||
cmd1.action = Some(create_command_action());
|
||||
|
||||
let mut cmd2 = create_basic_extension("duplicate-id", ExtensionType::Command);
|
||||
cmd2.action = Some(create_command_action());
|
||||
|
||||
extension.commands = Some(vec![cmd1, cmd2]);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("sub-extension with ID [duplicate-id] already exists")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duplicate_ids_across_different_sub_extension_types() {
|
||||
let mut extension = create_basic_extension("test-group", ExtensionType::Group);
|
||||
|
||||
let mut cmd = create_basic_extension("same-id", ExtensionType::Command);
|
||||
cmd.action = Some(create_command_action());
|
||||
|
||||
let script = create_basic_extension("same-id", ExtensionType::Script);
|
||||
|
||||
extension.commands = Some(vec![cmd]);
|
||||
extension.scripts = Some(vec![script]);
|
||||
|
||||
let result = general_check(&extension);
|
||||
assert!(result.is_err());
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.contains("sub-extension with ID [same-id] already exists")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_group_extension() {
|
||||
let mut extension = create_basic_extension("test-group", ExtensionType::Group);
|
||||
extension.commands = Some(vec![create_basic_extension("cmd1", ExtensionType::Command)]);
|
||||
|
||||
assert!(general_check(&extension).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_extension_type() {
|
||||
let mut extension = create_basic_extension("test-ext", ExtensionType::Extension);
|
||||
extension.scripts = Some(vec![create_basic_extension(
|
||||
"script1",
|
||||
ExtensionType::Script,
|
||||
)]);
|
||||
|
||||
assert!(general_check(&extension).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_command_extension() {
|
||||
let mut extension = create_basic_extension("test-cmd", ExtensionType::Command);
|
||||
extension.action = Some(create_command_action());
|
||||
|
||||
assert!(general_check(&extension).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_quicklink_extension() {
|
||||
let mut extension = create_basic_extension("test-quicklink", ExtensionType::Quicklink);
|
||||
extension.quicklink = Some(create_quicklink());
|
||||
|
||||
assert!(general_check(&extension).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_complex_extension() {
|
||||
let mut extension = create_basic_extension("spotify-controls", ExtensionType::Extension);
|
||||
|
||||
// Add valid commands
|
||||
let mut play_pause = create_basic_extension("play-pause", ExtensionType::Command);
|
||||
play_pause.action = Some(create_command_action());
|
||||
|
||||
let mut next_track = create_basic_extension("next-track", ExtensionType::Command);
|
||||
next_track.action = Some(create_command_action());
|
||||
|
||||
let mut prev_track = create_basic_extension("prev-track", ExtensionType::Command);
|
||||
prev_track.action = Some(create_command_action());
|
||||
|
||||
extension.commands = Some(vec![play_pause, next_track, prev_track]);
|
||||
|
||||
assert!(general_check(&extension).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_single_layer_command() {
|
||||
let mut extension = create_basic_extension("empty-trash", ExtensionType::Command);
|
||||
extension.action = Some(create_command_action());
|
||||
|
||||
assert!(general_check(&extension).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_command_alias_and_hotkey_allowed() {
|
||||
let mut extension = create_basic_extension("test-cmd", ExtensionType::Command);
|
||||
extension.action = Some(create_command_action());
|
||||
extension.alias = Some("cmd-alias".to_string());
|
||||
extension.hotkey = Some("cmd+t".to_string());
|
||||
|
||||
assert!(general_check(&extension).is_ok());
|
||||
}
|
||||
|
||||
/*
|
||||
* Tests for check that sub extension cannot support extensions that are not
|
||||
* supported by the main extension
|
||||
*
|
||||
* Start here
|
||||
*/
|
||||
#[test]
|
||||
fn test_platform_validation_both_none() {
|
||||
// Case 1: main extension's platforms = None, sub extension's platforms = None
|
||||
// Should return Ok(())
|
||||
let mut main_extension = create_basic_extension("main-ext", ExtensionType::Group);
|
||||
main_extension.platforms = None;
|
||||
|
||||
let mut sub_cmd = create_basic_extension("sub-cmd", ExtensionType::Command);
|
||||
sub_cmd.action = Some(create_command_action());
|
||||
sub_cmd.platforms = None;
|
||||
|
||||
main_extension.commands = Some(vec![sub_cmd]);
|
||||
|
||||
let result = general_check(&main_extension);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_platform_validation_main_all_sub_none() {
|
||||
// Case 2: main extension's platforms = Some(all platforms), sub extension's platforms = None
|
||||
// Should return Ok(())
|
||||
let mut main_extension = create_basic_extension("main-ext", ExtensionType::Group);
|
||||
main_extension.platforms = Some(Platform::all());
|
||||
|
||||
let mut sub_cmd = create_basic_extension("sub-cmd", ExtensionType::Command);
|
||||
sub_cmd.action = Some(create_command_action());
|
||||
sub_cmd.platforms = None;
|
||||
|
||||
main_extension.commands = Some(vec![sub_cmd]);
|
||||
|
||||
let result = general_check(&main_extension);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_platform_validation_main_none_sub_some() {
|
||||
// Case 3: main extension's platforms = None, sub extension's platforms = Some([Platform::Macos])
|
||||
// Should return Ok(()) because None means supports all platforms
|
||||
let mut main_extension = create_basic_extension("main-ext", ExtensionType::Group);
|
||||
main_extension.platforms = None;
|
||||
|
||||
let mut sub_cmd = create_basic_extension("sub-cmd", ExtensionType::Command);
|
||||
sub_cmd.action = Some(create_command_action());
|
||||
sub_cmd.platforms = Some(HashSet::from([Platform::Macos]));
|
||||
|
||||
main_extension.commands = Some(vec![sub_cmd]);
|
||||
|
||||
let result = general_check(&main_extension);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_platform_validation_main_all_sub_subset() {
|
||||
// Case 4: main extension's platforms = Some(all platforms), sub extension's platforms = Some([Platform::Macos])
|
||||
// Should return Ok(()) because sub extension supports a subset of main extension's platforms
|
||||
let mut main_extension = create_basic_extension("main-ext", ExtensionType::Group);
|
||||
main_extension.platforms = Some(Platform::all());
|
||||
|
||||
let mut sub_cmd = create_basic_extension("sub-cmd", ExtensionType::Command);
|
||||
sub_cmd.action = Some(create_command_action());
|
||||
sub_cmd.platforms = Some(HashSet::from([Platform::Macos]));
|
||||
|
||||
main_extension.commands = Some(vec![sub_cmd]);
|
||||
|
||||
let result = general_check(&main_extension);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_platform_validation_main_limited_sub_unsupported() {
|
||||
// Case 5: main extension's platforms = Some([Platform::Macos]), sub extension's platforms = Some([Platform::Linux])
|
||||
// Should return Err because sub extension supports a platform not supported by main extension
|
||||
let mut main_extension = create_basic_extension("main-ext", ExtensionType::Group);
|
||||
main_extension.platforms = Some(HashSet::from([Platform::Macos]));
|
||||
|
||||
let mut sub_cmd = create_basic_extension("sub-cmd", ExtensionType::Command);
|
||||
sub_cmd.action = Some(create_command_action());
|
||||
sub_cmd.platforms = Some(HashSet::from([Platform::Linux]));
|
||||
|
||||
main_extension.commands = Some(vec![sub_cmd]);
|
||||
|
||||
let result = general_check(&main_extension);
|
||||
assert!(result.is_err());
|
||||
let error_msg = result.unwrap_err();
|
||||
assert!(error_msg.contains("it supports platforms"));
|
||||
assert!(error_msg.contains("that are not supported by the main extension"));
|
||||
assert!(error_msg.contains("Linux")); // Should mention the unsupported platform
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_platform_validation_main_partial_sub_unsupported() {
|
||||
// Case 6: main extension's platforms = Some([Platform::Macos, Platform::Windows]), sub extension's platforms = Some([Platform::Linux])
|
||||
// Should return Err because sub extension supports a platform not supported by main extension
|
||||
let mut main_extension = create_basic_extension("main-ext", ExtensionType::Group);
|
||||
main_extension.platforms = Some(HashSet::from([Platform::Macos, Platform::Windows]));
|
||||
|
||||
let mut sub_cmd = create_basic_extension("sub-cmd", ExtensionType::Command);
|
||||
sub_cmd.action = Some(create_command_action());
|
||||
sub_cmd.platforms = Some(HashSet::from([Platform::Linux]));
|
||||
|
||||
main_extension.commands = Some(vec![sub_cmd]);
|
||||
|
||||
let result = general_check(&main_extension);
|
||||
assert!(result.is_err());
|
||||
let error_msg = result.unwrap_err();
|
||||
assert!(error_msg.contains("it supports platforms"));
|
||||
assert!(error_msg.contains("that are not supported by the main extension"));
|
||||
assert!(error_msg.contains("Linux")); // Should mention the unsupported platform
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_platform_validation_main_limited_sub_none() {
|
||||
// Case 7: main extension's platforms = Some([Platform::Macos]), sub extension's platforms = None
|
||||
// Should return Ok(()) because when sub extension's platforms is None, it inherits main extension's platforms
|
||||
let mut main_extension = create_basic_extension("main-ext", ExtensionType::Group);
|
||||
main_extension.platforms = Some(HashSet::from([Platform::Macos]));
|
||||
|
||||
let mut sub_cmd = create_basic_extension("sub-cmd", ExtensionType::Command);
|
||||
sub_cmd.action = Some(create_command_action());
|
||||
sub_cmd.platforms = None;
|
||||
|
||||
main_extension.commands = Some(vec![sub_cmd]);
|
||||
|
||||
let result = general_check(&main_extension);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
/*
|
||||
* Tests for check that sub extension cannot support extensions that are not
|
||||
* supported by the main extension
|
||||
*
|
||||
* End here
|
||||
*/
|
||||
}
|
||||
249
src-tauri/src/extension/third_party/install/local_extension.rs
vendored
Normal file
249
src-tauri/src/extension/third_party/install/local_extension.rs
vendored
Normal file
@@ -0,0 +1,249 @@
|
||||
use crate::extension::PLUGIN_JSON_FILE_NAME;
|
||||
use crate::extension::third_party::check::general_check;
|
||||
use crate::extension::third_party::install::{
|
||||
filter_out_incompatible_sub_extensions, is_extension_installed,
|
||||
};
|
||||
use crate::extension::third_party::{
|
||||
THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE, get_third_party_extension_directory,
|
||||
};
|
||||
use crate::extension::{Extension, canonicalize_relative_icon_path};
|
||||
use crate::util::platform::Platform;
|
||||
use serde_json::Value as Json;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tauri::AppHandle;
|
||||
use tokio::fs;
|
||||
|
||||
/// All the extensions installed from local file will belong to a special developer
|
||||
/// "__local__".
|
||||
const DEVELOPER_ID_LOCAL: &str = "__local__";
|
||||
|
||||
/// Install the extension specified by `path`.
|
||||
///
|
||||
/// `path` should point to a directory with the following structure:
|
||||
///
|
||||
/// ```text
|
||||
/// extension-directory/
|
||||
/// ├── assets/
|
||||
/// │ ├── icon.png
|
||||
/// │ └── other-assets...
|
||||
/// └── plugin.json
|
||||
/// ```
|
||||
#[tauri::command]
|
||||
pub(crate) async fn install_local_extension(
|
||||
tauri_app_handle: AppHandle,
|
||||
path: PathBuf,
|
||||
) -> Result<(), String> {
|
||||
let extension_dir_name = path
|
||||
.file_name()
|
||||
.ok_or_else(|| "Invalid extension: no directory name".to_string())?
|
||||
.to_str()
|
||||
.ok_or_else(|| "Invalid extension: non-UTF8 extension id".to_string())?;
|
||||
|
||||
// we use extension directory name as the extension ID.
|
||||
let extension_id = extension_dir_name;
|
||||
if is_extension_installed(DEVELOPER_ID_LOCAL, extension_id).await {
|
||||
// The frontend code uses this string to distinguish between 2 error cases:
|
||||
//
|
||||
// 1. This extension is already imported
|
||||
// 2. This extension is incompatible with the current platform
|
||||
// 3. The selected directory does not contain a valid extension
|
||||
//
|
||||
// do NOT edit this without updating the frontend code.
|
||||
//
|
||||
// ```ts
|
||||
// if (errorMessage === "already imported") {
|
||||
// addError(t("settings.extensions.hints.extensionAlreadyImported"));
|
||||
// } else if (errorMessage === "incompatible") {
|
||||
// addError(t("settings.extensions.hints.incompatibleExtension"));
|
||||
// } else {
|
||||
// addError(t("settings.extensions.hints.importFailed"));
|
||||
// }
|
||||
// ```
|
||||
//
|
||||
// This is definitely error-prone, but we have to do this until we have
|
||||
// structured error type
|
||||
return Err("already imported".into());
|
||||
}
|
||||
|
||||
let plugin_json_path = path.join(PLUGIN_JSON_FILE_NAME);
|
||||
|
||||
let plugin_json_content = fs::read_to_string(&plugin_json_path)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Parse as JSON first as it is not valid for `struct Extension`, we need to
|
||||
// correct it (set fields `id` and `developer`) before converting it to `struct Extension`:
|
||||
let mut extension_json: Json =
|
||||
serde_json::from_str(&plugin_json_content).map_err(|e| e.to_string())?;
|
||||
|
||||
// Set the main extension ID to the directory name
|
||||
let extension_obj = extension_json
|
||||
.as_object_mut()
|
||||
.expect("extension_json should be an object");
|
||||
extension_obj.insert("id".to_string(), Json::String(extension_id.to_string()));
|
||||
extension_obj.insert(
|
||||
"developer".to_string(),
|
||||
Json::String(DEVELOPER_ID_LOCAL.to_string()),
|
||||
);
|
||||
|
||||
// Counter for sub-extension IDs
|
||||
let mut counter = 1u32;
|
||||
|
||||
// Set IDs for commands
|
||||
if let Some(commands) = extension_obj.get_mut("commands") {
|
||||
if let Some(commands_array) = commands.as_array_mut() {
|
||||
for command in commands_array {
|
||||
if let Some(command_obj) = command.as_object_mut() {
|
||||
command_obj.insert("id".to_string(), Json::String(counter.to_string()));
|
||||
counter += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set IDs for quicklinks
|
||||
if let Some(quicklinks) = extension_obj.get_mut("quicklinks") {
|
||||
if let Some(quicklinks_array) = quicklinks.as_array_mut() {
|
||||
for quicklink in quicklinks_array {
|
||||
if let Some(quicklink_obj) = quicklink.as_object_mut() {
|
||||
quicklink_obj.insert("id".to_string(), Json::String(counter.to_string()));
|
||||
counter += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set IDs for scripts
|
||||
if let Some(scripts) = extension_obj.get_mut("scripts") {
|
||||
if let Some(scripts_array) = scripts.as_array_mut() {
|
||||
for script in scripts_array {
|
||||
if let Some(script_obj) = script.as_object_mut() {
|
||||
script_obj.insert("id".to_string(), Json::String(counter.to_string()));
|
||||
counter += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Now we can convert JSON to `struct Extension`
|
||||
let mut extension: Extension =
|
||||
serde_json::from_value(extension_json).map_err(|e| e.to_string())?;
|
||||
|
||||
let current_platform = Platform::current();
|
||||
/* Check begins here */
|
||||
general_check(&extension)?;
|
||||
|
||||
if let Some(ref platforms) = extension.platforms {
|
||||
if !platforms.contains(¤t_platform) {
|
||||
// The frontend code uses this string to distinguish between 3 error cases:
|
||||
//
|
||||
// 1. This extension is already imported
|
||||
// 2. This extension is incompatible with the current platform
|
||||
// 3. The selected directory does not contain a valid extension
|
||||
//
|
||||
// do NOT edit this without updating the frontend code.
|
||||
//
|
||||
// ```ts
|
||||
// if (errorMessage === "already imported") {
|
||||
// addError(t("settings.extensions.hints.extensionAlreadyImported"));
|
||||
// } else if (errorMessage === "incompatible") {
|
||||
// addError(t("settings.extensions.hints.incompatibleExtension"));
|
||||
// } else {
|
||||
// addError(t("settings.extensions.hints.importFailed"));
|
||||
// }
|
||||
// ```
|
||||
//
|
||||
// This is definitely error-prone, but we have to do this until we have
|
||||
// structured error type
|
||||
return Err("incompatible".into());
|
||||
}
|
||||
}
|
||||
/* Check ends here */
|
||||
|
||||
// Extension is compatible with current platform, but it could contain sub
|
||||
// extensions that are not, filter them out.
|
||||
filter_out_incompatible_sub_extensions(&mut extension, current_platform);
|
||||
|
||||
// Create destination directory
|
||||
let dest_dir = get_third_party_extension_directory(&tauri_app_handle)
|
||||
.join(DEVELOPER_ID_LOCAL)
|
||||
.join(extension_dir_name);
|
||||
|
||||
fs::create_dir_all(&dest_dir)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Copy all files except plugin.json
|
||||
let mut entries = fs::read_dir(&path).await.map_err(|e| e.to_string())?;
|
||||
|
||||
while let Some(entry) = entries.next_entry().await.map_err(|e| e.to_string())? {
|
||||
let file_name = entry.file_name();
|
||||
let file_name_str = file_name
|
||||
.to_str()
|
||||
.ok_or_else(|| "Invalid filename: non-UTF8".to_string())?;
|
||||
|
||||
// plugin.json will be handled separately.
|
||||
if file_name_str == PLUGIN_JSON_FILE_NAME {
|
||||
continue;
|
||||
}
|
||||
|
||||
let src_path = entry.path();
|
||||
let dest_path = dest_dir.join(&file_name);
|
||||
|
||||
if src_path.is_dir() {
|
||||
// Recursively copy directory
|
||||
copy_dir_recursively(&src_path, &dest_path).await?;
|
||||
} else {
|
||||
// Copy file
|
||||
fs::copy(&src_path, &dest_path)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
}
|
||||
}
|
||||
|
||||
// Write the corrected plugin.json file
|
||||
let corrected_plugin_json =
|
||||
serde_json::to_string_pretty(&extension).map_err(|e| e.to_string())?;
|
||||
|
||||
let dest_plugin_json_path = dest_dir.join(PLUGIN_JSON_FILE_NAME);
|
||||
fs::write(&dest_plugin_json_path, corrected_plugin_json)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Canonicalize relative icon paths
|
||||
canonicalize_relative_icon_path(&dest_dir, &mut extension)?;
|
||||
|
||||
// Add extension to the search source
|
||||
THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE
|
||||
.get()
|
||||
.unwrap()
|
||||
.add_extension(extension)
|
||||
.await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Helper function to recursively copy directories.
|
||||
#[async_recursion::async_recursion]
|
||||
async fn copy_dir_recursively(src: &Path, dest: &Path) -> Result<(), String> {
|
||||
tokio::fs::create_dir_all(dest)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
let mut read_dir = tokio::fs::read_dir(src).await.map_err(|e| e.to_string())?;
|
||||
|
||||
while let Some(entry) = read_dir.next_entry().await.map_err(|e| e.to_string())? {
|
||||
let src_path = entry.path();
|
||||
let dest_path = dest.join(entry.file_name());
|
||||
|
||||
if src_path.is_dir() {
|
||||
copy_dir_recursively(&src_path, &dest_path).await?;
|
||||
} else {
|
||||
tokio::fs::copy(&src_path, &dest_path)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
224
src-tauri/src/extension/third_party/install/mod.rs
vendored
Normal file
224
src-tauri/src/extension/third_party/install/mod.rs
vendored
Normal file
@@ -0,0 +1,224 @@
|
||||
//! This module contains the code of extension installation.
|
||||
//!
|
||||
//!
|
||||
//! # How
|
||||
//!
|
||||
//! Technically, installing an extension involves the following steps:
|
||||
//!
|
||||
//! 1. Correct the `plugin.json` JSON if it does not conform to our `struct Extension`
|
||||
//! definition.
|
||||
//!
|
||||
//! 2. Write the extension files to the corresponding location
|
||||
//!
|
||||
//! * developer directory
|
||||
//! * extension directory
|
||||
//! * assets directory
|
||||
//! * various assets files, e.g., "icon.png"
|
||||
//! * plugin.json file
|
||||
//!
|
||||
//! 3. Canonicalize the `Extension.icon` fields if they are relative paths
|
||||
//! (relative to the `assets` directory)
|
||||
//!
|
||||
//! 4. Deserialize the `plugin.json` file to a `struct Extension`, and call
|
||||
//! `THIRD_PARTY_EXTENSIONS_DIRECTORY.add_extension(extension)` to add it to
|
||||
//! the in-memory extension list.
|
||||
|
||||
pub(crate) mod local_extension;
|
||||
pub(crate) mod store;
|
||||
|
||||
use crate::extension::Extension;
|
||||
use crate::util::platform::Platform;
|
||||
|
||||
use super::THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE;
|
||||
|
||||
pub(crate) async fn is_extension_installed(developer: &str, extension_id: &str) -> bool {
|
||||
THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE
|
||||
.get()
|
||||
.unwrap()
|
||||
.extension_exists(developer, extension_id)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Filters out sub-extensions that are not compatible with the current platform.
|
||||
///
|
||||
/// We make `current_platform` an argument so that this function is testable.
|
||||
pub(crate) fn filter_out_incompatible_sub_extensions(
|
||||
extension: &mut Extension,
|
||||
current_platform: Platform,
|
||||
) {
|
||||
// Only process extensions of type Group or Extension that can have sub-extensions
|
||||
if !extension.r#type.contains_sub_items() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Filter commands
|
||||
if let Some(ref mut commands) = extension.commands {
|
||||
commands.retain(|sub_ext| {
|
||||
// If platforms is None, the sub-extension is compatible with all platforms
|
||||
if let Some(ref platforms) = sub_ext.platforms {
|
||||
platforms.contains(¤t_platform)
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Filter scripts
|
||||
if let Some(ref mut scripts) = extension.scripts {
|
||||
scripts.retain(|sub_ext| {
|
||||
// If platforms is None, the sub-extension is compatible with all platforms
|
||||
if let Some(ref platforms) = sub_ext.platforms {
|
||||
platforms.contains(¤t_platform)
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Filter quicklinks
|
||||
if let Some(ref mut quicklinks) = extension.quicklinks {
|
||||
quicklinks.retain(|sub_ext| {
|
||||
// If platforms is None, the sub-extension is compatible with all platforms
|
||||
if let Some(ref platforms) = sub_ext.platforms {
|
||||
platforms.contains(¤t_platform)
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::extension::ExtensionType;
|
||||
use std::collections::HashSet;
|
||||
|
||||
/// Helper function to create a basic extension for testing
|
||||
/// `filter_out_incompatible_sub_extensions`
|
||||
fn create_test_extension(
|
||||
extension_type: ExtensionType,
|
||||
platforms: Option<HashSet<Platform>>,
|
||||
) -> Extension {
|
||||
Extension {
|
||||
id: "ID".into(),
|
||||
name: "name".into(),
|
||||
developer: None,
|
||||
platforms,
|
||||
description: "Test extension".to_string(),
|
||||
icon: "test-icon".to_string(),
|
||||
r#type: extension_type,
|
||||
action: None,
|
||||
quicklink: None,
|
||||
commands: None,
|
||||
scripts: None,
|
||||
quicklinks: None,
|
||||
alias: None,
|
||||
hotkey: None,
|
||||
enabled: true,
|
||||
settings: None,
|
||||
screenshots: None,
|
||||
url: None,
|
||||
version: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filter_out_incompatible_sub_extensions_filter_non_group_extension_unchanged() {
|
||||
// Command
|
||||
let mut extension = create_test_extension(ExtensionType::Command, None);
|
||||
let clone = extension.clone();
|
||||
filter_out_incompatible_sub_extensions(&mut extension, Platform::Linux);
|
||||
assert_eq!(extension, clone);
|
||||
|
||||
// Quicklink
|
||||
let mut extension = create_test_extension(ExtensionType::Quicklink, None);
|
||||
let clone = extension.clone();
|
||||
filter_out_incompatible_sub_extensions(&mut extension, Platform::Linux);
|
||||
assert_eq!(extension, clone);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filter_out_incompatible_sub_extensions() {
|
||||
let mut main_extension = create_test_extension(ExtensionType::Group, None);
|
||||
// init sub extensions, which are macOS-only
|
||||
let commands = vec![create_test_extension(
|
||||
ExtensionType::Command,
|
||||
Some(HashSet::from([Platform::Macos])),
|
||||
)];
|
||||
let quicklinks = vec![create_test_extension(
|
||||
ExtensionType::Quicklink,
|
||||
Some(HashSet::from([Platform::Macos])),
|
||||
)];
|
||||
let scripts = vec![create_test_extension(
|
||||
ExtensionType::Script,
|
||||
Some(HashSet::from([Platform::Macos])),
|
||||
)];
|
||||
// Set sub extensions
|
||||
main_extension.commands = Some(commands);
|
||||
main_extension.quicklinks = Some(quicklinks);
|
||||
main_extension.scripts = Some(scripts);
|
||||
|
||||
// Current platform is Linux, all the sub extensions should be filtered out.
|
||||
filter_out_incompatible_sub_extensions(&mut main_extension, Platform::Linux);
|
||||
|
||||
// assertions
|
||||
assert!(main_extension.commands.unwrap().is_empty());
|
||||
assert!(main_extension.quicklinks.unwrap().is_empty());
|
||||
assert!(main_extension.scripts.unwrap().is_empty());
|
||||
}
|
||||
|
||||
/// Sub extensions are compatible with all the platforms, nothing to filter out.
|
||||
#[test]
|
||||
fn test_filter_out_incompatible_sub_extensions_all_compatible() {
|
||||
{
|
||||
let mut main_extension = create_test_extension(ExtensionType::Group, None);
|
||||
// init sub extensions, which are compatible with all the platforms
|
||||
let commands = vec![create_test_extension(
|
||||
ExtensionType::Command,
|
||||
Some(Platform::all()),
|
||||
)];
|
||||
let quicklinks = vec![create_test_extension(
|
||||
ExtensionType::Quicklink,
|
||||
Some(Platform::all()),
|
||||
)];
|
||||
let scripts = vec![create_test_extension(
|
||||
ExtensionType::Script,
|
||||
Some(Platform::all()),
|
||||
)];
|
||||
// Set sub extensions
|
||||
main_extension.commands = Some(commands);
|
||||
main_extension.quicklinks = Some(quicklinks);
|
||||
main_extension.scripts = Some(scripts);
|
||||
|
||||
// Current platform is Linux, all the sub extensions should be filtered out.
|
||||
filter_out_incompatible_sub_extensions(&mut main_extension, Platform::Linux);
|
||||
|
||||
// assertions
|
||||
assert_eq!(main_extension.commands.unwrap().len(), 1);
|
||||
assert_eq!(main_extension.quicklinks.unwrap().len(), 1);
|
||||
assert_eq!(main_extension.scripts.unwrap().len(), 1);
|
||||
}
|
||||
|
||||
// `platforms: None` means all platforms as well
|
||||
{
|
||||
let mut main_extension = create_test_extension(ExtensionType::Group, None);
|
||||
// init sub extensions, which are compatible with all the platforms
|
||||
let commands = vec![create_test_extension(ExtensionType::Command, None)];
|
||||
let quicklinks = vec![create_test_extension(ExtensionType::Quicklink, None)];
|
||||
let scripts = vec![create_test_extension(ExtensionType::Script, None)];
|
||||
// Set sub extensions
|
||||
main_extension.commands = Some(commands);
|
||||
main_extension.quicklinks = Some(quicklinks);
|
||||
main_extension.scripts = Some(scripts);
|
||||
|
||||
// Current platform is Linux, all the sub extensions should be filtered out.
|
||||
filter_out_incompatible_sub_extensions(&mut main_extension, Platform::Linux);
|
||||
|
||||
// assertions
|
||||
assert_eq!(main_extension.commands.unwrap().len(), 1);
|
||||
assert_eq!(main_extension.quicklinks.unwrap().len(), 1);
|
||||
assert_eq!(main_extension.scripts.unwrap().len(), 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
//! Extension store related stuff.
|
||||
|
||||
use super::LOCAL_QUERY_SOURCE_TYPE;
|
||||
use super::super::LOCAL_QUERY_SOURCE_TYPE;
|
||||
use super::is_extension_installed;
|
||||
use crate::common::document::DataSourceReference;
|
||||
use crate::common::document::Document;
|
||||
use crate::common::error::SearchError;
|
||||
@@ -8,16 +9,21 @@ use crate::common::search::QueryResponse;
|
||||
use crate::common::search::QuerySource;
|
||||
use crate::common::search::SearchQuery;
|
||||
use crate::common::traits::SearchSource;
|
||||
use crate::extension::canonicalize_relative_icon_path;
|
||||
use crate::extension::third_party::THIRD_PARTY_EXTENSIONS_DIRECTORY;
|
||||
use crate::extension::Extension;
|
||||
use crate::extension::PLUGIN_JSON_FILE_NAME;
|
||||
use crate::extension::THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE;
|
||||
use crate::extension::canonicalize_relative_icon_path;
|
||||
use crate::extension::third_party::check::general_check;
|
||||
use crate::extension::third_party::get_third_party_extension_directory;
|
||||
use crate::extension::third_party::install::filter_out_incompatible_sub_extensions;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::util::platform::Platform;
|
||||
use async_trait::async_trait;
|
||||
use reqwest::StatusCode;
|
||||
use serde_json::Map as JsonObject;
|
||||
use serde_json::Value as Json;
|
||||
use std::io::Read;
|
||||
use tauri::AppHandle;
|
||||
|
||||
const DATA_SOURCE_ID: &str = "Extension Store";
|
||||
|
||||
@@ -36,7 +42,11 @@ impl SearchSource for ExtensionStore {
|
||||
}
|
||||
}
|
||||
|
||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
||||
async fn search(
|
||||
&self,
|
||||
_tauri_app_handle: AppHandle,
|
||||
query: SearchQuery,
|
||||
) -> Result<QueryResponse, SearchError> {
|
||||
const SCORE: f64 = 2000.0;
|
||||
|
||||
let Some(query_string) = query.query_strings.get("query") else {
|
||||
@@ -146,14 +156,12 @@ pub(crate) async fn search_extension(
|
||||
.get("developer")
|
||||
.and_then(|dev| dev.get("id"))
|
||||
.and_then(|id| id.as_str())
|
||||
.expect("developer.id should exist")
|
||||
.to_string();
|
||||
.expect("developer.id should exist");
|
||||
|
||||
let extension_id = source_obj
|
||||
.get("id")
|
||||
.and_then(|id| id.as_str())
|
||||
.expect("extension id should exist")
|
||||
.to_string();
|
||||
.expect("extension id should exist");
|
||||
|
||||
let installed = is_extension_installed(developer_id, extension_id).await;
|
||||
source_obj.insert("installed".to_string(), Json::Bool(installed));
|
||||
@@ -164,16 +172,11 @@ pub(crate) async fn search_extension(
|
||||
Ok(extensions)
|
||||
}
|
||||
|
||||
async fn is_extension_installed(developer: String, extension_id: String) -> bool {
|
||||
THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE
|
||||
.get()
|
||||
.unwrap()
|
||||
.extension_exists(&developer, &extension_id)
|
||||
.await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub(crate) async fn install_extension(id: String) -> Result<(), String> {
|
||||
pub(crate) async fn install_extension_from_store(
|
||||
tauri_app_handle: AppHandle,
|
||||
id: String,
|
||||
) -> Result<(), String> {
|
||||
let path = format!("store/extension/{}/_download", id);
|
||||
let response = HttpClient::get("default_coco_server", &path, None)
|
||||
.await
|
||||
@@ -192,7 +195,15 @@ pub(crate) async fn install_extension(id: String) -> Result<(), String> {
|
||||
let mut archive =
|
||||
zip::ZipArchive::new(cursor).map_err(|e| format!("Failed to read zip archive: {}", e))?;
|
||||
|
||||
let mut plugin_json = archive.by_name("plugin.json").map_err(|e| e.to_string())?;
|
||||
// The plugin.json sent from the server does not conform to our `struct Extension` definition:
|
||||
//
|
||||
// 1. Its `developer` field is a JSON object, but we need a string
|
||||
// 2. sub-extensions won't have their `id` fields set
|
||||
//
|
||||
// we need to correct it
|
||||
let mut plugin_json = archive
|
||||
.by_name(PLUGIN_JSON_FILE_NAME)
|
||||
.map_err(|e| e.to_string())?;
|
||||
let mut plugin_json_content = String::new();
|
||||
std::io::Read::read_to_string(&mut plugin_json, &mut plugin_json_content)
|
||||
.map_err(|e| e.to_string())?;
|
||||
@@ -213,7 +224,6 @@ pub(crate) async fn install_extension(id: String) -> Result<(), String> {
|
||||
|
||||
// Set IDs for sub-extensions (commands, quicklinks, scripts)
|
||||
let mut counter = 0;
|
||||
// Set IDs for commands
|
||||
// Helper function to set IDs for array fields
|
||||
fn set_ids_for_field(extension: &mut Json, field_name: &str, counter: &mut i32) {
|
||||
if let Some(field) = extension.as_object_mut().unwrap().get_mut(field_name) {
|
||||
@@ -229,12 +239,11 @@ pub(crate) async fn install_extension(id: String) -> Result<(), String> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set IDs for sub-extensions
|
||||
set_ids_for_field(&mut extension, "commands", &mut counter);
|
||||
set_ids_for_field(&mut extension, "quicklinks", &mut counter);
|
||||
set_ids_for_field(&mut extension, "scripts", &mut counter);
|
||||
|
||||
// Now the extension JSON is valid
|
||||
let mut extension: Extension = serde_json::from_value(extension).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"cannot parse plugin.json as struct Extension, error [{:?}]",
|
||||
@@ -244,57 +253,74 @@ pub(crate) async fn install_extension(id: String) -> Result<(), String> {
|
||||
|
||||
drop(plugin_json);
|
||||
|
||||
general_check(&extension)?;
|
||||
|
||||
// Extension is compatible with current platform, but it could contain sub
|
||||
// extensions that are not, filter them out.
|
||||
filter_out_incompatible_sub_extensions(&mut extension, Platform::current());
|
||||
|
||||
// Write extension files to the extension directory
|
||||
let developer = extension.developer.clone().unwrap_or_default();
|
||||
let extension_id = extension.id.clone();
|
||||
|
||||
// Extract the zip file
|
||||
let extension_directory = {
|
||||
let mut path = THIRD_PARTY_EXTENSIONS_DIRECTORY.to_path_buf();
|
||||
let mut path = get_third_party_extension_directory(&tauri_app_handle);
|
||||
path.push(developer);
|
||||
path.push(extension_id.as_str());
|
||||
path
|
||||
};
|
||||
|
||||
tokio::fs::create_dir_all(extension_directory.as_path())
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Extract all files except plugin.json
|
||||
for i in 0..archive.len() {
|
||||
let mut file = archive.by_index(i).map_err(|e| e.to_string())?;
|
||||
let outpath = match file.enclosed_name() {
|
||||
Some(path) => extension_directory.join(path),
|
||||
None => continue,
|
||||
};
|
||||
let mut zip_file = archive.by_index(i).map_err(|e| e.to_string())?;
|
||||
// `.name()` is safe to use in our cases, the cases listed in the below
|
||||
// page won't happen to us.
|
||||
//
|
||||
// https://docs.rs/zip/4.2.0/zip/read/struct.ZipFile.html#method.name
|
||||
//
|
||||
// Example names:
|
||||
//
|
||||
// * `assets/icon.png`
|
||||
// * `assets/screenshot.png`
|
||||
// * `plugin.json`
|
||||
//
|
||||
// Yes, the `assets` directory is not a part of it.
|
||||
let zip_file_name = zip_file.name();
|
||||
|
||||
// Skip the plugin.json file as we'll create it from the extension variable
|
||||
if file.name() == "plugin.json" {
|
||||
if zip_file_name == PLUGIN_JSON_FILE_NAME {
|
||||
continue;
|
||||
}
|
||||
|
||||
if file.name().ends_with('/') {
|
||||
tokio::fs::create_dir_all(&outpath)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
} else {
|
||||
if let Some(p) = outpath.parent() {
|
||||
if !p.exists() {
|
||||
tokio::fs::create_dir_all(p)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
}
|
||||
}
|
||||
let mut outfile = tokio::fs::File::create(&outpath)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
let mut content = Vec::new();
|
||||
std::io::Read::read_to_end(&mut file, &mut content).map_err(|e| e.to_string())?;
|
||||
tokio::io::AsyncWriteExt::write_all(&mut outfile, &content)
|
||||
let dest_file_path = extension_directory.join(zip_file_name);
|
||||
|
||||
// For cases like `assets/xxx.png`
|
||||
if let Some(parent_dir) = dest_file_path.parent()
|
||||
&& !parent_dir.exists()
|
||||
{
|
||||
tokio::fs::create_dir_all(parent_dir)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
}
|
||||
}
|
||||
|
||||
let mut dest_file = tokio::fs::File::create(&dest_file_path)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
let mut src_bytes = Vec::with_capacity(
|
||||
zip_file
|
||||
.size()
|
||||
.try_into()
|
||||
.expect("we won't have a extension file that is bigger than 4GiB"),
|
||||
);
|
||||
zip_file
|
||||
.read_to_end(&mut src_bytes)
|
||||
.map_err(|e| e.to_string())?;
|
||||
tokio::io::copy(&mut src_bytes.as_slice(), &mut dest_file)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
}
|
||||
// Create plugin.json from the extension variable
|
||||
let plugin_json_path = extension_directory.join(PLUGIN_JSON_FILE_NAME);
|
||||
let extension_json = serde_json::to_string_pretty(&extension).map_err(|e| e.to_string())?;
|
||||
@@ -313,33 +339,3 @@ pub(crate) async fn install_extension(id: String) -> Result<(), String> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub(crate) async fn uninstall_extension(
|
||||
developer: String,
|
||||
extension_id: String,
|
||||
) -> Result<(), String> {
|
||||
let extension_dir = {
|
||||
let mut path = THIRD_PARTY_EXTENSIONS_DIRECTORY.join(developer.as_str());
|
||||
path.push(extension_id.as_str());
|
||||
|
||||
path
|
||||
};
|
||||
if !extension_dir.try_exists().map_err(|e| e.to_string())? {
|
||||
panic!(
|
||||
"we are uninstalling extension [{}/{}], but there is no such extension files on disk",
|
||||
developer, extension_id
|
||||
)
|
||||
}
|
||||
tokio::fs::remove_dir_all(extension_dir.as_path())
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE
|
||||
.get()
|
||||
.unwrap()
|
||||
.remove_extension(&developer, &extension_id)
|
||||
.await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,66 +1,52 @@
|
||||
use super::alter_extension_json_file;
|
||||
use super::canonicalize_relative_icon_path;
|
||||
pub(crate) mod check;
|
||||
pub(crate) mod install;
|
||||
|
||||
use super::Extension;
|
||||
use super::ExtensionType;
|
||||
use super::Platform;
|
||||
use super::LOCAL_QUERY_SOURCE_TYPE;
|
||||
use super::PLUGIN_JSON_FILE_NAME;
|
||||
use crate::common::document::open;
|
||||
use super::alter_extension_json_file;
|
||||
use super::canonicalize_relative_icon_path;
|
||||
use crate::common::document::DataSourceReference;
|
||||
use crate::common::document::Document;
|
||||
use crate::common::document::open;
|
||||
use crate::common::error::SearchError;
|
||||
use crate::common::search::QueryResponse;
|
||||
use crate::common::search::QuerySource;
|
||||
use crate::common::search::SearchQuery;
|
||||
use crate::common::traits::SearchSource;
|
||||
use crate::extension::ExtensionBundleIdBorrowed;
|
||||
use crate::GLOBAL_TAURI_APP_HANDLE;
|
||||
use crate::util::platform::Platform;
|
||||
use async_trait::async_trait;
|
||||
use borrowme::ToOwned;
|
||||
use check::general_check;
|
||||
use function_name::named;
|
||||
use std::ffi::OsStr;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::OnceLock;
|
||||
use tauri::async_runtime;
|
||||
use tauri::AppHandle;
|
||||
use tauri::Manager;
|
||||
use tauri::async_runtime;
|
||||
use tauri_plugin_global_shortcut::GlobalShortcutExt;
|
||||
use tauri_plugin_global_shortcut::ShortcutState;
|
||||
use tokio::fs::read_dir;
|
||||
use tokio::sync::RwLock;
|
||||
use tokio::sync::RwLockWriteGuard;
|
||||
|
||||
pub(crate) static THIRD_PARTY_EXTENSIONS_DIRECTORY: LazyLock<PathBuf> = LazyLock::new(|| {
|
||||
let mut app_data_dir = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set")
|
||||
.path()
|
||||
.app_data_dir()
|
||||
.expect(
|
||||
"User home directory not found, which should be impossible on desktop environments",
|
||||
);
|
||||
pub(crate) fn get_third_party_extension_directory(tauri_app_handle: &AppHandle) -> PathBuf {
|
||||
let mut app_data_dir = tauri_app_handle.path().app_data_dir().expect(
|
||||
"User home directory not found, which should be impossible on desktop environments",
|
||||
);
|
||||
app_data_dir.push("third_party_extensions");
|
||||
|
||||
app_data_dir
|
||||
});
|
||||
|
||||
/// Helper function to determine the current platform.
|
||||
fn current_platform() -> Platform {
|
||||
let os_str = std::env::consts::OS;
|
||||
serde_plain::from_str(os_str).unwrap_or_else(|_e| {
|
||||
panic!("std::env::consts::OS is [{}], which is not a valid value for [enum Platform], valid values: ['macos', 'linux', 'windows']", os_str)
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) async fn list_third_party_extensions(
|
||||
directory: &Path,
|
||||
) -> Result<(bool, Vec<Extension>), String> {
|
||||
let mut found_invalid_extensions = false;
|
||||
|
||||
) -> Result<Vec<Extension>, String> {
|
||||
let mut extensions_dir_iter = read_dir(&directory).await.map_err(|e| e.to_string())?;
|
||||
let current_platform = current_platform();
|
||||
let current_platform = Platform::current();
|
||||
|
||||
let mut extensions = Vec::new();
|
||||
|
||||
@@ -74,7 +60,6 @@ pub(crate) async fn list_third_party_extensions(
|
||||
};
|
||||
let developer_dir_file_type = developer_dir.file_type().await.map_err(|e| e.to_string())?;
|
||||
if !developer_dir_file_type.is_dir() {
|
||||
found_invalid_extensions = true;
|
||||
log::warn!(
|
||||
"file [{}] under the third party extension directory should be a directory, but it is not",
|
||||
developer_dir.file_name().display()
|
||||
@@ -84,18 +69,6 @@ pub(crate) async fn list_third_party_extensions(
|
||||
continue 'developer;
|
||||
}
|
||||
|
||||
let Ok(developer) = developer_dir.file_name().into_string() else {
|
||||
found_invalid_extensions = true;
|
||||
|
||||
log::warn!(
|
||||
"developer [{}] ID is not UTF-8 encoded",
|
||||
developer_dir.file_name().display()
|
||||
);
|
||||
|
||||
// Skip this file
|
||||
continue 'developer;
|
||||
};
|
||||
|
||||
let mut developer_dir_iter = read_dir(&developer_dir.path())
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
@@ -108,14 +81,17 @@ pub(crate) async fn list_third_party_extensions(
|
||||
let Some(extension_dir) = opt_extension_dir else {
|
||||
break 'extension;
|
||||
};
|
||||
let extension_dir_file_name = extension_dir
|
||||
.file_name()
|
||||
.into_string()
|
||||
.expect("extension directory name should be UTF-8 encoded");
|
||||
|
||||
let extension_dir_file_type =
|
||||
extension_dir.file_type().await.map_err(|e| e.to_string())?;
|
||||
if !extension_dir_file_type.is_dir() {
|
||||
found_invalid_extensions = true;
|
||||
log::warn!(
|
||||
"invalid extension [{}]: a valid extension should be a directory, but it is not",
|
||||
extension_dir.file_name().display()
|
||||
extension_dir_file_name
|
||||
);
|
||||
|
||||
// Skip invalid extension
|
||||
@@ -130,7 +106,6 @@ pub(crate) async fn list_third_party_extensions(
|
||||
};
|
||||
|
||||
if !plugin_json_file_path.is_file() {
|
||||
found_invalid_extensions = true;
|
||||
log::warn!(
|
||||
"invalid extension: [{}]: extension file [{}] should be a JSON file, but it is not",
|
||||
extension_dir.file_name().display(),
|
||||
@@ -147,10 +122,9 @@ pub(crate) async fn list_third_party_extensions(
|
||||
let mut extension = match serde_json::from_str::<Extension>(&plugin_json_file_content) {
|
||||
Ok(extension) => extension,
|
||||
Err(e) => {
|
||||
found_invalid_extensions = true;
|
||||
log::warn!(
|
||||
"invalid extension: [{}]: extension file [{}] is invalid, error: '{}'",
|
||||
extension_dir.file_name().display(),
|
||||
"invalid extension: [{}]: cannot parse file [{}] as a [struct Extension], error: '{}'",
|
||||
extension_dir_file_name,
|
||||
plugin_json_file_path.display(),
|
||||
e
|
||||
);
|
||||
@@ -158,22 +132,55 @@ pub(crate) async fn list_third_party_extensions(
|
||||
}
|
||||
};
|
||||
|
||||
// Turn it into an absolute path if it is a valid relative path because frontend code need this.
|
||||
canonicalize_relative_icon_path(&extension_dir.path(), &mut extension)?;
|
||||
/* Check starts here */
|
||||
if extension.id != extension_dir_file_name {
|
||||
log::warn!(
|
||||
"extension under [{}:{}] has an ID that is not same as the [{}]",
|
||||
developer_dir.file_name().display(),
|
||||
extension_dir_file_name,
|
||||
extension.id,
|
||||
);
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// Extension should be unique
|
||||
if extensions.iter().any(|ext: &Extension| {
|
||||
ext.id == extension.id && ext.developer == extension.developer
|
||||
}) {
|
||||
log::warn!(
|
||||
"an extension with the same bundle ID [ID {}, developer {:?}] already exists, skip this one",
|
||||
extension.id,
|
||||
extension.developer
|
||||
);
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Err(error_msg) = general_check(&extension) {
|
||||
log::warn!("{}", error_msg);
|
||||
|
||||
if !validate_extension(
|
||||
&extension,
|
||||
&extension_dir.file_name(),
|
||||
&extensions,
|
||||
current_platform,
|
||||
) {
|
||||
found_invalid_extensions = true;
|
||||
// Skip invalid extension
|
||||
continue;
|
||||
}
|
||||
|
||||
// Set extension's developer info manually.
|
||||
extension.developer = Some(developer.clone());
|
||||
if let Some(ref platforms) = extension.platforms {
|
||||
if !platforms.contains(¤t_platform) {
|
||||
log::warn!(
|
||||
"installed third-party extension [developer {}, ID {}] is not compatible with current platform, either user messes our directory or something wrong with our extension check",
|
||||
extension
|
||||
.developer
|
||||
.as_ref()
|
||||
.expect("third party extension should have [developer] set"),
|
||||
extension.id
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
/* Check ends here */
|
||||
|
||||
// Turn it into an absolute path if it is a valid relative path because frontend code needs this.
|
||||
canonicalize_relative_icon_path(&extension_dir.path(), &mut extension)?;
|
||||
|
||||
extensions.push(extension);
|
||||
}
|
||||
@@ -187,194 +194,7 @@ pub(crate) async fn list_third_party_extensions(
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
Ok((found_invalid_extensions, extensions))
|
||||
}
|
||||
|
||||
/// Helper function to validate `extension`, return `true` if it is valid.
|
||||
fn validate_extension(
|
||||
extension: &Extension,
|
||||
extension_dir_name: &OsStr,
|
||||
listed_extensions: &[Extension],
|
||||
current_platform: Platform,
|
||||
) -> bool {
|
||||
if OsStr::new(&extension.id) != extension_dir_name {
|
||||
log::warn!(
|
||||
"invalid extension []: id [{}] and extension directory name [{}] do not match",
|
||||
extension.id,
|
||||
extension_dir_name.display()
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Extension ID should be unique
|
||||
if listed_extensions.iter().any(|ext| ext.id == extension.id) {
|
||||
log::warn!(
|
||||
"invalid extension []: extension with id [{}] already exists",
|
||||
extension.id,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
if !validate_extension_or_sub_item(extension) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Extension is incompatible
|
||||
if let Some(ref platforms) = extension.platforms {
|
||||
if !platforms.contains(¤t_platform) {
|
||||
log::warn!("extension [{}] is not compatible with the current platform [{}], it is available to {:?}", extension.id, current_platform, platforms.iter().map(|os|os.to_string()).collect::<Vec<_>>());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref commands) = extension.commands {
|
||||
if !validate_sub_items(&extension.id, commands) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref scripts) = extension.scripts {
|
||||
if !validate_sub_items(&extension.id, scripts) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref quick_links) = extension.quicklinks {
|
||||
if !validate_sub_items(&extension.id, quick_links) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
/// Checks that can be performed against an extension or a sub item.
|
||||
fn validate_extension_or_sub_item(extension: &Extension) -> bool {
|
||||
// If field `action` is Some, then it should be a Command
|
||||
if extension.action.is_some() && extension.r#type != ExtensionType::Command {
|
||||
log::warn!(
|
||||
"invalid extension [{}], [action] is set for a non-Command extension",
|
||||
extension.id
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
if extension.r#type == ExtensionType::Command && extension.action.is_none() {
|
||||
log::warn!(
|
||||
"invalid extension [{}], [action] should be set for a Command extension",
|
||||
extension.id
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
// If field `quick_link` is Some, then it should be a QuickLink
|
||||
if extension.quicklink.is_some() && extension.r#type != ExtensionType::Quicklink {
|
||||
log::warn!(
|
||||
"invalid extension [{}], [quick_link] is set for a non-QuickLink extension",
|
||||
extension.id
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
if extension.r#type == ExtensionType::Quicklink && extension.quicklink.is_none() {
|
||||
log::warn!(
|
||||
"invalid extension [{}], [quick_link] should be set for a QuickLink extension",
|
||||
extension.id
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Group and Extension cannot have alias
|
||||
if extension.alias.is_some() {
|
||||
if extension.r#type == ExtensionType::Group || extension.r#type == ExtensionType::Extension
|
||||
{
|
||||
log::warn!(
|
||||
"invalid extension [{}], extension of type [{:?}] cannot have alias",
|
||||
extension.id,
|
||||
extension.r#type
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Group and Extension cannot have hotkey
|
||||
if extension.hotkey.is_some() {
|
||||
if extension.r#type == ExtensionType::Group || extension.r#type == ExtensionType::Extension
|
||||
{
|
||||
log::warn!(
|
||||
"invalid extension [{}], extension of type [{:?}] cannot have hotkey",
|
||||
extension.id,
|
||||
extension.r#type
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if extension.commands.is_some() || extension.scripts.is_some() || extension.quicklinks.is_some()
|
||||
{
|
||||
if extension.r#type != ExtensionType::Group && extension.r#type != ExtensionType::Extension
|
||||
{
|
||||
log::warn!(
|
||||
"invalid extension [{}], only extension of type [Group] and [Extension] can have sub-items",
|
||||
extension.id,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
/// Helper function to check sub-items.
|
||||
fn validate_sub_items(extension_id: &str, sub_items: &[Extension]) -> bool {
|
||||
for (sub_item_index, sub_item) in sub_items.iter().enumerate() {
|
||||
// If field `action` is Some, then it should be a Command
|
||||
if sub_item.action.is_some() && sub_item.r#type != ExtensionType::Command {
|
||||
log::warn!(
|
||||
"invalid extension sub-item [{}-{}]: [action] is set for a non-Command extension",
|
||||
extension_id,
|
||||
sub_item.id
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
if sub_item.r#type == ExtensionType::Group || sub_item.r#type == ExtensionType::Extension {
|
||||
log::warn!(
|
||||
"invalid extension sub-item [{}-{}]: sub-item should not be of type [Group] or [Extension]",
|
||||
extension_id, sub_item.id
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
let sub_item_with_same_id_count = sub_items
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(|(_idx, ext)| ext.id == sub_item.id)
|
||||
.filter(|(idx, _ext)| *idx != sub_item_index)
|
||||
.count();
|
||||
if sub_item_with_same_id_count != 0 {
|
||||
log::warn!(
|
||||
"invalid extension [{}]: found more than one sub-items with the same ID [{}]",
|
||||
extension_id,
|
||||
sub_item.id
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
if !validate_extension_or_sub_item(sub_item) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if sub_item.platforms.is_some() {
|
||||
log::warn!(
|
||||
"invalid extension [{}]: key [platforms] should not be set in sub-items",
|
||||
extension_id,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
Ok(extensions)
|
||||
}
|
||||
|
||||
/// All the third-party extensions will be registered as one search source.
|
||||
@@ -415,11 +235,11 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
/// Note that when you enable a parent extension, its **enabled** children extensions
|
||||
/// should also be enabled.
|
||||
#[async_recursion::async_recursion]
|
||||
async fn _enable_extension(extension: &Extension) -> Result<(), String> {
|
||||
async fn _enable_extension(
|
||||
tauri_app_handle: &AppHandle,
|
||||
extension: &Extension,
|
||||
) -> Result<(), String> {
|
||||
if extension.supports_alias_hotkey() {
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
if let Some(ref hotkey) = extension.hotkey {
|
||||
let on_opened = extension.on_opened().unwrap_or_else(|| panic!( "extension has hotkey, but on_open() returns None, extension ID [{}], extension type [{:?}]", extension.id, extension.r#type));
|
||||
|
||||
@@ -427,12 +247,14 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
|
||||
tauri_app_handle
|
||||
.global_shortcut()
|
||||
.on_shortcut(hotkey.as_str(), move |_tauri_app_handle, _hotkey, event| {
|
||||
.on_shortcut(hotkey.as_str(), move |tauri_app_handle, _hotkey, event| {
|
||||
let on_opened_clone = on_opened.clone();
|
||||
let extension_id_clone = extension_id_clone.clone();
|
||||
let app_handle_clone = tauri_app_handle.clone();
|
||||
|
||||
if event.state() == ShortcutState::Pressed {
|
||||
async_runtime::spawn(async move {
|
||||
let result = open(on_opened_clone).await;
|
||||
let result = open(app_handle_clone, on_opened_clone, None).await;
|
||||
if let Err(msg) = result {
|
||||
log::warn!(
|
||||
"failed to open extension [{}], error [{}]",
|
||||
@@ -451,19 +273,19 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
if extension.r#type.contains_sub_items() {
|
||||
if let Some(commands) = &extension.commands {
|
||||
for command in commands.iter().filter(|ext| ext.enabled) {
|
||||
Self::_enable_extension(command).await?;
|
||||
Self::_enable_extension(&tauri_app_handle, command).await?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(scripts) = &extension.scripts {
|
||||
for script in scripts.iter().filter(|ext| ext.enabled) {
|
||||
Self::_enable_extension(script).await?;
|
||||
Self::_enable_extension(&tauri_app_handle, script).await?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(quicklinks) = &extension.quicklinks {
|
||||
for quicklink in quicklinks.iter().filter(|ext| ext.enabled) {
|
||||
Self::_enable_extension(quicklink).await?;
|
||||
Self::_enable_extension(&tauri_app_handle, quicklink).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -478,12 +300,11 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
/// Note that when you disable a parent extension, its **enabled** children extensions
|
||||
/// should also be disabled.
|
||||
#[async_recursion::async_recursion]
|
||||
async fn _disable_extension(extension: &Extension) -> Result<(), String> {
|
||||
async fn _disable_extension(
|
||||
tauri_app_handle: &AppHandle,
|
||||
extension: &Extension,
|
||||
) -> Result<(), String> {
|
||||
if let Some(ref hotkey) = extension.hotkey {
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
|
||||
tauri_app_handle
|
||||
.global_shortcut()
|
||||
.unregister(hotkey.as_str())
|
||||
@@ -494,19 +315,19 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
if extension.r#type.contains_sub_items() {
|
||||
if let Some(commands) = &extension.commands {
|
||||
for command in commands.iter().filter(|ext| ext.enabled) {
|
||||
Self::_disable_extension(command).await?;
|
||||
Self::_disable_extension(tauri_app_handle, command).await?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(scripts) = &extension.scripts {
|
||||
for script in scripts.iter().filter(|ext| ext.enabled) {
|
||||
Self::_disable_extension(script).await?;
|
||||
Self::_disable_extension(tauri_app_handle, script).await?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(quicklinks) = &extension.quicklinks {
|
||||
for quicklink in quicklinks.iter().filter(|ext| ext.enabled) {
|
||||
Self::_disable_extension(quicklink).await?;
|
||||
Self::_disable_extension(tauri_app_handle, quicklink).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -525,6 +346,7 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
#[named]
|
||||
pub(super) async fn enable_extension(
|
||||
&self,
|
||||
tauri_app_handle: &AppHandle,
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
) -> Result<(), String> {
|
||||
let mut extensions_write_lock = self.inner.extensions.write().await;
|
||||
@@ -552,11 +374,11 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
|
||||
update_extension(extension)?;
|
||||
alter_extension_json_file(
|
||||
&THIRD_PARTY_EXTENSIONS_DIRECTORY,
|
||||
&get_third_party_extension_directory(tauri_app_handle),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
Self::_enable_extension(extension).await?;
|
||||
Self::_enable_extension(tauri_app_handle, extension).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -564,6 +386,7 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
#[named]
|
||||
pub(super) async fn disable_extension(
|
||||
&self,
|
||||
tauri_app_handle: &AppHandle,
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
) -> Result<(), String> {
|
||||
let mut extensions_write_lock = self.inner.extensions.write().await;
|
||||
@@ -591,11 +414,11 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
|
||||
update_extension(extension)?;
|
||||
alter_extension_json_file(
|
||||
&THIRD_PARTY_EXTENSIONS_DIRECTORY,
|
||||
&get_third_party_extension_directory(tauri_app_handle),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
Self::_disable_extension(extension).await?;
|
||||
Self::_disable_extension(tauri_app_handle, extension).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -603,6 +426,7 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
#[named]
|
||||
pub(super) async fn set_extension_alias(
|
||||
&self,
|
||||
tauri_app_handle: &AppHandle,
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
alias: &str,
|
||||
) -> Result<(), String> {
|
||||
@@ -623,7 +447,7 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
|
||||
update_extension(extension)?;
|
||||
alter_extension_json_file(
|
||||
&THIRD_PARTY_EXTENSIONS_DIRECTORY,
|
||||
&get_third_party_extension_directory(tauri_app_handle),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
@@ -633,11 +457,11 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
|
||||
/// Initialize the third-party extensions, which literally means
|
||||
/// enabling/activating the enabled extensions.
|
||||
pub(super) async fn init(&self) -> Result<(), String> {
|
||||
pub(super) async fn init(&self, tauri_app_handle: &AppHandle) -> Result<(), String> {
|
||||
let extensions_read_lock = self.inner.extensions.read().await;
|
||||
|
||||
for extension in extensions_read_lock.iter().filter(|ext| ext.enabled) {
|
||||
Self::_enable_extension(extension).await?;
|
||||
Self::_enable_extension(tauri_app_handle, extension).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -646,10 +470,12 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
#[named]
|
||||
pub(super) async fn register_extension_hotkey(
|
||||
&self,
|
||||
tauri_app_handle: &AppHandle,
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
hotkey: &str,
|
||||
) -> Result<(), String> {
|
||||
self.unregister_extension_hotkey(bundle_id).await?;
|
||||
self.unregister_extension_hotkey(tauri_app_handle, bundle_id)
|
||||
.await?;
|
||||
|
||||
let mut extensions_write_lock = self.inner.extensions.write().await;
|
||||
let extension =
|
||||
@@ -669,15 +495,12 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
// Update extension (memory and file)
|
||||
update_extension(extension)?;
|
||||
alter_extension_json_file(
|
||||
&THIRD_PARTY_EXTENSIONS_DIRECTORY,
|
||||
&get_third_party_extension_directory(tauri_app_handle),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
|
||||
// Set hotkey
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
let on_opened = extension.on_opened().unwrap_or_else(|| panic!(
|
||||
"setting hotkey for an extension that cannot be opened, extension ID [{:?}], extension type [{:?}]", bundle_id, extension.r#type,
|
||||
));
|
||||
@@ -685,12 +508,14 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
let bundle_id_owned = bundle_id.to_owned();
|
||||
tauri_app_handle
|
||||
.global_shortcut()
|
||||
.on_shortcut(hotkey, move |_tauri_app_handle, _hotkey, event| {
|
||||
.on_shortcut(hotkey, move |tauri_app_handle, _hotkey, event| {
|
||||
let on_opened_clone = on_opened.clone();
|
||||
let bundle_id_clone = bundle_id_owned.clone();
|
||||
let app_handle_clone = tauri_app_handle.clone();
|
||||
|
||||
if event.state() == ShortcutState::Pressed {
|
||||
async_runtime::spawn(async move {
|
||||
let result = open(on_opened_clone).await;
|
||||
let result = open(app_handle_clone, on_opened_clone, None).await;
|
||||
if let Err(msg) = result {
|
||||
log::warn!(
|
||||
"failed to open extension [{:?}], error [{}]",
|
||||
@@ -711,6 +536,7 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
#[named]
|
||||
pub(super) async fn unregister_extension_hotkey(
|
||||
&self,
|
||||
tauri_app_handle: &AppHandle,
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
) -> Result<(), String> {
|
||||
let mut extensions_write_lock = self.inner.extensions.write().await;
|
||||
@@ -738,15 +564,12 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
|
||||
update_extension(extension)?;
|
||||
alter_extension_json_file(
|
||||
&THIRD_PARTY_EXTENSIONS_DIRECTORY,
|
||||
&get_third_party_extension_directory(tauri_app_handle),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
|
||||
// Set hotkey
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
tauri_app_handle
|
||||
.global_shortcut()
|
||||
.unregister(hotkey.as_str())
|
||||
@@ -805,6 +628,7 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
.any(|ext| ext.developer.as_deref() == Some(developer) && ext.id == extension_id)
|
||||
}
|
||||
|
||||
/// Add `extension` to the **in-memory** extension list.
|
||||
pub(crate) async fn add_extension(&self, extension: Extension) {
|
||||
assert!(
|
||||
extension.developer.is_some(),
|
||||
@@ -828,7 +652,8 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
write_lock_guard.push(extension);
|
||||
}
|
||||
|
||||
pub(crate) async fn remove_extension(&self, developer: &str, extension_id: &str) {
|
||||
/// Remove `extension` from the **in-memory** extension list.
|
||||
pub(crate) async fn remove_extension(&self, developer: &str, extension_id: &str) -> Extension {
|
||||
let mut write_lock_guard = self.inner.extensions.write().await;
|
||||
let Some(index) = write_lock_guard
|
||||
.iter()
|
||||
@@ -840,7 +665,7 @@ impl ThirdPartyExtensionsSearchSource {
|
||||
);
|
||||
};
|
||||
|
||||
write_lock_guard.remove(index);
|
||||
write_lock_guard.remove(index)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -865,7 +690,11 @@ impl SearchSource for ThirdPartyExtensionsSearchSource {
|
||||
}
|
||||
}
|
||||
|
||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
||||
async fn search(
|
||||
&self,
|
||||
_tauri_app_handle: AppHandle,
|
||||
query: SearchQuery,
|
||||
) -> Result<QueryResponse, SearchError> {
|
||||
let Some(query_string) = query.query_strings.get("query") else {
|
||||
return Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
@@ -909,10 +738,10 @@ impl SearchSource for ThirdPartyExtensionsSearchSource {
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref quick_links) = extension.quicklinks {
|
||||
for quick_link in quick_links.iter().filter(|link| link.enabled) {
|
||||
if let Some(ref quicklinks) = extension.quicklinks {
|
||||
for quicklink in quicklinks.iter().filter(|link| link.enabled) {
|
||||
if let Some(hit) = extension_to_hit(
|
||||
quick_link,
|
||||
quicklink,
|
||||
&query_lower,
|
||||
opt_data_source.as_deref(),
|
||||
) {
|
||||
@@ -1062,6 +891,45 @@ fn calculate_text_similarity(query: &str, text: &str) -> Option<f64> {
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub(crate) async fn uninstall_extension(
|
||||
tauri_app_handle: AppHandle,
|
||||
developer: String,
|
||||
extension_id: String,
|
||||
) -> Result<(), String> {
|
||||
let extension_dir = {
|
||||
let mut path = get_third_party_extension_directory(&tauri_app_handle);
|
||||
path.push(developer.as_str());
|
||||
path.push(extension_id.as_str());
|
||||
|
||||
path
|
||||
};
|
||||
if !extension_dir.try_exists().map_err(|e| e.to_string())? {
|
||||
panic!(
|
||||
"we are uninstalling extension [{}/{}], but there is no such extension files on disk",
|
||||
developer, extension_id
|
||||
)
|
||||
}
|
||||
tokio::fs::remove_dir_all(extension_dir.as_path())
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let extension = THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE
|
||||
.get()
|
||||
.unwrap()
|
||||
.remove_extension(&developer, &extension_id)
|
||||
.await;
|
||||
|
||||
// Unregister the extension hotkey, if set.
|
||||
//
|
||||
// Unregistering hotkey is the only thing that we will do when we disable
|
||||
// an extension, so we directly use this function here even though "disabling"
|
||||
// the extension that one is trying to uninstall does not make too much sense.
|
||||
ThirdPartyExtensionsSearchSource::_disable_extension(&tauri_app_handle, &extension).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -19,7 +19,7 @@ use std::sync::Mutex;
|
||||
use std::sync::OnceLock;
|
||||
use tauri::async_runtime::block_on;
|
||||
use tauri::plugin::TauriPlugin;
|
||||
use tauri::{AppHandle, Emitter, Manager, PhysicalPosition, Runtime, WebviewWindow, WindowEvent};
|
||||
use tauri::{AppHandle, Emitter, Manager, PhysicalPosition, WebviewWindow, WindowEvent};
|
||||
use tauri_plugin_autostart::MacosLauncher;
|
||||
|
||||
/// Tauri store name
|
||||
@@ -28,9 +28,14 @@ pub(crate) const COCO_TAURI_STORE: &str = "coco_tauri_store";
|
||||
lazy_static! {
|
||||
static ref PREVIOUS_MONITOR_NAME: Mutex<Option<String>> = Mutex::new(None);
|
||||
}
|
||||
|
||||
/// To allow us to access tauri's `AppHandle` when its context is inaccessible,
|
||||
/// store it globally. It will be set in `init()`.
|
||||
///
|
||||
/// # WARNING
|
||||
///
|
||||
/// You may find this work, but the usage is discouraged and should be generally
|
||||
/// avoided. If you do need it, always be careful that it may not be set() when
|
||||
/// you access it.
|
||||
pub(crate) static GLOBAL_TAURI_APP_HANDLE: OnceLock<AppHandle> = OnceLock::new();
|
||||
|
||||
#[tauri::command]
|
||||
@@ -85,7 +90,11 @@ pub fn run() {
|
||||
.plugin(tauri_plugin_macos_permissions::init())
|
||||
.plugin(tauri_plugin_screenshots::init())
|
||||
.plugin(tauri_plugin_process::init())
|
||||
.plugin(tauri_plugin_updater::Builder::new().build())
|
||||
.plugin(
|
||||
tauri_plugin_updater::Builder::new()
|
||||
.default_version_comparator(crate::util::updater::custom_version_comparator)
|
||||
.build(),
|
||||
)
|
||||
.plugin(tauri_plugin_windows_version::init())
|
||||
.plugin(tauri_plugin_opener::init());
|
||||
|
||||
@@ -107,7 +116,6 @@ pub fn run() {
|
||||
show_settings,
|
||||
show_check,
|
||||
hide_check,
|
||||
server::servers::get_server_token,
|
||||
server::servers::add_coco_server,
|
||||
server::servers::remove_coco_server,
|
||||
server::servers::list_coco_servers,
|
||||
@@ -122,8 +130,8 @@ pub fn run() {
|
||||
server::connector::get_connectors_by_server,
|
||||
search::query_coco_fusion,
|
||||
assistant::chat_history,
|
||||
assistant::new_chat,
|
||||
assistant::send_message,
|
||||
assistant::chat_create,
|
||||
assistant::chat_chat,
|
||||
assistant::session_chat_history,
|
||||
assistant::open_session_chat,
|
||||
assistant::close_session_chat,
|
||||
@@ -135,21 +143,19 @@ pub fn run() {
|
||||
assistant::assistant_get_multi,
|
||||
// server::get_coco_server_datasources,
|
||||
// server::get_coco_server_connectors,
|
||||
server::websocket::connect_to_server,
|
||||
server::websocket::disconnect,
|
||||
get_app_search_source,
|
||||
server::attachment::upload_attachment,
|
||||
server::attachment::get_attachment,
|
||||
server::attachment::get_attachment_by_ids,
|
||||
server::attachment::delete_attachment,
|
||||
server::transcription::transcription,
|
||||
server::system_settings::get_system_settings,
|
||||
simulate_mouse_click,
|
||||
extension::built_in::application::get_app_list,
|
||||
extension::built_in::application::get_app_search_path,
|
||||
extension::built_in::application::get_app_metadata,
|
||||
extension::built_in::application::add_app_search_path,
|
||||
extension::built_in::application::remove_app_search_path,
|
||||
extension::built_in::application::reindex_applications,
|
||||
extension::quicklink_link_arguments,
|
||||
extension::list_extensions,
|
||||
extension::enable_extension,
|
||||
extension::disable_extension,
|
||||
@@ -157,15 +163,31 @@ pub fn run() {
|
||||
extension::register_extension_hotkey,
|
||||
extension::unregister_extension_hotkey,
|
||||
extension::is_extension_enabled,
|
||||
extension::store::search_extension,
|
||||
extension::store::install_extension,
|
||||
extension::store::uninstall_extension,
|
||||
extension::third_party::install::store::search_extension,
|
||||
extension::third_party::install::store::install_extension_from_store,
|
||||
extension::third_party::install::local_extension::install_local_extension,
|
||||
extension::third_party::uninstall_extension,
|
||||
settings::set_allow_self_signature,
|
||||
settings::get_allow_self_signature,
|
||||
assistant::ask_ai,
|
||||
crate::common::document::open,
|
||||
#[cfg(any(target_os = "macos", target_os = "windows"))]
|
||||
extension::built_in::file_search::config::get_file_system_config,
|
||||
#[cfg(any(target_os = "macos", target_os = "windows"))]
|
||||
extension::built_in::file_search::config::set_file_system_config,
|
||||
server::synthesize::synthesize,
|
||||
util::file::get_file_icon,
|
||||
util::app_lang::update_app_lang,
|
||||
#[cfg(target_os = "macos")]
|
||||
setup::toggle_move_to_active_space_attribute,
|
||||
])
|
||||
.setup(|app| {
|
||||
let app_handle = app.handle().clone();
|
||||
GLOBAL_TAURI_APP_HANDLE
|
||||
.set(app_handle.clone())
|
||||
.expect("global tauri AppHandle already initialized");
|
||||
log::trace!("global Tauri AppHandle set");
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
log::trace!("hiding Dock icon on macOS");
|
||||
@@ -173,22 +195,34 @@ pub fn run() {
|
||||
log::trace!("Dock icon should be hidden now");
|
||||
}
|
||||
|
||||
let app_handle = app.handle().clone();
|
||||
GLOBAL_TAURI_APP_HANDLE
|
||||
.set(app_handle.clone())
|
||||
.expect("variable already initialized");
|
||||
log::trace!("global Tauri app handle set");
|
||||
|
||||
let registry = SearchSourceRegistry::default();
|
||||
|
||||
app.manage(registry); // Store registry in Tauri's app state
|
||||
app.manage(server::websocket::WebSocketManager::default());
|
||||
|
||||
// This has to be called before initializing extensions as doing that
|
||||
// requires access to the shortcut store, which will be set by this
|
||||
// function.
|
||||
shortcut::enable_shortcut(app);
|
||||
|
||||
block_on(async {
|
||||
init(app.handle()).await;
|
||||
});
|
||||
|
||||
shortcut::enable_shortcut(app);
|
||||
// We want all the extensions here, so no filter condition specified.
|
||||
match extension::list_extensions(app_handle.clone(), None, None, false).await {
|
||||
Ok(extensions) => {
|
||||
// Initializing extension relies on SearchSourceRegistry, so this should
|
||||
// be executed after `app.manage(registry)`
|
||||
if let Err(e) =
|
||||
extension::init_extensions(app_handle.clone(), extensions).await
|
||||
{
|
||||
log::error!("initializing extensions failed with error [{}]", e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("listing extensions failed with error [{}]", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
ensure_autostart_state_consistent(app)?;
|
||||
|
||||
@@ -256,7 +290,7 @@ pub fn run() {
|
||||
});
|
||||
}
|
||||
|
||||
pub async fn init<R: Runtime>(app_handle: &AppHandle<R>) {
|
||||
pub async fn init(app_handle: &AppHandle) {
|
||||
// Await the async functions to load the servers and tokens
|
||||
if let Err(err) = load_or_insert_default_server(app_handle).await {
|
||||
log::error!("Failed to load servers: {}", err);
|
||||
@@ -266,7 +300,7 @@ pub async fn init<R: Runtime>(app_handle: &AppHandle<R>) {
|
||||
log::error!("Failed to load server tokens: {}", err);
|
||||
}
|
||||
|
||||
let coco_servers = server::servers::get_all_servers();
|
||||
let coco_servers = server::servers::get_all_servers().await;
|
||||
|
||||
// Get the registry from Tauri's state
|
||||
// let registry: State<SearchSourceRegistry> = app_handle.state::<SearchSourceRegistry>();
|
||||
@@ -280,7 +314,7 @@ pub async fn init<R: Runtime>(app_handle: &AppHandle<R>) {
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn show_coco<R: Runtime>(app_handle: AppHandle<R>) {
|
||||
async fn show_coco(app_handle: AppHandle) {
|
||||
if let Some(window) = app_handle.get_webview_window(MAIN_WINDOW_LABEL) {
|
||||
move_window_to_active_monitor(&window);
|
||||
|
||||
@@ -293,7 +327,7 @@ async fn show_coco<R: Runtime>(app_handle: AppHandle<R>) {
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn hide_coco<R: Runtime>(app: AppHandle<R>) {
|
||||
async fn hide_coco(app: AppHandle) {
|
||||
if let Some(window) = app.get_webview_window(MAIN_WINDOW_LABEL) {
|
||||
if let Err(err) = window.hide() {
|
||||
log::error!("Failed to hide the window: {}", err);
|
||||
@@ -305,7 +339,7 @@ async fn hide_coco<R: Runtime>(app: AppHandle<R>) {
|
||||
}
|
||||
}
|
||||
|
||||
fn move_window_to_active_monitor<R: Runtime>(window: &WebviewWindow<R>) {
|
||||
fn move_window_to_active_monitor(window: &WebviewWindow) {
|
||||
//dbg!("Moving window to active monitor");
|
||||
// Try to get the available monitors, handle failure gracefully
|
||||
let available_monitors = match window.available_monitors() {
|
||||
@@ -399,13 +433,7 @@ fn move_window_to_active_monitor<R: Runtime>(window: &WebviewWindow<R>) {
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn get_app_search_source<R: Runtime>(app_handle: AppHandle<R>) -> Result<(), String> {
|
||||
// We want all the extensions here, so no filter condition specified.
|
||||
let (_found_invalid_extensions, extensions) = extension::list_extensions(None, None, false)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
extension::init_extensions(extensions).await?;
|
||||
|
||||
async fn get_app_search_source(app_handle: AppHandle) -> Result<(), String> {
|
||||
let _ = server::connector::refresh_all_connectors(&app_handle).await;
|
||||
let _ = server::datasource::refresh_all_datasources(&app_handle).await;
|
||||
|
||||
@@ -446,52 +474,6 @@ async fn hide_check(app_handle: AppHandle) {
|
||||
window.hide().unwrap();
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn simulate_mouse_click<R: Runtime>(window: WebviewWindow<R>, is_chat_mode: bool) {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use enigo::{Button, Coordinate, Direction, Enigo, Mouse, Settings};
|
||||
use std::{thread, time::Duration};
|
||||
|
||||
if let Ok(mut enigo) = Enigo::new(&Settings::default()) {
|
||||
// Save the current mouse position
|
||||
if let Ok((original_x, original_y)) = enigo.location() {
|
||||
// Retrieve the window's outer position (top-left corner)
|
||||
if let Ok(position) = window.outer_position() {
|
||||
// Retrieve the window's inner size (client area)
|
||||
if let Ok(size) = window.inner_size() {
|
||||
// Calculate the center position of the title bar
|
||||
let x = position.x + (size.width as i32 / 2);
|
||||
let y = if is_chat_mode {
|
||||
position.y + size.height as i32 - 50
|
||||
} else {
|
||||
position.y + 30
|
||||
};
|
||||
|
||||
// Move the mouse cursor to the calculated position
|
||||
if enigo.move_mouse(x, y, Coordinate::Abs).is_ok() {
|
||||
// // Simulate a left mouse click
|
||||
let _ = enigo.button(Button::Left, Direction::Click);
|
||||
// let _ = enigo.button(Button::Left, Direction::Release);
|
||||
|
||||
thread::sleep(Duration::from_millis(100));
|
||||
|
||||
// Move the mouse cursor back to the original position
|
||||
let _ = enigo.move_mouse(original_x, original_y, Coordinate::Abs);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
let _ = window;
|
||||
let _ = is_chat_mode;
|
||||
}
|
||||
}
|
||||
|
||||
/// Log format:
|
||||
///
|
||||
/// ```text
|
||||
@@ -601,7 +583,12 @@ fn set_up_tauri_logger() -> TauriPlugin<tauri::Wry> {
|
||||
// When running the built binary, set `COCO_LOG` to `coco_lib=trace` to capture all logs
|
||||
// that come from Coco in the log file, which helps with debugging.
|
||||
if !tauri::is_dev() {
|
||||
std::env::set_var("COCO_LOG", "coco_lib=trace");
|
||||
// We have absolutely no guarantee that we (We have control over the Rust
|
||||
// code, but definitely no idea about the libc C code, all the shared objects
|
||||
// that we will link) will not concurrently read/write `envp`, so just use unsafe.
|
||||
unsafe {
|
||||
std::env::set_var("COCO_LOG", "coco_lib=trace");
|
||||
}
|
||||
}
|
||||
|
||||
let mut builder = tauri_plugin_log::Builder::new();
|
||||
|
||||
@@ -1,5 +1,112 @@
|
||||
// Prevents additional console window on Windows in release, DO NOT REMOVE!!
|
||||
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
|
||||
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Helper function to return the log directory.
|
||||
///
|
||||
/// This should return the same value as `tauri_app_handle.path().app_log_dir().unwrap()`.
|
||||
fn app_log_dir() -> PathBuf {
|
||||
// This function `app_log_dir()` is for the panic hook, which should be set
|
||||
// before Tauri performs any initialization. At that point, we do not have
|
||||
// access to the identifier provided by Tauri, so we need to define our own
|
||||
// one here.
|
||||
//
|
||||
// NOTE: If you update identifier in the following files, update this one
|
||||
// as well!
|
||||
//
|
||||
// src-tauri/tauri.linux.conf.json
|
||||
// src-tauri/Entitlements.plist
|
||||
// src-tauri/tauri.conf.json
|
||||
// src-tauri/Info.plist
|
||||
const IDENTIFIER: &str = "rs.coco.app";
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
let path = dirs::home_dir()
|
||||
.expect("cannot find the home directory, Coco should never run in such a environment")
|
||||
.join("Library/Logs")
|
||||
.join(IDENTIFIER);
|
||||
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
let path = dirs::data_local_dir()
|
||||
.expect("app local dir is None, we should not encounter this")
|
||||
.join(IDENTIFIER)
|
||||
.join("logs");
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
/// Set up panic hook to log panic information to a file
|
||||
fn setup_panic_hook() {
|
||||
std::panic::set_hook(Box::new(|panic_info| {
|
||||
let timestamp = chrono::Local::now();
|
||||
// "%Y-%m-%d %H:%M:%S"
|
||||
//
|
||||
// I would like to use the above format, but Windows does not allow that
|
||||
// and complains with OS error 123.
|
||||
let datetime_str = timestamp.format("%Y-%m-%d-%H-%M-%S").to_string();
|
||||
|
||||
let log_dir = app_log_dir();
|
||||
|
||||
// Ensure the log directory exists
|
||||
if let Err(e) = std::fs::create_dir_all(&log_dir) {
|
||||
eprintln!("Panic hook error: failed to create log directory: {}", e);
|
||||
return;
|
||||
}
|
||||
|
||||
let panic_file = log_dir.join(format!("{}_rust_panic.log", datetime_str));
|
||||
|
||||
// Prepare panic information
|
||||
let panic_message = if let Some(s) = panic_info.payload().downcast_ref::<&str>() {
|
||||
s.to_string()
|
||||
} else if let Some(s) = panic_info.payload().downcast_ref::<String>() {
|
||||
s.clone()
|
||||
} else {
|
||||
"Unknown panic message".to_string()
|
||||
};
|
||||
|
||||
let location = if let Some(location) = panic_info.location() {
|
||||
format!(
|
||||
"{}:{}:{}",
|
||||
location.file(),
|
||||
location.line(),
|
||||
location.column()
|
||||
)
|
||||
} else {
|
||||
"Unknown location".to_string()
|
||||
};
|
||||
|
||||
// Use `force_capture()` instead of `capture()` as we want backtrace
|
||||
// regardless of whether the corresponding env vars are set or not.
|
||||
let backtrace = std::backtrace::Backtrace::force_capture();
|
||||
|
||||
let panic_log = format!(
|
||||
"Time: [{}]\nLocation: [{}]\nMessage: [{}]\nBacktrace: \n{}",
|
||||
datetime_str, location, panic_message, backtrace
|
||||
);
|
||||
|
||||
// Write to panic file
|
||||
match OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(&panic_file)
|
||||
{
|
||||
Ok(mut file) => {
|
||||
if let Err(e) = writeln!(file, "{}", panic_log) {
|
||||
eprintln!("Panic hook error: Failed to write panic to file: {}", e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Panic hook error: Failed to open panic log file: {}", e);
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// Panic hook setup should be the first thing to do, everything could panic!
|
||||
setup_panic_hook();
|
||||
coco_lib::run();
|
||||
}
|
||||
|
||||
@@ -1,141 +1,210 @@
|
||||
use crate::common::error::SearchError;
|
||||
use crate::common::register::SearchSourceRegistry;
|
||||
use crate::common::search::{
|
||||
FailedRequest, MultiSourceQueryResponse, QueryHits, QueryResponse, QuerySource, SearchQuery,
|
||||
FailedRequest, MultiSourceQueryResponse, QueryHits, QuerySource, SearchQuery,
|
||||
};
|
||||
use crate::common::traits::SearchSource;
|
||||
use crate::server::servers::logout_coco_server;
|
||||
use crate::server::servers::mark_server_as_offline;
|
||||
use function_name::named;
|
||||
use futures::stream::FuturesUnordered;
|
||||
use futures::StreamExt;
|
||||
use futures::stream::FuturesUnordered;
|
||||
use reqwest::StatusCode;
|
||||
use std::cmp::Reverse;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::future::Future;
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Manager, Runtime};
|
||||
use tokio::time::error::Elapsed;
|
||||
use tokio::time::{timeout, Duration};
|
||||
|
||||
/// Helper function to return the Future used for querying querysources.
|
||||
///
|
||||
/// It is a workaround for the limitations:
|
||||
///
|
||||
/// 1. 2 async blocks have different types in Rust's type system even though
|
||||
/// they are literally same
|
||||
/// 2. `futures::stream::FuturesUnordered` needs the `Futures` pushed to it to
|
||||
/// have only 1 type
|
||||
///
|
||||
/// Putting the async block in a function to unify the types.
|
||||
fn same_type_futures(
|
||||
query_source: QuerySource,
|
||||
query_source_trait_object: Arc<dyn SearchSource>,
|
||||
timeout_duration: Duration,
|
||||
search_query: SearchQuery,
|
||||
) -> impl Future<
|
||||
Output = (
|
||||
QuerySource,
|
||||
Result<Result<QueryResponse, SearchError>, Elapsed>,
|
||||
),
|
||||
> + 'static {
|
||||
async move {
|
||||
(
|
||||
// Store `query_source` as part of future for debugging purposes.
|
||||
query_source,
|
||||
timeout(timeout_duration, async {
|
||||
query_source_trait_object.search(search_query).await
|
||||
})
|
||||
.await,
|
||||
)
|
||||
}
|
||||
}
|
||||
use tauri::{AppHandle, Manager};
|
||||
use tokio::time::{Duration, timeout};
|
||||
|
||||
#[named]
|
||||
#[tauri::command]
|
||||
pub async fn query_coco_fusion<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
pub async fn query_coco_fusion(
|
||||
tauri_app_handle: AppHandle,
|
||||
from: u64,
|
||||
size: u64,
|
||||
query_strings: HashMap<String, String>,
|
||||
query_timeout: u64,
|
||||
) -> Result<MultiSourceQueryResponse, SearchError> {
|
||||
let query_keyword = query_strings
|
||||
.get("query")
|
||||
.unwrap_or(&"".to_string())
|
||||
.clone();
|
||||
|
||||
let opt_query_source_id = query_strings.get("querysource");
|
||||
|
||||
let search_sources = app_handle.state::<SearchSourceRegistry>();
|
||||
|
||||
let sources_future = search_sources.get_sources();
|
||||
let mut futures = FuturesUnordered::new();
|
||||
|
||||
let mut sources_list = sources_future.await;
|
||||
let sources_list_len = sources_list.len();
|
||||
|
||||
// Time limit for each query
|
||||
let search_sources = tauri_app_handle.state::<SearchSourceRegistry>();
|
||||
let query_source_list = search_sources.get_sources().await;
|
||||
let timeout_duration = Duration::from_millis(query_timeout);
|
||||
let search_query = SearchQuery::new(from, size, query_strings.clone());
|
||||
|
||||
log::debug!(
|
||||
"{}(): {:?}, timeout: {:?}",
|
||||
"{}() invoked with parameters: from: [{}], size: [{}], query_strings: [{:?}], timeout: [{:?}]",
|
||||
function_name!(),
|
||||
from,
|
||||
size,
|
||||
query_strings,
|
||||
timeout_duration
|
||||
);
|
||||
|
||||
let search_query = SearchQuery::new(from, size, query_strings.clone());
|
||||
|
||||
// Dispatch to different `query_coco_fusion_xxx()` functions.
|
||||
if let Some(query_source_id) = opt_query_source_id {
|
||||
// If this query source ID is specified, we only query this query source.
|
||||
log::debug!(
|
||||
"parameter [querysource={}] specified, will only query this querysource",
|
||||
query_source_id
|
||||
);
|
||||
|
||||
let opt_query_source_trait_object_index = sources_list
|
||||
.iter()
|
||||
.position(|query_source| &query_source.get_type().id == query_source_id);
|
||||
|
||||
let Some(query_source_trait_object_index) = opt_query_source_trait_object_index else {
|
||||
// It is possible (an edge case) that the frontend invokes `query_coco_fusion()` with a
|
||||
// datasource that does not exist in the source list:
|
||||
//
|
||||
// 1. Search applications
|
||||
// 2. Navigate to the application sub page
|
||||
// 3. Disable the application extension in settings
|
||||
// 4. hide the search window
|
||||
// 5. Re-open the search window and search for something
|
||||
//
|
||||
// The application search source is not in the source list because the extension
|
||||
// has been disabled, but the last search is indeed invoked with parameter
|
||||
// `datasource=application`.
|
||||
return Ok(MultiSourceQueryResponse {
|
||||
failed: Vec::new(),
|
||||
hits: Vec::new(),
|
||||
total_hits: 0,
|
||||
});
|
||||
};
|
||||
|
||||
let query_source_trait_object = sources_list.remove(query_source_trait_object_index);
|
||||
let query_source = query_source_trait_object.get_type();
|
||||
|
||||
futures.push(same_type_futures(
|
||||
query_source,
|
||||
query_source_trait_object,
|
||||
query_coco_fusion_single_query_source(
|
||||
tauri_app_handle,
|
||||
query_source_list,
|
||||
query_source_id.clone(),
|
||||
timeout_duration,
|
||||
search_query,
|
||||
));
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
for query_source_trait_object in sources_list {
|
||||
let query_source = query_source_trait_object.get_type().clone();
|
||||
log::debug!("will query querysource [{}]", query_source.id);
|
||||
futures.push(same_type_futures(
|
||||
query_source,
|
||||
query_source_trait_object,
|
||||
timeout_duration,
|
||||
search_query.clone(),
|
||||
));
|
||||
query_coco_fusion_multi_query_sources(
|
||||
tauri_app_handle,
|
||||
query_source_list,
|
||||
timeout_duration,
|
||||
search_query,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
/// Query only 1 query source.
|
||||
///
|
||||
/// The logic here is much simpler than `query_coco_fusion_multi_query_sources()`
|
||||
/// as we don't need to re-rank due to fact that this does not involve multiple
|
||||
/// query sources.
|
||||
async fn query_coco_fusion_single_query_source(
|
||||
tauri_app_handle: AppHandle,
|
||||
mut query_source_list: Vec<Arc<dyn SearchSource>>,
|
||||
id_of_query_source_to_query: String,
|
||||
timeout_duration: Duration,
|
||||
search_query: SearchQuery,
|
||||
) -> Result<MultiSourceQueryResponse, SearchError> {
|
||||
// If this query source ID is specified, we only query this query source.
|
||||
log::debug!(
|
||||
"parameter [querysource={}] specified, will only query this query source",
|
||||
id_of_query_source_to_query
|
||||
);
|
||||
|
||||
let opt_query_source_trait_object_index = query_source_list
|
||||
.iter()
|
||||
.position(|query_source| query_source.get_type().id == id_of_query_source_to_query);
|
||||
|
||||
let Some(query_source_trait_object_index) = opt_query_source_trait_object_index else {
|
||||
// It is possible (an edge case) that the frontend invokes `query_coco_fusion()`
|
||||
// with a querysource that does not exist in the source list:
|
||||
//
|
||||
// 1. Search applications
|
||||
// 2. Navigate to the application sub page
|
||||
// 3. Disable the application extension in settings, which removes this
|
||||
// query source from the list
|
||||
// 4. hide the search window
|
||||
// 5. Re-open the search window, you will still be in the sub page, type to search
|
||||
// something
|
||||
//
|
||||
// The application query source is not in the source list because the extension
|
||||
// was disabled and thus removed from the query sources, but the last
|
||||
// search is indeed invoked with parameter `querysource=application`.
|
||||
return Ok(MultiSourceQueryResponse {
|
||||
failed: Vec::new(),
|
||||
hits: Vec::new(),
|
||||
total_hits: 0,
|
||||
});
|
||||
};
|
||||
|
||||
let query_source_trait_object = query_source_list.remove(query_source_trait_object_index);
|
||||
let query_source = query_source_trait_object.get_type();
|
||||
let search_fut = query_source_trait_object.search(tauri_app_handle.clone(), search_query);
|
||||
let timeout_result = timeout(timeout_duration, search_fut).await;
|
||||
|
||||
let mut failed_requests: Vec<FailedRequest> = Vec::new();
|
||||
let mut hits = Vec::new();
|
||||
let mut total_hits = 0;
|
||||
|
||||
match timeout_result {
|
||||
// Ignore the `_timeout` variable as it won't provide any useful debugging information.
|
||||
Err(_timeout) => {
|
||||
log::warn!(
|
||||
"searching query source [{}] timed out, skip this request",
|
||||
query_source.id
|
||||
);
|
||||
}
|
||||
Ok(query_result) => match query_result {
|
||||
Ok(response) => {
|
||||
total_hits = response.total_hits;
|
||||
|
||||
for (document, score) in response.hits {
|
||||
log::debug!(
|
||||
"document from query source [{}]: ID [{}], title [{:?}], score [{}]",
|
||||
response.source.id,
|
||||
document.id,
|
||||
document.title,
|
||||
score
|
||||
);
|
||||
|
||||
let query_hit = QueryHits {
|
||||
source: Some(response.source.clone()),
|
||||
score,
|
||||
document,
|
||||
};
|
||||
|
||||
hits.push(query_hit);
|
||||
}
|
||||
}
|
||||
Err(search_error) => {
|
||||
query_coco_fusion_handle_failed_request(
|
||||
tauri_app_handle.clone(),
|
||||
&mut failed_requests,
|
||||
query_source,
|
||||
search_error,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
Ok(MultiSourceQueryResponse {
|
||||
failed: failed_requests,
|
||||
hits,
|
||||
total_hits,
|
||||
})
|
||||
}
|
||||
|
||||
async fn query_coco_fusion_multi_query_sources(
|
||||
tauri_app_handle: AppHandle,
|
||||
query_source_trait_object_list: Vec<Arc<dyn SearchSource>>,
|
||||
timeout_duration: Duration,
|
||||
search_query: SearchQuery,
|
||||
) -> Result<MultiSourceQueryResponse, SearchError> {
|
||||
log::debug!(
|
||||
"will query query sources {:?}",
|
||||
query_source_trait_object_list
|
||||
.iter()
|
||||
.map(|search_source| search_source.get_type().id.clone())
|
||||
.collect::<Vec<String>>()
|
||||
);
|
||||
|
||||
let query_keyword = search_query
|
||||
.query_strings
|
||||
.get("query")
|
||||
.unwrap_or(&"".to_string())
|
||||
.clone();
|
||||
let size = search_query.size;
|
||||
|
||||
let mut futures = FuturesUnordered::new();
|
||||
|
||||
let query_source_list_len = query_source_trait_object_list.len();
|
||||
for query_source_trait_object in query_source_trait_object_list {
|
||||
let query_source = query_source_trait_object.get_type().clone();
|
||||
let tauri_app_handle_clone = tauri_app_handle.clone();
|
||||
let search_query_clone = search_query.clone();
|
||||
|
||||
futures.push(async move {
|
||||
(
|
||||
// Store `query_source` as part of future for debugging purposes.
|
||||
query_source,
|
||||
timeout(timeout_duration, async {
|
||||
query_source_trait_object
|
||||
.search(tauri_app_handle_clone, search_query_clone)
|
||||
.await
|
||||
})
|
||||
.await,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
let mut total_hits = 0;
|
||||
@@ -144,7 +213,7 @@ pub async fn query_coco_fusion<R: Runtime>(
|
||||
let mut all_hits: Vec<(String, QueryHits, f64)> = Vec::new();
|
||||
let mut hits_per_source: HashMap<String, Vec<(QueryHits, f64)>> = HashMap::new();
|
||||
|
||||
if sources_list_len > 1 {
|
||||
if query_source_list_len > 1 {
|
||||
need_rerank = true; // If we have more than one source, we need to rerank the hits
|
||||
}
|
||||
|
||||
@@ -156,25 +225,25 @@ pub async fn query_coco_fusion<R: Runtime>(
|
||||
"searching query source [{}] timed out, skip this request",
|
||||
query_source.id
|
||||
);
|
||||
// failed_requests.push(FailedRequest {
|
||||
// source: query_source,
|
||||
// status: 0,
|
||||
// error: Some("querying timed out".into()),
|
||||
// reason: None,
|
||||
// });
|
||||
}
|
||||
Ok(query_result) => match query_result {
|
||||
Ok(response) => {
|
||||
total_hits += response.total_hits;
|
||||
let source_id = response.source.id.clone();
|
||||
|
||||
for (doc, score) in response.hits {
|
||||
log::debug!("doc: {}, {:?}, {}", doc.id, doc.title, score);
|
||||
for (document, score) in response.hits {
|
||||
log::debug!(
|
||||
"document from query source [{}]: ID [{}], title [{:?}], score [{}]",
|
||||
response.source.id,
|
||||
document.id,
|
||||
document.title,
|
||||
score
|
||||
);
|
||||
|
||||
let query_hit = QueryHits {
|
||||
source: Some(response.source.clone()),
|
||||
score,
|
||||
document: doc,
|
||||
document,
|
||||
};
|
||||
|
||||
all_hits.push((source_id.clone(), query_hit.clone(), score));
|
||||
@@ -186,17 +255,13 @@ pub async fn query_coco_fusion<R: Runtime>(
|
||||
}
|
||||
}
|
||||
Err(search_error) => {
|
||||
log::error!(
|
||||
"searching query source [{}] failed, error [{}]",
|
||||
query_source.id,
|
||||
search_error
|
||||
);
|
||||
failed_requests.push(FailedRequest {
|
||||
source: query_source,
|
||||
status: 0,
|
||||
error: Some(search_error.to_string()),
|
||||
reason: None,
|
||||
});
|
||||
query_coco_fusion_handle_failed_request(
|
||||
tauri_app_handle.clone(),
|
||||
&mut failed_requests,
|
||||
query_source,
|
||||
search_error,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -356,3 +421,54 @@ fn boosted_levenshtein_rerank(query: &str, titles: Vec<(usize, &str)>) -> Vec<(u
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Helper function to handle a failed request.
|
||||
///
|
||||
/// Extracted as a function because `query_coco_fusion_single_query_source()` and
|
||||
/// `query_coco_fusion_multi_query_sources()` share the same error handling logic.
|
||||
async fn query_coco_fusion_handle_failed_request(
|
||||
tauri_app_handle: AppHandle,
|
||||
failed_requests: &mut Vec<FailedRequest>,
|
||||
query_source: QuerySource,
|
||||
search_error: SearchError,
|
||||
) {
|
||||
log::error!(
|
||||
"searching query source [{}] failed, error [{}]",
|
||||
query_source.id,
|
||||
search_error
|
||||
);
|
||||
|
||||
let mut status_code_num: u16 = 0;
|
||||
|
||||
if let SearchError::HttpError {
|
||||
status_code: opt_status_code,
|
||||
msg: _,
|
||||
} = search_error
|
||||
{
|
||||
if let Some(status_code) = opt_status_code {
|
||||
status_code_num = status_code.as_u16();
|
||||
if status_code != StatusCode::OK {
|
||||
if status_code == StatusCode::UNAUTHORIZED {
|
||||
// This Coco server is unavailable. In addition to marking it as
|
||||
// unavailable, we need to log out because the status code is 401.
|
||||
logout_coco_server(tauri_app_handle.clone(), query_source.id.to_string()).await.unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"the search request to Coco server [id {}, name {}] failed with status code {}, the login token is invalid, we are trying to log out, but failed with error [{}]",
|
||||
query_source.id, query_source.name, StatusCode::UNAUTHORIZED, e
|
||||
);
|
||||
})
|
||||
} else {
|
||||
// This Coco server is unavailable
|
||||
mark_server_as_offline(tauri_app_handle.clone(), &query_source.id).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
failed_requests.push(FailedRequest {
|
||||
source: query_source,
|
||||
status: status_code_num,
|
||||
error: Some(search_error.to_string()),
|
||||
reason: None,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -15,42 +15,6 @@ pub struct UploadAttachmentResponse {
|
||||
pub attachments: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct AttachmentSource {
|
||||
pub id: String,
|
||||
pub created: String,
|
||||
pub updated: String,
|
||||
pub session: String,
|
||||
pub name: String,
|
||||
pub icon: String,
|
||||
pub url: String,
|
||||
pub size: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct AttachmentHit {
|
||||
pub _index: String,
|
||||
pub _type: Option<String>,
|
||||
pub _id: String,
|
||||
pub _score: Option<f64>,
|
||||
pub _source: AttachmentSource,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct AttachmentHits {
|
||||
pub total: Value,
|
||||
pub max_score: Option<f64>,
|
||||
pub hits: Option<Vec<AttachmentHit>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct GetAttachmentResponse {
|
||||
pub took: u32,
|
||||
pub timed_out: bool,
|
||||
pub _shards: Option<Value>,
|
||||
pub hits: AttachmentHits,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct DeleteAttachmentResponse {
|
||||
pub _id: String,
|
||||
@@ -60,7 +24,6 @@ pub struct DeleteAttachmentResponse {
|
||||
#[command]
|
||||
pub async fn upload_attachment(
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
file_paths: Vec<PathBuf>,
|
||||
) -> Result<UploadAttachmentResponse, String> {
|
||||
let mut form = Form::new();
|
||||
@@ -82,10 +45,12 @@ pub async fn upload_attachment(
|
||||
form = form.part("files", part);
|
||||
}
|
||||
|
||||
let server = get_server_by_id(&server_id).ok_or("Server not found")?;
|
||||
let url = HttpClient::join_url(&server.endpoint, &format!("chat/{}/_upload", session_id));
|
||||
let server = get_server_by_id(&server_id)
|
||||
.await
|
||||
.ok_or("Server not found")?;
|
||||
let url = HttpClient::join_url(&server.endpoint, &format!("attachment/_upload"));
|
||||
|
||||
let token = get_server_token(&server_id).await?;
|
||||
let token = get_server_token(&server_id).await;
|
||||
let mut headers = HashMap::new();
|
||||
if let Some(token) = token {
|
||||
headers.insert("X-API-TOKEN".to_string(), token.access_token);
|
||||
@@ -107,20 +72,25 @@ pub async fn upload_attachment(
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn get_attachment(
|
||||
pub async fn get_attachment_by_ids(
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
) -> Result<GetAttachmentResponse, String> {
|
||||
let mut query_params = Vec::new();
|
||||
query_params.push(format!("session={}", session_id));
|
||||
attachments: Vec<String>,
|
||||
) -> Result<Value, String> {
|
||||
println!("get_attachment_by_ids server_id: {}", server_id);
|
||||
println!("get_attachment_by_ids attachments: {:?}", attachments);
|
||||
|
||||
let response = HttpClient::get(&server_id, "/attachment/_search", Some(query_params))
|
||||
let request_body = serde_json::json!({
|
||||
"attachments": attachments
|
||||
});
|
||||
let body = reqwest::Body::from(serde_json::to_string(&request_body).unwrap());
|
||||
|
||||
let response = HttpClient::post(&server_id, "/attachment/_search", None, Some(body))
|
||||
.await
|
||||
.map_err(|e| format!("Request error: {}", e))?;
|
||||
|
||||
let body = get_response_body_text(response).await?;
|
||||
|
||||
serde_json::from_str::<GetAttachmentResponse>(&body)
|
||||
serde_json::from_str::<Value>(&body)
|
||||
.map_err(|e| format!("Failed to parse attachment response: {}", e))
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ use crate::server::servers::{
|
||||
get_server_by_id, persist_servers, persist_servers_token, save_access_token, save_server,
|
||||
try_register_server_to_search_source,
|
||||
};
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use tauri::AppHandle;
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn request_access_token_url(request_id: &str) -> String {
|
||||
@@ -13,22 +13,22 @@ fn request_access_token_url(request_id: &str) -> String {
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn handle_sso_callback<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
pub async fn handle_sso_callback(
|
||||
app_handle: AppHandle,
|
||||
server_id: String,
|
||||
request_id: String,
|
||||
code: String,
|
||||
) -> Result<(), String> {
|
||||
// Retrieve the server details using the server ID
|
||||
let server = get_server_by_id(&server_id);
|
||||
let server = get_server_by_id(&server_id).await;
|
||||
|
||||
let expire_in = 3600; // TODO, need to update to actual expire_in value
|
||||
if let Some(mut server) = server {
|
||||
// Save the access token for the server
|
||||
let access_token = ServerAccessToken::new(server_id.clone(), code.clone(), expire_in);
|
||||
// dbg!(&server_id, &request_id, &code, &token);
|
||||
save_access_token(server_id.clone(), access_token);
|
||||
persist_servers_token(&app_handle)?;
|
||||
save_access_token(server_id.clone(), access_token).await;
|
||||
persist_servers_token(&app_handle).await?;
|
||||
|
||||
// Register the server to the search source
|
||||
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
||||
@@ -41,7 +41,7 @@ pub async fn handle_sso_callback<R: Runtime>(
|
||||
Ok(p) => {
|
||||
server.profile = Some(p);
|
||||
server.available = true;
|
||||
save_server(&server);
|
||||
save_server(&server).await;
|
||||
persist_servers(&app_handle).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
use crate::common::connector::Connector;
|
||||
use crate::common::search::parse_search_results;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::server::http_client::{HttpClient, status_code_check};
|
||||
use crate::server::servers::get_all_servers;
|
||||
use http::StatusCode;
|
||||
use lazy_static::lazy_static;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, RwLock};
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use tauri::AppHandle;
|
||||
|
||||
lazy_static! {
|
||||
static ref CONNECTOR_CACHE: Arc<RwLock<HashMap<String, HashMap<String, Connector>>>> =
|
||||
@@ -28,8 +29,8 @@ pub fn get_connector_by_id(server_id: &str, connector_id: &str) -> Option<Connec
|
||||
Some(connector.clone())
|
||||
}
|
||||
|
||||
pub async fn refresh_all_connectors<R: Runtime>(app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||
let servers = get_all_servers();
|
||||
pub async fn refresh_all_connectors(app_handle: &AppHandle) -> Result<(), String> {
|
||||
let servers = get_all_servers().await;
|
||||
|
||||
// Collect all the tasks for fetching and refreshing connectors
|
||||
let mut server_map = HashMap::new();
|
||||
@@ -107,6 +108,7 @@ pub async fn fetch_connectors_by_server(id: &str) -> Result<Vec<Connector>, Stri
|
||||
// dbg!("Error fetching connector for id {}: {}", &id, &e);
|
||||
format!("Error fetching connector: {}", e)
|
||||
})?;
|
||||
status_code_check(&resp, &[StatusCode::OK, StatusCode::CREATED])?;
|
||||
|
||||
// Parse the search results directly from the response body
|
||||
let datasource: Vec<Connector> = parse_search_results(resp)
|
||||
@@ -120,8 +122,8 @@ pub async fn fetch_connectors_by_server(id: &str) -> Result<Vec<Connector>, Stri
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_connectors_by_server<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
pub async fn get_connectors_by_server(
|
||||
_app_handle: AppHandle,
|
||||
id: String,
|
||||
) -> Result<Vec<Connector>, String> {
|
||||
let connectors = fetch_connectors_by_server(&id).await?;
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
use crate::common::datasource::DataSource;
|
||||
use crate::common::search::parse_search_results;
|
||||
use crate::server::connector::get_connector_by_id;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::server::http_client::{HttpClient, status_code_check};
|
||||
use crate::server::servers::get_all_servers;
|
||||
use http::StatusCode;
|
||||
use lazy_static::lazy_static;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, RwLock};
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use tauri::AppHandle;
|
||||
|
||||
lazy_static! {
|
||||
static ref DATASOURCE_CACHE: Arc<RwLock<HashMap<String, HashMap<String, DataSource>>>> =
|
||||
@@ -25,15 +26,15 @@ pub fn save_datasource_to_cache(server_id: &str, datasources: Vec<DataSource>) {
|
||||
#[allow(dead_code)]
|
||||
pub fn get_datasources_from_cache(server_id: &str) -> Option<HashMap<String, DataSource>> {
|
||||
let cache = DATASOURCE_CACHE.read().unwrap(); // Acquire read lock
|
||||
// dbg!("cache: {:?}", &cache);
|
||||
// dbg!("cache: {:?}", &cache);
|
||||
let server_cache = cache.get(server_id)?; // Get the server's cache
|
||||
Some(server_cache.clone())
|
||||
}
|
||||
|
||||
pub async fn refresh_all_datasources<R: Runtime>(_app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||
pub async fn refresh_all_datasources(_app_handle: &AppHandle) -> Result<(), String> {
|
||||
// dbg!("Attempting to refresh all datasources");
|
||||
|
||||
let servers = get_all_servers();
|
||||
let servers = get_all_servers().await;
|
||||
|
||||
let mut server_map = HashMap::new();
|
||||
|
||||
@@ -95,6 +96,7 @@ pub async fn datasource_search(
|
||||
let resp = HttpClient::post(id, "/datasource/_search", query_params, None)
|
||||
.await
|
||||
.map_err(|e| format!("Error fetching datasource: {}", e))?;
|
||||
status_code_check(&resp, &[StatusCode::OK, StatusCode::CREATED])?;
|
||||
|
||||
// Parse the search results from the response
|
||||
let datasources: Vec<DataSource> = parse_search_results(resp).await.map_err(|e| {
|
||||
@@ -117,6 +119,7 @@ pub async fn mcp_server_search(
|
||||
let resp = HttpClient::post(id, "/mcp_server/_search", query_params, None)
|
||||
.await
|
||||
.map_err(|e| format!("Error fetching datasource: {}", e))?;
|
||||
status_code_check(&resp, &[StatusCode::OK, StatusCode::CREATED])?;
|
||||
|
||||
// Parse the search results from the response
|
||||
let mcp_server: Vec<DataSource> = parse_search_results(resp).await.map_err(|e| {
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
use crate::server::servers::{get_server_by_id, get_server_token};
|
||||
use http::{HeaderName, HeaderValue};
|
||||
use crate::util::app_lang::get_app_lang;
|
||||
use crate::util::platform::Platform;
|
||||
use http::{HeaderName, HeaderValue, StatusCode};
|
||||
use once_cell::sync::Lazy;
|
||||
use reqwest::{Client, Method, RequestBuilder};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::LazyLock;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
pub(crate) fn new_reqwest_http_client(accept_invalid_certs: bool) -> Client {
|
||||
Client::builder()
|
||||
.read_timeout(Duration::from_secs(3)) // Set a timeout of 3 second
|
||||
.connect_timeout(Duration::from_secs(3)) // Set a timeout of 3 second
|
||||
.timeout(Duration::from_secs(10)) // Set a timeout of 10 seconds
|
||||
.read_timeout(Duration::from_secs(60)) // Set a timeout of 60 second
|
||||
.connect_timeout(Duration::from_secs(30)) // Set a timeout of 30 second
|
||||
.timeout(Duration::from_secs(5 * 60)) // Set a timeout of 5 minute
|
||||
.danger_accept_invalid_certs(accept_invalid_certs) // allow self-signed certificates
|
||||
.build()
|
||||
.expect("Failed to build client")
|
||||
@@ -26,6 +29,26 @@ pub static HTTP_CLIENT: Lazy<Mutex<Client>> = Lazy::new(|| {
|
||||
Mutex::new(new_reqwest_http_client(allow_self_signature))
|
||||
});
|
||||
|
||||
/// These header values won't change during a process's lifetime.
|
||||
static STATIC_HEADERS: LazyLock<HashMap<String, String>> = LazyLock::new(|| {
|
||||
HashMap::from([
|
||||
(
|
||||
"X-OS-NAME".into(),
|
||||
Platform::current()
|
||||
.to_os_name_http_header_str()
|
||||
.into_owned(),
|
||||
),
|
||||
(
|
||||
"X-OS-VER".into(),
|
||||
sysinfo::System::os_version()
|
||||
.expect("sysinfo::System::os_version() should be Some on major systems"),
|
||||
),
|
||||
("X-OS-ARCH".into(), sysinfo::System::cpu_arch()),
|
||||
("X-APP-NAME".into(), "coco-app".into()),
|
||||
("X-APP-VER".into(), env!("CARGO_PKG_VERSION").into()),
|
||||
])
|
||||
});
|
||||
|
||||
pub struct HttpClient;
|
||||
|
||||
impl HttpClient {
|
||||
@@ -81,8 +104,32 @@ impl HttpClient {
|
||||
// Build the request
|
||||
let mut request_builder = client.request(method.clone(), url);
|
||||
|
||||
// Populate the headers defined by us
|
||||
let mut req_headers = reqwest::header::HeaderMap::new();
|
||||
for (key, value) in STATIC_HEADERS.iter() {
|
||||
let key = HeaderName::from_bytes(key.as_bytes())
|
||||
.expect("headers defined by us should be valid");
|
||||
let value = HeaderValue::from_str(value.trim()).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"header value [{}] is invalid, error [{}], this should be unreachable",
|
||||
value, e
|
||||
);
|
||||
});
|
||||
req_headers.insert(key, value);
|
||||
}
|
||||
let app_lang = get_app_lang().await.to_string();
|
||||
req_headers.insert(
|
||||
"X-APP-LANG",
|
||||
HeaderValue::from_str(&app_lang).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"header value [{}] is invalid, error [{}], this should be unreachable",
|
||||
app_lang, e
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
// Headers from the function parameter
|
||||
if let Some(h) = headers {
|
||||
let mut req_headers = reqwest::header::HeaderMap::new();
|
||||
for (key, value) in h.into_iter() {
|
||||
match (
|
||||
HeaderName::from_bytes(key.as_bytes()),
|
||||
@@ -106,10 +153,8 @@ impl HttpClient {
|
||||
}
|
||||
|
||||
if let Some(params) = query_params {
|
||||
let query: Vec<(&str, &str)> = params
|
||||
.iter()
|
||||
.filter_map(|s| s.split_once('='))
|
||||
.collect();
|
||||
let query: Vec<(&str, &str)> =
|
||||
params.iter().filter_map(|s| s.split_once('=')).collect();
|
||||
request_builder = request_builder.query(&query);
|
||||
}
|
||||
|
||||
@@ -121,7 +166,6 @@ impl HttpClient {
|
||||
request_builder
|
||||
}
|
||||
|
||||
|
||||
pub async fn send_request(
|
||||
server_id: &str,
|
||||
method: Method,
|
||||
@@ -131,14 +175,14 @@ impl HttpClient {
|
||||
body: Option<reqwest::Body>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
// Fetch the server using the server_id
|
||||
let server = get_server_by_id(server_id);
|
||||
let server = get_server_by_id(server_id).await;
|
||||
if let Some(s) = server {
|
||||
// Construct the URL
|
||||
let url = HttpClient::join_url(&s.endpoint, path);
|
||||
|
||||
// Retrieve the token for the server (token is optional)
|
||||
let token = get_server_token(server_id)
|
||||
.await?
|
||||
.await
|
||||
.map(|t| t.access_token.clone());
|
||||
|
||||
let mut headers = if let Some(custom_headers) = custom_headers {
|
||||
@@ -161,7 +205,7 @@ impl HttpClient {
|
||||
|
||||
Self::send_raw_request(method, &url, query_params, Some(headers), body).await
|
||||
} else {
|
||||
Err("Server not found".to_string())
|
||||
Err(format!("Server [{}] not found", server_id))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -171,8 +215,7 @@ impl HttpClient {
|
||||
path: &str,
|
||||
query_params: Option<Vec<String>>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
HttpClient::send_request(server_id, Method::GET, path, None, query_params,
|
||||
None).await
|
||||
HttpClient::send_request(server_id, Method::GET, path, None, query_params, None).await
|
||||
}
|
||||
|
||||
// Convenience method for POST requests
|
||||
@@ -200,7 +243,7 @@ impl HttpClient {
|
||||
query_params,
|
||||
body,
|
||||
)
|
||||
.await
|
||||
.await
|
||||
}
|
||||
|
||||
// Convenience method for PUT requests
|
||||
@@ -220,7 +263,7 @@ impl HttpClient {
|
||||
query_params,
|
||||
body,
|
||||
)
|
||||
.await
|
||||
.await
|
||||
}
|
||||
|
||||
// Convenience method for DELETE requests
|
||||
@@ -239,6 +282,33 @@ impl HttpClient {
|
||||
query_params,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to check status code.
|
||||
///
|
||||
/// If the status code is not in the `allowed_status_codes` list, return an error.
|
||||
pub(crate) fn status_code_check(
|
||||
response: &reqwest::Response,
|
||||
allowed_status_codes: &[StatusCode],
|
||||
) -> Result<(), String> {
|
||||
let status_code = response.status();
|
||||
|
||||
if !allowed_status_codes.contains(&status_code) {
|
||||
let msg = format!(
|
||||
"Response of request [{}] status code failed: status code [{}], which is not in the 'allow' list {:?}",
|
||||
response.url(),
|
||||
status_code,
|
||||
allowed_status_codes
|
||||
.iter()
|
||||
.map(|status| status.to_string())
|
||||
.collect::<Vec<String>>()
|
||||
);
|
||||
log::warn!("{}", msg);
|
||||
|
||||
Err(msg)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,6 @@ pub mod http_client;
|
||||
pub mod profile;
|
||||
pub mod search;
|
||||
pub mod servers;
|
||||
pub mod synthesize;
|
||||
pub mod system_settings;
|
||||
pub mod transcription;
|
||||
pub mod websocket;
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use crate::common::http::get_response_body_text;
|
||||
use crate::common::profile::UserProfile;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use tauri::AppHandle;
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_user_profiles<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
pub async fn get_user_profiles(
|
||||
_app_handle: AppHandle,
|
||||
server_id: String,
|
||||
) -> Result<UserProfile, String> {
|
||||
// Use the generic GET method from HttpClient
|
||||
|
||||
@@ -6,10 +6,10 @@ use crate::common::server::Server;
|
||||
use crate::common::traits::SearchSource;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use async_trait::async_trait;
|
||||
// use futures::stream::StreamExt;
|
||||
use ordered_float::OrderedFloat;
|
||||
use reqwest::StatusCode;
|
||||
use std::collections::HashMap;
|
||||
// use std::hash::Hash;
|
||||
use tauri::AppHandle;
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) struct DocumentsSizedCollector {
|
||||
@@ -44,7 +44,7 @@ impl DocumentsSizedCollector {
|
||||
}
|
||||
}
|
||||
|
||||
fn documents(self) -> impl ExactSizeIterator<Item=Document> {
|
||||
fn documents(self) -> impl ExactSizeIterator<Item = Document> {
|
||||
self.docs.into_iter().map(|(_, doc, _)| doc)
|
||||
}
|
||||
|
||||
@@ -90,7 +90,11 @@ impl SearchSource for CocoSearchSource {
|
||||
}
|
||||
}
|
||||
|
||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
||||
async fn search(
|
||||
&self,
|
||||
_tauri_app_handle: AppHandle,
|
||||
query: SearchQuery,
|
||||
) -> Result<QueryResponse, SearchError> {
|
||||
let url = "/query/_search";
|
||||
let mut total_hits = 0;
|
||||
let mut hits: Vec<(Document, f64)> = Vec::new();
|
||||
@@ -108,7 +112,18 @@ impl SearchSource for CocoSearchSource {
|
||||
|
||||
let response = HttpClient::get(&self.server.id, &url, Some(query_params))
|
||||
.await
|
||||
.map_err(|e| SearchError::HttpError(format!("{}", e)))?;
|
||||
.map_err(|e| SearchError::HttpError {
|
||||
status_code: None,
|
||||
msg: format!("{}", e),
|
||||
})?;
|
||||
let status_code = response.status();
|
||||
|
||||
if ![StatusCode::OK, StatusCode::CREATED].contains(&status_code) {
|
||||
return Err(SearchError::HttpError {
|
||||
status_code: Some(status_code),
|
||||
msg: format!("Request failed with status code [{}]", status_code),
|
||||
});
|
||||
}
|
||||
|
||||
// Use the helper function to parse the response body
|
||||
let response_body = get_response_body_text(response)
|
||||
@@ -123,7 +138,6 @@ impl SearchSource for CocoSearchSource {
|
||||
let parsed: SearchResponse<Document> = serde_json::from_str(&response_body)
|
||||
.map_err(|e| SearchError::ParseError(format!("{}", e)))?;
|
||||
|
||||
|
||||
// Process the parsed response
|
||||
total_hits = parsed.hits.total.value as usize;
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use crate::COCO_TAURI_STORE;
|
||||
use crate::common::http::get_response_body_text;
|
||||
use crate::common::register::SearchSourceRegistry;
|
||||
use crate::common::server::{AuthProvider, Provider, Server, ServerAccessToken, Sso, Version};
|
||||
@@ -5,68 +6,71 @@ use crate::server::connector::fetch_connectors_by_server;
|
||||
use crate::server::datasource::datasource_search;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::server::search::CocoSearchSource;
|
||||
use crate::COCO_TAURI_STORE;
|
||||
use lazy_static::lazy_static;
|
||||
use function_name;
|
||||
use http::StatusCode;
|
||||
use reqwest::Method;
|
||||
use serde_json::from_value;
|
||||
use serde_json::Value as JsonValue;
|
||||
use serde_json::from_value;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use std::sync::RwLock;
|
||||
use tauri::Runtime;
|
||||
use std::sync::LazyLock;
|
||||
use tauri::{AppHandle, Manager};
|
||||
use tauri_plugin_store::StoreExt;
|
||||
// Assuming you're using serde_json
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
lazy_static! {
|
||||
static ref SERVER_CACHE: Arc<RwLock<HashMap<String, Server>>> =
|
||||
Arc::new(RwLock::new(HashMap::new()));
|
||||
static ref SERVER_TOKEN: Arc<RwLock<HashMap<String, ServerAccessToken>>> =
|
||||
Arc::new(RwLock::new(HashMap::new()));
|
||||
}
|
||||
/// Coco sever list
|
||||
static SERVER_LIST_CACHE: LazyLock<RwLock<HashMap<String, Server>>> =
|
||||
LazyLock::new(|| RwLock::new(HashMap::new()));
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn check_server_exists(id: &str) -> bool {
|
||||
let cache = SERVER_CACHE.read().unwrap(); // Acquire read lock
|
||||
cache.contains_key(id)
|
||||
}
|
||||
/// If a server has a token stored here that has not expired, it is considered logged in.
|
||||
///
|
||||
/// Since the `expire_at` field of `struct ServerAccessToken` is currently unused,
|
||||
/// all servers stored here are treated as logged in.
|
||||
static SERVER_TOKEN_LIST_CACHE: LazyLock<RwLock<HashMap<String, ServerAccessToken>>> =
|
||||
LazyLock::new(|| RwLock::new(HashMap::new()));
|
||||
|
||||
pub fn get_server_by_id(id: &str) -> Option<Server> {
|
||||
let cache = SERVER_CACHE.read().unwrap(); // Acquire read lock
|
||||
/// `SERVER_LIST_CACHE` will be stored in KV store COCO_TAURI_STORE, under this key.
|
||||
pub const COCO_SERVERS: &str = "coco_servers";
|
||||
|
||||
/// `SERVER_TOKEN_LIST_CACHE` will be stored in KV store COCO_TAURI_STORE, under this key.
|
||||
const COCO_SERVER_TOKENS: &str = "coco_server_tokens";
|
||||
|
||||
pub async fn get_server_by_id(id: &str) -> Option<Server> {
|
||||
let cache = SERVER_LIST_CACHE.read().await;
|
||||
cache.get(id).cloned()
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_server_token(id: &str) -> Result<Option<ServerAccessToken>, String> {
|
||||
let cache = SERVER_TOKEN.read().map_err(|err| err.to_string())?;
|
||||
pub async fn get_server_token(id: &str) -> Option<ServerAccessToken> {
|
||||
let cache = SERVER_TOKEN_LIST_CACHE.read().await;
|
||||
|
||||
Ok(cache.get(id).cloned())
|
||||
cache.get(id).cloned()
|
||||
}
|
||||
|
||||
pub fn save_access_token(server_id: String, token: ServerAccessToken) -> bool {
|
||||
let mut cache = SERVER_TOKEN.write().unwrap();
|
||||
pub async fn save_access_token(server_id: String, token: ServerAccessToken) -> bool {
|
||||
let mut cache = SERVER_TOKEN_LIST_CACHE.write().await;
|
||||
cache.insert(server_id, token).is_none()
|
||||
}
|
||||
|
||||
fn check_endpoint_exists(endpoint: &str) -> bool {
|
||||
let cache = SERVER_CACHE.read().unwrap();
|
||||
async fn check_endpoint_exists(endpoint: &str) -> bool {
|
||||
let cache = SERVER_LIST_CACHE.read().await;
|
||||
cache.values().any(|server| server.endpoint == endpoint)
|
||||
}
|
||||
|
||||
pub fn save_server(server: &Server) -> bool {
|
||||
let mut cache = SERVER_CACHE.write().unwrap();
|
||||
cache.insert(server.id.clone(), server.clone()).is_none() // If the server id did not exist, `insert` will return `None`
|
||||
/// Return true if `server` does not exists in the server list, i.e., it is a newly-added
|
||||
/// server.
|
||||
pub async fn save_server(server: &Server) -> bool {
|
||||
let mut cache = SERVER_LIST_CACHE.write().await;
|
||||
cache.insert(server.id.clone(), server.clone()).is_none()
|
||||
}
|
||||
|
||||
fn remove_server_by_id(id: String) -> bool {
|
||||
/// Return the removed `Server` if it exists in the server list.
|
||||
async fn remove_server_by_id(id: &str) -> Option<Server> {
|
||||
log::debug!("remove server by id: {}", &id);
|
||||
let mut cache = SERVER_CACHE.write().unwrap();
|
||||
let deleted = cache.remove(id.as_str());
|
||||
deleted.is_some()
|
||||
let mut cache = SERVER_LIST_CACHE.write().await;
|
||||
cache.remove(id)
|
||||
}
|
||||
|
||||
pub async fn persist_servers<R: Runtime>(app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||
let cache = SERVER_CACHE.read().unwrap(); // Acquire a read lock, not a write lock, since you're not modifying the cache
|
||||
pub async fn persist_servers(app_handle: &AppHandle) -> Result<(), String> {
|
||||
let cache = SERVER_LIST_CACHE.read().await;
|
||||
|
||||
// Convert HashMap to Vec for serialization (iterating over values of HashMap)
|
||||
let servers: Vec<Server> = cache.values().cloned().collect();
|
||||
@@ -86,14 +90,16 @@ pub async fn persist_servers<R: Runtime>(app_handle: &AppHandle<R>) -> Result<()
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn remove_server_token(id: &str) -> bool {
|
||||
/// Return true if the server token of the server specified by `id` exists in
|
||||
/// the token list and gets deleted.
|
||||
pub async fn remove_server_token(id: &str) -> bool {
|
||||
log::debug!("remove server token by id: {}", &id);
|
||||
let mut cache = SERVER_TOKEN.write().unwrap();
|
||||
let mut cache = SERVER_TOKEN_LIST_CACHE.write().await;
|
||||
cache.remove(id).is_some()
|
||||
}
|
||||
|
||||
pub fn persist_servers_token<R: Runtime>(app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||
let cache = SERVER_TOKEN.read().unwrap(); // Acquire a read lock, not a write lock, since you're not modifying the cache
|
||||
pub async fn persist_servers_token(app_handle: &AppHandle) -> Result<(), String> {
|
||||
let cache = SERVER_TOKEN_LIST_CACHE.read().await;
|
||||
|
||||
// Convert HashMap to Vec for serialization (iterating over values of HashMap)
|
||||
let servers: Vec<ServerAccessToken> = cache.values().cloned().collect();
|
||||
@@ -151,9 +157,7 @@ fn get_default_server() -> Server {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn load_servers_token<R: Runtime>(
|
||||
app_handle: &AppHandle<R>,
|
||||
) -> Result<Vec<ServerAccessToken>, String> {
|
||||
pub async fn load_servers_token(app_handle: &AppHandle) -> Result<Vec<ServerAccessToken>, String> {
|
||||
log::debug!("Attempting to load servers token");
|
||||
|
||||
let store = app_handle
|
||||
@@ -173,30 +177,46 @@ pub async fn load_servers_token<R: Runtime>(
|
||||
servers.ok_or_else(|| "Failed to read servers from store: No servers found".to_string())?;
|
||||
|
||||
// Convert each item in the JsonValue array to a Server
|
||||
if let JsonValue::Array(servers_array) = servers {
|
||||
// Deserialize each JsonValue into Server, filtering out any errors
|
||||
let deserialized_tokens: Vec<ServerAccessToken> = servers_array
|
||||
.into_iter()
|
||||
.filter_map(|server_json| from_value(server_json).ok()) // Only keep valid Server instances
|
||||
.collect();
|
||||
match servers {
|
||||
JsonValue::Array(servers_array) => {
|
||||
let mut deserialized_tokens: Vec<ServerAccessToken> =
|
||||
Vec::with_capacity(servers_array.len());
|
||||
for server_json in servers_array {
|
||||
match from_value(server_json.clone()) {
|
||||
Ok(token) => {
|
||||
deserialized_tokens.push(token);
|
||||
}
|
||||
Err(e) => {
|
||||
panic!(
|
||||
"failed to deserialize JSON [{}] to [struct ServerAccessToken], error [{}], store [{}] key [{}] is possibly corrupted!",
|
||||
server_json, e, COCO_TAURI_STORE, COCO_SERVER_TOKENS
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if deserialized_tokens.is_empty() {
|
||||
return Err("Failed to deserialize any servers from the store.".to_string());
|
||||
if deserialized_tokens.is_empty() {
|
||||
return Err("Failed to deserialize any servers from the store.".to_string());
|
||||
}
|
||||
|
||||
for server in deserialized_tokens.iter() {
|
||||
save_access_token(server.id.clone(), server.clone()).await;
|
||||
}
|
||||
|
||||
log::debug!("loaded {:?} servers's token", &deserialized_tokens.len());
|
||||
|
||||
Ok(deserialized_tokens)
|
||||
}
|
||||
|
||||
for server in deserialized_tokens.iter() {
|
||||
save_access_token(server.id.clone(), server.clone());
|
||||
_ => {
|
||||
unreachable!(
|
||||
"coco server tokens should be stored in an array under store [{}] key [{}], but it is not",
|
||||
COCO_TAURI_STORE, COCO_SERVER_TOKENS
|
||||
);
|
||||
}
|
||||
|
||||
log::debug!("loaded {:?} servers's token", &deserialized_tokens.len());
|
||||
|
||||
Ok(deserialized_tokens)
|
||||
} else {
|
||||
Err("Failed to read servers from store: Invalid format".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn load_servers<R: Runtime>(app_handle: &AppHandle<R>) -> Result<Vec<Server>, String> {
|
||||
pub async fn load_servers(app_handle: &AppHandle) -> Result<Vec<Server>, String> {
|
||||
let store = app_handle
|
||||
.store(COCO_TAURI_STORE)
|
||||
.expect("create or load a store should not fail");
|
||||
@@ -214,33 +234,46 @@ pub async fn load_servers<R: Runtime>(app_handle: &AppHandle<R>) -> Result<Vec<S
|
||||
servers.ok_or_else(|| "Failed to read servers from store: No servers found".to_string())?;
|
||||
|
||||
// Convert each item in the JsonValue array to a Server
|
||||
if let JsonValue::Array(servers_array) = servers {
|
||||
// Deserialize each JsonValue into Server, filtering out any errors
|
||||
let deserialized_servers: Vec<Server> = servers_array
|
||||
.into_iter()
|
||||
.filter_map(|server_json| from_value(server_json).ok()) // Only keep valid Server instances
|
||||
.collect();
|
||||
match servers {
|
||||
JsonValue::Array(servers_array) => {
|
||||
let mut deserialized_servers = Vec::with_capacity(servers_array.len());
|
||||
for server_json in servers_array {
|
||||
match from_value(server_json.clone()) {
|
||||
Ok(server) => {
|
||||
deserialized_servers.push(server);
|
||||
}
|
||||
Err(e) => {
|
||||
panic!(
|
||||
"failed to deserialize JSON [{}] to [struct Server], error [{}], store [{}] key [{}] is possibly corrupted!",
|
||||
server_json, e, COCO_TAURI_STORE, COCO_SERVERS
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if deserialized_servers.is_empty() {
|
||||
return Err("Failed to deserialize any servers from the store.".to_string());
|
||||
if deserialized_servers.is_empty() {
|
||||
return Err("Failed to deserialize any servers from the store.".to_string());
|
||||
}
|
||||
|
||||
for server in deserialized_servers.iter() {
|
||||
save_server(&server).await;
|
||||
}
|
||||
|
||||
log::debug!("load servers: {:?}", &deserialized_servers);
|
||||
|
||||
Ok(deserialized_servers)
|
||||
}
|
||||
|
||||
for server in deserialized_servers.iter() {
|
||||
save_server(&server);
|
||||
_ => {
|
||||
unreachable!(
|
||||
"coco servers should be stored in an array under store [{}] key [{}], but it is not",
|
||||
COCO_TAURI_STORE, COCO_SERVERS
|
||||
);
|
||||
}
|
||||
|
||||
log::debug!("load servers: {:?}", &deserialized_servers);
|
||||
|
||||
Ok(deserialized_servers)
|
||||
} else {
|
||||
Err("Failed to read servers from store: Invalid format".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// Function to load servers or insert a default one if none exist
|
||||
pub async fn load_or_insert_default_server<R: Runtime>(
|
||||
app_handle: &AppHandle<R>,
|
||||
) -> Result<Vec<Server>, String> {
|
||||
pub async fn load_or_insert_default_server(app_handle: &AppHandle) -> Result<Vec<Server>, String> {
|
||||
log::debug!("Attempting to load or insert default server");
|
||||
|
||||
let exists_servers = load_servers(&app_handle).await;
|
||||
@@ -250,7 +283,7 @@ pub async fn load_or_insert_default_server<R: Runtime>(
|
||||
}
|
||||
|
||||
let default = get_default_server();
|
||||
save_server(&default);
|
||||
save_server(&default).await;
|
||||
|
||||
log::debug!("loaded default servers");
|
||||
|
||||
@@ -258,47 +291,32 @@ pub async fn load_or_insert_default_server<R: Runtime>(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn list_coco_servers<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
) -> Result<Vec<Server>, String> {
|
||||
pub async fn list_coco_servers(app_handle: AppHandle) -> Result<Vec<Server>, String> {
|
||||
//hard fresh all server's info, in order to get the actual health
|
||||
refresh_all_coco_server_info(_app_handle.clone()).await;
|
||||
refresh_all_coco_server_info(app_handle.clone()).await;
|
||||
|
||||
let servers: Vec<Server> = get_all_servers().await;
|
||||
|
||||
let servers: Vec<Server> = get_all_servers();
|
||||
Ok(servers)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn get_servers_as_hashmap() -> HashMap<String, Server> {
|
||||
let cache = SERVER_CACHE.read().unwrap();
|
||||
cache.clone()
|
||||
}
|
||||
|
||||
pub fn get_all_servers() -> Vec<Server> {
|
||||
let cache = SERVER_CACHE.read().unwrap();
|
||||
pub async fn get_all_servers() -> Vec<Server> {
|
||||
let cache = SERVER_LIST_CACHE.read().await;
|
||||
cache.values().cloned().collect()
|
||||
}
|
||||
|
||||
/// We store added Coco servers in the Tauri store using this key.
|
||||
pub const COCO_SERVERS: &str = "coco_servers";
|
||||
|
||||
const COCO_SERVER_TOKENS: &str = "coco_server_tokens";
|
||||
|
||||
pub async fn refresh_all_coco_server_info<R: Runtime>(app_handle: AppHandle<R>) {
|
||||
let servers = get_all_servers();
|
||||
pub async fn refresh_all_coco_server_info(app_handle: AppHandle) {
|
||||
let servers = get_all_servers().await;
|
||||
for server in servers {
|
||||
let _ = refresh_coco_server_info(app_handle.clone(), server.id.clone()).await;
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn refresh_coco_server_info<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
id: String,
|
||||
) -> Result<Server, String> {
|
||||
pub async fn refresh_coco_server_info(app_handle: AppHandle, id: String) -> Result<Server, String> {
|
||||
// Retrieve the server from the cache
|
||||
let cached_server = {
|
||||
let cache = SERVER_CACHE.read().unwrap();
|
||||
let cache = SERVER_LIST_CACHE.read().await;
|
||||
cache.get(&id).cloned()
|
||||
};
|
||||
|
||||
@@ -313,19 +331,16 @@ pub async fn refresh_coco_server_info<R: Runtime>(
|
||||
let profile = server.profile;
|
||||
|
||||
// Send request to fetch updated server info
|
||||
let response = HttpClient::get(&id, "/provider/_info", None)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to contact the server: {}", e));
|
||||
|
||||
if response.is_err() {
|
||||
let _ = mark_server_as_offline(app_handle, &id).await;
|
||||
return Err(response.err().unwrap());
|
||||
}
|
||||
|
||||
let response = response?;
|
||||
let response = match HttpClient::get(&id, "/provider/_info", None).await {
|
||||
Ok(response) => response,
|
||||
Err(e) => {
|
||||
mark_server_as_offline(app_handle, &id).await;
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
if !response.status().is_success() {
|
||||
let _ = mark_server_as_offline(app_handle, &id).await;
|
||||
mark_server_as_offline(app_handle, &id).await;
|
||||
return Err(format!("Request failed with status: {}", response.status()));
|
||||
}
|
||||
|
||||
@@ -336,19 +351,26 @@ pub async fn refresh_coco_server_info<R: Runtime>(
|
||||
let mut updated_server: Server = serde_json::from_str(&body)
|
||||
.map_err(|e| format!("Failed to deserialize the response: {}", e))?;
|
||||
|
||||
// Mark server as online
|
||||
let _ = mark_server_as_online(app_handle.clone(), &id).await;
|
||||
|
||||
// Restore local state
|
||||
updated_server.id = id.clone();
|
||||
updated_server.builtin = is_builtin;
|
||||
updated_server.enabled = is_enabled;
|
||||
updated_server.available = true;
|
||||
updated_server.available = {
|
||||
if server.public {
|
||||
// Public Coco servers are available as long as they are online.
|
||||
true
|
||||
} else {
|
||||
// For non-public Coco servers, we still need to check if it is
|
||||
// logged in, i.e., has a token stored in `SERVER_TOKEN_LIST_CACHE`.
|
||||
get_server_token(&id).await.is_some()
|
||||
}
|
||||
};
|
||||
updated_server.profile = profile;
|
||||
trim_endpoint_last_forward_slash(&mut updated_server);
|
||||
|
||||
// Save and persist
|
||||
save_server(&updated_server);
|
||||
save_server(&updated_server).await;
|
||||
try_register_server_to_search_source(app_handle.clone(), &updated_server).await;
|
||||
persist_servers(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to persist servers: {}", e))?;
|
||||
@@ -361,20 +383,17 @@ pub async fn refresh_coco_server_info<R: Runtime>(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn add_coco_server<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
endpoint: String,
|
||||
) -> Result<Server, String> {
|
||||
pub async fn add_coco_server(app_handle: AppHandle, endpoint: String) -> Result<Server, String> {
|
||||
load_or_insert_default_server(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to load default servers: {}", e))?;
|
||||
|
||||
let endpoint = endpoint.trim_end_matches('/');
|
||||
|
||||
if check_endpoint_exists(endpoint) {
|
||||
if check_endpoint_exists(endpoint).await {
|
||||
log::debug!(
|
||||
"This Coco server has already been registered: {:?}",
|
||||
&endpoint
|
||||
"trying to register a Coco server [{}] that has already been registered",
|
||||
endpoint
|
||||
);
|
||||
return Err("This Coco server has already been registered.".into());
|
||||
}
|
||||
@@ -386,6 +405,15 @@ pub async fn add_coco_server<R: Runtime>(
|
||||
|
||||
log::debug!("Get provider info response: {:?}", &response);
|
||||
|
||||
if response.status() != StatusCode::OK {
|
||||
log::debug!(
|
||||
"trying to register a Coco server [{}] that is possibly down",
|
||||
endpoint
|
||||
);
|
||||
|
||||
return Err("This Coco server is possibly down".into());
|
||||
}
|
||||
|
||||
let body = get_response_body_text(response).await?;
|
||||
|
||||
let mut server: Server = serde_json::from_str(&body)
|
||||
@@ -393,15 +421,32 @@ pub async fn add_coco_server<R: Runtime>(
|
||||
|
||||
trim_endpoint_last_forward_slash(&mut server);
|
||||
|
||||
// The JSON returned from `provider/_info` won't have this field, serde will set
|
||||
// it to an empty string during deserialization, we need to set a valid value here.
|
||||
if server.id.is_empty() {
|
||||
server.id = pizza_common::utils::uuid::Uuid::new().to_string();
|
||||
}
|
||||
|
||||
// Use the default name, if it is not set.
|
||||
if server.name.is_empty() {
|
||||
server.name = "Coco Server".to_string();
|
||||
}
|
||||
|
||||
save_server(&server);
|
||||
// Update the `available` field
|
||||
if server.public {
|
||||
// Serde already sets this to true, but just to make the code clear, do it again.
|
||||
server.available = true;
|
||||
} else {
|
||||
let opt_token = get_server_token(&server.id).await;
|
||||
assert!(
|
||||
opt_token.is_none(),
|
||||
"this Coco server is newly-added, we should have no token stored for it!"
|
||||
);
|
||||
// This is a non-public Coco server, and it is not logged in, so it is unavailable.
|
||||
server.available = false;
|
||||
}
|
||||
|
||||
save_server(&server).await;
|
||||
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
||||
|
||||
persist_servers(&app_handle)
|
||||
@@ -413,58 +458,106 @@ pub async fn add_coco_server<R: Runtime>(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn remove_coco_server<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
id: String,
|
||||
) -> Result<(), ()> {
|
||||
#[function_name::named]
|
||||
pub async fn remove_coco_server(app_handle: AppHandle, id: String) -> Result<(), ()> {
|
||||
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||
registry.remove_source(id.as_str()).await;
|
||||
|
||||
remove_server_token(id.as_str());
|
||||
remove_server_by_id(id);
|
||||
|
||||
let opt_server = remove_server_by_id(id.as_str()).await;
|
||||
let Some(server) = opt_server else {
|
||||
panic!(
|
||||
"[{}()] invoked with a server [{}] that does not exist! Mismatched states between frontend and backend!",
|
||||
function_name!(),
|
||||
id
|
||||
);
|
||||
};
|
||||
persist_servers(&app_handle)
|
||||
.await
|
||||
.expect("failed to save servers");
|
||||
persist_servers_token(&app_handle).expect("failed to save server tokens");
|
||||
|
||||
// Only non-public Coco servers require tokens
|
||||
if !server.public {
|
||||
// If is logged in, clear the token as well.
|
||||
let deleted = remove_server_token(id.as_str()).await;
|
||||
if deleted {
|
||||
persist_servers_token(&app_handle)
|
||||
.await
|
||||
.expect("failed to save server tokens");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn enable_server<R: Runtime>(app_handle: AppHandle<R>, id: String) -> Result<(), ()> {
|
||||
println!("enable_server: {}", id);
|
||||
#[function_name::named]
|
||||
pub async fn enable_server(app_handle: AppHandle, id: String) -> Result<(), ()> {
|
||||
let opt_server = get_server_by_id(id.as_str()).await;
|
||||
|
||||
let server = get_server_by_id(id.as_str());
|
||||
if let Some(mut server) = server {
|
||||
server.enabled = true;
|
||||
save_server(&server);
|
||||
let Some(mut server) = opt_server else {
|
||||
panic!(
|
||||
"[{}()] invoked with a server [{}] that does not exist! Mismatched states between frontend and backend!",
|
||||
function_name!(),
|
||||
id
|
||||
);
|
||||
};
|
||||
|
||||
// Register the server to the search source
|
||||
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
||||
server.enabled = true;
|
||||
save_server(&server).await;
|
||||
|
||||
persist_servers(&app_handle)
|
||||
.await
|
||||
.expect("failed to save servers");
|
||||
}
|
||||
// Register the server to the search source
|
||||
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
||||
|
||||
persist_servers(&app_handle)
|
||||
.await
|
||||
.expect("failed to save servers");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn try_register_server_to_search_source(
|
||||
app_handle: AppHandle<impl Runtime>,
|
||||
server: &Server,
|
||||
) {
|
||||
#[tauri::command]
|
||||
#[function_name::named]
|
||||
pub async fn disable_server(app_handle: AppHandle, id: String) -> Result<(), ()> {
|
||||
let opt_server = get_server_by_id(id.as_str()).await;
|
||||
|
||||
let Some(mut server) = opt_server else {
|
||||
panic!(
|
||||
"[{}()] invoked with a server [{}] that does not exist! Mismatched states between frontend and backend!",
|
||||
function_name!(),
|
||||
id
|
||||
);
|
||||
};
|
||||
|
||||
server.enabled = false;
|
||||
|
||||
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||
registry.remove_source(id.as_str()).await;
|
||||
|
||||
save_server(&server).await;
|
||||
persist_servers(&app_handle)
|
||||
.await
|
||||
.expect("failed to save servers");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// For non-public Coco servers, we add it to the search source as long as it is
|
||||
/// enabled.
|
||||
///
|
||||
/// For public Coco server, an extra token is required.
|
||||
pub async fn try_register_server_to_search_source(app_handle: AppHandle, server: &Server) {
|
||||
if server.enabled {
|
||||
log::trace!(
|
||||
"Server {} is public: {} and available: {}",
|
||||
"Server [name: {}, id: {}] is public: {} and available: {}",
|
||||
&server.name,
|
||||
&server.id,
|
||||
&server.public,
|
||||
&server.available
|
||||
);
|
||||
|
||||
if !server.public {
|
||||
let token = get_server_token(&server.id).await;
|
||||
let opt_token = get_server_token(&server.id).await;
|
||||
|
||||
if !token.is_ok() || token.is_ok() && token.unwrap().is_none() {
|
||||
if opt_token.is_none() {
|
||||
log::debug!("Server {} is not public and no token was found", &server.id);
|
||||
return;
|
||||
}
|
||||
@@ -476,113 +569,107 @@ pub async fn try_register_server_to_search_source(
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn mark_server_as_online<R: Runtime>(
|
||||
app_handle: AppHandle<R>, id: &str) -> Result<(), ()> {
|
||||
// println!("server_is_offline: {}", id);
|
||||
let server = get_server_by_id(id);
|
||||
#[function_name::named]
|
||||
#[allow(unused)]
|
||||
async fn mark_server_as_online(app_handle: AppHandle, id: &str) {
|
||||
let server = get_server_by_id(id).await;
|
||||
if let Some(mut server) = server {
|
||||
server.available = true;
|
||||
server.health = None;
|
||||
save_server(&server);
|
||||
save_server(&server).await;
|
||||
|
||||
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
||||
} else {
|
||||
log::warn!(
|
||||
"[{}()] invoked with a server [{}] that does not exist!",
|
||||
function_name!(),
|
||||
id
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn mark_server_as_offline<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
id: &str,
|
||||
) -> Result<(), ()> {
|
||||
// println!("server_is_offline: {}", id);
|
||||
let server = get_server_by_id(id);
|
||||
#[function_name::named]
|
||||
pub(crate) async fn mark_server_as_offline(app_handle: AppHandle, id: &str) {
|
||||
let server = get_server_by_id(id).await;
|
||||
if let Some(mut server) = server {
|
||||
server.available = false;
|
||||
server.health = None;
|
||||
save_server(&server);
|
||||
save_server(&server).await;
|
||||
|
||||
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||
registry.remove_source(id).await;
|
||||
} else {
|
||||
log::warn!(
|
||||
"[{}()] invoked with a server [{}] that does not exist!",
|
||||
function_name!(),
|
||||
id
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn disable_server<R: Runtime>(app_handle: AppHandle<R>, id: String) -> Result<(), ()> {
|
||||
let server = get_server_by_id(id.as_str());
|
||||
if let Some(mut server) = server {
|
||||
server.enabled = false;
|
||||
|
||||
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||
registry.remove_source(id.as_str()).await;
|
||||
|
||||
save_server(&server);
|
||||
persist_servers(&app_handle)
|
||||
.await
|
||||
.expect("failed to save servers");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn logout_coco_server<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
id: String,
|
||||
) -> Result<(), String> {
|
||||
#[function_name::named]
|
||||
pub async fn logout_coco_server(app_handle: AppHandle, id: String) -> Result<(), String> {
|
||||
log::debug!("Attempting to log out server by id: {}", &id);
|
||||
|
||||
// Check if server token exists
|
||||
if let Some(_token) = get_server_token(id.as_str()).await? {
|
||||
log::debug!("Found server token for id: {}", &id);
|
||||
// Check if the server exists
|
||||
let Some(mut server) = get_server_by_id(id.as_str()).await else {
|
||||
panic!(
|
||||
"[{}()] invoked with a server [{}] that does not exist! Mismatched states between frontend and backend!",
|
||||
function_name!(),
|
||||
id
|
||||
);
|
||||
};
|
||||
|
||||
// Clear server profile
|
||||
server.profile = None;
|
||||
// Logging out from a non-public Coco server makes it unavailable
|
||||
if !server.public {
|
||||
server.available = false;
|
||||
}
|
||||
// Save the updated server data
|
||||
save_server(&server).await;
|
||||
// Persist the updated server data
|
||||
if let Err(e) = persist_servers(&app_handle).await {
|
||||
log::debug!("Failed to save server for id: {}. Error: {:?}", &id, &e);
|
||||
return Err(format!("Failed to save server: {}", &e));
|
||||
}
|
||||
|
||||
let has_token = get_server_token(id.as_str()).await.is_some();
|
||||
if server.public {
|
||||
if has_token {
|
||||
panic!("Public Coco server won't have token")
|
||||
}
|
||||
} else {
|
||||
assert!(
|
||||
has_token,
|
||||
"This is a non-public Coco server, and it is logged in, we should have a token"
|
||||
);
|
||||
// Remove the server token from cache
|
||||
remove_server_token(id.as_str());
|
||||
remove_server_token(id.as_str()).await;
|
||||
|
||||
// Persist the updated tokens
|
||||
if let Err(e) = persist_servers_token(&app_handle) {
|
||||
if let Err(e) = persist_servers_token(&app_handle).await {
|
||||
log::debug!("Failed to save tokens for id: {}. Error: {:?}", &id, &e);
|
||||
return Err(format!("Failed to save tokens: {}", &e));
|
||||
}
|
||||
} else {
|
||||
// Log the case where server token is not found
|
||||
log::debug!("No server token found for id: {}", &id);
|
||||
}
|
||||
|
||||
// Check if the server exists
|
||||
if let Some(mut server) = get_server_by_id(id.as_str()) {
|
||||
log::debug!("Found server for id: {}", &id);
|
||||
|
||||
// Clear server profile
|
||||
server.profile = None;
|
||||
let _ = mark_server_as_offline(app_handle.clone(), id.as_str()).await;
|
||||
|
||||
// Save the updated server data
|
||||
save_server(&server);
|
||||
|
||||
// Persist the updated server data
|
||||
if let Err(e) = persist_servers(&app_handle).await {
|
||||
log::debug!("Failed to save server for id: {}. Error: {:?}", &id, &e);
|
||||
return Err(format!("Failed to save server: {}", &e));
|
||||
}
|
||||
} else {
|
||||
// Log the case where server is not found
|
||||
log::debug!("No server found for id: {}", &id);
|
||||
return Err(format!("No server found for id: {}", id));
|
||||
// Remove it from the search source if it becomes unavailable
|
||||
if !server.available {
|
||||
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||
registry.remove_source(id.as_str()).await;
|
||||
}
|
||||
|
||||
log::debug!("Successfully logged out server with id: {}", &id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Removes the trailing slash from the server's endpoint if present.
|
||||
/// Helper function to remove the trailing slash from the server's endpoint if present.
|
||||
fn trim_endpoint_last_forward_slash(server: &mut Server) {
|
||||
if server.endpoint.ends_with('/') {
|
||||
server.endpoint.pop(); // Remove the last character
|
||||
while server.endpoint.ends_with('/') {
|
||||
server.endpoint.pop();
|
||||
}
|
||||
let endpoint = &mut server.endpoint;
|
||||
while endpoint.ends_with('/') {
|
||||
endpoint.pop();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -591,42 +678,47 @@ fn provider_info_url(endpoint: &str) -> String {
|
||||
format!("{endpoint}/provider/_info")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_trim_endpoint_last_forward_slash() {
|
||||
let mut server = Server {
|
||||
id: "test".to_string(),
|
||||
builtin: false,
|
||||
enabled: true,
|
||||
name: "".to_string(),
|
||||
endpoint: "https://example.com///".to_string(),
|
||||
provider: Provider {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_trim_endpoint_last_forward_slash() {
|
||||
let mut server = Server {
|
||||
id: "test".to_string(),
|
||||
builtin: false,
|
||||
enabled: true,
|
||||
name: "".to_string(),
|
||||
icon: "".to_string(),
|
||||
website: "".to_string(),
|
||||
eula: "".to_string(),
|
||||
privacy_policy: "".to_string(),
|
||||
banner: "".to_string(),
|
||||
description: "".to_string(),
|
||||
},
|
||||
version: Version {
|
||||
number: "".to_string(),
|
||||
},
|
||||
minimal_client_version: None,
|
||||
updated: "".to_string(),
|
||||
public: false,
|
||||
available: false,
|
||||
health: None,
|
||||
profile: None,
|
||||
auth_provider: AuthProvider {
|
||||
sso: Sso {
|
||||
url: "".to_string(),
|
||||
endpoint: "https://example.com///".to_string(),
|
||||
provider: Provider {
|
||||
name: "".to_string(),
|
||||
icon: "".to_string(),
|
||||
website: "".to_string(),
|
||||
eula: "".to_string(),
|
||||
privacy_policy: "".to_string(),
|
||||
banner: "".to_string(),
|
||||
description: "".to_string(),
|
||||
},
|
||||
},
|
||||
priority: 0,
|
||||
stats: None,
|
||||
};
|
||||
version: Version {
|
||||
number: "".to_string(),
|
||||
},
|
||||
minimal_client_version: None,
|
||||
updated: "".to_string(),
|
||||
public: false,
|
||||
available: false,
|
||||
health: None,
|
||||
profile: None,
|
||||
auth_provider: AuthProvider {
|
||||
sso: Sso {
|
||||
url: "".to_string(),
|
||||
},
|
||||
},
|
||||
priority: 0,
|
||||
stats: None,
|
||||
};
|
||||
|
||||
trim_endpoint_last_forward_slash(&mut server);
|
||||
trim_endpoint_last_forward_slash(&mut server);
|
||||
|
||||
assert_eq!(server.endpoint, "https://example.com");
|
||||
assert_eq!(server.endpoint, "https://example.com");
|
||||
}
|
||||
}
|
||||
|
||||
57
src-tauri/src/server/synthesize.rs
Normal file
57
src-tauri/src/server/synthesize.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
use crate::server::http_client::HttpClient;
|
||||
use futures_util::StreamExt;
|
||||
use http::Method;
|
||||
use serde_json::json;
|
||||
use tauri::{AppHandle, Emitter, command};
|
||||
|
||||
#[command]
|
||||
pub async fn synthesize(
|
||||
app_handle: AppHandle,
|
||||
client_id: String,
|
||||
server_id: String,
|
||||
voice: String,
|
||||
content: String,
|
||||
) -> Result<(), String> {
|
||||
let body = json!({
|
||||
"voice": voice,
|
||||
"content": content,
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let response = HttpClient::send_request(
|
||||
server_id.as_str(),
|
||||
Method::POST,
|
||||
"/services/audio/synthesize",
|
||||
None,
|
||||
None,
|
||||
Some(reqwest::Body::from(body.to_string())),
|
||||
)
|
||||
.await?;
|
||||
|
||||
log::info!("Synthesize response status: {}", response.status());
|
||||
|
||||
if response.status() == 429 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("Request Failed: {}", response.status()));
|
||||
}
|
||||
|
||||
let mut stream = response.bytes_stream();
|
||||
while let Some(chunk) = stream.next().await {
|
||||
match chunk {
|
||||
Ok(bytes) => {
|
||||
if let Err(err) = app_handle.emit(&client_id, bytes.to_vec()) {
|
||||
log::error!("Emit error: {:?}", err);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Stream error: {:?}", e);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,41 +1,96 @@
|
||||
use crate::common::http::get_response_body_text;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{Value, from_str};
|
||||
use tauri::command;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct TranscriptionResponse {
|
||||
pub text: String,
|
||||
task_id: String,
|
||||
results: Vec<Value>,
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn transcription(
|
||||
server_id: String,
|
||||
_audio_type: String,
|
||||
_audio_content: String,
|
||||
audio_content: String,
|
||||
) -> Result<TranscriptionResponse, String> {
|
||||
// let mut query_params = HashMap::new();
|
||||
// query_params.insert("type".to_string(), JsonValue::String(audio_type));
|
||||
// query_params.insert("content".to_string(), JsonValue::String(audio_content));
|
||||
|
||||
// Send the HTTP POST request
|
||||
let response = HttpClient::post(
|
||||
// Send request to initiate transcription task
|
||||
let init_response = HttpClient::post(
|
||||
&server_id,
|
||||
"/services/audio/transcription",
|
||||
None,
|
||||
None,
|
||||
Some(audio_content.into()),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Error sending transcription request: {}", e))?;
|
||||
.await
|
||||
.map_err(|e| format!("Failed to initiate transcription: {}", e))?;
|
||||
|
||||
// Use get_response_body_text to extract the response body as text
|
||||
let response_body = get_response_body_text(response)
|
||||
// Extract response body as text
|
||||
let init_response_text = get_response_body_text(init_response)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read response body: {}", e))?;
|
||||
.map_err(|e| format!("Failed to read initial response body: {}", e))?;
|
||||
|
||||
// Deserialize the response body into TranscriptionResponse
|
||||
let transcription_response: TranscriptionResponse = serde_json::from_str(&response_body)
|
||||
.map_err(|e| format!("Failed to parse transcription response: {}", e))?;
|
||||
// Parse response JSON to extract task ID
|
||||
let init_response_json: Value = from_str(&init_response_text).map_err(|e| {
|
||||
format!(
|
||||
"Failed to parse initial response JSON: {}. Raw response: {}",
|
||||
e, init_response_text
|
||||
)
|
||||
})?;
|
||||
|
||||
let transcription_task_id = init_response_json["task_id"]
|
||||
.as_str()
|
||||
.ok_or_else(|| {
|
||||
format!(
|
||||
"Missing or invalid task_id in initial response: {}",
|
||||
init_response_text
|
||||
)
|
||||
})?
|
||||
.to_string();
|
||||
|
||||
// Set up polling with timeout
|
||||
let polling_start = std::time::Instant::now();
|
||||
const POLLING_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(30);
|
||||
const POLLING_INTERVAL: std::time::Duration = std::time::Duration::from_millis(200);
|
||||
|
||||
let mut transcription_response: TranscriptionResponse;
|
||||
|
||||
loop {
|
||||
// Poll for transcription results
|
||||
let poll_response = HttpClient::get(
|
||||
&server_id,
|
||||
&format!("/services/audio/task/{}", transcription_task_id),
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to poll transcription task: {}", e))?;
|
||||
|
||||
// Extract poll response body
|
||||
let poll_response_text = get_response_body_text(poll_response)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read poll response body: {}", e))?;
|
||||
|
||||
// Parse poll response JSON
|
||||
transcription_response = from_str(&poll_response_text).map_err(|e| {
|
||||
format!(
|
||||
"Failed to parse poll response JSON: {}. Raw response: {}",
|
||||
e, poll_response_text
|
||||
)
|
||||
})?;
|
||||
|
||||
// Check if transcription results are available
|
||||
if !transcription_response.results.is_empty() {
|
||||
break;
|
||||
}
|
||||
|
||||
// Check for timeout
|
||||
if polling_start.elapsed() >= POLLING_TIMEOUT {
|
||||
return Err("Transcription task timed out after 30 seconds".to_string());
|
||||
}
|
||||
|
||||
// Wait before next poll
|
||||
tokio::time::sleep(POLLING_INTERVAL).await;
|
||||
}
|
||||
|
||||
Ok(transcription_response)
|
||||
}
|
||||
|
||||
@@ -1,170 +0,0 @@
|
||||
use crate::server::servers::{get_server_by_id, get_server_token};
|
||||
use futures::StreamExt;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Emitter, Runtime};
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::sync::{mpsc, Mutex};
|
||||
use tokio_tungstenite::tungstenite::handshake::client::generate_key;
|
||||
use tokio_tungstenite::tungstenite::Message;
|
||||
use tokio_tungstenite::MaybeTlsStream;
|
||||
use tokio_tungstenite::WebSocketStream;
|
||||
use tokio_tungstenite::{connect_async_tls_with_config, Connector};
|
||||
#[derive(Default)]
|
||||
pub struct WebSocketManager {
|
||||
connections: Arc<Mutex<HashMap<String, Arc<WebSocketInstance>>>>,
|
||||
}
|
||||
|
||||
struct WebSocketInstance {
|
||||
ws_connection: Mutex<WebSocketStream<MaybeTlsStream<TcpStream>>>, // No need to lock the entire map
|
||||
cancel_tx: mpsc::Sender<()>,
|
||||
}
|
||||
|
||||
fn convert_to_websocket(endpoint: &str) -> Result<String, String> {
|
||||
let url = url::Url::parse(endpoint).map_err(|e| format!("Invalid URL: {}", e))?;
|
||||
let ws_protocol = if url.scheme() == "https" {
|
||||
"wss://"
|
||||
} else {
|
||||
"ws://"
|
||||
};
|
||||
let host = url.host_str().ok_or("No host found in URL")?;
|
||||
let port = url
|
||||
.port_or_known_default()
|
||||
.unwrap_or(if url.scheme() == "https" { 443 } else { 80 });
|
||||
|
||||
let ws_endpoint = if port == 80 || port == 443 {
|
||||
format!("{}{}{}", ws_protocol, host, "/ws")
|
||||
} else {
|
||||
format!("{}{}:{}/ws", ws_protocol, host, port)
|
||||
};
|
||||
Ok(ws_endpoint)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn connect_to_server<R: Runtime>(
|
||||
tauri_app_handle: AppHandle<R>,
|
||||
id: String,
|
||||
client_id: String,
|
||||
state: tauri::State<'_, WebSocketManager>,
|
||||
app_handle: AppHandle,
|
||||
) -> Result<(), String> {
|
||||
let connections_clone = state.connections.clone();
|
||||
|
||||
// Disconnect old connection first
|
||||
disconnect(client_id.clone(), state.clone()).await.ok();
|
||||
|
||||
let server = get_server_by_id(&id).ok_or(format!("Server with ID {} not found", id))?;
|
||||
let endpoint = convert_to_websocket(&server.endpoint)?;
|
||||
let token = get_server_token(&id).await?.map(|t| t.access_token.clone());
|
||||
|
||||
let mut request =
|
||||
tokio_tungstenite::tungstenite::client::IntoClientRequest::into_client_request(&endpoint)
|
||||
.map_err(|e| format!("Failed to create WebSocket request: {}", e))?;
|
||||
|
||||
request
|
||||
.headers_mut()
|
||||
.insert("Connection", "Upgrade".parse().unwrap());
|
||||
request
|
||||
.headers_mut()
|
||||
.insert("Upgrade", "websocket".parse().unwrap());
|
||||
request
|
||||
.headers_mut()
|
||||
.insert("Sec-WebSocket-Version", "13".parse().unwrap());
|
||||
request
|
||||
.headers_mut()
|
||||
.insert("Sec-WebSocket-Key", generate_key().parse().unwrap());
|
||||
|
||||
if let Some(token) = token {
|
||||
request
|
||||
.headers_mut()
|
||||
.insert("X-API-TOKEN", token.parse().unwrap());
|
||||
}
|
||||
|
||||
let allow_self_signature =
|
||||
crate::settings::get_allow_self_signature(tauri_app_handle.clone()).await;
|
||||
let tls_connector = tokio_native_tls::native_tls::TlsConnector::builder()
|
||||
.danger_accept_invalid_certs(allow_self_signature)
|
||||
.build()
|
||||
.map_err(|e| format!("TLS build error: {:?}", e))?;
|
||||
|
||||
let connector = Connector::NativeTls(tls_connector.into());
|
||||
|
||||
let (ws_stream, _) = connect_async_tls_with_config(
|
||||
request,
|
||||
None, // WebSocketConfig
|
||||
true, // disable_nagle
|
||||
Some(connector), // Connector
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("WebSocket TLS error: {:?}", e))?;
|
||||
|
||||
let (cancel_tx, mut cancel_rx) = mpsc::channel(1);
|
||||
|
||||
let instance = Arc::new(WebSocketInstance {
|
||||
ws_connection: Mutex::new(ws_stream),
|
||||
cancel_tx,
|
||||
});
|
||||
|
||||
// Insert connection into the map (lock is held briefly)
|
||||
{
|
||||
let mut connections = connections_clone.lock().await;
|
||||
connections.insert(client_id.clone(), instance.clone());
|
||||
}
|
||||
|
||||
// Spawn WebSocket handler in a separate task
|
||||
let app_handle_clone = app_handle.clone();
|
||||
let client_id_clone = client_id.clone();
|
||||
tokio::spawn(async move {
|
||||
let ws = &mut *instance.ws_connection.lock().await;
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
msg = ws.next() => {
|
||||
match msg {
|
||||
Some(Ok(Message::Text(text))) => {
|
||||
let _ = app_handle_clone.emit(&format!("ws-message-{}", client_id_clone), text);
|
||||
},
|
||||
Some(Err(_)) | None => {
|
||||
log::debug!("WebSocket connection closed or error");
|
||||
let _ = app_handle_clone.emit(&format!("ws-error-{}", client_id_clone), id.clone());
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
_ = cancel_rx.recv() => {
|
||||
log::debug!("WebSocket connection cancelled");
|
||||
let _ = app_handle_clone.emit(&format!("ws-cancel-{}", client_id_clone), id.clone());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove connection after it closes
|
||||
let mut connections = connections_clone.lock().await;
|
||||
connections.remove(&client_id_clone);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn disconnect(
|
||||
client_id: String,
|
||||
state: tauri::State<'_, WebSocketManager>,
|
||||
) -> Result<(), String> {
|
||||
let instance = {
|
||||
let mut connections = state.connections.lock().await;
|
||||
connections.remove(&client_id)
|
||||
};
|
||||
|
||||
if let Some(instance) = instance {
|
||||
let _ = instance.cancel_tx.send(()).await;
|
||||
|
||||
// Close WebSocket (lock only the connection, not the whole map)
|
||||
let mut ws = instance.ws_connection.lock().await;
|
||||
let _ = ws.close(None).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
use crate::COCO_TAURI_STORE;
|
||||
use serde_json::Value as Json;
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use tauri::AppHandle;
|
||||
use tauri_plugin_store::StoreExt;
|
||||
|
||||
const SETTINGS_ALLOW_SELF_SIGNATURE: &str = "settings_allow_self_signature";
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn set_allow_self_signature<R: Runtime>(tauri_app_handle: AppHandle<R>, value: bool) {
|
||||
pub async fn set_allow_self_signature(tauri_app_handle: AppHandle, value: bool) {
|
||||
use crate::server::http_client;
|
||||
|
||||
let store = tauri_app_handle
|
||||
@@ -40,7 +40,7 @@ pub async fn set_allow_self_signature<R: Runtime>(tauri_app_handle: AppHandle<R>
|
||||
}
|
||||
|
||||
/// Synchronous version of `async get_allow_self_signature()`.
|
||||
pub fn _get_allow_self_signature<R: Runtime>(tauri_app_handle: AppHandle<R>) -> bool {
|
||||
pub fn _get_allow_self_signature(tauri_app_handle: AppHandle) -> bool {
|
||||
let store = tauri_app_handle
|
||||
.store(COCO_TAURI_STORE)
|
||||
.unwrap_or_else(|e| {
|
||||
@@ -67,6 +67,6 @@ pub fn _get_allow_self_signature<R: Runtime>(tauri_app_handle: AppHandle<R>) ->
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_allow_self_signature<R: Runtime>(tauri_app_handle: AppHandle<R>) -> bool {
|
||||
pub async fn get_allow_self_signature(tauri_app_handle: AppHandle) -> bool {
|
||||
_get_allow_self_signature(tauri_app_handle)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
//credits to: https://github.com/ayangweb/ayangweb-EcoPaste/blob/169323dbe6365ffe4abb64d867439ed2ea84c6d1/src-tauri/src/core/setup/mac.rs
|
||||
use tauri::{App, Emitter, EventTarget, WebviewWindow};
|
||||
use tauri_nspanel::{cocoa::appkit::NSWindowCollectionBehavior, panel_delegate, WebviewWindowExt};
|
||||
//! credits to: https://github.com/ayangweb/ayangweb-EcoPaste/blob/169323dbe6365ffe4abb64d867439ed2ea84c6d1/src-tauri/src/core/setup/mac.rs
|
||||
|
||||
use cocoa::appkit::NSWindow;
|
||||
use tauri::Manager;
|
||||
use tauri::{App, AppHandle, Emitter, EventTarget, WebviewWindow};
|
||||
use tauri_nspanel::{WebviewWindowExt, cocoa::appkit::NSWindowCollectionBehavior, panel_delegate};
|
||||
|
||||
use crate::common::MAIN_WINDOW_LABEL;
|
||||
|
||||
@@ -29,7 +32,7 @@ pub fn platform(
|
||||
|
||||
// Share the window across all desktop spaces and full screen
|
||||
panel.set_collection_behaviour(
|
||||
NSWindowCollectionBehavior::NSWindowCollectionBehaviorCanJoinAllSpaces
|
||||
NSWindowCollectionBehavior::NSWindowCollectionBehaviorMoveToActiveSpace
|
||||
| NSWindowCollectionBehavior::NSWindowCollectionBehaviorStationary
|
||||
| NSWindowCollectionBehavior::NSWindowCollectionBehaviorFullScreenAuxiliary,
|
||||
);
|
||||
@@ -78,3 +81,50 @@ pub fn platform(
|
||||
// Set the delegate object for the window to handle window events
|
||||
panel.set_delegate(delegate);
|
||||
}
|
||||
|
||||
/// Change NS window attribute between `NSWindowCollectionBehaviorCanJoinAllSpaces`
|
||||
/// and `NSWindowCollectionBehaviorMoveToActiveSpace` accordingly.
|
||||
///
|
||||
/// NOTE: this tauri command is not async because we should run it in the main
|
||||
/// thread, or `ns_window.setCollectionBehavior_(collection_behavior)` would lead
|
||||
/// to UB.
|
||||
#[tauri::command]
|
||||
pub(crate) fn toggle_move_to_active_space_attribute(tauri_app_hanlde: AppHandle) {
|
||||
use cocoa::appkit::NSWindowCollectionBehavior;
|
||||
use cocoa::base::id;
|
||||
|
||||
let main_window = tauri_app_hanlde
|
||||
.get_webview_window(MAIN_WINDOW_LABEL)
|
||||
.unwrap();
|
||||
let ns_window = main_window.ns_window().unwrap() as id;
|
||||
let mut collection_behavior = unsafe { ns_window.collectionBehavior() };
|
||||
let join_all_spaces = collection_behavior
|
||||
.contains(NSWindowCollectionBehavior::NSWindowCollectionBehaviorCanJoinAllSpaces);
|
||||
let move_to_active_space = collection_behavior
|
||||
.contains(NSWindowCollectionBehavior::NSWindowCollectionBehaviorMoveToActiveSpace);
|
||||
|
||||
match (join_all_spaces, move_to_active_space) {
|
||||
(true, false) => {
|
||||
collection_behavior
|
||||
.remove(NSWindowCollectionBehavior::NSWindowCollectionBehaviorCanJoinAllSpaces);
|
||||
collection_behavior
|
||||
.insert(NSWindowCollectionBehavior::NSWindowCollectionBehaviorMoveToActiveSpace);
|
||||
}
|
||||
(false, true) => {
|
||||
collection_behavior
|
||||
.remove(NSWindowCollectionBehavior::NSWindowCollectionBehaviorMoveToActiveSpace);
|
||||
collection_behavior
|
||||
.insert(NSWindowCollectionBehavior::NSWindowCollectionBehaviorCanJoinAllSpaces);
|
||||
}
|
||||
_ => {
|
||||
panic!(
|
||||
"invalid NS window attribute, NSWindowCollectionBehaviorCanJoinAllSpaces is set [{}], NSWindowCollectionBehaviorMoveToActiveSpace is set [{}]",
|
||||
join_all_spaces, move_to_active_space
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe {
|
||||
ns_window.setCollectionBehavior_(collection_behavior);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::{hide_coco, show_coco, COCO_TAURI_STORE};
|
||||
use tauri::{async_runtime, App, AppHandle, Manager, Runtime};
|
||||
use crate::{COCO_TAURI_STORE, hide_coco, show_coco};
|
||||
use tauri::{App, AppHandle, Manager, async_runtime};
|
||||
use tauri_plugin_global_shortcut::{GlobalShortcutExt, Shortcut, ShortcutState};
|
||||
use tauri_plugin_store::{JsonValue, StoreExt};
|
||||
|
||||
@@ -50,14 +50,14 @@ pub fn enable_shortcut(app: &App) {
|
||||
/// Get the stored shortcut as a string, same as [`_get_shortcut()`], except that
|
||||
/// this is a `tauri::command` interface.
|
||||
#[tauri::command]
|
||||
pub async fn get_current_shortcut<R: Runtime>(app: AppHandle<R>) -> Result<String, String> {
|
||||
pub async fn get_current_shortcut(app: AppHandle) -> Result<String, String> {
|
||||
let shortcut = _get_shortcut(&app);
|
||||
Ok(shortcut)
|
||||
}
|
||||
|
||||
/// Get the current shortcut and unregister it on the tauri side.
|
||||
#[tauri::command]
|
||||
pub async fn unregister_shortcut<R: Runtime>(app: AppHandle<R>) {
|
||||
pub async fn unregister_shortcut(app: AppHandle) {
|
||||
let shortcut_str = _get_shortcut(&app);
|
||||
let shortcut = shortcut_str
|
||||
.parse::<Shortcut>()
|
||||
@@ -70,9 +70,9 @@ pub async fn unregister_shortcut<R: Runtime>(app: AppHandle<R>) {
|
||||
|
||||
/// Change the global shortcut to `key`.
|
||||
#[tauri::command]
|
||||
pub async fn change_shortcut<R: Runtime>(
|
||||
app: AppHandle<R>,
|
||||
_window: tauri::Window<R>,
|
||||
pub async fn change_shortcut(
|
||||
app: AppHandle,
|
||||
_window: tauri::Window,
|
||||
key: String,
|
||||
) -> Result<(), String> {
|
||||
println!("key {}:", key);
|
||||
@@ -94,7 +94,7 @@ pub async fn change_shortcut<R: Runtime>(
|
||||
}
|
||||
|
||||
/// Helper function to register a shortcut, used for shortcut updates.
|
||||
fn _register_shortcut<R: Runtime>(app: &AppHandle<R>, shortcut: Shortcut) {
|
||||
fn _register_shortcut(app: &AppHandle, shortcut: Shortcut) {
|
||||
app.global_shortcut()
|
||||
.on_shortcut(shortcut, move |app, scut, event| {
|
||||
if scut == &shortcut {
|
||||
@@ -151,7 +151,7 @@ fn _register_shortcut_upon_start(app: &App, shortcut: Shortcut) {
|
||||
}
|
||||
|
||||
/// Helper function to get the stored global shortcut, as a string.
|
||||
pub fn _get_shortcut<R: Runtime>(app: &AppHandle<R>) -> String {
|
||||
pub fn _get_shortcut(app: &AppHandle) -> String {
|
||||
let store = app
|
||||
.get_store(COCO_TAURI_STORE)
|
||||
.expect("store should be loaded or created");
|
||||
|
||||
62
src-tauri/src/util/app_lang.rs
Normal file
62
src-tauri/src/util/app_lang.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
//! Configuration entry App language is persisted in the frontend code, but we
|
||||
//! need to access it on the backend.
|
||||
//!
|
||||
//! So we duplicate it here **in the MEMORY** and expose a setter method to the
|
||||
//! frontend so that the value can be updated and stay update-to-date.
|
||||
|
||||
use function_name::named;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
#[allow(non_camel_case_types)]
|
||||
pub(crate) enum Lang {
|
||||
en_US,
|
||||
zh_CN,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Lang {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Lang::en_US => write!(f, "en_US"),
|
||||
Lang::zh_CN => write!(f, "zh_CN"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for Lang {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s {
|
||||
"en" => Ok(Lang::en_US),
|
||||
"zh" => Ok(Lang::zh_CN),
|
||||
_ => Err(format!("Invalid language: {}", s)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Cache the language config in memory.
|
||||
static APP_LANG: RwLock<Option<Lang>> = RwLock::const_new(None);
|
||||
|
||||
/// Frontend code uses this interface to update the in-memory cached `APP_LANG` config.
|
||||
#[named]
|
||||
#[tauri::command]
|
||||
pub(crate) async fn update_app_lang(lang: String) {
|
||||
let app_lang = lang.parse::<Lang>().unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"frontend code passes an invalid argument [{}] to interface [{}], parsing error [{}]",
|
||||
lang,
|
||||
function_name!(),
|
||||
e
|
||||
)
|
||||
});
|
||||
|
||||
let mut write_guard = APP_LANG.write().await;
|
||||
*write_guard = Some(app_lang);
|
||||
}
|
||||
|
||||
/// Helper getter method to handle the `None` case.
|
||||
pub(crate) async fn get_app_lang() -> Lang {
|
||||
let opt_lang = *APP_LANG.read().await;
|
||||
opt_lang.expect("frontend code did not invoke [update_app_lang()] to set the APP_LANG")
|
||||
}
|
||||
174
src-tauri/src/util/file.rs
Normal file
174
src-tauri/src/util/file.rs
Normal file
@@ -0,0 +1,174 @@
|
||||
#[derive(Debug, Clone, PartialEq, Copy)]
|
||||
pub(crate) enum FileType {
|
||||
Folder,
|
||||
JPEGImage,
|
||||
PNGImage,
|
||||
PDFDocument,
|
||||
PlainTextDocument,
|
||||
MicrosoftWordDocument,
|
||||
MicrosoftExcelSpreadsheet,
|
||||
AudioFile,
|
||||
VideoFile,
|
||||
CHeaderFile,
|
||||
TOMLDocument,
|
||||
RustScript,
|
||||
CSourceCode,
|
||||
MarkdownDocument,
|
||||
TerminalSettings,
|
||||
ZipArchive,
|
||||
Dmg,
|
||||
Html,
|
||||
Json,
|
||||
Xml,
|
||||
Yaml,
|
||||
Css,
|
||||
Vue,
|
||||
React,
|
||||
Sql,
|
||||
Csv,
|
||||
Javascript,
|
||||
Lnk,
|
||||
Typescript,
|
||||
Python,
|
||||
Java,
|
||||
Golang,
|
||||
Ruby,
|
||||
Php,
|
||||
Sass,
|
||||
Sketch,
|
||||
AdobeAi,
|
||||
AdobePsd,
|
||||
AdobePr,
|
||||
AdobeAu,
|
||||
AdobeAe,
|
||||
AdobeLr,
|
||||
AdobeXd,
|
||||
AdobeFl,
|
||||
AdobeId,
|
||||
Svg,
|
||||
Epub,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
async fn get_file_type(path: &str) -> FileType {
|
||||
let path = camino::Utf8Path::new(path);
|
||||
|
||||
// stat() is more precise than file extension, use it if possible.
|
||||
if path.is_dir() {
|
||||
return FileType::Folder;
|
||||
}
|
||||
|
||||
let Some(ext) = path.extension() else {
|
||||
return FileType::Unknown;
|
||||
};
|
||||
|
||||
let ext = ext.to_lowercase();
|
||||
match ext.as_str() {
|
||||
"pdf" => FileType::PDFDocument,
|
||||
"txt" | "text" => FileType::PlainTextDocument,
|
||||
"doc" | "docx" => FileType::MicrosoftWordDocument,
|
||||
"xls" | "xlsx" => FileType::MicrosoftExcelSpreadsheet,
|
||||
"jpg" | "jpeg" => FileType::JPEGImage,
|
||||
"png" => FileType::PNGImage,
|
||||
"mp3" | "wav" | "flac" | "aac" | "ogg" | "m4a" => FileType::AudioFile,
|
||||
"mp4" | "avi" | "mov" | "mkv" | "wmv" | "flv" | "webm" => FileType::VideoFile,
|
||||
"h" | "hpp" => FileType::CHeaderFile,
|
||||
"c" | "cpp" | "cc" | "cxx" => FileType::CSourceCode,
|
||||
"toml" => FileType::TOMLDocument,
|
||||
"rs" => FileType::RustScript,
|
||||
"md" | "markdown" => FileType::MarkdownDocument,
|
||||
"terminal" => FileType::TerminalSettings,
|
||||
"zip" | "rar" | "7z" | "tar" | "gz" | "bz2" => FileType::ZipArchive,
|
||||
"dmg" => FileType::Dmg,
|
||||
"html" | "htm" => FileType::Html,
|
||||
"json" => FileType::Json,
|
||||
"xml" => FileType::Xml,
|
||||
"yaml" | "yml" => FileType::Yaml,
|
||||
"css" => FileType::Css,
|
||||
"vue" => FileType::Vue,
|
||||
"jsx" | "tsx" => FileType::React,
|
||||
"sql" => FileType::Sql,
|
||||
"csv" => FileType::Csv,
|
||||
"js" | "mjs" => FileType::Javascript,
|
||||
"ts" => FileType::Typescript,
|
||||
"py" | "pyw" => FileType::Python,
|
||||
"java" => FileType::Java,
|
||||
"go" => FileType::Golang,
|
||||
"rb" => FileType::Ruby,
|
||||
"php" => FileType::Php,
|
||||
"sass" | "scss" => FileType::Sass,
|
||||
"sketch" => FileType::Sketch,
|
||||
"ai" => FileType::AdobeAi,
|
||||
"psd" => FileType::AdobePsd,
|
||||
"prproj" => FileType::AdobePr,
|
||||
"aup" | "aup3" => FileType::AdobeAu,
|
||||
"aep" => FileType::AdobeAe,
|
||||
"lrcat" => FileType::AdobeLr,
|
||||
"xd" => FileType::AdobeXd,
|
||||
"fla" => FileType::AdobeFl,
|
||||
"indd" => FileType::AdobeId,
|
||||
"svg" => FileType::Svg,
|
||||
"epub" => FileType::Epub,
|
||||
"lnk" => FileType::Lnk,
|
||||
_ => FileType::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn type_to_icon(ty: FileType) -> &'static str {
|
||||
match ty {
|
||||
FileType::Folder => "font_file_folder",
|
||||
FileType::JPEGImage => "font_file_image",
|
||||
FileType::PNGImage => "font_file_image",
|
||||
FileType::PDFDocument => "font_file_document_pdf",
|
||||
FileType::PlainTextDocument => "font_file_txt",
|
||||
FileType::MicrosoftWordDocument => "font_file_document_word",
|
||||
FileType::MicrosoftExcelSpreadsheet => "font_file_spreadsheet_excel",
|
||||
FileType::AudioFile => "font_file_audio",
|
||||
FileType::VideoFile => "font_file_video",
|
||||
FileType::CHeaderFile => "font_file_csource",
|
||||
FileType::TOMLDocument => "font_file_toml",
|
||||
FileType::RustScript => "font_file_rustscript1",
|
||||
FileType::CSourceCode => "font_file_csource",
|
||||
FileType::MarkdownDocument => "font_file_markdown",
|
||||
FileType::TerminalSettings => "font_file_terminal1",
|
||||
FileType::ZipArchive => "font_file_zip",
|
||||
FileType::Dmg => "font_file_dmg",
|
||||
FileType::Html => "font_file_html",
|
||||
FileType::Json => "font_file_json",
|
||||
FileType::Xml => "font_file_xml",
|
||||
FileType::Yaml => "font_file_yaml",
|
||||
FileType::Css => "font_file_css",
|
||||
FileType::Vue => "font_file_vue",
|
||||
FileType::React => "font_file_react",
|
||||
FileType::Sql => "font_file_sql",
|
||||
FileType::Csv => "font_file_csv",
|
||||
FileType::Javascript => "font_file_javascript",
|
||||
FileType::Lnk => "font_file_lnk",
|
||||
FileType::Typescript => "font_file_typescript",
|
||||
FileType::Python => "font_file_python",
|
||||
FileType::Java => "font_file_java",
|
||||
FileType::Golang => "font_file_golang",
|
||||
FileType::Ruby => "font_file_ruby",
|
||||
FileType::Php => "font_file_php",
|
||||
FileType::Sass => "font_file_sass",
|
||||
FileType::Sketch => "font_file_sketch",
|
||||
FileType::AdobeAi => "font_file_adobe_ai",
|
||||
FileType::AdobePsd => "font_file_adobe_psd",
|
||||
FileType::AdobePr => "font_file_adobe_pr",
|
||||
FileType::AdobeAu => "font_file_adobe_au",
|
||||
FileType::AdobeAe => "font_file_adobe_ae",
|
||||
FileType::AdobeLr => "font_file_adobe_lr",
|
||||
FileType::AdobeXd => "font_file_adobe_xd",
|
||||
FileType::AdobeFl => "font_file_adobe_fl",
|
||||
FileType::AdobeId => "font_file_adobe_id",
|
||||
FileType::Svg => "font_file_svg",
|
||||
FileType::Epub => "font_file_epub",
|
||||
FileType::Unknown => "font_file_unknown",
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub(crate) async fn get_file_icon(path: String) -> &'static str {
|
||||
let ty = get_file_type(path.as_str()).await;
|
||||
type_to_icon(ty)
|
||||
}
|
||||
@@ -1,10 +1,20 @@
|
||||
pub(crate) mod app_lang;
|
||||
pub(crate) mod file;
|
||||
pub(crate) mod platform;
|
||||
pub(crate) mod updater;
|
||||
|
||||
use std::{path::Path, process::Command};
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use tauri::AppHandle;
|
||||
use tauri_plugin_shell::ShellExt;
|
||||
|
||||
/// We use this env variable to determine the DE on Linux.
|
||||
const XDG_CURRENT_DESKTOP: &str = "XDG_CURRENT_DESKTOP";
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
enum LinuxDesktopEnvironment {
|
||||
Gnome,
|
||||
Kde,
|
||||
Unsupported { xdg_current_desktop: String },
|
||||
}
|
||||
|
||||
impl LinuxDesktopEnvironment {
|
||||
@@ -30,6 +40,14 @@ impl LinuxDesktopEnvironment {
|
||||
.arg(path)
|
||||
.output()
|
||||
.map_err(|e| e.to_string())?,
|
||||
Self::Unsupported {
|
||||
xdg_current_desktop,
|
||||
} => {
|
||||
return Err(format!(
|
||||
"Cannot open apps as this Linux desktop environment [{}] is not supported",
|
||||
xdg_current_desktop
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
if !cmd_output.status.success() {
|
||||
@@ -44,20 +62,23 @@ impl LinuxDesktopEnvironment {
|
||||
}
|
||||
}
|
||||
|
||||
/// None means that it is likely that we do not have a desktop environment.
|
||||
fn get_linux_desktop_environment() -> Option<LinuxDesktopEnvironment> {
|
||||
let de_os_str = std::env::var_os("XDG_CURRENT_DESKTOP")?;
|
||||
let de_str = de_os_str
|
||||
.into_string()
|
||||
.expect("$XDG_CURRENT_DESKTOP should be UTF-8 encoded");
|
||||
let de_os_str = std::env::var_os(XDG_CURRENT_DESKTOP)?;
|
||||
let de_str = de_os_str.into_string().unwrap_or_else(|_os_string| {
|
||||
panic!("${} should be UTF-8 encoded", XDG_CURRENT_DESKTOP);
|
||||
});
|
||||
|
||||
let de = match de_str.as_str() {
|
||||
"GNOME" => LinuxDesktopEnvironment::Gnome,
|
||||
// Ubuntu uses "ubuntu:GNOME" instead of just "GNOME", they really love
|
||||
// their distro name.
|
||||
"ubuntu:GNOME" => LinuxDesktopEnvironment::Gnome,
|
||||
"KDE" => LinuxDesktopEnvironment::Kde,
|
||||
|
||||
unsupported_de => unimplemented!(
|
||||
"This desktop environment [{}] has not been supported yet",
|
||||
unsupported_de
|
||||
),
|
||||
_ => LinuxDesktopEnvironment::Unsupported {
|
||||
xdg_current_desktop: de_str,
|
||||
},
|
||||
};
|
||||
|
||||
Some(de)
|
||||
@@ -67,12 +88,12 @@ fn get_linux_desktop_environment() -> Option<LinuxDesktopEnvironment> {
|
||||
//
|
||||
// tauri_plugin_shell::open() is deprecated, but we still use it.
|
||||
#[allow(deprecated)]
|
||||
pub async fn open<R: Runtime>(app_handle: AppHandle<R>, path: String) -> Result<(), String> {
|
||||
pub async fn open(app_handle: AppHandle, path: String) -> Result<(), String> {
|
||||
if cfg!(target_os = "linux") {
|
||||
let borrowed_path = Path::new(&path);
|
||||
if let Some(file_extension) = borrowed_path.extension() {
|
||||
if file_extension == "desktop" {
|
||||
let desktop_environment = get_linux_desktop_environment().expect("The Linux OS is running without a desktop, Coco could never run in such a environment");
|
||||
let desktop_environment = get_linux_desktop_environment().expect("The Linux OS is running without a desktop, Coco could never run in such an environment");
|
||||
return desktop_environment.launch_app_via_desktop_file(path);
|
||||
}
|
||||
}
|
||||
@@ -83,3 +104,55 @@ pub async fn open<R: Runtime>(app_handle: AppHandle<R>, path: String) -> Result<
|
||||
.open(path, None)
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// This test modifies env var XDG_CURRENT_DESKTOP, which is kinda unsafe
|
||||
// but considering this is just test, it is ok to do so.
|
||||
#[test]
|
||||
fn test_get_linux_desktop_environment() {
|
||||
// SAFETY: Rust code won't modify/read XDG_CURRENT_DESKTOP concurrently, we
|
||||
// have no guarantee from the underlying C code.
|
||||
unsafe {
|
||||
// Save the original value if it exists
|
||||
let original_value = std::env::var_os(XDG_CURRENT_DESKTOP);
|
||||
|
||||
// Test when XDG_CURRENT_DESKTOP is not set
|
||||
std::env::remove_var(XDG_CURRENT_DESKTOP);
|
||||
assert!(get_linux_desktop_environment().is_none());
|
||||
|
||||
// Test GNOME
|
||||
std::env::set_var(XDG_CURRENT_DESKTOP, "GNOME");
|
||||
let result = get_linux_desktop_environment();
|
||||
assert_eq!(result.unwrap(), LinuxDesktopEnvironment::Gnome);
|
||||
|
||||
// Test ubuntu:GNOME
|
||||
std::env::set_var(XDG_CURRENT_DESKTOP, "ubuntu:GNOME");
|
||||
let result = get_linux_desktop_environment();
|
||||
assert_eq!(result.unwrap(), LinuxDesktopEnvironment::Gnome);
|
||||
|
||||
// Test KDE
|
||||
std::env::set_var(XDG_CURRENT_DESKTOP, "KDE");
|
||||
let result = get_linux_desktop_environment();
|
||||
assert_eq!(result.unwrap(), LinuxDesktopEnvironment::Kde);
|
||||
|
||||
// Test unsupported desktop environment
|
||||
std::env::set_var(XDG_CURRENT_DESKTOP, "XFCE");
|
||||
let result = get_linux_desktop_environment();
|
||||
assert_eq!(
|
||||
result.unwrap(),
|
||||
LinuxDesktopEnvironment::Unsupported {
|
||||
xdg_current_desktop: "XFCE".into()
|
||||
}
|
||||
);
|
||||
|
||||
// Restore the original value
|
||||
match original_value {
|
||||
Some(value) => std::env::set_var(XDG_CURRENT_DESKTOP, value),
|
||||
None => std::env::remove_var(XDG_CURRENT_DESKTOP),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
61
src-tauri/src/util/platform.rs
Normal file
61
src-tauri/src/util/platform.rs
Normal file
@@ -0,0 +1,61 @@
|
||||
use derive_more::Display;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::borrow::Cow;
|
||||
use strum::EnumCount;
|
||||
use strum::VariantArray;
|
||||
|
||||
#[derive(
|
||||
Debug,
|
||||
Deserialize,
|
||||
Serialize,
|
||||
Copy,
|
||||
Clone,
|
||||
Hash,
|
||||
PartialEq,
|
||||
Eq,
|
||||
Display,
|
||||
EnumCount,
|
||||
VariantArray,
|
||||
)]
|
||||
#[serde(rename_all(serialize = "lowercase", deserialize = "lowercase"))]
|
||||
pub(crate) enum Platform {
|
||||
#[display("macOS")]
|
||||
Macos,
|
||||
#[display("Linux")]
|
||||
Linux,
|
||||
#[display("windows")]
|
||||
Windows,
|
||||
}
|
||||
|
||||
impl Platform {
|
||||
/// Helper function to determine the current platform.
|
||||
pub(crate) fn current() -> Platform {
|
||||
let os_str = std::env::consts::OS;
|
||||
serde_plain::from_str(os_str).unwrap_or_else(|_e| {
|
||||
panic!("std::env::consts::OS is [{}], which is not a valid value for [enum Platform], valid values: {:?}", os_str, Self::VARIANTS.iter().map(|platform|platform.to_string()).collect::<Vec<String>>());
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the `X-OS-NAME` HTTP request header.
|
||||
pub(crate) fn to_os_name_http_header_str(&self) -> Cow<'static, str> {
|
||||
match self {
|
||||
Self::Macos => Cow::Borrowed("macos"),
|
||||
Self::Windows => Cow::Borrowed("windows"),
|
||||
// For Linux, we need the actual distro `ID`, not just a "linux".
|
||||
Self::Linux => Cow::Owned(sysinfo::System::distribution_id()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the number of platforms supported by Coco.
|
||||
//
|
||||
// a.k.a., the number of this enum's variants.
|
||||
pub(crate) fn num_of_supported_platforms() -> usize {
|
||||
Platform::COUNT
|
||||
}
|
||||
|
||||
/// Returns a set that contains all the platforms.
|
||||
#[cfg(test)] // currently, only used in tests
|
||||
pub(crate) fn all() -> std::collections::HashSet<Self> {
|
||||
Platform::VARIANTS.into_iter().copied().collect()
|
||||
}
|
||||
}
|
||||
87
src-tauri/src/util/updater.rs
Normal file
87
src-tauri/src/util/updater.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
use semver::Version;
|
||||
use tauri_plugin_updater::RemoteRelease;
|
||||
|
||||
/// Helper function to extract the build number out of `version`.
|
||||
///
|
||||
/// If the version string is in the `x.y.z` format and does not include a build
|
||||
/// number, we assume a build number of 0.
|
||||
fn extract_build_number(version: &Version) -> u32 {
|
||||
let pre = &version.pre;
|
||||
|
||||
if pre.is_empty() {
|
||||
// A special value for the versions that do not have array
|
||||
0
|
||||
} else {
|
||||
let pre_str = pre.as_str();
|
||||
let build_number_str = {
|
||||
match pre_str.strip_prefix("SNAPSHOT-") {
|
||||
Some(str) => str,
|
||||
None => pre_str,
|
||||
}
|
||||
};
|
||||
let build_number : u32 = build_number_str.parse().unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"invalid build number, cannot parse [{}] to a valid build number, error [{}], version [{}]",
|
||||
build_number_str, e, version
|
||||
)
|
||||
});
|
||||
|
||||
build_number
|
||||
}
|
||||
}
|
||||
|
||||
/// # Local version format
|
||||
///
|
||||
/// Packages built in our CI use the following format:
|
||||
///
|
||||
/// * `x.y.z-SNAPSHOT-<build number>`
|
||||
/// * `x.y.z-<build number>`
|
||||
///
|
||||
/// If you build Coco from src, the version will be in format `x.y.z`
|
||||
///
|
||||
/// # Remote version format
|
||||
///
|
||||
/// `x.y.z-<build number>`
|
||||
///
|
||||
/// # How we compare versions
|
||||
///
|
||||
/// We compare versions based solely on the build number.
|
||||
/// If the version string is in the `x.y.z` format and does not include a build number,
|
||||
/// we assume a build number of 0. As a result, such versions are considered older
|
||||
/// than any version with an explicit build number.
|
||||
pub(crate) fn custom_version_comparator(local: Version, remote_release: RemoteRelease) -> bool {
|
||||
let remote = remote_release.version;
|
||||
|
||||
let local_build_number = extract_build_number(&local);
|
||||
let remote_build_number = extract_build_number(&remote);
|
||||
|
||||
let should_update = remote_build_number > local_build_number;
|
||||
log::debug!(
|
||||
"custom version comparator invoked, local version [{}], remote version [{}], should update [{}]",
|
||||
local,
|
||||
remote,
|
||||
should_update
|
||||
);
|
||||
|
||||
should_update
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_extract_build_number() {
|
||||
// 0.6.0 => 0
|
||||
let version = Version::parse("0.6.0").unwrap();
|
||||
assert_eq!(extract_build_number(&version), 0);
|
||||
|
||||
// 0.6.0-2371 => 2371
|
||||
let version = Version::parse("0.6.0-2371").unwrap();
|
||||
assert_eq!(extract_build_number(&version), 2371);
|
||||
|
||||
// 0.6.0-SNAPSHOT-2371 => 2371
|
||||
let version = Version::parse("0.6.0-SNAPSHOT-2371").unwrap();
|
||||
assert_eq!(extract_build_number(&version), 2371);
|
||||
}
|
||||
}
|
||||
@@ -113,20 +113,6 @@
|
||||
"icons/Square310x310Logo.png",
|
||||
"icons/StoreLogo.png"
|
||||
],
|
||||
"macOS": {
|
||||
"minimumSystemVersion": "10.12",
|
||||
"hardenedRuntime": true,
|
||||
"dmg": {
|
||||
"appPosition": {
|
||||
"x": 180,
|
||||
"y": 180
|
||||
},
|
||||
"applicationFolderPosition": {
|
||||
"x": 480,
|
||||
"y": 180
|
||||
}
|
||||
}
|
||||
},
|
||||
"resources": ["assets/**/*", "icons"]
|
||||
},
|
||||
"plugins": {
|
||||
@@ -140,7 +126,6 @@
|
||||
"https://release.infinilabs.com/coco/app/.latest.json?target={{target}}&arch={{arch}}¤t_version={{current_version}}"
|
||||
]
|
||||
},
|
||||
"websocket": {},
|
||||
"shell": {},
|
||||
"globalShortcut": {},
|
||||
"deep-link": {
|
||||
|
||||
15
src-tauri/tauri.linux.conf.json
Normal file
15
src-tauri/tauri.linux.conf.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"identifier": "rs.coco.app",
|
||||
"bundle": {
|
||||
"linux": {
|
||||
"deb": {
|
||||
"depends": ["gstreamer1.0-plugins-good"],
|
||||
"desktopTemplate": "./Coco.desktop"
|
||||
},
|
||||
"rpm": {
|
||||
"depends": ["gstreamer1-plugins-good"],
|
||||
"desktopTemplate": "./Coco.desktop"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -86,6 +86,12 @@ export const Get = <T>(
|
||||
} else {
|
||||
res = result?.data as FcResponse<T>;
|
||||
}
|
||||
// web component log
|
||||
infoLog({
|
||||
username: "@/api/axiosRequest.ts",
|
||||
logName: url,
|
||||
})(res);
|
||||
|
||||
resolve([null, res as FcResponse<T>]);
|
||||
})
|
||||
.catch((err) => {
|
||||
@@ -96,14 +102,14 @@ export const Get = <T>(
|
||||
|
||||
export const Post = <T>(
|
||||
url: string,
|
||||
data: IAnyObj,
|
||||
data: IAnyObj | undefined,
|
||||
params: IAnyObj = {},
|
||||
headers: IAnyObj = {}
|
||||
): Promise<[any, FcResponse<T> | undefined]> => {
|
||||
return new Promise((resolve) => {
|
||||
const appStore = JSON.parse(localStorage.getItem("app-store") || "{}");
|
||||
|
||||
let baseURL = appStore.state?.endpoint_http
|
||||
let baseURL = appStore.state?.endpoint_http;
|
||||
if (!baseURL || baseURL === "undefined") {
|
||||
baseURL = "";
|
||||
}
|
||||
|
||||
63
src/api/streamFetch.ts
Normal file
63
src/api/streamFetch.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
export async function streamPost({
|
||||
url,
|
||||
body,
|
||||
queryParams,
|
||||
headers,
|
||||
onMessage,
|
||||
onError,
|
||||
}: {
|
||||
url: string;
|
||||
body: any;
|
||||
queryParams?: Record<string, any>;
|
||||
headers?: Record<string, string>;
|
||||
onMessage: (chunk: string) => void;
|
||||
onError?: (err: any) => void;
|
||||
}) {
|
||||
const appStore = JSON.parse(localStorage.getItem("app-store") || "{}");
|
||||
|
||||
let baseURL = appStore.state?.endpoint_http;
|
||||
if (!baseURL || baseURL === "undefined") {
|
||||
baseURL = "";
|
||||
}
|
||||
|
||||
const headersStr = localStorage.getItem("headers") || "{}";
|
||||
const headersStorage = JSON.parse(headersStr);
|
||||
|
||||
const query = new URLSearchParams(queryParams || {}).toString();
|
||||
const fullUrl = `${baseURL}${url}?${query}`;
|
||||
|
||||
try {
|
||||
const res = await fetch(fullUrl, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(headersStorage),
|
||||
...(headers || {}),
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
|
||||
if (!res.ok || !res.body) throw new Error("Stream failed");
|
||||
|
||||
const reader = res.body.getReader();
|
||||
const decoder = new TextDecoder("utf-8");
|
||||
let buffer = "";
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
|
||||
buffer += decoder.decode(value, { stream: true });
|
||||
|
||||
const lines = buffer.split("\n");
|
||||
for (let i = 0; i < lines.length - 1; i++) {
|
||||
const line = lines[i].trim();
|
||||
if (line) onMessage(line);
|
||||
}
|
||||
buffer = lines[lines.length - 1];
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("streamPost error:", err);
|
||||
onError?.(err);
|
||||
}
|
||||
}
|
||||
@@ -1,133 +0,0 @@
|
||||
import { fetch } from "@tauri-apps/plugin-http";
|
||||
|
||||
import { clientEnv } from "@/utils/env";
|
||||
import { useLogStore } from "@/stores/logStore";
|
||||
import { get_server_token } from "@/commands";
|
||||
interface FetchRequestConfig {
|
||||
url: string;
|
||||
method?: "GET" | "POST" | "PUT" | "DELETE";
|
||||
headers?: Record<string, string>;
|
||||
body?: any;
|
||||
timeout?: number;
|
||||
parseAs?: "json" | "text" | "binary";
|
||||
baseURL?: string;
|
||||
}
|
||||
|
||||
interface FetchResponse<T = any> {
|
||||
data: T;
|
||||
status: number;
|
||||
statusText: string;
|
||||
headers: Headers;
|
||||
}
|
||||
|
||||
const timeoutPromise = (ms: number) => {
|
||||
return new Promise<never>((_, reject) =>
|
||||
setTimeout(() => reject(new Error(`Request timed out after ${ms} ms`)), ms)
|
||||
);
|
||||
};
|
||||
|
||||
export const tauriFetch = async <T = any>({
|
||||
url,
|
||||
method = "GET",
|
||||
headers = {},
|
||||
body,
|
||||
timeout = 30,
|
||||
parseAs = "json",
|
||||
baseURL = clientEnv.COCO_SERVER_URL
|
||||
}: FetchRequestConfig): Promise<FetchResponse<T>> => {
|
||||
const addLog = useLogStore.getState().addLog;
|
||||
|
||||
try {
|
||||
const appStore = JSON.parse(localStorage.getItem("app-store") || "{}");
|
||||
const connectStore = JSON.parse(localStorage.getItem("connect-store") || "{}");
|
||||
console.log("baseURL", appStore.state?.endpoint_http)
|
||||
|
||||
baseURL = appStore.state?.endpoint_http || baseURL;
|
||||
|
||||
const authStore = JSON.parse(localStorage.getItem("auth-store") || "{}")
|
||||
const auth = authStore?.state?.auth
|
||||
console.log("auth", auth)
|
||||
|
||||
if (baseURL.endsWith("/")) {
|
||||
baseURL = baseURL.slice(0, -1);
|
||||
}
|
||||
|
||||
if (!url.startsWith("http://") && !url.startsWith("https://")) {
|
||||
// If not, prepend the defaultPrefix
|
||||
url = baseURL + url;
|
||||
}
|
||||
|
||||
if (method !== "GET") {
|
||||
headers["Content-Type"] = "application/json";
|
||||
}
|
||||
|
||||
const server_id = connectStore.state?.currentService?.id || "default_coco_server"
|
||||
const res: any = await get_server_token(server_id);
|
||||
|
||||
headers["X-API-TOKEN"] = headers["X-API-TOKEN"] || res?.access_token || undefined;
|
||||
|
||||
// debug API
|
||||
const requestInfo = {
|
||||
url,
|
||||
method,
|
||||
headers,
|
||||
body,
|
||||
timeout,
|
||||
parseAs,
|
||||
};
|
||||
|
||||
const fetchPromise = fetch(url, {
|
||||
method,
|
||||
headers,
|
||||
body,
|
||||
});
|
||||
|
||||
const response = await Promise.race([
|
||||
fetchPromise,
|
||||
timeoutPromise(timeout * 1000),
|
||||
]);
|
||||
|
||||
const statusText = response.ok ? "OK" : "Error";
|
||||
|
||||
let data: any;
|
||||
if (parseAs === "json") {
|
||||
data = await response.json();
|
||||
} else if (parseAs === "text") {
|
||||
data = await response.text();
|
||||
} else {
|
||||
data = await response.arrayBuffer();
|
||||
}
|
||||
|
||||
// debug API
|
||||
const log = {
|
||||
request: requestInfo,
|
||||
response: {
|
||||
data,
|
||||
status: response.status,
|
||||
statusText,
|
||||
headers: response.headers,
|
||||
},
|
||||
};
|
||||
addLog(log);
|
||||
|
||||
return log.response;
|
||||
} catch (error) {
|
||||
console.error("Request failed:", error);
|
||||
|
||||
// debug API
|
||||
const log = {
|
||||
request: {
|
||||
url,
|
||||
method,
|
||||
headers,
|
||||
body,
|
||||
timeout,
|
||||
parseAs,
|
||||
},
|
||||
error,
|
||||
};
|
||||
addLog(log);
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
@@ -1,14 +1,13 @@
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
|
||||
import {
|
||||
ServerTokenResponse,
|
||||
Server,
|
||||
Connector,
|
||||
DataSource,
|
||||
GetResponse,
|
||||
UploadAttachmentPayload,
|
||||
UploadAttachmentResponse,
|
||||
GetAttachmentPayload,
|
||||
GetAttachmentByIdsPayload,
|
||||
GetAttachmentResponse,
|
||||
DeleteAttachmentPayload,
|
||||
TranscriptionPayload,
|
||||
@@ -17,6 +16,49 @@ import {
|
||||
} from "@/types/commands";
|
||||
import { useAppStore } from "@/stores/appStore";
|
||||
import { useAuthStore } from "@/stores/authStore";
|
||||
import { useConnectStore } from "@/stores/connectStore";
|
||||
import { SETTINGS_WINDOW_LABEL } from "@/constants";
|
||||
import platformAdapter from "@/utils/platformAdapter";
|
||||
|
||||
export async function getCurrentWindowService() {
|
||||
const currentService = useConnectStore.getState().currentService;
|
||||
const cloudSelectService = useConnectStore.getState().cloudSelectService;
|
||||
const windowLabel = await platformAdapter.getCurrentWindowLabel();
|
||||
|
||||
return windowLabel === SETTINGS_WINDOW_LABEL
|
||||
? cloudSelectService
|
||||
: currentService;
|
||||
}
|
||||
|
||||
export async function setCurrentWindowService(service: any) {
|
||||
const windowLabel = await platformAdapter.getCurrentWindowLabel();
|
||||
const { setCurrentService, setCloudSelectService } =
|
||||
useConnectStore.getState();
|
||||
|
||||
return windowLabel === SETTINGS_WINDOW_LABEL
|
||||
? setCloudSelectService(service)
|
||||
: setCurrentService(service);
|
||||
}
|
||||
|
||||
export async function handleLogout(serverId?: string) {
|
||||
const setIsCurrentLogin = useAuthStore.getState().setIsCurrentLogin;
|
||||
const { serverList, setServerList } = useConnectStore.getState();
|
||||
|
||||
const service = await getCurrentWindowService();
|
||||
|
||||
const id = serverId || service?.id;
|
||||
if (!id) return;
|
||||
|
||||
// Update the status first
|
||||
setIsCurrentLogin(false);
|
||||
if (service?.id === id) {
|
||||
await setCurrentWindowService({ ...service, profile: null });
|
||||
}
|
||||
const updatedServerList = serverList.map((server) =>
|
||||
server.id === id ? { ...server, profile: null } : server
|
||||
);
|
||||
setServerList(updatedServerList);
|
||||
}
|
||||
|
||||
// Endpoints that don't require authentication
|
||||
const WHITELIST_SERVERS = [
|
||||
@@ -37,7 +79,15 @@ async function invokeWithErrorHandler<T>(
|
||||
args?: Record<string, any>
|
||||
): Promise<T> {
|
||||
const isCurrentLogin = useAuthStore.getState().isCurrentLogin;
|
||||
if (!WHITELIST_SERVERS.includes(command) && !isCurrentLogin) {
|
||||
|
||||
const service = await getCurrentWindowService();
|
||||
|
||||
// Not logged in
|
||||
// console.log("isCurrentLogin", command, isCurrentLogin);
|
||||
if (
|
||||
!WHITELIST_SERVERS.includes(command) &&
|
||||
(!isCurrentLogin || !service?.profile)
|
||||
) {
|
||||
console.error("This command requires authentication");
|
||||
throw new Error("This command requires authentication");
|
||||
}
|
||||
@@ -64,18 +114,31 @@ async function invokeWithErrorHandler<T>(
|
||||
}
|
||||
}
|
||||
|
||||
// Server Data log
|
||||
let parsedResult = result;
|
||||
let logData = result;
|
||||
if (typeof result === "string") {
|
||||
parsedResult = JSON.parse(result);
|
||||
logData = parsedResult;
|
||||
}
|
||||
infoLog({
|
||||
username: "@/commands/servers.ts",
|
||||
logName: command,
|
||||
})(logData);
|
||||
|
||||
return result;
|
||||
} catch (error: any) {
|
||||
const errorMessage = error || "Command execution failed";
|
||||
addError(command + ":" + errorMessage, "error");
|
||||
// 401 Unauthorized
|
||||
if (errorMessage.includes("Unauthorized")) {
|
||||
handleLogout();
|
||||
} else {
|
||||
addError(command + ":" + errorMessage, "error");
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export function get_server_token(id: string): Promise<ServerTokenResponse> {
|
||||
return invokeWithErrorHandler(`get_server_token`, { id });
|
||||
}
|
||||
|
||||
export function list_coco_servers(): Promise<Server[]> {
|
||||
return invokeWithErrorHandler(`list_coco_servers`);
|
||||
}
|
||||
@@ -146,14 +209,6 @@ export function mcp_server_search({
|
||||
return invokeWithErrorHandler(`mcp_server_search`, { id, queryParams });
|
||||
}
|
||||
|
||||
export function connect_to_server(id: string, clientId: string): Promise<void> {
|
||||
return invokeWithErrorHandler(`connect_to_server`, { id, clientId });
|
||||
}
|
||||
|
||||
export function disconnect(clientId: string): Promise<void> {
|
||||
return invokeWithErrorHandler(`disconnect`, { clientId });
|
||||
}
|
||||
|
||||
export function chat_history({
|
||||
serverId,
|
||||
from = 0,
|
||||
@@ -221,54 +276,63 @@ export function open_session_chat({
|
||||
export function cancel_session_chat({
|
||||
serverId,
|
||||
sessionId,
|
||||
queryParams,
|
||||
}: {
|
||||
serverId: string;
|
||||
sessionId: string;
|
||||
queryParams?: Record<string, any>;
|
||||
}): Promise<string> {
|
||||
return invokeWithErrorHandler(`cancel_session_chat`, {
|
||||
serverId,
|
||||
sessionId,
|
||||
});
|
||||
}
|
||||
|
||||
export function new_chat({
|
||||
serverId,
|
||||
websocketId,
|
||||
message,
|
||||
queryParams,
|
||||
}: {
|
||||
serverId: string;
|
||||
websocketId: string;
|
||||
message: string;
|
||||
queryParams?: Record<string, any>;
|
||||
}): Promise<GetResponse> {
|
||||
return invokeWithErrorHandler(`new_chat`, {
|
||||
serverId,
|
||||
websocketId,
|
||||
message,
|
||||
queryParams,
|
||||
});
|
||||
}
|
||||
|
||||
export function send_message({
|
||||
export function chat_create({
|
||||
serverId,
|
||||
message,
|
||||
attachments,
|
||||
queryParams,
|
||||
clientId,
|
||||
}: {
|
||||
serverId: string;
|
||||
message: string;
|
||||
attachments: string[];
|
||||
queryParams?: Record<string, any>;
|
||||
clientId: string;
|
||||
}): Promise<GetResponse> {
|
||||
return invokeWithErrorHandler(`chat_create`, {
|
||||
serverId,
|
||||
message,
|
||||
attachments,
|
||||
queryParams,
|
||||
clientId,
|
||||
});
|
||||
}
|
||||
|
||||
export function chat_chat({
|
||||
serverId,
|
||||
websocketId,
|
||||
sessionId,
|
||||
message,
|
||||
attachments,
|
||||
queryParams,
|
||||
clientId,
|
||||
}: {
|
||||
serverId: string;
|
||||
websocketId: string;
|
||||
sessionId: string;
|
||||
message: string;
|
||||
attachments: string[];
|
||||
queryParams?: Record<string, any>;
|
||||
clientId: string;
|
||||
}): Promise<string> {
|
||||
return invokeWithErrorHandler(`send_message`, {
|
||||
return invokeWithErrorHandler(`chat_chat`, {
|
||||
serverId,
|
||||
websocketId,
|
||||
sessionId,
|
||||
message,
|
||||
attachments,
|
||||
queryParams,
|
||||
clientId,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -290,9 +354,7 @@ export const update_session_chat = (payload: {
|
||||
|
||||
export const assistant_search = (payload: {
|
||||
serverId: string;
|
||||
from: number;
|
||||
size: number;
|
||||
query?: Record<string, any>;
|
||||
queryParams?: string[];
|
||||
}): Promise<boolean> => {
|
||||
return invokeWithErrorHandler<boolean>("assistant_search", payload);
|
||||
};
|
||||
@@ -323,10 +385,13 @@ export const upload_attachment = async (payload: UploadAttachmentPayload) => {
|
||||
}
|
||||
};
|
||||
|
||||
export const get_attachment = (payload: GetAttachmentPayload) => {
|
||||
return invokeWithErrorHandler<GetAttachmentResponse>("get_attachment", {
|
||||
...payload,
|
||||
});
|
||||
export const get_attachment_by_ids = (payload: GetAttachmentByIdsPayload) => {
|
||||
return invokeWithErrorHandler<GetAttachmentResponse>(
|
||||
"get_attachment_by_ids",
|
||||
{
|
||||
...payload,
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
export const delete_attachment = (payload: DeleteAttachmentPayload) => {
|
||||
@@ -349,3 +414,7 @@ export const query_coco_fusion = (payload: {
|
||||
...payload,
|
||||
});
|
||||
};
|
||||
|
||||
export const get_app_search_source = () => {
|
||||
return invokeWithErrorHandler<void>("get_app_search_source");
|
||||
};
|
||||
|
||||
@@ -34,4 +34,8 @@ export function show_check(): Promise<void> {
|
||||
|
||||
export function hide_check(): Promise<void> {
|
||||
return invoke('hide_check');
|
||||
}
|
||||
|
||||
export function toggle_move_to_active_space_attribute(): Promise<void> {
|
||||
return invoke('toggle_move_to_active_space_attribute');
|
||||
}
|
||||
@@ -1,10 +1,8 @@
|
||||
import { useRef } from "react";
|
||||
|
||||
import { Post } from "@/api/axiosRequest";
|
||||
import platformAdapter from "@/utils/platformAdapter";
|
||||
import { useConnectStore } from "@/stores/connectStore";
|
||||
import { useAppStore } from "@/stores/appStore";
|
||||
import { parseSearchQuery, SearchQuery, unrequitable } from "@/utils";
|
||||
import { parseSearchQuery, unrequitable } from "@/utils";
|
||||
|
||||
interface AssistantFetcherProps {
|
||||
debounceKeyword?: string;
|
||||
@@ -15,8 +13,6 @@ export const AssistantFetcher = ({
|
||||
debounceKeyword = "",
|
||||
assistantIDs = [],
|
||||
}: AssistantFetcherProps) => {
|
||||
const isTauri = useAppStore((state) => state.isTauri);
|
||||
|
||||
const { currentService, currentAssistant, setCurrentAssistant } =
|
||||
useConnectStore();
|
||||
|
||||
@@ -43,7 +39,7 @@ export const AssistantFetcher = ({
|
||||
query,
|
||||
} = params;
|
||||
|
||||
const searchQuery: SearchQuery = {
|
||||
const queryParams = parseSearchQuery({
|
||||
from: (current - 1) * pageSize,
|
||||
size: pageSize,
|
||||
query: query ?? debounceKeyword,
|
||||
@@ -52,38 +48,15 @@ export const AssistantFetcher = ({
|
||||
enabled: true,
|
||||
id: assistantIDs,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const queryParams = parseSearchQuery(searchQuery);
|
||||
|
||||
const body: Record<string, any> = {
|
||||
const response = await platformAdapter.fetchAssistant(
|
||||
serverId,
|
||||
queryParams,
|
||||
};
|
||||
|
||||
let response: any;
|
||||
|
||||
if (isTauri) {
|
||||
if (!currentService?.id) {
|
||||
throw new Error("currentService is undefined");
|
||||
}
|
||||
|
||||
response = await platformAdapter.commands("assistant_search", body);
|
||||
} else {
|
||||
body.serverId = undefined;
|
||||
const [error, res] = await Post(`/assistant/_search`, body);
|
||||
|
||||
if (error) {
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
response = res;
|
||||
}
|
||||
queryParams
|
||||
);
|
||||
|
||||
let assistantList = response?.hits?.hits ?? [];
|
||||
|
||||
console.log("assistantList", assistantList);
|
||||
|
||||
if (
|
||||
!currentAssistant?._id ||
|
||||
currentService?.id !== lastServerId.current
|
||||
|
||||
@@ -17,7 +17,6 @@ import { AssistantFetcher } from "./AssistantFetcher";
|
||||
import AssistantItem from "./AssistantItem";
|
||||
import Pagination from "@/components/Common/Pagination";
|
||||
import { useSearchStore } from "@/stores/searchStore";
|
||||
import { useChatStore } from "@/stores/chatStore";
|
||||
|
||||
interface AssistantListProps {
|
||||
assistantIDs?: string[];
|
||||
@@ -44,9 +43,6 @@ export function AssistantList({ assistantIDs = [] }: AssistantListProps) {
|
||||
return state.setAskAiAssistantId;
|
||||
});
|
||||
const assistantList = useConnectStore((state) => state.assistantList);
|
||||
const connected = useChatStore((state) => {
|
||||
return state.connected;
|
||||
});
|
||||
|
||||
const { fetchAssistant } = AssistantFetcher({
|
||||
debounceKeyword,
|
||||
@@ -54,24 +50,12 @@ export function AssistantList({ assistantIDs = [] }: AssistantListProps) {
|
||||
});
|
||||
|
||||
const getAssistants = (params: { current: number; pageSize: number }) => {
|
||||
if (!connected) {
|
||||
return Promise.resolve({
|
||||
total: 0,
|
||||
list: [],
|
||||
});
|
||||
}
|
||||
|
||||
return fetchAssistant(params);
|
||||
};
|
||||
|
||||
const { pagination, runAsync } = usePagination(getAssistants, {
|
||||
defaultPageSize: 5,
|
||||
refreshDeps: [
|
||||
currentService?.id,
|
||||
debounceKeyword,
|
||||
currentService?.enabled,
|
||||
connected,
|
||||
],
|
||||
refreshDeps: [currentService?.id, debounceKeyword, currentService?.enabled],
|
||||
onSuccess(data) {
|
||||
setAssistants(data.list);
|
||||
|
||||
@@ -198,7 +182,7 @@ export function AssistantList({ assistantIDs = [] }: AssistantListProps) {
|
||||
</PopoverButton>
|
||||
|
||||
<PopoverPanel
|
||||
className="absolute z-50 top-full mt-1 left-0 w-60 rounded-xl bg-white dark:bg-[#202126] p-3 text-sm/6 text-[#333] dark:text-[#D8D8D8] shadow-lg border dark:border-white/10 focus:outline-none max-h-[calc(100vh-80px)] overflow-y-auto"
|
||||
className="absolute z-50 top-full mt-1 left-0 w-60 rounded-xl bg-white dark:bg-[#202126] p-3 text-sm/6 text-[#333] dark:text-[#D8D8D8] shadow-lg border dark:border-white/10 focus:outline-none max-h-[calc(100vh-150px)] overflow-y-auto"
|
||||
onMouseMove={handleMouseMove}
|
||||
>
|
||||
<div className="flex items-center justify-between text-sm font-bold">
|
||||
|
||||
182
src/components/Assistant/AttachmentList.tsx
Normal file
182
src/components/Assistant/AttachmentList.tsx
Normal file
@@ -0,0 +1,182 @@
|
||||
import { FC, useEffect, useMemo } from "react";
|
||||
import { X } from "lucide-react";
|
||||
import { useAsyncEffect } from "ahooks";
|
||||
import { useTranslation } from "react-i18next";
|
||||
|
||||
import { useChatStore, UploadAttachments } from "@/stores/chatStore";
|
||||
import { useConnectStore } from "@/stores/connectStore";
|
||||
import platformAdapter from "@/utils/platformAdapter";
|
||||
import Tooltip2 from "../Common/Tooltip2";
|
||||
import FileIcon from "../Common/Icons/FileIcon";
|
||||
import { filesize } from "@/utils";
|
||||
|
||||
const AttachmentList = () => {
|
||||
const { uploadAttachments, setUploadAttachments } = useChatStore();
|
||||
const { currentService } = useConnectStore();
|
||||
|
||||
const serverId = useMemo(() => {
|
||||
return currentService.id;
|
||||
}, [currentService]);
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
setUploadAttachments([]);
|
||||
};
|
||||
}, []);
|
||||
|
||||
const uploadAttachment = async (data: UploadAttachments) => {
|
||||
const { uploading, uploaded, uploadFailed, path } = data;
|
||||
|
||||
if (uploading || uploaded || uploadFailed) return;
|
||||
|
||||
const { uploadAttachments } = useChatStore.getState();
|
||||
|
||||
const matched = uploadAttachments.find((item) => item.id === data.id);
|
||||
|
||||
if (matched) {
|
||||
matched.uploading = true;
|
||||
|
||||
setUploadAttachments(uploadAttachments);
|
||||
}
|
||||
|
||||
try {
|
||||
const attachmentIds: any = await platformAdapter.commands(
|
||||
"upload_attachment",
|
||||
{
|
||||
serverId,
|
||||
filePaths: [path],
|
||||
}
|
||||
);
|
||||
|
||||
if (!attachmentIds) {
|
||||
throw new Error("Failed to get attachment id");
|
||||
} else {
|
||||
Object.assign(data, {
|
||||
uploaded: true,
|
||||
attachmentId: attachmentIds[0],
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
Object.assign(data, {
|
||||
uploadFailed: true,
|
||||
failedMessage: String(error),
|
||||
});
|
||||
} finally {
|
||||
Object.assign(data, {
|
||||
uploading: false,
|
||||
});
|
||||
|
||||
setUploadAttachments(uploadAttachments);
|
||||
}
|
||||
};
|
||||
|
||||
useAsyncEffect(async () => {
|
||||
if (uploadAttachments.length === 0) return;
|
||||
|
||||
for (const item of uploadAttachments) {
|
||||
uploadAttachment(item);
|
||||
}
|
||||
}, [uploadAttachments]);
|
||||
|
||||
const deleteFile = async (id: string) => {
|
||||
const { uploadAttachments } = useChatStore.getState();
|
||||
|
||||
const matched = uploadAttachments.find((item) => item.id === id);
|
||||
|
||||
if (!matched) return;
|
||||
|
||||
const { uploadFailed, attachmentId } = matched;
|
||||
|
||||
setUploadAttachments(uploadAttachments.filter((file) => file.id !== id));
|
||||
|
||||
if (uploadFailed) return;
|
||||
|
||||
platformAdapter.commands("delete_attachment", {
|
||||
serverId,
|
||||
id: attachmentId,
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex flex-wrap gap-y-2 -mx-1 text-sm">
|
||||
{uploadAttachments.map((file) => {
|
||||
return (
|
||||
<AttachmentItem
|
||||
key={file.id}
|
||||
{...file}
|
||||
deletable
|
||||
onDelete={deleteFile}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
interface AttachmentItemProps extends UploadAttachments {
|
||||
deletable?: boolean;
|
||||
onDelete?: (id: string) => void;
|
||||
}
|
||||
|
||||
export const AttachmentItem: FC<AttachmentItemProps> = (props) => {
|
||||
const {
|
||||
id,
|
||||
name,
|
||||
path,
|
||||
extname,
|
||||
size,
|
||||
uploaded,
|
||||
attachmentId,
|
||||
uploadFailed,
|
||||
failedMessage,
|
||||
deletable,
|
||||
onDelete,
|
||||
} = props;
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<div key={id} className="w-1/3 px-1">
|
||||
<div className="relative group flex items-center gap-1 p-1 rounded-[4px] bg-[#dedede] dark:bg-[#202126]">
|
||||
{(uploadFailed || attachmentId) && deletable && (
|
||||
<div
|
||||
className="absolute flex justify-center items-center size-[14px] bg-red-600 top-0 right-0 rounded-full cursor-pointer translate-x-[5px] -translate-y-[5px] transition opacity-0 group-hover:opacity-100 "
|
||||
onClick={() => {
|
||||
onDelete?.(id);
|
||||
}}
|
||||
>
|
||||
<X className="size-[10px] text-white" />
|
||||
</div>
|
||||
)}
|
||||
|
||||
<FileIcon path={path} />
|
||||
|
||||
<div className="flex flex-col justify-between overflow-hidden">
|
||||
<div className="truncate text-sm text-[#333333] dark:text-[#D8D8D8]">
|
||||
{name}
|
||||
</div>
|
||||
|
||||
<div className="text-xs">
|
||||
{uploadFailed && failedMessage ? (
|
||||
<Tooltip2 content={failedMessage}>
|
||||
<span className="text-red-500">Upload Failed</span>
|
||||
</Tooltip2>
|
||||
) : (
|
||||
<div className="text-[#999]">
|
||||
{uploaded ? (
|
||||
<div className="flex gap-2">
|
||||
{extname && <span>{extname}</span>}
|
||||
<span>{filesize(size)}</span>
|
||||
</div>
|
||||
) : (
|
||||
<span>{t("assistant.fileList.uploading")}</span>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default AttachmentList;
|
||||
@@ -12,7 +12,6 @@ import { useChatStore } from "@/stores/chatStore";
|
||||
import { useConnectStore } from "@/stores/connectStore";
|
||||
import { useWindows } from "@/hooks/useWindows";
|
||||
import useMessageChunkData from "@/hooks/useMessageChunkData";
|
||||
import useWebSocket from "@/hooks/useWebSocket";
|
||||
import { useChatActions } from "@/hooks/useChatActions";
|
||||
import { useMessageHandler } from "@/hooks/useMessageHandler";
|
||||
import { ChatSidebar } from "./ChatSidebar";
|
||||
@@ -23,7 +22,6 @@ import type { Chat, StartPage } from "@/types/chat";
|
||||
import PrevSuggestion from "@/components/ChatMessage/PrevSuggestion";
|
||||
import { useAppStore } from "@/stores/appStore";
|
||||
import { useSearchStore } from "@/stores/searchStore";
|
||||
// import ReadAloud from "./ReadAloud";
|
||||
import { useAuthStore } from "@/stores/authStore";
|
||||
import Splash from "./Splash";
|
||||
|
||||
@@ -41,12 +39,18 @@ interface ChatAIProps {
|
||||
showChatHistory?: boolean;
|
||||
assistantIDs?: string[];
|
||||
startPage?: StartPage;
|
||||
formatUrl?: (data: any) => string;
|
||||
instanceId?: string;
|
||||
}
|
||||
|
||||
export interface SendMessageParams {
|
||||
message?: string;
|
||||
attachments?: string[];
|
||||
}
|
||||
|
||||
export interface ChatAIRef {
|
||||
init: (value: string) => void;
|
||||
init: (params: SendMessageParams) => void;
|
||||
cancelChat: () => void;
|
||||
reconnect: () => void;
|
||||
clearChat: () => void;
|
||||
}
|
||||
|
||||
@@ -67,18 +71,19 @@ const ChatAI = memo(
|
||||
showChatHistory,
|
||||
assistantIDs,
|
||||
startPage,
|
||||
formatUrl,
|
||||
instanceId,
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
useImperativeHandle(ref, () => ({
|
||||
init: init,
|
||||
cancelChat: () => cancelChat(activeChat),
|
||||
reconnect: reconnect,
|
||||
clearChat: clearChat,
|
||||
}));
|
||||
|
||||
const { curChatEnd, setCurChatEnd, connected, setConnected } =
|
||||
useChatStore();
|
||||
const curChatEnd = useChatStore((state) => state.curChatEnd);
|
||||
const setCurChatEnd = useChatStore((state) => state.setCurChatEnd);
|
||||
|
||||
const isTauri = useAppStore((state) => state.isTauri);
|
||||
|
||||
@@ -87,9 +92,7 @@ const ChatAI = memo(
|
||||
return state.setIsCurrentLogin;
|
||||
});
|
||||
|
||||
const visibleStartPage = useConnectStore((state) => {
|
||||
return state.visibleStartPage;
|
||||
});
|
||||
const { currentService, visibleStartPage } = useConnectStore();
|
||||
|
||||
const addError = useAppStore.getState().addError;
|
||||
|
||||
@@ -97,6 +100,7 @@ const ChatAI = memo(
|
||||
const [timedoutShow, setTimedoutShow] = useState(false);
|
||||
|
||||
const curIdRef = useRef("");
|
||||
const curSessionIdRef = useRef("");
|
||||
|
||||
const [isSidebarOpenChat, setIsSidebarOpenChat] = useState(isSidebarOpen);
|
||||
const [chats, setChats] = useState<Chat[]>([]);
|
||||
@@ -107,9 +111,6 @@ const ChatAI = memo(
|
||||
const askAiServerId = useSearchStore((state) => {
|
||||
return state.askAiServerId;
|
||||
});
|
||||
const currentService = useConnectStore((state) => {
|
||||
return state.currentService;
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
activeChatProp && setActiveChat(activeChatProp);
|
||||
@@ -123,10 +124,10 @@ const ChatAI = memo(
|
||||
setIsCurrentLogin(false);
|
||||
}
|
||||
|
||||
if (showChatHistory && connected) {
|
||||
if (showChatHistory) {
|
||||
getChatHistory();
|
||||
}
|
||||
}, [currentService?.enabled, showChatHistory, connected]);
|
||||
}, [currentService?.enabled, showChatHistory]);
|
||||
|
||||
useEffect(() => {
|
||||
if (askAiServerId || !askAiSessionId) return;
|
||||
@@ -138,12 +139,6 @@ const ChatAI = memo(
|
||||
|
||||
const [Question, setQuestion] = useState<string>("");
|
||||
|
||||
const [websocketSessionId, setWebsocketSessionId] = useState("");
|
||||
|
||||
const onWebsocketSessionId = useCallback((sessionId: string) => {
|
||||
setWebsocketSessionId(sessionId);
|
||||
}, []);
|
||||
|
||||
const {
|
||||
data: {
|
||||
query_intent,
|
||||
@@ -170,15 +165,6 @@ const ChatAI = memo(
|
||||
|
||||
const dealMsgRef = useRef<((msg: string) => void) | null>(null);
|
||||
|
||||
const clientId = isChatPage ? "standalone" : "popup";
|
||||
const { reconnect, updateDealMsg } = useWebSocket({
|
||||
clientId,
|
||||
connected,
|
||||
setConnected,
|
||||
dealMsgRef,
|
||||
onWebsocketSessionId,
|
||||
});
|
||||
|
||||
const {
|
||||
chatClose,
|
||||
cancelChat,
|
||||
@@ -198,17 +184,21 @@ const ChatAI = memo(
|
||||
clearAllChunkData,
|
||||
setQuestion,
|
||||
curIdRef,
|
||||
curSessionIdRef,
|
||||
setChats,
|
||||
dealMsgRef,
|
||||
setLoadingStep,
|
||||
isChatPage,
|
||||
isSearchActive,
|
||||
isDeepThinkActive,
|
||||
isMCPActive,
|
||||
changeInput,
|
||||
websocketSessionId,
|
||||
showChatHistory
|
||||
);
|
||||
|
||||
const { dealMsg } = useMessageHandler(
|
||||
curIdRef,
|
||||
curSessionIdRef,
|
||||
setCurChatEnd,
|
||||
setTimedoutShow,
|
||||
(chat) => cancelChat(chat || activeChat),
|
||||
@@ -216,6 +206,13 @@ const ChatAI = memo(
|
||||
handlers
|
||||
);
|
||||
|
||||
const updateDealMsg = useCallback(
|
||||
(newDealMsg: (msg: string) => void) => {
|
||||
dealMsgRef.current = newDealMsg;
|
||||
},
|
||||
[dealMsgRef]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (dealMsg) {
|
||||
dealMsgRef.current = dealMsg;
|
||||
@@ -233,7 +230,7 @@ const ChatAI = memo(
|
||||
}, [activeChat, chatClose]);
|
||||
|
||||
const init = useCallback(
|
||||
async (value: string) => {
|
||||
async (params: SendMessageParams) => {
|
||||
try {
|
||||
//console.log("init", curChatEnd, activeChat?._id);
|
||||
if (!isCurrentLogin) {
|
||||
@@ -245,9 +242,9 @@ const ChatAI = memo(
|
||||
return;
|
||||
}
|
||||
if (!activeChat?._id) {
|
||||
await createNewChat(value, activeChat, websocketSessionId);
|
||||
await createNewChat(params);
|
||||
} else {
|
||||
await handleSendMessage(value, activeChat, websocketSessionId);
|
||||
await handleSendMessage(activeChat, params);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to initialize chat:", error);
|
||||
@@ -259,7 +256,6 @@ const ChatAI = memo(
|
||||
activeChat?._id,
|
||||
createNewChat,
|
||||
handleSendMessage,
|
||||
websocketSessionId,
|
||||
]
|
||||
);
|
||||
|
||||
@@ -271,7 +267,8 @@ const ChatAI = memo(
|
||||
const onSelectChat = useCallback(
|
||||
async (chat: Chat) => {
|
||||
setTimedoutShow(false);
|
||||
clearAllChunkData();
|
||||
|
||||
await clearAllChunkData();
|
||||
await cancelChat(activeChat);
|
||||
await chatClose(activeChat);
|
||||
const response = await openSessionChat(chat);
|
||||
@@ -293,7 +290,10 @@ const ChatAI = memo(
|
||||
if (updatedChats.length > 0) {
|
||||
setActiveChat(updatedChats[0]);
|
||||
} else {
|
||||
init("");
|
||||
init({
|
||||
message: "",
|
||||
attachments: [],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -376,6 +376,7 @@ const ChatAI = memo(
|
||||
)}
|
||||
<div
|
||||
data-tauri-drag-region
|
||||
data-chat-instance={instanceId}
|
||||
className={`flex flex-col rounded-md h-full overflow-hidden relative`}
|
||||
>
|
||||
<ChatHeader
|
||||
@@ -384,7 +385,6 @@ const ChatAI = memo(
|
||||
setIsSidebarOpen={toggleSidebar}
|
||||
isSidebarOpen={isSidebarOpenChat}
|
||||
activeChat={activeChat}
|
||||
reconnect={reconnect}
|
||||
isChatPage={isChatPage}
|
||||
showChatHistory={showChatHistory}
|
||||
assistantIDs={assistantIDs}
|
||||
@@ -394,7 +394,6 @@ const ChatAI = memo(
|
||||
<>
|
||||
<ChatContent
|
||||
activeChat={activeChat}
|
||||
curChatEnd={curChatEnd}
|
||||
query_intent={query_intent}
|
||||
tools={tools}
|
||||
fetch_source={fetch_source}
|
||||
@@ -405,10 +404,12 @@ const ChatAI = memo(
|
||||
loadingStep={loadingStep}
|
||||
timedoutShow={timedoutShow}
|
||||
Question={Question}
|
||||
handleSendMessage={(value) =>
|
||||
handleSendMessage(value, activeChat)
|
||||
handleSendMessage={(message) =>
|
||||
handleSendMessage(activeChat, { message })
|
||||
}
|
||||
getFileUrl={getFileUrl}
|
||||
formatUrl={formatUrl}
|
||||
curIdRef={curIdRef}
|
||||
/>
|
||||
<Splash assistantIDs={assistantIDs} startPage={startPage} />
|
||||
</>
|
||||
@@ -417,10 +418,12 @@ const ChatAI = memo(
|
||||
)}
|
||||
|
||||
{!activeChat?._id && !visibleStartPage && (
|
||||
<PrevSuggestion sendMessage={init} />
|
||||
<PrevSuggestion
|
||||
sendMessage={(message) => {
|
||||
init({ message });
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* <ReadAloud /> */}
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
|
||||
@@ -3,17 +3,17 @@ import { useTranslation } from "react-i18next";
|
||||
|
||||
import { ChatMessage } from "@/components/ChatMessage";
|
||||
import { Greetings } from "./Greetings";
|
||||
import FileList from "@/components/Assistant/FileList";
|
||||
import AttachmentList from "@/components/Assistant/AttachmentList";
|
||||
import { useChatScroll } from "@/hooks/useChatScroll";
|
||||
import { useChatStore } from "@/stores/chatStore";
|
||||
|
||||
import type { Chat, IChunkData } from "@/types/chat";
|
||||
import { useConnectStore } from "@/stores/connectStore";
|
||||
import SessionFile from "./SessionFile";
|
||||
// import SessionFile from "./SessionFile";
|
||||
import ScrollToBottom from "@/components/Common/ScrollToBottom";
|
||||
import { useChatStore } from "@/stores/chatStore";
|
||||
|
||||
interface ChatContentProps {
|
||||
activeChat?: Chat;
|
||||
curChatEnd: boolean;
|
||||
query_intent?: IChunkData;
|
||||
tools?: IChunkData;
|
||||
fetch_source?: IChunkData;
|
||||
@@ -26,11 +26,12 @@ interface ChatContentProps {
|
||||
Question: string;
|
||||
handleSendMessage: (content: string, newChat?: Chat) => void;
|
||||
getFileUrl: (path: string) => string;
|
||||
formatUrl?: (data: any) => string;
|
||||
curIdRef: React.MutableRefObject<string>;
|
||||
}
|
||||
|
||||
export const ChatContent = ({
|
||||
activeChat,
|
||||
curChatEnd,
|
||||
query_intent,
|
||||
tools,
|
||||
fetch_source,
|
||||
@@ -42,16 +43,14 @@ export const ChatContent = ({
|
||||
timedoutShow,
|
||||
Question,
|
||||
handleSendMessage,
|
||||
getFileUrl,
|
||||
formatUrl,
|
||||
}: ChatContentProps) => {
|
||||
const sessionId = useConnectStore((state) => state.currentSessionId);
|
||||
const setCurrentSessionId = useConnectStore((state) => {
|
||||
return state.setCurrentSessionId;
|
||||
});
|
||||
const { currentSessionId, setCurrentSessionId } = useConnectStore();
|
||||
|
||||
const { t } = useTranslation();
|
||||
|
||||
const uploadFiles = useChatStore((state) => state.uploadFiles);
|
||||
const { uploadAttachments } = useChatStore();
|
||||
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const { scrollToBottom } = useChatScroll(messagesEndRef);
|
||||
@@ -59,6 +58,8 @@ export const ChatContent = ({
|
||||
const [isAtBottom, setIsAtBottom] = useState(true);
|
||||
const visibleStartPage = useConnectStore((state) => state.visibleStartPage);
|
||||
|
||||
const curChatEnd = useChatStore((state) => state.curChatEnd);
|
||||
|
||||
useEffect(() => {
|
||||
setIsAtBottom(true);
|
||||
setCurrentSessionId(activeChat?._id);
|
||||
@@ -67,7 +68,7 @@ export const ChatContent = ({
|
||||
useEffect(() => {
|
||||
scrollToBottom();
|
||||
}, [
|
||||
activeChat?.id,
|
||||
activeChat?._id,
|
||||
query_intent?.message_chunk,
|
||||
fetch_source?.message_chunk,
|
||||
pick_source?.message_chunk,
|
||||
@@ -121,7 +122,7 @@ export const ChatContent = ({
|
||||
deep_read ||
|
||||
think ||
|
||||
response) &&
|
||||
activeChat?._id ? (
|
||||
activeChat?._source?.id ? (
|
||||
<ChatMessage
|
||||
key={"current"}
|
||||
message={{
|
||||
@@ -144,6 +145,7 @@ export const ChatContent = ({
|
||||
think={think}
|
||||
response={response}
|
||||
loadingStep={loadingStep}
|
||||
formatUrl={formatUrl}
|
||||
/>
|
||||
) : null}
|
||||
|
||||
@@ -165,13 +167,13 @@ export const ChatContent = ({
|
||||
<div ref={messagesEndRef} />
|
||||
</div>
|
||||
|
||||
{sessionId && uploadFiles.length > 0 && (
|
||||
<div key={sessionId} className="max-h-[120px] overflow-auto p-2">
|
||||
<FileList sessionId={sessionId} getFileUrl={getFileUrl} />
|
||||
{uploadAttachments.length > 0 && (
|
||||
<div key={currentSessionId} className="max-h-[120px] overflow-auto p-2">
|
||||
<AttachmentList />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{sessionId && <SessionFile sessionId={sessionId} />}
|
||||
{/* {currentSessionId && <SessionFile sessionId={currentSessionId} />} */}
|
||||
|
||||
<ScrollToBottom scrollRef={scrollRef} isAtBottom={isAtBottom} />
|
||||
</div>
|
||||
|
||||
@@ -7,14 +7,12 @@ import PinIcon from "@/icons/Pin";
|
||||
import WindowsFullIcon from "@/icons/WindowsFull";
|
||||
import { useAppStore } from "@/stores/appStore";
|
||||
import type { Chat } from "@/types/chat";
|
||||
import platformAdapter from "@/utils/platformAdapter";
|
||||
import VisibleKey from "../Common/VisibleKey";
|
||||
import { useShortcutsStore } from "@/stores/shortcutsStore";
|
||||
import { HISTORY_PANEL_ID } from "@/constants";
|
||||
import { AssistantList } from "./AssistantList";
|
||||
import { ServerList } from "./ServerList";
|
||||
import { Server } from "@/types/server"
|
||||
|
||||
import { useTogglePin } from "@/hooks/useTogglePin";
|
||||
|
||||
interface ChatHeaderProps {
|
||||
clearChat: () => void;
|
||||
@@ -22,7 +20,6 @@ interface ChatHeaderProps {
|
||||
setIsSidebarOpen: () => void;
|
||||
isSidebarOpen: boolean;
|
||||
activeChat: Chat | undefined;
|
||||
reconnect: (server?: Server) => void;
|
||||
isChatPage?: boolean;
|
||||
showChatHistory?: boolean;
|
||||
assistantIDs?: string[];
|
||||
@@ -34,37 +31,15 @@ export function ChatHeader({
|
||||
isSidebarOpen,
|
||||
setIsSidebarOpen,
|
||||
activeChat,
|
||||
reconnect,
|
||||
isChatPage = false,
|
||||
showChatHistory = true,
|
||||
assistantIDs,
|
||||
}: ChatHeaderProps) {
|
||||
const isPinned = useAppStore((state) => state.isPinned);
|
||||
const setIsPinned = useAppStore((state) => state.setIsPinned);
|
||||
const { isTauri } = useAppStore();
|
||||
const { isPinned, togglePin } = useTogglePin();
|
||||
|
||||
const isTauri = useAppStore((state) => state.isTauri);
|
||||
const historicalRecords = useShortcutsStore((state) => {
|
||||
return state.historicalRecords;
|
||||
});
|
||||
const newSession = useShortcutsStore((state) => {
|
||||
return state.newSession;
|
||||
});
|
||||
const fixedWindow = useShortcutsStore((state) => {
|
||||
return state.fixedWindow;
|
||||
});
|
||||
|
||||
const external = useShortcutsStore((state) => state.external);
|
||||
|
||||
const togglePin = async () => {
|
||||
try {
|
||||
const newPinned = !isPinned;
|
||||
await platformAdapter.setAlwaysOnTop(newPinned);
|
||||
setIsPinned(newPinned);
|
||||
} catch (err) {
|
||||
console.error("Failed to toggle window pin state:", err);
|
||||
setIsPinned(isPinned);
|
||||
}
|
||||
};
|
||||
const { historicalRecords, newSession, fixedWindow, external } =
|
||||
useShortcutsStore();
|
||||
|
||||
return (
|
||||
<header
|
||||
@@ -98,7 +73,11 @@ export function ChatHeader({
|
||||
onClick={clearChat}
|
||||
className="p-2 py-1 rounded-lg hover:bg-gray-100 dark:hover:bg-gray-800"
|
||||
>
|
||||
<VisibleKey shortcutClassName="top-2.5" shortcut={newSession} onKeyPress={clearChat}>
|
||||
<VisibleKey
|
||||
shortcutClassName="top-2.5"
|
||||
shortcut={newSession}
|
||||
onKeyPress={clearChat}
|
||||
>
|
||||
<MessageSquarePlus className="h-4 w-4 relative top-0.5" />
|
||||
</VisibleKey>
|
||||
</button>
|
||||
@@ -110,7 +89,7 @@ export function ChatHeader({
|
||||
activeChat?._source?.message ||
|
||||
activeChat?._id}
|
||||
</h2>
|
||||
|
||||
|
||||
{isTauri ? (
|
||||
<div className="flex items-center gap-2">
|
||||
<button
|
||||
@@ -124,10 +103,7 @@ export function ChatHeader({
|
||||
</VisibleKey>
|
||||
</button>
|
||||
|
||||
<ServerList
|
||||
reconnect={reconnect}
|
||||
clearChat={clearChat}
|
||||
/>
|
||||
<ServerList clearChat={clearChat} />
|
||||
|
||||
{isChatPage ? null : (
|
||||
<button className="inline-flex" onClick={onOpenChatAI}>
|
||||
|
||||
@@ -1,118 +0,0 @@
|
||||
import { useEffect, useMemo } from "react";
|
||||
import { filesize } from "filesize";
|
||||
import { X } from "lucide-react";
|
||||
import { useAsyncEffect } from "ahooks";
|
||||
import { useTranslation } from "react-i18next";
|
||||
|
||||
import { useChatStore } from "@/stores/chatStore";
|
||||
import { useConnectStore } from "@/stores/connectStore";
|
||||
import FileIcon from "../Common/Icons/FileIcon";
|
||||
import platformAdapter from "@/utils/platformAdapter";
|
||||
|
||||
interface FileListProps {
|
||||
sessionId: string;
|
||||
getFileUrl: (path: string) => string;
|
||||
}
|
||||
|
||||
const FileList = (props: FileListProps) => {
|
||||
const { sessionId } = props;
|
||||
const { t } = useTranslation();
|
||||
const uploadFiles = useChatStore((state) => state.uploadFiles);
|
||||
const setUploadFiles = useChatStore((state) => state.setUploadFiles);
|
||||
const currentService = useConnectStore((state) => state.currentService);
|
||||
|
||||
const serverId = useMemo(() => {
|
||||
return currentService.id;
|
||||
}, [currentService]);
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
setUploadFiles([]);
|
||||
};
|
||||
}, []);
|
||||
|
||||
useAsyncEffect(async () => {
|
||||
if (uploadFiles.length === 0) return;
|
||||
|
||||
for await (const item of uploadFiles) {
|
||||
const { uploaded, path } = item;
|
||||
|
||||
if (uploaded) continue;
|
||||
|
||||
const attachmentIds: any = await platformAdapter.commands(
|
||||
"upload_attachment",
|
||||
{
|
||||
serverId,
|
||||
sessionId,
|
||||
filePaths: [path],
|
||||
}
|
||||
);
|
||||
|
||||
if (!attachmentIds) continue;
|
||||
|
||||
Object.assign(item, {
|
||||
uploaded: true,
|
||||
attachmentId: attachmentIds[0],
|
||||
});
|
||||
|
||||
setUploadFiles(uploadFiles);
|
||||
}
|
||||
}, [uploadFiles]);
|
||||
|
||||
const deleteFile = async (id: string, attachmentId: string) => {
|
||||
setUploadFiles(uploadFiles.filter((file) => file.id !== id));
|
||||
|
||||
platformAdapter.commands("delete_attachment", {
|
||||
serverId,
|
||||
id: attachmentId,
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex flex-wrap gap-y-2 -mx-1 text-sm">
|
||||
{uploadFiles.map((file) => {
|
||||
const { id, name, extname, size, uploaded, attachmentId } = file;
|
||||
|
||||
return (
|
||||
<div key={id} className="w-1/3 px-1">
|
||||
<div className="relative group flex items-center gap-1 p-1 rounded-[4px] bg-[#dedede] dark:bg-[#202126]">
|
||||
{attachmentId && (
|
||||
<div
|
||||
className="absolute flex justify-center items-center size-[14px] bg-red-600 top-0 right-0 rounded-full cursor-pointer translate-x-[5px] -translate-y-[5px] transition opacity-0 group-hover:opacity-100 "
|
||||
onClick={() => {
|
||||
deleteFile(id, attachmentId);
|
||||
}}
|
||||
>
|
||||
<X className="size-[10px] text-white" />
|
||||
</div>
|
||||
)}
|
||||
|
||||
<FileIcon extname={extname} />
|
||||
|
||||
<div className="flex flex-col justify-between overflow-hidden">
|
||||
<div className="truncate text-[#333333] dark:text-[#D8D8D8]">
|
||||
{name}
|
||||
</div>
|
||||
|
||||
<div className="text-xs text-[#999999]">
|
||||
{uploaded ? (
|
||||
<div className="flex gap-2">
|
||||
{extname && <span>{extname}</span>}
|
||||
<span>
|
||||
{filesize(size, { standard: "jedec", spacer: "" })}
|
||||
</span>
|
||||
</div>
|
||||
) : (
|
||||
<span>{t("assistant.fileList.uploading")}</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default FileList;
|
||||
@@ -17,26 +17,30 @@ import { Server as IServer } from "@/types/server";
|
||||
import StatusIndicator from "@/components/Cloud/StatusIndicator";
|
||||
import { useAuthStore } from "@/stores/authStore";
|
||||
import { useSearchStore } from "@/stores/searchStore";
|
||||
import { useServers } from "@/hooks/useServers";
|
||||
import { getCurrentWindowService, setCurrentWindowService } from "@/commands";
|
||||
|
||||
interface ServerListProps {
|
||||
reconnect: (server?: IServer) => void;
|
||||
clearChat: () => void;
|
||||
}
|
||||
|
||||
export function ServerList({ reconnect, clearChat }: ServerListProps) {
|
||||
export function ServerList({ clearChat }: ServerListProps) {
|
||||
const { t } = useTranslation();
|
||||
|
||||
const isCurrentLogin = useAuthStore((state) => state.isCurrentLogin);
|
||||
const setIsCurrentLogin = useAuthStore((state) => state.setIsCurrentLogin);
|
||||
const serviceList = useShortcutsStore((state) => state.serviceList);
|
||||
const serviceListShortcut = useShortcutsStore(
|
||||
(state) => state.serviceListShortcut
|
||||
);
|
||||
const setEndpoint = useAppStore((state) => state.setEndpoint);
|
||||
const setCurrentService = useConnectStore((state) => state.setCurrentService);
|
||||
const isTauri = useAppStore((state) => state.isTauri);
|
||||
const currentService = useConnectStore((state) => state.currentService);
|
||||
const cloudSelectService = useConnectStore((state) => {
|
||||
return state.cloudSelectService;
|
||||
});
|
||||
|
||||
const { setMessages } = useChatStore();
|
||||
|
||||
const [serverList, setServerList] = useState<IServer[]>([]);
|
||||
const [list, setList] = useState<IServer[]>([]);
|
||||
const [isRefreshing, setIsRefreshing] = useState(false);
|
||||
const [highlightId, setHighlightId] = useState<string>("");
|
||||
|
||||
@@ -50,39 +54,49 @@ export function ServerList({ reconnect, clearChat }: ServerListProps) {
|
||||
const popoverRef = useRef<HTMLDivElement>(null);
|
||||
const serverListButtonRef = useRef<HTMLButtonElement>(null);
|
||||
|
||||
const fetchServers = useCallback(
|
||||
async (resetSelection: boolean) => {
|
||||
platformAdapter
|
||||
.commands("list_coco_servers")
|
||||
.then((res: any) => {
|
||||
const enabledServers = (res as IServer[]).filter(
|
||||
(server) => server.enabled && server.available
|
||||
);
|
||||
//console.log("list_coco_servers", enabledServers);
|
||||
setServerList(enabledServers);
|
||||
const { refreshServerList } = useServers();
|
||||
const serverList = useConnectStore((state) => state.serverList);
|
||||
|
||||
if (resetSelection && enabledServers.length > 0) {
|
||||
const currentServiceExists = enabledServers.find(
|
||||
(server) => server.id === currentService?.id
|
||||
);
|
||||
const switchServer = async (server: IServer) => {
|
||||
if (!server) return;
|
||||
try {
|
||||
// Switch UI first, then switch server connection
|
||||
await setCurrentWindowService(server);
|
||||
setEndpoint(server.endpoint);
|
||||
setMessages(""); // Clear previous messages
|
||||
clearChat();
|
||||
//
|
||||
if (!server.public && !server.profile) {
|
||||
setIsCurrentLogin(false);
|
||||
return;
|
||||
}
|
||||
//
|
||||
setIsCurrentLogin(true);
|
||||
} catch (error) {
|
||||
console.error("switchServer:", error);
|
||||
}
|
||||
};
|
||||
|
||||
if (currentServiceExists) {
|
||||
switchServer(currentServiceExists);
|
||||
} else {
|
||||
switchServer(enabledServers[enabledServers.length - 1]);
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch((err: any) => {
|
||||
console.error(err);
|
||||
});
|
||||
},
|
||||
[currentService?.id]
|
||||
);
|
||||
const fetchServers = useCallback(async () => {
|
||||
const service = await getCurrentWindowService();
|
||||
|
||||
useEffect(() => {
|
||||
fetchServers(true);
|
||||
}, [currentService?.enabled]);
|
||||
const enabledServers = serverList.filter(
|
||||
(server) => server.enabled && server.available
|
||||
);
|
||||
setList(enabledServers);
|
||||
|
||||
if (enabledServers.length > 0) {
|
||||
const serviceExists = enabledServers.find((server) => {
|
||||
return server.id === service?.id;
|
||||
});
|
||||
|
||||
if (serviceExists) {
|
||||
switchServer(serviceExists);
|
||||
} else {
|
||||
switchServer(enabledServers[enabledServers.length - 1]);
|
||||
}
|
||||
}
|
||||
}, [currentService?.id, cloudSelectService?.id, serverList]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!askAiServerId || serverList.length === 0) return;
|
||||
@@ -100,25 +114,12 @@ export function ServerList({ reconnect, clearChat }: ServerListProps) {
|
||||
useEffect(() => {
|
||||
if (!isTauri) return;
|
||||
|
||||
fetchServers(true);
|
||||
|
||||
const unlisten = platformAdapter.listenEvent("login_or_logout", (event) => {
|
||||
//console.log("Login or Logout:", currentService, event.payload);
|
||||
if (event.payload !== isCurrentLogin) {
|
||||
setIsCurrentLogin(!!event.payload);
|
||||
}
|
||||
fetchServers(true);
|
||||
});
|
||||
|
||||
return () => {
|
||||
// Cleanup logic if needed
|
||||
unlisten.then((fn) => fn());
|
||||
};
|
||||
}, []);
|
||||
fetchServers();
|
||||
}, [serverList]);
|
||||
|
||||
const handleRefresh = async () => {
|
||||
setIsRefreshing(true);
|
||||
await fetchServers(false);
|
||||
await refreshServerList();
|
||||
setTimeout(() => setIsRefreshing(false), 1000);
|
||||
};
|
||||
|
||||
@@ -126,62 +127,43 @@ export function ServerList({ reconnect, clearChat }: ServerListProps) {
|
||||
platformAdapter.emitEvent("open_settings", "connect");
|
||||
};
|
||||
|
||||
const switchServer = async (server: IServer) => {
|
||||
if (!server) return;
|
||||
try {
|
||||
// Switch UI first, then switch server connection
|
||||
setCurrentService(server);
|
||||
setEndpoint(server.endpoint);
|
||||
setMessages(""); // Clear previous messages
|
||||
clearChat();
|
||||
//
|
||||
if (!server.public && !server.profile) {
|
||||
setIsCurrentLogin(false);
|
||||
return;
|
||||
useKeyPress(
|
||||
["uparrow", "downarrow", "enter"],
|
||||
async (event, key) => {
|
||||
const service = await getCurrentWindowService();
|
||||
const isClose = isNil(serverListButtonRef.current?.dataset["open"]);
|
||||
const length = serverList.length;
|
||||
|
||||
if (isClose || length <= 1) return;
|
||||
|
||||
event.stopPropagation();
|
||||
event.preventDefault();
|
||||
|
||||
const currentIndex = serverList.findIndex((server) => {
|
||||
return server.id === (highlightId === "" ? service?.id : highlightId);
|
||||
});
|
||||
|
||||
let nextIndex = currentIndex;
|
||||
|
||||
if (key === "uparrow") {
|
||||
nextIndex = currentIndex > 0 ? currentIndex - 1 : length - 1;
|
||||
setHighlightId(serverList[nextIndex].id);
|
||||
} else if (key === "downarrow") {
|
||||
nextIndex = currentIndex < serverList.length - 1 ? currentIndex + 1 : 0;
|
||||
setHighlightId(serverList[nextIndex].id);
|
||||
} else if (key === "enter" && currentIndex >= 0) {
|
||||
if (document.activeElement instanceof HTMLTextAreaElement) return;
|
||||
const selectedServer = serverList[currentIndex];
|
||||
if (selectedServer) {
|
||||
switchServer(selectedServer);
|
||||
serverListButtonRef.current?.click();
|
||||
}
|
||||
}
|
||||
//
|
||||
setIsCurrentLogin(true);
|
||||
// The Rust backend will automatically disconnect,
|
||||
// so we don't need to handle disconnection on the frontend
|
||||
// src-tauri/src/server/websocket.rs
|
||||
reconnect && reconnect(server);
|
||||
} catch (error) {
|
||||
console.error("switchServer:", error);
|
||||
},
|
||||
{
|
||||
target: popoverRef,
|
||||
}
|
||||
};
|
||||
|
||||
useKeyPress(["uparrow", "downarrow", "enter"], (event, key) => {
|
||||
const isClose = isNil(serverListButtonRef.current?.dataset["open"]);
|
||||
const length = serverList.length;
|
||||
|
||||
if (isClose || length <= 1) return;
|
||||
|
||||
event.stopPropagation();
|
||||
event.preventDefault();
|
||||
|
||||
const currentIndex = serverList.findIndex((server) => {
|
||||
return server.id === (highlightId === '' ? currentService?.id : highlightId);
|
||||
});
|
||||
|
||||
let nextIndex = currentIndex;
|
||||
|
||||
if (key === "uparrow") {
|
||||
nextIndex = currentIndex > 0 ? currentIndex - 1 : length - 1;
|
||||
setHighlightId(serverList[nextIndex].id);
|
||||
} else if (key === "downarrow") {
|
||||
nextIndex = currentIndex < serverList.length - 1 ? currentIndex + 1 : 0;
|
||||
setHighlightId(serverList[nextIndex].id);
|
||||
} else if (key === "enter" && currentIndex >= 0) {
|
||||
if (document.activeElement instanceof HTMLTextAreaElement) return;
|
||||
const selectedServer = serverList[currentIndex];
|
||||
if (selectedServer) {
|
||||
switchServer(selectedServer);
|
||||
serverListButtonRef.current?.click();
|
||||
}
|
||||
}
|
||||
}, {
|
||||
target: popoverRef,
|
||||
});
|
||||
);
|
||||
|
||||
const handleMouseMove = useCallback(() => {
|
||||
setHighlightId("");
|
||||
@@ -191,7 +173,7 @@ export function ServerList({ reconnect, clearChat }: ServerListProps) {
|
||||
<Popover ref={popoverRef} className="relative">
|
||||
<PopoverButton ref={serverListButtonRef} className="flex items-center">
|
||||
<VisibleKey
|
||||
shortcut={serviceList}
|
||||
shortcut={serviceListShortcut}
|
||||
onKeyPress={() => {
|
||||
serverListButtonRef.current?.click();
|
||||
}}
|
||||
@@ -202,7 +184,8 @@ export function ServerList({ reconnect, clearChat }: ServerListProps) {
|
||||
|
||||
<PopoverPanel
|
||||
onMouseMove={handleMouseMove}
|
||||
className="absolute right-0 z-10 mt-2 min-w-[240px] bg-white dark:bg-[#202126] rounded-lg shadow-lg border border-gray-200 dark:border-gray-700">
|
||||
className="absolute right-0 z-10 mt-2 min-w-[240px] bg-white dark:bg-[#202126] rounded-lg shadow-lg border border-gray-200 dark:border-gray-700"
|
||||
>
|
||||
<div className="p-3">
|
||||
<div className="flex items-center justify-between mb-3 whitespace-nowrap">
|
||||
<h3 className="text-sm font-medium text-gray-900 dark:text-gray-100">
|
||||
@@ -224,23 +207,26 @@ export function ServerList({ reconnect, clearChat }: ServerListProps) {
|
||||
>
|
||||
<VisibleKey shortcut="R" onKeyPress={handleRefresh}>
|
||||
<RefreshCw
|
||||
className={`h-4 w-4 text-[#0287FF] transition-transform duration-1000 ${isRefreshing ? "animate-spin" : ""
|
||||
}`}
|
||||
className={`h-4 w-4 text-[#0287FF] transition-transform duration-1000 ${
|
||||
isRefreshing ? "animate-spin" : ""
|
||||
}`}
|
||||
/>
|
||||
</VisibleKey>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="space-y-1">
|
||||
{serverList.length > 0 ? (
|
||||
serverList.map((server) => (
|
||||
{list.length > 0 ? (
|
||||
list.map((server) => (
|
||||
<div
|
||||
key={server.id}
|
||||
onClick={() => switchServer(server)}
|
||||
className={`w-full flex items-center justify-between gap-1 p-2 rounded-lg transition-colors whitespace-nowrap
|
||||
${currentService?.id === server.id || highlightId === server.id
|
||||
? "bg-gray-100 dark:bg-gray-800"
|
||||
: "hover:bg-gray-50 dark:hover:bg-gray-800/50"
|
||||
${
|
||||
currentService?.id === server.id ||
|
||||
highlightId === server.id
|
||||
? "bg-gray-100 dark:bg-gray-800"
|
||||
: "hover:bg-gray-50 dark:hover:bg-gray-800/50"
|
||||
}`}
|
||||
>
|
||||
<div className="flex items-center gap-2 overflow-hidden min-w-0">
|
||||
|
||||
@@ -1,174 +1,176 @@
|
||||
import clsx from "clsx";
|
||||
import {filesize} from "filesize";
|
||||
import {Files, Trash2, X} from "lucide-react";
|
||||
import {useEffect, useMemo, useState} from "react";
|
||||
import {useTranslation} from "react-i18next";
|
||||
import { Files, Trash2, X } from "lucide-react";
|
||||
import { useEffect, useMemo, useState } from "react";
|
||||
import { useTranslation } from "react-i18next";
|
||||
|
||||
import {useConnectStore} from "@/stores/connectStore";
|
||||
import { useConnectStore } from "@/stores/connectStore";
|
||||
import Checkbox from "@/components/Common/Checkbox";
|
||||
import FileIcon from "@/components/Common/Icons/FileIcon";
|
||||
import {AttachmentHit} from "@/types/commands";
|
||||
import {useAppStore} from "@/stores/appStore";
|
||||
import { AttachmentHit } from "@/types/commands";
|
||||
import { useAppStore } from "@/stores/appStore";
|
||||
import platformAdapter from "@/utils/platformAdapter";
|
||||
import FileIcon from "../Common/Icons/FileIcon";
|
||||
import { filesize } from "@/utils";
|
||||
|
||||
interface SessionFileProps {
|
||||
sessionId: string;
|
||||
sessionId: string;
|
||||
}
|
||||
|
||||
const SessionFile = (props: SessionFileProps) => {
|
||||
const {sessionId} = props;
|
||||
const {t} = useTranslation();
|
||||
const { sessionId } = props;
|
||||
const { t } = useTranslation();
|
||||
|
||||
const isTauri = useAppStore((state) => state.isTauri);
|
||||
const currentService = useConnectStore((state) => state.currentService);
|
||||
const [uploadedFiles, setUploadedFiles] = useState<AttachmentHit[]>([]);
|
||||
const [visible, setVisible] = useState(false);
|
||||
const [checkList, setCheckList] = useState<string[]>([]);
|
||||
const isTauri = useAppStore((state) => state.isTauri);
|
||||
const currentService = useConnectStore((state) => state.currentService);
|
||||
const [uploadedFiles, setUploadedFiles] = useState<AttachmentHit[]>([]);
|
||||
const [visible, setVisible] = useState(false);
|
||||
const [checkList, setCheckList] = useState<string[]>([]);
|
||||
|
||||
const serverId = useMemo(() => {
|
||||
return currentService.id;
|
||||
}, [currentService]);
|
||||
const serverId = useMemo(() => {
|
||||
return currentService.id;
|
||||
}, [currentService]);
|
||||
|
||||
useEffect(() => {
|
||||
setUploadedFiles([]);
|
||||
useEffect(() => {
|
||||
setUploadedFiles([]);
|
||||
|
||||
getUploadedFiles();
|
||||
}, [sessionId]);
|
||||
getUploadedFiles();
|
||||
}, [sessionId]);
|
||||
|
||||
const getUploadedFiles = async () => {
|
||||
if (isTauri) {
|
||||
const response: any = await platformAdapter.commands("get_attachment", {
|
||||
serverId,
|
||||
sessionId,
|
||||
});
|
||||
const getUploadedFiles = async () => {
|
||||
if (isTauri) {
|
||||
console.log("sessionId", sessionId);
|
||||
|
||||
setUploadedFiles(response?.hits?.hits ?? []);
|
||||
} else {
|
||||
const response: any = await platformAdapter.commands(
|
||||
"get_attachment_by_ids",
|
||||
{
|
||||
serverId,
|
||||
sessionId,
|
||||
}
|
||||
};
|
||||
);
|
||||
|
||||
const handleDelete = async (id: string) => {
|
||||
let result;
|
||||
if (isTauri) {
|
||||
result = await platformAdapter.commands("delete_attachment", {
|
||||
serverId,
|
||||
id,
|
||||
});
|
||||
} else {
|
||||
}
|
||||
if (!result) return;
|
||||
setUploadedFiles(response?.hits?.hits ?? []);
|
||||
} else {
|
||||
}
|
||||
};
|
||||
|
||||
getUploadedFiles();
|
||||
};
|
||||
const handleDelete = async (id: string) => {
|
||||
let result;
|
||||
if (isTauri) {
|
||||
result = await platformAdapter.commands("delete_attachment", {
|
||||
serverId,
|
||||
id,
|
||||
});
|
||||
} else {
|
||||
}
|
||||
if (!result) return;
|
||||
|
||||
const handleCheckAll = (checked: boolean) => {
|
||||
if (checked) {
|
||||
setCheckList(uploadedFiles?.map((item) => item?._source?.id));
|
||||
} else {
|
||||
setCheckList([]);
|
||||
}
|
||||
};
|
||||
getUploadedFiles();
|
||||
};
|
||||
|
||||
const handleCheck = (checked: boolean, id: string) => {
|
||||
if (checked) {
|
||||
setCheckList([...checkList, id]);
|
||||
} else {
|
||||
setCheckList(checkList.filter((item) => item !== id));
|
||||
}
|
||||
};
|
||||
const handleCheckAll = (checked: boolean) => {
|
||||
if (checked) {
|
||||
setCheckList(uploadedFiles?.map((item) => item?._source?.id));
|
||||
} else {
|
||||
setCheckList([]);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
className={clsx("select-none", {
|
||||
hidden: uploadedFiles?.length === 0,
|
||||
})}
|
||||
>
|
||||
<div
|
||||
className="absolute top-4 right-4 flex items-center justify-center size-8 rounded-lg bg-[#0072FF] cursor-pointer"
|
||||
onClick={() => {
|
||||
setVisible(true);
|
||||
}}
|
||||
>
|
||||
<Files className="size-5 text-white"/>
|
||||
const handleCheck = (checked: boolean, id: string) => {
|
||||
if (checked) {
|
||||
setCheckList([...checkList, id]);
|
||||
} else {
|
||||
setCheckList(checkList.filter((item) => item !== id));
|
||||
}
|
||||
};
|
||||
|
||||
<div
|
||||
className="absolute -top-2 -right-2 flex items-center justify-center min-w-4 h-4 px-1 text-white text-xs rounded-full bg-[#3DB954]">
|
||||
{uploadedFiles?.length}
|
||||
</div>
|
||||
</div>
|
||||
return (
|
||||
<div
|
||||
className={clsx("select-none", {
|
||||
hidden: uploadedFiles?.length === 0,
|
||||
})}
|
||||
>
|
||||
<div
|
||||
className="absolute top-4 right-4 flex items-center justify-center size-8 rounded-lg bg-[#0072FF] cursor-pointer"
|
||||
onClick={() => {
|
||||
setVisible(true);
|
||||
}}
|
||||
>
|
||||
<Files className="size-5 text-white" />
|
||||
|
||||
<div
|
||||
className={clsx(
|
||||
"absolute inset-0 flex flex-col p-4 bg-white dark:bg-black",
|
||||
{
|
||||
hidden: !visible,
|
||||
}
|
||||
)}
|
||||
>
|
||||
<X
|
||||
className="absolute top-4 right-4 size-5 text-[#999] cursor-pointer"
|
||||
onClick={() => {
|
||||
setVisible(false);
|
||||
}}
|
||||
/>
|
||||
<div className="absolute -top-2 -right-2 flex items-center justify-center min-w-4 h-4 px-1 text-white text-xs rounded-full bg-[#3DB954]">
|
||||
{uploadedFiles?.length}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="mb-2 text-sm text-[#333] dark:text-[#D8D8D8] font-bold">
|
||||
{t("assistant.sessionFile.title")}
|
||||
</div>
|
||||
<div className="flex items-center justify-between pr-2">
|
||||
<div
|
||||
className={clsx(
|
||||
"absolute inset-0 flex flex-col p-4 bg-white dark:bg-black",
|
||||
{
|
||||
hidden: !visible,
|
||||
}
|
||||
)}
|
||||
>
|
||||
<X
|
||||
className="absolute top-4 right-4 size-5 text-[#999] cursor-pointer"
|
||||
onClick={() => {
|
||||
setVisible(false);
|
||||
}}
|
||||
/>
|
||||
|
||||
<div className="mb-2 text-sm text-[#333] dark:text-[#D8D8D8] font-bold">
|
||||
{t("assistant.sessionFile.title")}
|
||||
</div>
|
||||
<div className="flex items-center justify-between pr-2">
|
||||
<span className="text-sm text-[#999]">
|
||||
{t("assistant.sessionFile.description")}
|
||||
</span>
|
||||
|
||||
<Checkbox
|
||||
indeterminate
|
||||
checked={checkList?.length === uploadedFiles?.length}
|
||||
onChange={handleCheckAll}
|
||||
/>
|
||||
</div>
|
||||
<ul className="flex-1 overflow-auto flex flex-col gap-2 mt-6 p-0">
|
||||
{uploadedFiles?.map((item) => {
|
||||
const {id, name, icon, size} = item._source;
|
||||
|
||||
return (
|
||||
<li
|
||||
key={id}
|
||||
className="flex items-center justify-between min-h-12 px-2 rounded-[4px] bg-[#ededed] dark:bg-[#202126]"
|
||||
>
|
||||
<div className="flex items-center gap-2">
|
||||
<FileIcon extname={icon}/>
|
||||
|
||||
<div>
|
||||
<div className="text-sm leading-4 text-[#333] dark:text-[#D8D8D8]">
|
||||
{name}
|
||||
</div>
|
||||
<div className="text-xs text-[#999]">
|
||||
<span>{icon}</span>
|
||||
<span className="pl-2">
|
||||
{filesize(size, {standard: "jedec", spacer: ""})}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-2">
|
||||
<Trash2
|
||||
className="size-4 text-[#999] cursor-pointer"
|
||||
onClick={() => handleDelete(id)}
|
||||
/>
|
||||
|
||||
<Checkbox
|
||||
checked={checkList.includes(id)}
|
||||
onChange={(checked) => handleCheck(checked, id)}
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</ul>
|
||||
</div>
|
||||
<Checkbox
|
||||
indeterminate
|
||||
checked={checkList?.length === uploadedFiles?.length}
|
||||
onChange={handleCheckAll}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
<ul className="flex-1 overflow-auto flex flex-col gap-2 mt-6 p-0">
|
||||
{uploadedFiles?.map((item) => {
|
||||
const { id, name, icon, size } = item._source;
|
||||
|
||||
return (
|
||||
<li
|
||||
key={id}
|
||||
className="flex items-center justify-between min-h-12 px-2 rounded-[4px] bg-[#ededed] dark:bg-[#202126]"
|
||||
>
|
||||
<div className="flex items-center gap-1">
|
||||
<FileIcon path={name} />
|
||||
|
||||
<div>
|
||||
<div className="text-sm leading-4 text-[#333] dark:text-[#D8D8D8]">
|
||||
{name}
|
||||
</div>
|
||||
<div className="text-xs text-[#999]">
|
||||
{icon && <span className="pr-2">{icon}</span>}
|
||||
<span>{filesize(size)}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-2">
|
||||
<Trash2
|
||||
className="size-4 text-[#999] cursor-pointer"
|
||||
onClick={() => handleDelete(id)}
|
||||
/>
|
||||
|
||||
<Checkbox
|
||||
checked={checkList.includes(id)}
|
||||
onChange={(checked) => handleCheck(checked, id)}
|
||||
/>
|
||||
</div>
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default SessionFile;
|
||||
|
||||
@@ -120,7 +120,7 @@ const Splash = ({ assistantIDs = [], startPage }: SplashProps) => {
|
||||
const { id, name, description, icon } = item._source;
|
||||
|
||||
return (
|
||||
<li key={id} className="w-1/2 p-1">
|
||||
<li key={id} className="mobile:w-full w-1/2 p-1">
|
||||
<div
|
||||
className="group h-[74px] px-3 py-2 text-sm rounded-xl border dark:border-[#262626] bg-white dark:bg-black cursor-pointer transition hover:!border-[#0087FF]"
|
||||
onClick={() => {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useEffect, useMemo, useRef } from "react";
|
||||
import { useReactive } from "ahooks";
|
||||
import { useEffect, useRef } from "react";
|
||||
import dayjs from "dayjs";
|
||||
import durationPlugin from "dayjs/plugin/duration";
|
||||
import { nanoid } from "nanoid";
|
||||
|
||||
import { useThemeStore } from "@/stores/themeStore";
|
||||
import loadingLight from "@/assets/images/ReadAloud/loading-light.png";
|
||||
@@ -16,77 +16,82 @@ import forwardLight from "@/assets/images/ReadAloud/forward-light.png";
|
||||
import forwardDark from "@/assets/images/ReadAloud/forward-dark.png";
|
||||
import closeLight from "@/assets/images/ReadAloud/close-light.png";
|
||||
import closeDark from "@/assets/images/ReadAloud/close-dark.png";
|
||||
import { useConnectStore } from "@/stores/connectStore";
|
||||
import platformAdapter from "@/utils/platformAdapter";
|
||||
import { useStreamAudio } from "@/hooks/useStreamAudio";
|
||||
import { useChatStore } from "@/stores/chatStore";
|
||||
|
||||
dayjs.extend(durationPlugin);
|
||||
|
||||
interface State {
|
||||
loading: boolean;
|
||||
playing: boolean;
|
||||
totalDuration: number;
|
||||
currentDuration: number;
|
||||
}
|
||||
const Synthesize = () => {
|
||||
const { isDark } = useThemeStore();
|
||||
const { currentService } = useConnectStore();
|
||||
const { synthesizeItem, setSynthesizeItem } = useChatStore();
|
||||
const clientIdRef = useRef(nanoid());
|
||||
|
||||
const ReadAloud = () => {
|
||||
const isDark = useThemeStore((state) => state.isDark);
|
||||
const state = useReactive<State>({
|
||||
loading: false,
|
||||
playing: true,
|
||||
totalDuration: 300,
|
||||
currentDuration: 0,
|
||||
const {
|
||||
loading,
|
||||
playing,
|
||||
currentTime,
|
||||
totalTime,
|
||||
audioRef,
|
||||
audioUrl,
|
||||
initMediaSource,
|
||||
toggle,
|
||||
seek,
|
||||
appendBuffer,
|
||||
onCanplay,
|
||||
onTimeupdate,
|
||||
onEnded,
|
||||
} = useStreamAudio({
|
||||
onSourceopen() {
|
||||
return platformAdapter.invokeBackend("synthesize", {
|
||||
clientId: clientIdRef.current,
|
||||
serverId: currentService.id,
|
||||
content: synthesizeItem?.content,
|
||||
voice: "longwan_v2",
|
||||
});
|
||||
},
|
||||
});
|
||||
const timerRef = useRef<ReturnType<typeof setTimeout>>();
|
||||
|
||||
const formatTime = useMemo(() => {
|
||||
return dayjs.duration(state.currentDuration * 1000).format("mm:ss");
|
||||
}, [state.currentDuration]);
|
||||
|
||||
useEffect(() => {
|
||||
if (state.playing && state.currentDuration >= state.totalDuration) {
|
||||
state.currentDuration = 0;
|
||||
}
|
||||
const id = nanoid();
|
||||
|
||||
changeCurrentDuration();
|
||||
}, [state.playing]);
|
||||
clientIdRef.current = `synthesize-${id}`;
|
||||
|
||||
const changeCurrentDuration = (duration = state.currentDuration) => {
|
||||
clearTimeout(timerRef.current);
|
||||
initMediaSource();
|
||||
|
||||
let nextDuration = duration;
|
||||
const unlisten = platformAdapter.listenEvent(
|
||||
`synthesize-${id}`,
|
||||
({ payload }) => {
|
||||
appendBuffer(new Uint8Array(payload));
|
||||
}
|
||||
);
|
||||
|
||||
if (duration < 0) {
|
||||
nextDuration = 0;
|
||||
}
|
||||
|
||||
if (duration >= state.totalDuration) {
|
||||
state.currentDuration = state.totalDuration;
|
||||
|
||||
state.playing = false;
|
||||
}
|
||||
|
||||
if (!state.playing) return;
|
||||
|
||||
state.currentDuration = nextDuration;
|
||||
|
||||
timerRef.current = setTimeout(() => {
|
||||
changeCurrentDuration(duration + 1);
|
||||
}, 1000);
|
||||
};
|
||||
return () => {
|
||||
unlisten.then((unmount) => unmount());
|
||||
};
|
||||
}, [synthesizeItem?.id]);
|
||||
|
||||
return (
|
||||
<div className="fixed top-[60px] left-1/2 z-1000 w-[200px] h-12 px-4 flex items-center justify-between -translate-x-1/2 border rounded-lg text-[#333] dark:text-[#D8D8D8] bg-white dark:bg-black dark:border-[#272828] shadow-[0_4px_8px_rgba(0,0,0,0.2)] dark:shadow-[0_4px_8px_rgba(255,255,255,0.15)]">
|
||||
<audio
|
||||
ref={audioRef}
|
||||
src={audioUrl}
|
||||
onCanPlay={onCanplay}
|
||||
onTimeUpdate={onTimeupdate}
|
||||
onEnded={onEnded}
|
||||
/>
|
||||
|
||||
<div className="flex items-center gap-2">
|
||||
{state.loading ? (
|
||||
{loading ? (
|
||||
<img
|
||||
src={isDark ? loadingDark : loadingLight}
|
||||
className="size-4 animate-spin"
|
||||
/>
|
||||
) : (
|
||||
<div
|
||||
onClick={() => {
|
||||
state.playing = !state.playing;
|
||||
}}
|
||||
>
|
||||
{state.playing ? (
|
||||
<div onClick={toggle}>
|
||||
{playing ? (
|
||||
<img
|
||||
src={isDark ? playDark : playLight}
|
||||
className="size-4 cursor-pointer"
|
||||
@@ -100,16 +105,20 @@ const ReadAloud = () => {
|
||||
</div>
|
||||
)}
|
||||
|
||||
<span className="text-sm">{formatTime}</span>
|
||||
{!loading && (
|
||||
<span className="text-sm">
|
||||
{dayjs.duration(currentTime * 1000).format("mm:ss")}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex gap-3">
|
||||
{!state.loading && (
|
||||
{!loading && totalTime !== Infinity && (
|
||||
<>
|
||||
<img
|
||||
src={isDark ? backDark : backLight}
|
||||
className="size-4 cursor-pointer"
|
||||
onClick={() => {
|
||||
changeCurrentDuration(state.currentDuration - 15);
|
||||
seek(currentTime - 15);
|
||||
}}
|
||||
/>
|
||||
|
||||
@@ -117,7 +126,7 @@ const ReadAloud = () => {
|
||||
src={isDark ? forwardDark : forwardLight}
|
||||
className="size-4 cursor-pointer"
|
||||
onClick={() => {
|
||||
changeCurrentDuration(state.currentDuration + 15);
|
||||
seek(currentTime + 15);
|
||||
}}
|
||||
/>
|
||||
</>
|
||||
@@ -126,10 +135,13 @@ const ReadAloud = () => {
|
||||
<img
|
||||
src={isDark ? closeDark : closeLight}
|
||||
className="size-4 cursor-pointer"
|
||||
onClick={() => {
|
||||
setSynthesizeItem(void 0);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default ReadAloud;
|
||||
export default Synthesize;
|
||||
@@ -2,16 +2,10 @@ import { useReactive } from "ahooks";
|
||||
import clsx from "clsx";
|
||||
import { Check, Loader, Mic, X } from "lucide-react";
|
||||
import { FC, useEffect, useRef } from "react";
|
||||
import {
|
||||
checkMicrophonePermission,
|
||||
requestMicrophonePermission,
|
||||
} from "tauri-plugin-macos-permissions-api";
|
||||
import { useWavesurfer } from "@wavesurfer/react";
|
||||
import RecordPlugin from "wavesurfer.js/dist/plugins/record.esm.js";
|
||||
|
||||
import { useConnectStore } from "@/stores/connectStore";
|
||||
import { useShortcutsStore } from "@/stores/shortcutsStore";
|
||||
import VisibleKey from "@/components/Common/VisibleKey";
|
||||
import { useAppStore } from "@/stores/appStore";
|
||||
import platformAdapter from "@/utils/platformAdapter";
|
||||
|
||||
@@ -40,9 +34,8 @@ const AudioRecording: FC<AudioRecordingProps> = (props) => {
|
||||
const state = useReactive({ ...INITIAL_STATE });
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
const recordRef = useRef<RecordPlugin>();
|
||||
const withVisibility = useAppStore((state) => state.withVisibility);
|
||||
const currentService = useConnectStore((state) => state.currentService);
|
||||
const voiceInput = useShortcutsStore((state) => state.voiceInput);
|
||||
const { withVisibility, addError } = useAppStore();
|
||||
const { currentService } = useConnectStore();
|
||||
|
||||
const { wavesurfer } = useWavesurfer({
|
||||
container: containerRef,
|
||||
@@ -75,23 +68,32 @@ const AudioRecording: FC<AudioRecordingProps> = (props) => {
|
||||
|
||||
const reader = new FileReader();
|
||||
|
||||
reader.readAsDataURL(blob);
|
||||
|
||||
reader.onloadend = async () => {
|
||||
const base64Audio = (reader.result as string).split(",")[1];
|
||||
|
||||
const response: any = await platformAdapter.commands("transcription", {
|
||||
serverId: currentService.id,
|
||||
audioType: "mp3",
|
||||
audioContent: base64Audio,
|
||||
});
|
||||
try {
|
||||
const response: any = await platformAdapter.commands(
|
||||
"transcription",
|
||||
{
|
||||
serverId: currentService.id,
|
||||
audioContent: JSON.stringify({ content: base64Audio }),
|
||||
}
|
||||
);
|
||||
|
||||
if (!response) return;
|
||||
const text = response?.results
|
||||
.flatMap((item: any) => item?.transcription?.transcripts)
|
||||
.map((item: any) => item?.text?.replace(/<\|[\/\w]+\|>/g, ""))
|
||||
.join(" ");
|
||||
|
||||
onChange?.(response.text);
|
||||
|
||||
resetState();
|
||||
onChange?.(text);
|
||||
} catch (error) {
|
||||
addError(String(error));
|
||||
} finally {
|
||||
resetState();
|
||||
}
|
||||
};
|
||||
|
||||
reader.readAsDataURL(blob);
|
||||
});
|
||||
|
||||
recordRef.current = record;
|
||||
@@ -124,15 +126,15 @@ const AudioRecording: FC<AudioRecordingProps> = (props) => {
|
||||
};
|
||||
|
||||
const checkPermission = async () => {
|
||||
const authorized = await checkMicrophonePermission();
|
||||
const authorized = await platformAdapter.checkMicrophonePermission();
|
||||
|
||||
if (authorized) return;
|
||||
|
||||
requestMicrophonePermission();
|
||||
platformAdapter.requestMicrophonePermission();
|
||||
|
||||
return new Promise(async (resolved) => {
|
||||
const timer = setInterval(async () => {
|
||||
const authorized = await checkMicrophonePermission();
|
||||
const authorized = await platformAdapter.checkMicrophonePermission();
|
||||
|
||||
if (!authorized) return;
|
||||
|
||||
@@ -157,20 +159,21 @@ const AudioRecording: FC<AudioRecordingProps> = (props) => {
|
||||
<>
|
||||
<div
|
||||
className={clsx(
|
||||
"p-1 hover:bg-gray-50 dark:hover:bg-gray-700 rounded-full transition cursor-pointer",
|
||||
"min-w-6 h-6 flex items-center justify-center hover:bg-gray-50 dark:hover:bg-gray-700 rounded-full transition cursor-pointer",
|
||||
{
|
||||
hidden: state.audioDevices.length === 0,
|
||||
}
|
||||
)}
|
||||
onClick={startRecording}
|
||||
>
|
||||
<VisibleKey shortcut={voiceInput} onKeyPress={startRecording}>
|
||||
<Mic className="size-4 text-[#999]" onClick={startRecording} />
|
||||
</VisibleKey>
|
||||
{/* <VisibleKey shortcut={voiceInput} onKeyPress={startRecording}> */}
|
||||
<Mic className="size-4 text-[#999]" />
|
||||
{/* </VisibleKey> */}
|
||||
</div>
|
||||
|
||||
<div
|
||||
className={clsx(
|
||||
"absolute inset-0 flex items-center gap-1 px-1 rounded translate-x-full transition-all bg-[#ededed] dark:bg-[#202126]",
|
||||
"absolute -inset-2 flex items-center gap-1 px-1 rounded translate-x-full transition-all bg-[#ededed] dark:bg-[#202126]",
|
||||
{
|
||||
"!translate-x-0": state.isRecording || state.converting,
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ interface FetchSourceProps {
|
||||
Detail?: any;
|
||||
ChunkData?: IChunkData;
|
||||
loading?: boolean;
|
||||
formatUrl?: (data: ISourceData) => string;
|
||||
}
|
||||
|
||||
interface ISourceData {
|
||||
@@ -38,6 +39,7 @@ export const FetchSource = ({
|
||||
Detail,
|
||||
ChunkData,
|
||||
loading,
|
||||
formatUrl,
|
||||
}: FetchSourceProps) => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
@@ -78,6 +80,13 @@ export const FetchSource = ({
|
||||
}
|
||||
}, [ChunkData?.message_chunk, loading]);
|
||||
|
||||
const sourceClick = (item: ISourceData) => () => {
|
||||
const url = (formatUrl && formatUrl(item)) || item.url;
|
||||
if (url) {
|
||||
OpenURLWithBrowser(url);
|
||||
}
|
||||
};
|
||||
|
||||
// Must be after hooks !!!
|
||||
if (!ChunkData && !Detail) return null;
|
||||
|
||||
@@ -125,7 +134,7 @@ export const FetchSource = ({
|
||||
{data?.map((item, idx) => (
|
||||
<div
|
||||
key={idx}
|
||||
onClick={() => item.url && OpenURLWithBrowser(item.url)}
|
||||
onClick={sourceClick(item)}
|
||||
className="group flex items-center p-2 hover:bg-[#F7F7F7] dark:hover:bg-[#2C2C2C] border-b border-[#E6E6E6] dark:border-[#272626] last:border-b-0 cursor-pointer transition-colors"
|
||||
>
|
||||
<div className="w-full flex items-center gap-2">
|
||||
|
||||
@@ -9,7 +9,8 @@ import {
|
||||
RotateCcw,
|
||||
} from "lucide-react";
|
||||
|
||||
import { copyToClipboard } from "@/utils";
|
||||
import { copyToClipboard, isDefaultServer } from "@/utils";
|
||||
import { useChatStore } from "@/stores/chatStore";
|
||||
|
||||
interface MessageActionsProps {
|
||||
id: string;
|
||||
@@ -40,6 +41,8 @@ export const MessageActions = ({
|
||||
|
||||
const isRefreshOnly = RefreshOnlyIds.includes(id);
|
||||
|
||||
const { synthesizeItem, setSynthesizeItem } = useChatStore();
|
||||
|
||||
const handleCopy = async () => {
|
||||
try {
|
||||
await copyToClipboard(content);
|
||||
@@ -63,7 +66,11 @@ export const MessageActions = ({
|
||||
setLiked(false);
|
||||
};
|
||||
|
||||
const handleSpeak = () => {
|
||||
const handleSpeak = async () => {
|
||||
if (isDefaultServer()) {
|
||||
return setSynthesizeItem({ id, content });
|
||||
}
|
||||
|
||||
if ("speechSynthesis" in window) {
|
||||
if (isSpeaking) {
|
||||
window.speechSynthesis.cancel();
|
||||
@@ -162,22 +169,24 @@ export const MessageActions = ({
|
||||
</button>
|
||||
)}
|
||||
{!isRefreshOnly && (
|
||||
<button
|
||||
onClick={handleSpeak}
|
||||
className="p-1 hover:bg-black/5 dark:hover:bg-white/5 rounded-lg transition-colors"
|
||||
>
|
||||
<Volume2
|
||||
className={`w-4 h-4 ${
|
||||
isSpeaking
|
||||
? "text-[#1990FF] dark:text-[#1990FF]"
|
||||
: "text-[#666666] dark:text-[#A3A3A3]"
|
||||
}`}
|
||||
style={{
|
||||
width: actionIconSize,
|
||||
height: actionIconSize,
|
||||
}}
|
||||
/>
|
||||
</button>
|
||||
<>
|
||||
<button
|
||||
onClick={handleSpeak}
|
||||
className="p-1 hover:bg-black/5 dark:hover:bg-white/5 rounded-lg transition-colors"
|
||||
>
|
||||
<Volume2
|
||||
className={`w-4 h-4 ${
|
||||
isSpeaking || synthesizeItem?.id === id
|
||||
? "text-[#1990FF] dark:text-[#1990FF]"
|
||||
: "text-[#666666] dark:text-[#A3A3A3]"
|
||||
}`}
|
||||
style={{
|
||||
width: actionIconSize,
|
||||
height: actionIconSize,
|
||||
}}
|
||||
/>
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
{question && (
|
||||
<button
|
||||
|
||||
@@ -43,17 +43,21 @@ export const QueryIntent = ({
|
||||
useEffect(() => {
|
||||
if (!ChunkData?.message_chunk) return;
|
||||
if (!loading) {
|
||||
const cleanContent = ChunkData.message_chunk.replace(/^"|"$/g, "");
|
||||
const allMatches = cleanContent.match(/<JSON>([\s\S]*?)<\/JSON>/g);
|
||||
if (allMatches) {
|
||||
const lastMatch = allMatches[allMatches.length - 1];
|
||||
const jsonString = lastMatch.replace(/<JSON>|<\/JSON>/g, "");
|
||||
const data = JSON.parse(jsonString);
|
||||
//console.log("QueryIntent", data);
|
||||
if (data?.suggestion && getSuggestion) {
|
||||
getSuggestion(data?.suggestion);
|
||||
try {
|
||||
const cleanContent = ChunkData.message_chunk.replace(/^"|"$/g, "");
|
||||
const allMatches = cleanContent.match(/<JSON>([\s\S]*?)<\/JSON>/g);
|
||||
if (allMatches) {
|
||||
const lastMatch = allMatches[allMatches.length - 1];
|
||||
const jsonString = lastMatch.replace(/<JSON>|<\/JSON>/g, "");
|
||||
const data = JSON.parse(jsonString);
|
||||
//console.log("QueryIntent", data);
|
||||
if (data?.suggestion && getSuggestion) {
|
||||
getSuggestion(data?.suggestion);
|
||||
}
|
||||
setData(data);
|
||||
}
|
||||
setData(data);
|
||||
} catch (error) {
|
||||
console.error("Failed to process message chunk in QueryIntent:", error);
|
||||
}
|
||||
}
|
||||
}, [ChunkData?.message_chunk, loading]);
|
||||
@@ -79,14 +83,22 @@ export const QueryIntent = ({
|
||||
<>
|
||||
<Loader className="w-4 h-4 animate-spin text-[#1990FF]" />
|
||||
<span className="text-xs text-[#999999] italic">
|
||||
{t(`assistant.message.steps.${ChunkData?.chunk_type || Detail.type}`)}
|
||||
{t(
|
||||
`assistant.message.steps.${
|
||||
ChunkData?.chunk_type || Detail.type
|
||||
}`
|
||||
)}
|
||||
</span>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<UnderstandIcon className="w-4 h-4 text-[#38C200]" />
|
||||
<span className="text-xs text-[#999999]">
|
||||
{t(`assistant.message.steps.${ChunkData?.chunk_type || Detail.type}`)}
|
||||
{t(
|
||||
`assistant.message.steps.${
|
||||
ChunkData?.chunk_type || Detail.type
|
||||
}`
|
||||
)}
|
||||
</span>
|
||||
</>
|
||||
)}
|
||||
|
||||
@@ -1,17 +1,28 @@
|
||||
import { useState } from "react";
|
||||
import { FC, useState } from "react";
|
||||
import clsx from "clsx";
|
||||
|
||||
import { CopyButton } from "@/components/Common/CopyButton";
|
||||
import { useAsyncEffect } from "ahooks";
|
||||
import platformAdapter from "@/utils/platformAdapter";
|
||||
import { useConnectStore } from "@/stores/connectStore";
|
||||
import { AttachmentItem } from "../Assistant/AttachmentList";
|
||||
import { useAppStore } from "@/stores/appStore";
|
||||
|
||||
interface UserMessageProps {
|
||||
messageContent: string;
|
||||
message: string;
|
||||
attachments: string[];
|
||||
}
|
||||
|
||||
export const UserMessage = ({ messageContent }: UserMessageProps) => {
|
||||
export const UserMessage: FC<UserMessageProps> = (props) => {
|
||||
const { message, attachments } = props;
|
||||
|
||||
const [showCopyButton, setShowCopyButton] = useState(false);
|
||||
const { currentService } = useConnectStore();
|
||||
const [attachmentData, setAttachmentData] = useState<any[]>([]);
|
||||
const { addError } = useAppStore();
|
||||
|
||||
const handleDoubleClick = (e: React.MouseEvent<HTMLDivElement>) => {
|
||||
if (typeof window !== 'undefined' && typeof document !== 'undefined') {
|
||||
if (typeof window !== "undefined" && typeof document !== "undefined") {
|
||||
const selection = window.getSelection();
|
||||
const range = document.createRange();
|
||||
|
||||
@@ -21,31 +32,81 @@ export const UserMessage = ({ messageContent }: UserMessageProps) => {
|
||||
selection.removeAllRanges();
|
||||
selection.addRange(range);
|
||||
} catch (error) {
|
||||
console.error('Selection failed:', error);
|
||||
console.error("Selection failed:", error);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
useAsyncEffect(async () => {
|
||||
try {
|
||||
if (attachments.length === 0) return;
|
||||
|
||||
const result: any = await platformAdapter.commands(
|
||||
"get_attachment_by_ids",
|
||||
{
|
||||
serverId: currentService.id,
|
||||
attachments,
|
||||
}
|
||||
);
|
||||
|
||||
setAttachmentData(result?.hits?.hits);
|
||||
} catch (error) {
|
||||
addError(String(error));
|
||||
}
|
||||
}, [attachments]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className="flex gap-1 items-center justify-end"
|
||||
onMouseEnter={() => setShowCopyButton(true)}
|
||||
onMouseLeave={() => setShowCopyButton(false)}
|
||||
>
|
||||
<div
|
||||
className={clsx("size-6 transition", {
|
||||
"opacity-0": !showCopyButton,
|
||||
})}
|
||||
>
|
||||
<CopyButton textToCopy={messageContent} />
|
||||
</div>
|
||||
<div
|
||||
className="max-w-[85%] overflow-auto text-left px-3 py-2 bg-white dark:bg-[#202126] rounded-xl border border-black/12 dark:border-black/15 font-normal text-sm text-[#333333] dark:text-[#D8D8D8] cursor-pointer user-select-text whitespace-pre-wrap"
|
||||
onDoubleClick={handleDoubleClick}
|
||||
>
|
||||
{messageContent}
|
||||
</div>
|
||||
</div>
|
||||
<>
|
||||
{message && (
|
||||
<div
|
||||
className="flex gap-1 items-center justify-end"
|
||||
onMouseEnter={() => setShowCopyButton(true)}
|
||||
onMouseLeave={() => setShowCopyButton(false)}
|
||||
>
|
||||
<div
|
||||
className={clsx("size-6 transition", {
|
||||
"opacity-0": !showCopyButton,
|
||||
})}
|
||||
>
|
||||
<CopyButton textToCopy={message} />
|
||||
</div>
|
||||
<div
|
||||
className="max-w-[85%] overflow-auto text-left px-3 py-2 bg-white dark:bg-[#202126] rounded-xl border border-black/12 dark:border-black/15 font-normal text-sm text-[#333333] dark:text-[#D8D8D8] cursor-pointer user-select-text whitespace-pre-wrap"
|
||||
onDoubleClick={handleDoubleClick}
|
||||
>
|
||||
{message}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{attachmentData && (
|
||||
<div
|
||||
className={clsx("flex justify-end flex-wrap gap-y-2 w-full", {
|
||||
"mt-3": message,
|
||||
})}
|
||||
>
|
||||
{attachmentData.map((item) => {
|
||||
const { id, name, size, icon } = item._source;
|
||||
|
||||
return (
|
||||
<AttachmentItem
|
||||
{...item._source}
|
||||
key={id}
|
||||
uploading={false}
|
||||
uploaded
|
||||
id={id}
|
||||
extname={icon}
|
||||
attachmentId={id}
|
||||
name={name}
|
||||
path={name}
|
||||
size={size}
|
||||
deletable={false}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -34,6 +34,7 @@ interface ChatMessageProps {
|
||||
actionClassName?: string;
|
||||
actionIconSize?: number;
|
||||
copyButtonId?: string;
|
||||
formatUrl?: (data: any) => string;
|
||||
}
|
||||
|
||||
export const ChatMessage = memo(function ChatMessage({
|
||||
@@ -53,6 +54,7 @@ export const ChatMessage = memo(function ChatMessage({
|
||||
actionClassName,
|
||||
actionIconSize,
|
||||
copyButtonId,
|
||||
formatUrl,
|
||||
}: ChatMessageProps) {
|
||||
const { t } = useTranslation();
|
||||
|
||||
@@ -87,6 +89,7 @@ export const ChatMessage = memo(function ChatMessage({
|
||||
]);
|
||||
|
||||
const messageContent = message?._source?.message || "";
|
||||
const attachments = message?._source?.attachments ?? [];
|
||||
const details = message?._source?.details || [];
|
||||
const question = message?._source?.question || "";
|
||||
|
||||
@@ -101,7 +104,7 @@ export const ChatMessage = memo(function ChatMessage({
|
||||
|
||||
const renderContent = () => {
|
||||
if (!isAssistant) {
|
||||
return <UserMessage messageContent={messageContent} />;
|
||||
return <UserMessage message={messageContent} attachments={attachments} />;
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -123,6 +126,7 @@ export const ChatMessage = memo(function ChatMessage({
|
||||
Detail={details.find((item) => item.type === "fetch_source")}
|
||||
ChunkData={fetch_source}
|
||||
loading={loadingStep?.fetch_source}
|
||||
formatUrl={formatUrl}
|
||||
/>
|
||||
<PickSource
|
||||
Detail={details.find((item) => item.type === "pick_source")}
|
||||
@@ -173,13 +177,13 @@ export const ChatMessage = memo(function ChatMessage({
|
||||
return (
|
||||
<div
|
||||
className={clsx(
|
||||
"py-8 flex",
|
||||
"w-full py-8 flex",
|
||||
[isAssistant ? "justify-start" : "justify-end"],
|
||||
rootClassName
|
||||
)}
|
||||
>
|
||||
<div
|
||||
className={`px-4 flex gap-4 ${
|
||||
className={`w-full px-4 flex gap-4 ${
|
||||
isAssistant ? "w-full" : "flex-row-reverse"
|
||||
}`}
|
||||
>
|
||||
|
||||
@@ -1,18 +1,15 @@
|
||||
import { useEffect, useRef, useState, useCallback } from "react";
|
||||
import { emit } from "@tauri-apps/api/event";
|
||||
|
||||
import { DataSourcesList } from "./DataSourcesList";
|
||||
import { Sidebar } from "./Sidebar";
|
||||
import { Connect } from "./Connect";
|
||||
import { useAppStore } from "@/stores/appStore";
|
||||
import { useConnectStore } from "@/stores/connectStore";
|
||||
import {
|
||||
list_coco_servers,
|
||||
add_coco_server,
|
||||
refresh_coco_server_info,
|
||||
} from "@/commands";
|
||||
import ServiceInfo from "./ServiceInfo";
|
||||
import ServiceAuth from "./ServiceAuth";
|
||||
import platformAdapter from "@/utils/platformAdapter";
|
||||
import type { Server } from "@/types/server";
|
||||
import { useServers } from "@/hooks/useServers";
|
||||
|
||||
export default function Cloud() {
|
||||
const SidebarRef = useRef<{ refreshData: () => void }>(null);
|
||||
@@ -21,103 +18,63 @@ export default function Cloud() {
|
||||
|
||||
const [isConnect, setIsConnect] = useState(true);
|
||||
|
||||
const currentService = useConnectStore((state) => state.currentService);
|
||||
const setCurrentService = useConnectStore((state) => state.setCurrentService);
|
||||
|
||||
const serverList = useConnectStore((state) => state.serverList);
|
||||
const setServerList = useConnectStore((state) => state.setServerList);
|
||||
const {
|
||||
cloudSelectService,
|
||||
setCloudSelectService,
|
||||
serverList,
|
||||
setServerList,
|
||||
} = useConnectStore();
|
||||
|
||||
const [refreshLoading, setRefreshLoading] = useState(false);
|
||||
|
||||
const { addServer, refreshServerList } = useServers();
|
||||
|
||||
// fetch the servers
|
||||
useEffect(() => {
|
||||
fetchServers(true);
|
||||
}, []);
|
||||
fetchServers();
|
||||
}, [serverList]);
|
||||
|
||||
useEffect(() => {
|
||||
// console.log("currentService", currentService);
|
||||
setRefreshLoading(false);
|
||||
setIsConnect(true);
|
||||
}, [JSON.stringify(currentService)]);
|
||||
}, [cloudSelectService?.id]);
|
||||
|
||||
const fetchServers = async (resetSelection: boolean) => {
|
||||
list_coco_servers()
|
||||
.then((res: any) => {
|
||||
if (errors.length > 0) {
|
||||
res = (res || []).map((item: any) => {
|
||||
if (item.id === currentService?.id) {
|
||||
item.health = {
|
||||
services: null,
|
||||
status: null,
|
||||
};
|
||||
}
|
||||
return item;
|
||||
});
|
||||
const fetchServers = useCallback(async () => {
|
||||
let res = serverList;
|
||||
if (errors.length > 0) {
|
||||
res = res.map((item: Server) => {
|
||||
if (item.id === cloudSelectService?.id) {
|
||||
item.health = {
|
||||
services: item.health?.services || {},
|
||||
status: item.health?.status || "red",
|
||||
};
|
||||
}
|
||||
// console.log("list_coco_servers", res);
|
||||
setServerList(res);
|
||||
|
||||
if (resetSelection && res.length > 0) {
|
||||
const matched = res.find((server: any) => {
|
||||
return server.id === currentService?.id;
|
||||
});
|
||||
|
||||
if (matched) {
|
||||
setCurrentService(matched);
|
||||
} else {
|
||||
setCurrentService(res[res.length - 1]);
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch((err: any) => {
|
||||
console.error(err);
|
||||
return item;
|
||||
});
|
||||
};
|
||||
|
||||
const addServer = (endpointLink: string) => {
|
||||
if (!endpointLink) {
|
||||
throw new Error("Endpoint is required");
|
||||
}
|
||||
if (
|
||||
!endpointLink.startsWith("http://") &&
|
||||
!endpointLink.startsWith("https://")
|
||||
) {
|
||||
throw new Error("Invalid Endpoint");
|
||||
}
|
||||
setServerList(res);
|
||||
|
||||
setRefreshLoading(true);
|
||||
|
||||
return add_coco_server(endpointLink)
|
||||
.then((res: any) => {
|
||||
// console.log("add_coco_server", res);
|
||||
fetchServers(false).then((r) => {
|
||||
console.log("fetchServers", r);
|
||||
setCurrentService(res);
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
setRefreshLoading(false);
|
||||
if (res.length > 0) {
|
||||
const matched = res.find((server: any) => {
|
||||
return server.id === cloudSelectService?.id;
|
||||
});
|
||||
};
|
||||
|
||||
if (matched) {
|
||||
setCloudSelectService(matched);
|
||||
} else {
|
||||
setCloudSelectService(res[res.length - 1]);
|
||||
}
|
||||
}
|
||||
}, [serverList, errors, cloudSelectService]);
|
||||
|
||||
const refreshClick = useCallback(
|
||||
(id: string) => {
|
||||
async (id: string) => {
|
||||
setRefreshLoading(true);
|
||||
refresh_coco_server_info(id)
|
||||
.then((res: any) => {
|
||||
console.log("refresh_coco_server_info", id, res);
|
||||
fetchServers(false).then((r) => {
|
||||
console.log("fetchServers", r);
|
||||
});
|
||||
// update currentService
|
||||
setCurrentService(res);
|
||||
emit("login_or_logout", true);
|
||||
})
|
||||
.finally(() => {
|
||||
setRefreshLoading(false);
|
||||
});
|
||||
await platformAdapter.commands("refresh_coco_server_info", id);
|
||||
await refreshServerList();
|
||||
setRefreshLoading(false);
|
||||
},
|
||||
[fetchServers]
|
||||
[refreshServerList]
|
||||
);
|
||||
|
||||
return (
|
||||
@@ -134,7 +91,6 @@ export default function Cloud() {
|
||||
<ServiceInfo
|
||||
refreshLoading={refreshLoading}
|
||||
refreshClick={refreshClick}
|
||||
fetchServers={fetchServers}
|
||||
/>
|
||||
|
||||
<ServiceAuth
|
||||
@@ -142,8 +98,8 @@ export default function Cloud() {
|
||||
refreshClick={refreshClick}
|
||||
/>
|
||||
|
||||
{currentService?.profile && currentService?.available ? (
|
||||
<DataSourcesList server={currentService?.id} />
|
||||
{cloudSelectService?.profile && cloudSelectService?.available ? (
|
||||
<DataSourcesList server={cloudSelectService?.id} />
|
||||
) : null}
|
||||
</div>
|
||||
) : (
|
||||
|
||||
@@ -21,7 +21,7 @@ export function Connect({ setIsConnect, onAddServer }: ConnectServiceProps) {
|
||||
};
|
||||
|
||||
const onAddServerClick = async (endpoint: string) => {
|
||||
console.log("onAddServer", endpoint);
|
||||
//console.log("onAddServer", endpoint);
|
||||
await onAddServer(endpoint);
|
||||
setIsConnect(true);
|
||||
};
|
||||
|
||||
@@ -20,8 +20,6 @@ interface DataSourceItemProps {
|
||||
}
|
||||
|
||||
export function DataSourceItem({ name, icon, connector }: DataSourceItemProps) {
|
||||
// const isConnected = true;
|
||||
|
||||
const isDark = useThemeStore((state) => state.isDark);
|
||||
|
||||
const connector_data = useConnectStore((state) => state.connector_data);
|
||||
|
||||
@@ -4,7 +4,7 @@ import { RefreshCcw } from "lucide-react";
|
||||
|
||||
import { DataSourceItem } from "./DataSourceItem";
|
||||
import { useConnectStore } from "@/stores/connectStore";
|
||||
import { get_connectors_by_server, datasource_search } from "@/commands";
|
||||
import platformAdapter from "@/utils/platformAdapter";
|
||||
|
||||
export function DataSourcesList({ server }: { server: string }) {
|
||||
const { t } = useTranslation();
|
||||
@@ -17,17 +17,17 @@ export function DataSourcesList({ server }: { server: string }) {
|
||||
function initServerAppData() {
|
||||
setRefreshLoading(true);
|
||||
// fetch connectors data
|
||||
get_connectors_by_server(server)
|
||||
platformAdapter
|
||||
.commands("get_connectors_by_server", server)
|
||||
.then((res: any) => {
|
||||
// console.log("get_connectors_by_server", res);
|
||||
setConnectorData(res, server);
|
||||
})
|
||||
.finally(() => {});
|
||||
|
||||
// fetch datasource data
|
||||
datasource_search({ id: server })
|
||||
platformAdapter
|
||||
.commands("datasource_search", { id: server })
|
||||
.then((res: any) => {
|
||||
// console.log("datasource_search", res);
|
||||
setDatasourceData(res, server);
|
||||
})
|
||||
.finally(() => {
|
||||
|
||||
@@ -2,7 +2,6 @@ import { FC, memo, useCallback, useEffect, useState } from "react";
|
||||
import { Copy } from "lucide-react";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { emit } from "@tauri-apps/api/event";
|
||||
import {
|
||||
getCurrent as getCurrentDeepLinkUrls,
|
||||
onOpenUrl,
|
||||
@@ -13,8 +12,9 @@ import { UserProfile } from "./UserProfile";
|
||||
import { OpenURLWithBrowser } from "@/utils";
|
||||
import { useConnectStore } from "@/stores/connectStore";
|
||||
import { useAppStore } from "@/stores/appStore";
|
||||
import { logout_coco_server, handle_sso_callback } from "@/commands";
|
||||
import { copyToClipboard } from "@/utils";
|
||||
import platformAdapter from "@/utils/platformAdapter";
|
||||
import { useServers } from "@/hooks/useServers";
|
||||
|
||||
interface ServiceAuthProps {
|
||||
setRefreshLoading: (loading: boolean) => void;
|
||||
@@ -30,12 +30,9 @@ const ServiceAuth = memo(
|
||||
|
||||
const addError = useAppStore((state) => state.addError);
|
||||
|
||||
const currentService = useConnectStore((state) => state.currentService);
|
||||
const setCurrentService = useConnectStore(
|
||||
(state) => state.setCurrentService
|
||||
);
|
||||
const serverList = useConnectStore((state) => state.serverList);
|
||||
const setServerList = useConnectStore((state) => state.setServerList);
|
||||
const cloudSelectService = useConnectStore((state) => state.cloudSelectService);
|
||||
|
||||
const { logoutServer } = useServers();
|
||||
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
@@ -46,7 +43,7 @@ const ServiceAuth = memo(
|
||||
setSSORequestID(requestID);
|
||||
|
||||
// Generate the login URL with the current appUid
|
||||
const url = `${currentService?.auth_provider?.sso?.url}/?provider=${currentService?.id}&product=coco&request_id=${requestID}`;
|
||||
const url = `${cloudSelectService?.auth_provider?.sso?.url}/?provider=${cloudSelectService?.id}&product=coco&request_id=${requestID}`;
|
||||
|
||||
console.log("Open SSO link, requestID:", ssoRequestID, url);
|
||||
|
||||
@@ -55,28 +52,16 @@ const ServiceAuth = memo(
|
||||
|
||||
// Start loading state
|
||||
setLoading(true);
|
||||
}, [ssoRequestID, loading, currentService]);
|
||||
}, [ssoRequestID, loading, cloudSelectService]);
|
||||
|
||||
const onLogout = useCallback(
|
||||
(id: string) => {
|
||||
setRefreshLoading(true);
|
||||
logout_coco_server(id)
|
||||
.then((res: any) => {
|
||||
console.log("logout_coco_server", id, JSON.stringify(res));
|
||||
emit("login_or_logout", false);
|
||||
// update server profile
|
||||
setCurrentService({ ...currentService, profile: null });
|
||||
const updatedServerList = serverList.map((server) =>
|
||||
server.id === id ? { ...server, profile: null } : server
|
||||
);
|
||||
console.log("updatedServerList", updatedServerList);
|
||||
setServerList(updatedServerList);
|
||||
})
|
||||
.finally(() => {
|
||||
setRefreshLoading(false);
|
||||
});
|
||||
logoutServer(id).finally(() => {
|
||||
setRefreshLoading(false);
|
||||
});
|
||||
},
|
||||
[currentService, serverList]
|
||||
[logoutServer]
|
||||
);
|
||||
|
||||
const handleOAuthCallback = useCallback(
|
||||
@@ -88,7 +73,7 @@ const ServiceAuth = memo(
|
||||
|
||||
try {
|
||||
console.log("Handling OAuth callback:", { code, serverId });
|
||||
await handle_sso_callback({
|
||||
await platformAdapter.commands("handle_sso_callback", {
|
||||
serverId: serverId, // Make sure 'server_id' is the correct argument
|
||||
requestId: ssoRequestID, // Make sure 'request_id' is the correct argument
|
||||
code: code,
|
||||
@@ -123,7 +108,7 @@ const ServiceAuth = memo(
|
||||
return;
|
||||
}
|
||||
|
||||
const serverId = currentService?.id;
|
||||
const serverId = cloudSelectService?.id;
|
||||
handleOAuthCallback(code, serverId);
|
||||
} catch (err) {
|
||||
console.error("Failed to parse URL:", err);
|
||||
@@ -176,9 +161,9 @@ const ServiceAuth = memo(
|
||||
|
||||
useEffect(() => {
|
||||
setLoading(false);
|
||||
}, [currentService]);
|
||||
}, [cloudSelectService]);
|
||||
|
||||
if (!currentService?.auth_provider?.sso?.url) {
|
||||
if (!cloudSelectService?.auth_provider?.sso?.url) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -187,10 +172,10 @@ const ServiceAuth = memo(
|
||||
<h2 className="text-lg font-medium text-gray-900 dark:text-white mb-4">
|
||||
{t("cloud.accountInfo")}
|
||||
</h2>
|
||||
{currentService?.profile ? (
|
||||
{cloudSelectService?.profile ? (
|
||||
<UserProfile
|
||||
server={currentService?.id}
|
||||
userInfo={currentService?.profile}
|
||||
server={cloudSelectService?.id}
|
||||
userInfo={cloudSelectService?.profile}
|
||||
onLogout={onLogout}
|
||||
/>
|
||||
) : (
|
||||
@@ -204,21 +189,37 @@ const ServiceAuth = memo(
|
||||
onCancel={() => setLoading(false)}
|
||||
onCopy={() => {
|
||||
copyToClipboard(
|
||||
`${currentService?.auth_provider?.sso?.url}/?provider=${currentService?.id}&product=coco&request_id=${ssoRequestID}`
|
||||
`${cloudSelectService?.auth_provider?.sso?.url}/?provider=${cloudSelectService?.id}&product=coco&request_id=${ssoRequestID}`
|
||||
);
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Privacy Policy Link */}
|
||||
<button
|
||||
className="text-xs text-[#0096FB] dark:text-blue-400 block"
|
||||
onClick={() =>
|
||||
OpenURLWithBrowser(currentService?.provider?.privacy_policy)
|
||||
}
|
||||
>
|
||||
{t("cloud.privacyPolicy")}
|
||||
</button>
|
||||
<div className="flex items-center gap-2">
|
||||
{/* EULA Link */}
|
||||
<button
|
||||
className="text-xs text-[#0096FB] dark:text-blue-400 block"
|
||||
onClick={() =>
|
||||
OpenURLWithBrowser(cloudSelectService?.provider?.eula)
|
||||
}
|
||||
>
|
||||
{t("cloud.eula")}
|
||||
</button>
|
||||
|
||||
<span className="text-xs text-[#0096FB] dark:text-blue-400 block">
|
||||
|
|
||||
</span>
|
||||
|
||||
{/* Privacy Policy Link */}
|
||||
<button
|
||||
className="text-xs text-[#0096FB] dark:text-blue-400 block"
|
||||
onClick={() =>
|
||||
OpenURLWithBrowser(cloudSelectService?.provider?.privacy_policy)
|
||||
}
|
||||
>
|
||||
{t("cloud.privacyPolicy")}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -6,13 +6,13 @@ import { useConnectStore } from "@/stores/connectStore";
|
||||
interface ServiceBannerProps {}
|
||||
|
||||
const ServiceBanner = memo(({}: ServiceBannerProps) => {
|
||||
const currentService = useConnectStore((state) => state.currentService);
|
||||
const cloudSelectService = useConnectStore((state) => state.cloudSelectService);
|
||||
|
||||
return (
|
||||
<div className="w-full rounded-[4px] bg-[rgba(229,229,229,1)] dark:bg-gray-800 mb-6">
|
||||
<img
|
||||
width="100%"
|
||||
src={currentService?.provider?.banner || bannerImg}
|
||||
src={cloudSelectService?.provider?.banner || bannerImg}
|
||||
alt="banner"
|
||||
onError={(e) => {
|
||||
const target = e.target as HTMLImageElement;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { memo, useCallback } from "react";
|
||||
import { memo } from "react";
|
||||
import { Globe, RefreshCcw, Trash2 } from "lucide-react";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import clsx from "clsx";
|
||||
@@ -7,90 +7,64 @@ import Tooltip from "@/components/Common/Tooltip";
|
||||
import SettingsToggle from "@/components/Settings/SettingsToggle";
|
||||
import { OpenURLWithBrowser } from "@/utils";
|
||||
import { useConnectStore } from "@/stores/connectStore";
|
||||
import { enable_server, disable_server, remove_coco_server } from "@/commands";
|
||||
import { useServers } from "@/hooks/useServers";
|
||||
|
||||
interface ServiceHeaderProps {
|
||||
refreshLoading?: boolean;
|
||||
refreshClick: (id: string) => void;
|
||||
fetchServers: (force: boolean) => Promise<void>;
|
||||
}
|
||||
|
||||
const ServiceHeader = memo(
|
||||
({ refreshLoading, refreshClick, fetchServers }: ServiceHeaderProps) => {
|
||||
({ refreshLoading, refreshClick }: ServiceHeaderProps) => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
const currentService = useConnectStore((state) => state.currentService);
|
||||
const setCurrentService = useConnectStore(
|
||||
(state) => state.setCurrentService
|
||||
);
|
||||
const cloudSelectService = useConnectStore((state) => state.cloudSelectService);
|
||||
|
||||
const enable_coco_server = useCallback(
|
||||
async (enabled: boolean) => {
|
||||
if (enabled) {
|
||||
await enable_server(currentService?.id);
|
||||
} else {
|
||||
await disable_server(currentService?.id);
|
||||
}
|
||||
|
||||
setCurrentService({ ...currentService, enabled });
|
||||
|
||||
await fetchServers(false);
|
||||
},
|
||||
[currentService?.id]
|
||||
);
|
||||
|
||||
const removeServer = (id: string) => {
|
||||
remove_coco_server(id).then((res: any) => {
|
||||
console.log("remove_coco_server", id, JSON.stringify(res));
|
||||
fetchServers(true).then((r) => {
|
||||
console.log("fetchServers", r);
|
||||
});
|
||||
});
|
||||
};
|
||||
const { enableServer, removeServer } = useServers();
|
||||
|
||||
return (
|
||||
<div className="flex items-center justify-between mb-4">
|
||||
<div className="flex items-center space-x-3">
|
||||
<Tooltip content={currentService?.endpoint}>
|
||||
<Tooltip content={cloudSelectService?.endpoint}>
|
||||
<div className="flex items-center text-gray-900 dark:text-white font-medium cursor-pointer">
|
||||
{currentService?.name}
|
||||
{cloudSelectService?.name}
|
||||
</div>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<SettingsToggle
|
||||
checked={currentService?.enabled}
|
||||
checked={cloudSelectService?.enabled}
|
||||
className={clsx({
|
||||
"bg-red-600 focus:ring-red-500": !currentService?.enabled,
|
||||
"bg-red-600 focus:ring-red-500": !cloudSelectService?.enabled,
|
||||
})}
|
||||
label={
|
||||
currentService?.enabled
|
||||
cloudSelectService?.enabled
|
||||
? t("cloud.enable_server")
|
||||
: t("cloud.disable_server")
|
||||
}
|
||||
onChange={enable_coco_server}
|
||||
onChange={enableServer}
|
||||
/>
|
||||
|
||||
<button
|
||||
className="p-2 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-300 rounded-[6px] bg-white dark:bg-gray-800 border border-[rgba(228,229,239,1)] dark:border-gray-700"
|
||||
onClick={() =>
|
||||
OpenURLWithBrowser(currentService?.provider?.website)
|
||||
OpenURLWithBrowser(cloudSelectService?.provider?.website)
|
||||
}
|
||||
>
|
||||
<Globe className="w-3.5 h-3.5" />
|
||||
</button>
|
||||
<button
|
||||
className="p-2 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-300 rounded-[6px] bg-white dark:bg-gray-800 border border-[rgba(228,229,239,1)] dark:border-gray-700"
|
||||
onClick={() => refreshClick(currentService?.id)}
|
||||
onClick={() => refreshClick(cloudSelectService?.id)}
|
||||
>
|
||||
<RefreshCcw
|
||||
className={`w-3.5 h-3.5 ${refreshLoading ? "animate-spin" : ""}`}
|
||||
/>
|
||||
</button>
|
||||
{!currentService?.builtin && (
|
||||
{!cloudSelectService?.builtin && (
|
||||
<button
|
||||
className="p-2 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-300 rounded-[6px] bg-white dark:bg-gray-800 border border-[rgba(228,229,239,1)] dark:border-gray-700"
|
||||
onClick={() => removeServer(currentService?.id)}
|
||||
onClick={() => removeServer(cloudSelectService?.id)}
|
||||
>
|
||||
<Trash2 className="w-3.5 h-3.5 text-[#ff4747]" />
|
||||
</button>
|
||||
|
||||
@@ -7,11 +7,10 @@ import ServiceMetadata from "./ServiceMetadata";
|
||||
interface ServiceInfoProps {
|
||||
refreshLoading?: boolean;
|
||||
refreshClick: (id: string) => void;
|
||||
fetchServers: (force: boolean) => Promise<void>;
|
||||
}
|
||||
|
||||
const ServiceInfo = memo(
|
||||
({ refreshLoading, refreshClick, fetchServers }: ServiceInfoProps) => {
|
||||
({ refreshLoading, refreshClick }: ServiceInfoProps) => {
|
||||
return (
|
||||
<>
|
||||
<ServiceBanner />
|
||||
@@ -19,7 +18,6 @@ const ServiceInfo = memo(
|
||||
<ServiceHeader
|
||||
refreshLoading={refreshLoading}
|
||||
refreshClick={refreshClick}
|
||||
fetchServers={fetchServers}
|
||||
/>
|
||||
|
||||
<ServiceMetadata />
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user