Compare commits
399 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2ac81566c6 | ||
|
|
b004670dec | ||
|
|
a426e33e6b | ||
|
|
bb7dd6bf7c | ||
|
|
37c5f2de24 | ||
|
|
ab6c25fe96 | ||
|
|
1fb464df09 | ||
|
|
65aa75043f | ||
|
|
79dcc7b4ec | ||
|
|
3d29cfe235 | ||
|
|
aea3a7ba98 | ||
|
|
190dfc6ecd | ||
|
|
316a7940d6 | ||
|
|
acfc1bb32d | ||
|
|
c4d178dc2d | ||
|
|
6333c697d5 | ||
|
|
810541494f | ||
|
|
e45dc2acbe | ||
|
|
2d1ccb9744 | ||
|
|
406f3b31e9 | ||
|
|
f51dd81014 | ||
|
|
3b38cbfb6c | ||
|
|
a4483ba277 | ||
|
|
bf46979b80 | ||
|
|
070f171ad4 | ||
|
|
3180704a0d | ||
|
|
b3f68697ce | ||
|
|
69d2b4b834 | ||
|
|
6837286061 | ||
|
|
a431ead22a | ||
|
|
7ec41dfe80 | ||
|
|
06053e9fd9 | ||
|
|
70b048fba3 | ||
|
|
45083f829b | ||
|
|
e4f6fb8e98 | ||
|
|
ee182b22da | ||
|
|
a37e22c227 | ||
|
|
d75ab1018d | ||
|
|
40ad066e69 | ||
|
|
a2a5a9f8fe | ||
|
|
5fd9339e56 | ||
|
|
a8a9208b1f | ||
|
|
8c9a2ff441 | ||
|
|
2251b0af95 | ||
|
|
560a12ab93 | ||
|
|
2ff66c0b91 | ||
|
|
ef4a184233 | ||
|
|
8422bc03e7 | ||
|
|
370113129c | ||
|
|
cb758ef452 | ||
|
|
12b9b4bb81 | ||
|
|
562db19f16 | ||
|
|
dc5cd9aecb | ||
|
|
0b018cd24f | ||
|
|
2ed22d3d7c | ||
|
|
4ce9561eb7 | ||
|
|
3aeb39b3af | ||
|
|
27e99d4629 | ||
|
|
df70276a54 | ||
|
|
6553a8f5d3 | ||
|
|
4ebbc9ec6e | ||
|
|
4208633556 | ||
|
|
fc43fbe798 | ||
|
|
b5bb9105d4 | ||
|
|
b6ebd6e5f8 | ||
|
|
22216491b6 | ||
|
|
44ca66259c | ||
|
|
be3cae36e2 | ||
|
|
35ea30626f | ||
|
|
4bcae5cffb | ||
|
|
76458db8ab | ||
|
|
5b41e190d3 | ||
|
|
43ac9a054c | ||
|
|
ac485a32cc | ||
|
|
e10908a095 | ||
|
|
78b8908ac8 | ||
|
|
3c54cb84a8 | ||
|
|
8ed808c591 | ||
|
|
7a2dde7448 | ||
|
|
65451fc63e | ||
|
|
5d108a46d3 | ||
|
|
f9567c2d46 | ||
|
|
da917e6012 | ||
|
|
335a906674 | ||
|
|
a50a636d59 | ||
|
|
2dd3f776e6 | ||
|
|
40f6aa0ccd | ||
|
|
4da9e024e0 | ||
|
|
c20bba51f5 | ||
|
|
0a62a2095b | ||
|
|
5677995185 | ||
|
|
ec4e5e7d1d | ||
|
|
1df5265b1a | ||
|
|
fb8a4684dc | ||
|
|
0b609e570d | ||
|
|
f91f6bdc17 | ||
|
|
57590f3b57 | ||
|
|
c18f9ea154 | ||
|
|
441875d9b4 | ||
|
|
eddf9075bb | ||
|
|
9eac8f8a8e | ||
|
|
515260c43f | ||
|
|
118de0e80b | ||
|
|
19ce896fdc | ||
|
|
4a41ea5d8b | ||
|
|
880e1206ce | ||
|
|
1e6d9f9550 | ||
|
|
ff0faf425f | ||
|
|
1fbf5d6552 | ||
|
|
db41e817c3 | ||
|
|
1296755bc5 | ||
|
|
d410f20864 | ||
|
|
61d0a3b79a | ||
|
|
b24319b649 | ||
|
|
3c0fb24548 | ||
|
|
2fcbed0381 | ||
|
|
7444347e0c | ||
|
|
725ce042de | ||
|
|
3b67de5387 | ||
|
|
9b53a026ff | ||
|
|
9ea7dbf3aa | ||
|
|
55622911ac | ||
|
|
92f78ad08c | ||
|
|
f690dbaab2 | ||
|
|
210efe763d | ||
|
|
f23498afa0 | ||
|
|
a80a5d928f | ||
|
|
b733bb5516 | ||
|
|
5046754534 | ||
|
|
f557f7e780 | ||
|
|
18feb2d690 | ||
|
|
af59f2fe9f | ||
|
|
5e1bb54d5e | ||
|
|
33fa516aad | ||
|
|
d2c1cf513d | ||
|
|
f81bec8403 | ||
|
|
cce956ac15 | ||
|
|
0d1174c8dd | ||
|
|
e0258dc2fa | ||
|
|
310a70838b | ||
|
|
94d7f809d2 | ||
|
|
e1d1bc2684 | ||
|
|
a9e3bb3eee | ||
|
|
d184851e3b | ||
|
|
c9b785ccf3 | ||
|
|
4c5ae8c718 | ||
|
|
8a7f7bc708 | ||
|
|
3d44d10048 | ||
|
|
97d880ea27 | ||
|
|
6c53056edd | ||
|
|
a6fd2ebd16 | ||
|
|
b509176572 | ||
|
|
17f2bcf7a8 | ||
|
|
c471a83821 | ||
|
|
51b0a2a545 | ||
|
|
baded2af1e | ||
|
|
2b21426355 | ||
|
|
8edc938426 | ||
|
|
fa919bee11 | ||
|
|
50f1e611c3 | ||
|
|
4c3cf28012 | ||
|
|
89fcc67222 | ||
|
|
33c9ce67df | ||
|
|
c6dadfd83e | ||
|
|
e707a8b5c7 | ||
|
|
5c5364974a | ||
|
|
9d3e3e8dde | ||
|
|
e065ba749f | ||
|
|
2dd8e3160c | ||
|
|
6aeecfe3ac | ||
|
|
334e29d69b | ||
|
|
382f89ace0 | ||
|
|
32c7cc5060 | ||
|
|
c13151d69e | ||
|
|
07c4ab03b5 | ||
|
|
cf3f2affa5 | ||
|
|
401832ad43 | ||
|
|
6a6f48d2fc | ||
|
|
8a6c90d124 | ||
|
|
34acecbcb0 | ||
|
|
4474212b7d | ||
|
|
1187b641d4 | ||
|
|
ef8cd569e4 | ||
|
|
5ef06bfc95 | ||
|
|
2b59addb08 | ||
|
|
ee750620f2 | ||
|
|
acc3b1a0d2 | ||
|
|
4372747014 | ||
|
|
ee531209aa | ||
|
|
ee0bbce3e2 | ||
|
|
7eccf99f92 | ||
|
|
5044a98bb7 | ||
|
|
72165812bf | ||
|
|
f9c1be8517 | ||
|
|
71ce23ef21 | ||
|
|
3e6041cbd8 | ||
|
|
0b9e158b55 | ||
|
|
688ced3fc3 | ||
|
|
16e0382a8b | ||
|
|
91c9cd5725 | ||
|
|
7f3e602bb3 | ||
|
|
5e9d41ea5c | ||
|
|
8bdb93d813 | ||
|
|
690e6a3225 | ||
|
|
111d9bddca | ||
|
|
7645b3e736 | ||
|
|
ac21074db6 | ||
|
|
496ae025d8 | ||
|
|
ac5a196746 | ||
|
|
aa99588001 | ||
|
|
163df77e8a | ||
|
|
21509f35e5 | ||
|
|
7bf59aa259 | ||
|
|
4aa377e486 | ||
|
|
feb716039c | ||
|
|
448d2a6069 | ||
|
|
c31a4aa52a | ||
|
|
73ac29ef3b | ||
|
|
3cd73f13ab | ||
|
|
95ccbaec3e | ||
|
|
d52ce481f9 | ||
|
|
573e1cf038 | ||
|
|
5162604cfd | ||
|
|
e38053682d | ||
|
|
018ec9e4ed | ||
|
|
f9e5c6cc28 | ||
|
|
6bb64e92d9 | ||
|
|
7962c329c7 | ||
|
|
dd6bd2093d | ||
|
|
25d998a41c | ||
|
|
3cfb03dd49 | ||
|
|
386b9cc48b | ||
|
|
006b679386 | ||
|
|
d47fb3cbc6 | ||
|
|
26f71cff08 | ||
|
|
ae8f95e19c | ||
|
|
4c49daf510 | ||
|
|
8d2528e521 | ||
|
|
4895322397 | ||
|
|
a8a4d435fc | ||
|
|
1c0335feb4 | ||
|
|
8498578425 | ||
|
|
326e161505 | ||
|
|
e96e6b4a89 | ||
|
|
853ea38058 | ||
|
|
4e127f8cdc | ||
|
|
51ada19d42 | ||
|
|
86f3741302 | ||
|
|
bb50b150c0 | ||
|
|
a092354fee | ||
|
|
2ffbb79358 | ||
|
|
661b5d1b77 | ||
|
|
47d2e46b72 | ||
|
|
414bc78aaf | ||
|
|
9fd4a16df3 | ||
|
|
0e9e8bf653 | ||
|
|
c14b9fa0be | ||
|
|
8477c7ce95 | ||
|
|
3e48eae749 | ||
|
|
5764b72f1e | ||
|
|
bff86c327a | ||
|
|
e60915443a | ||
|
|
c86c768960 | ||
|
|
a6a84f3df5 | ||
|
|
0a231b80d0 | ||
|
|
5272c3dab9 | ||
|
|
256262ec2e | ||
|
|
4508c292eb | ||
|
|
f4a3838844 | ||
|
|
6e07cacae2 | ||
|
|
191f34905e | ||
|
|
f876fc24f2 | ||
|
|
05f1459f8d | ||
|
|
78a7bfb4c4 | ||
|
|
9078c99e25 | ||
|
|
a044642636 | ||
|
|
0f18c0a597 | ||
|
|
86836bf756 | ||
|
|
70f876fd4a | ||
|
|
3826346fdf | ||
|
|
79b998da1b | ||
|
|
839a51bb3c | ||
|
|
f7c7c0cc1e | ||
|
|
61e253ca2c | ||
|
|
ab16543e65 | ||
|
|
c095ad4d29 | ||
|
|
af63bab7bd | ||
|
|
80ac8baca5 | ||
|
|
bde658b981 | ||
|
|
4380b56a30 | ||
|
|
54364565e2 | ||
|
|
ee4a06b6de | ||
|
|
9715a92f36 | ||
|
|
2caeb4090a | ||
|
|
983e65ee61 | ||
|
|
ec37cfe68f | ||
|
|
db66d81bd0 | ||
|
|
5b0fdbcb2c | ||
|
|
88955e0b95 | ||
|
|
aee7df608f | ||
|
|
6d8fa81141 | ||
|
|
d67d6645fe | ||
|
|
6329354243 | ||
|
|
3ef5226e11 | ||
|
|
eebf49d7e0 | ||
|
|
04903a09cd | ||
|
|
44b5f8400e | ||
|
|
77e6b58381 | ||
|
|
f6e5e826fd | ||
|
|
886400bcbc | ||
|
|
53258ee834 | ||
|
|
e8d197fb32 | ||
|
|
195b6e7af1 | ||
|
|
6f08d1e934 | ||
|
|
de89ad8d9a | ||
|
|
a5657e61c0 | ||
|
|
20e8658da8 | ||
|
|
caf9f0238f | ||
|
|
f18f94ea6d | ||
|
|
bbb517237f | ||
|
|
0bf6686494 | ||
|
|
9f04fb1e0f | ||
|
|
542fd5b233 | ||
|
|
26bf391937 | ||
|
|
20b653391c | ||
|
|
a9aab4e4d5 | ||
|
|
b25f820288 | ||
|
|
a6205eff1b | ||
|
|
af70639eb3 | ||
|
|
bd5015efeb | ||
|
|
1c59a88a38 | ||
|
|
8fef0a5d8b | ||
|
|
4eed4cb1d9 | ||
|
|
eff37d6764 | ||
|
|
a22024f640 | ||
|
|
c3bef7e46b | ||
|
|
0703808009 | ||
|
|
23ae478e47 | ||
|
|
6ecb232685 | ||
|
|
e4785f0654 | ||
|
|
fc2c311624 | ||
|
|
0d15b3b6be | ||
|
|
689631cde2 | ||
|
|
326b1f5bff | ||
|
|
0a7b445661 | ||
|
|
62cbb95000 | ||
|
|
2b11d4a2a8 | ||
|
|
2cc3bf55c7 | ||
|
|
76880460c5 | ||
|
|
42fb9563a7 | ||
|
|
e088f5dcbe | ||
|
|
024dc3155d | ||
|
|
0948ab1035 | ||
|
|
19e2f5eb4f | ||
|
|
935cdef391 | ||
|
|
7e4f4b5303 | ||
|
|
c053b55759 | ||
|
|
7fa56cfc7d | ||
|
|
c15fd2ce73 | ||
|
|
6c90f42da0 | ||
|
|
72e5224e39 | ||
|
|
b602121cd3 | ||
|
|
211ba463d0 | ||
|
|
b45eb0b91d | ||
|
|
57b2a20c56 | ||
|
|
59622a768b | ||
|
|
1cace28760 | ||
|
|
eb32b03b48 | ||
|
|
04d00c808d | ||
|
|
73a65718ef | ||
|
|
e15baef8f9 | ||
|
|
7225635f08 | ||
|
|
ecc5757af6 | ||
|
|
6a9b1b53b9 | ||
|
|
a3663703e4 | ||
|
|
3aed3a0df4 | ||
|
|
569a61841c | ||
|
|
8b2fc07519 | ||
|
|
bf145c8697 | ||
|
|
0c3606820c | ||
|
|
3df86fc1c4 | ||
|
|
d01cbe1541 | ||
|
|
89a763dff7 | ||
|
|
0c42a51cb5 | ||
|
|
f514e5a5c9 | ||
|
|
b3aff2b353 | ||
|
|
bcb92bfd49 | ||
|
|
d9dea0ea38 | ||
|
|
d2eed4a1c4 | ||
|
|
c7e547b5fa | ||
|
|
eadd0988ba | ||
|
|
78bc83f38a | ||
|
|
84d9c6cdf0 | ||
|
|
0769545a92 | ||
|
|
118eaa55e3 | ||
|
|
ef1304ce5e | ||
|
|
51d3a9d090 | ||
|
|
7d0eced55a | ||
|
|
e81c5bbb6e |
6
.env
@@ -1,3 +1,5 @@
|
||||
COCO_SERVER_URL=https://coco.infini.cloud #http://localhost:9000
|
||||
COCO_SERVER_URL=http://localhost:9000 #https://coco.infini.cloud #http://localhost:9000
|
||||
|
||||
COCO_WEBSOCKET_URL=wss://coco.infini.cloud/ws #ws://localhost:9000/ws
|
||||
COCO_WEBSOCKET_URL=ws://localhost:9000/ws #wss://coco.infini.cloud/ws #ws://localhost:9000/ws
|
||||
|
||||
#TAURI_DEV_HOST=0.0.0.0
|
||||
18
.github/workflows/enforce-no-dep-pizza-engine.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
name: Enforce no dependency pizza-engine
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name:
|
||||
working-directory: ./src-tauri
|
||||
run: |
|
||||
# if cargo remove pizza-engine succeeds, then it is in our dependency list, fail the CI pipeline.
|
||||
if cargo remove pizza-engine; then exit 1; fi
|
||||
82
.github/workflows/release.yml
vendored
@@ -9,10 +9,16 @@ on:
|
||||
jobs:
|
||||
create-release:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
APP_VERSION: ${{ steps.get-version.outputs.APP_VERSION }}
|
||||
RELEASE_BODY: ${{ steps.get-changelog.outputs.RELEASE_BODY }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set output
|
||||
id: vars
|
||||
run: echo "tag=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT
|
||||
@@ -22,11 +28,28 @@ jobs:
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Get build version
|
||||
shell: bash
|
||||
id: get-version
|
||||
run: |
|
||||
PACKAGE_VERSION=$(jq -r '.version' package.json)
|
||||
CARGO_VERSION=$(grep -m 1 '^version =' src-tauri/Cargo.toml | sed -E 's/.*"([^"]+)".*/\1/')
|
||||
if [ "$PACKAGE_VERSION" != "$CARGO_VERSION" ]; then
|
||||
echo "::error::Version mismatch!"
|
||||
else
|
||||
echo "Version match: $PACKAGE_VERSION"
|
||||
fi
|
||||
echo "APP_VERSION=$PACKAGE_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate changelog
|
||||
id: create_release
|
||||
run: npx changelogithub --draft --name ${{ steps.vars.outputs.tag }}
|
||||
id: get-changelog
|
||||
run: |
|
||||
CHANGELOG_BODY=$(npx changelogithub --draft --name ${{ steps.vars.outputs.tag }})
|
||||
echo "RELEASE_BODY<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$CHANGELOG_BODY" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
build-app:
|
||||
needs: create-release
|
||||
@@ -50,11 +73,26 @@ jobs:
|
||||
|
||||
- platform: "ubuntu-22.04"
|
||||
target: "x86_64-unknown-linux-gnu"
|
||||
- platform: "ubuntu-22.04-arm"
|
||||
target: "aarch64-unknown-linux-gnu"
|
||||
env:
|
||||
APP_VERSION: ${{ needs.create-release.outputs.APP_VERSION }}
|
||||
RELEASE_BODY: ${{ needs.create-release.outputs.RELEASE_BODY }}
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Checkout dependency repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: 'infinilabs/pizza'
|
||||
ssh-key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
submodules: recursive
|
||||
ref: main
|
||||
path: pizza
|
||||
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
@@ -63,17 +101,31 @@ jobs:
|
||||
with:
|
||||
version: latest
|
||||
|
||||
- name: Install rust target
|
||||
run: rustup target add ${{ matrix.target }}
|
||||
|
||||
- name: Install dependencies (ubuntu only)
|
||||
if: matrix.platform == 'ubuntu-22.04'
|
||||
if: startsWith(matrix.platform, 'ubuntu-22.04')
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf xdg-utils
|
||||
|
||||
- name: Install Rust stable
|
||||
run: rustup toolchain install stable
|
||||
- name: Add Rust build target at ${{ matrix.platform}} for ${{ matrix.target }}
|
||||
working-directory: src-tauri
|
||||
shell: bash
|
||||
run: |
|
||||
rustup target add ${{ matrix.target }} || true
|
||||
|
||||
- name: Add pizza engine as a dependency
|
||||
working-directory: src-tauri
|
||||
shell: bash
|
||||
run: |
|
||||
BUILD_ARGS="--target ${{ matrix.target }}"
|
||||
if [[ "${{matrix.target }}" != "i686-pc-windows-msvc" ]]; then
|
||||
echo "Adding pizza engine as a dependency for ${{matrix.platform }}-${{matrix.target }}"
|
||||
( cargo add --path ../pizza/lib/engine --features query_string_parser,persistence )
|
||||
BUILD_ARGS+=" --features use_pizza_engine"
|
||||
else
|
||||
echo "Skipping pizza engine dependency for ${{matrix.platform }}-${{matrix.target }}"
|
||||
fi
|
||||
echo "BUILD_ARGS=${BUILD_ARGS}" >> $GITHUB_ENV
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@v2
|
||||
@@ -88,8 +140,8 @@ jobs:
|
||||
|
||||
- name: Install app dependencies and build web
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build the app
|
||||
|
||||
- name: Build the coco at ${{ matrix.platform}} for ${{ matrix.target }} @ ${{ env.APP_VERSION }}
|
||||
uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
CI: false
|
||||
@@ -105,8 +157,8 @@ jobs:
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
with:
|
||||
tagName: ${{ github.ref_name }}
|
||||
releaseName: Coco ${{ needs.create-release.outputs.APP_VERSION }}
|
||||
releaseBody: ""
|
||||
releaseName: Coco ${{ env.APP_VERSION }}
|
||||
releaseBody: "${{ env.RELEASE_BODY }}"
|
||||
releaseDraft: true
|
||||
prerelease: false
|
||||
args: --target ${{ matrix.target }}
|
||||
args: ${{ env.BUILD_ARGS }}
|
||||
|
||||
3
.gitignore
vendored
@@ -11,6 +11,8 @@ node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
out
|
||||
src/components/web
|
||||
|
||||
# Editor directories and files
|
||||
# .vscode/*
|
||||
@@ -23,3 +25,4 @@ dist-ssr
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
.env
|
||||
|
||||
5
.vscode/settings.json
vendored
@@ -7,13 +7,17 @@
|
||||
"changelogithub",
|
||||
"clsx",
|
||||
"codegen",
|
||||
"dataurl",
|
||||
"dtolnay",
|
||||
"dyld",
|
||||
"elif",
|
||||
"errmsg",
|
||||
"fullscreen",
|
||||
"fulltext",
|
||||
"headlessui",
|
||||
"Icdbb",
|
||||
"icns",
|
||||
"iconfont",
|
||||
"INFINI",
|
||||
"infinilabs",
|
||||
"inputbox",
|
||||
@@ -56,6 +60,7 @@
|
||||
"uuidv",
|
||||
"VITE",
|
||||
"walkdir",
|
||||
"wavesurfer",
|
||||
"webviews",
|
||||
"xzvf",
|
||||
"yuque",
|
||||
|
||||
7
Makefile
@@ -76,3 +76,10 @@ clean-rebuild:
|
||||
@echo "Cleaning up and rebuilding..."
|
||||
rm -rf node_modules
|
||||
$(MAKE) dev-build
|
||||
|
||||
add-dep-pizza-engine:
|
||||
cd src-tauri && cargo add --git ssh://git@github.com/infinilabs/pizza.git pizza-engine --features query_string_parser,persistence
|
||||
|
||||
dev-build-with-pizza: add-dep-pizza-engine
|
||||
@echo "Starting desktop development with Pizza Engine pulled in..."
|
||||
RUST_BACKTRACE=1 pnpm tauri dev --features use_pizza_engine
|
||||
89
README.md
@@ -1,7 +1,15 @@
|
||||
# Coco AI - Connect & Collaborate
|
||||
|
||||
<div align="center">
|
||||
|
||||
**Tagline**: _"Coco AI - search, connect, collaborate – all in one place."_
|
||||
|
||||
Visit our website: [https://coco.rs](https://coco.rs)
|
||||
|
||||
[](LICENSE) [](https://tauri.app/) [](https://react.dev/) [](https://www.typescriptlang.org/) [](https://www.rust-lang.org/) [](https://nodejs.org/) [](https://github.com/infinilabs/coco-app/pulls) [](https://github.com/infinilabs/coco-app/releases) [](https://github.com/infinilabs/coco-app/actions) [](https://discord.com/invite/4tKTMkkvVX)
|
||||
|
||||
</div>
|
||||
|
||||
Coco AI is a unified search platform that connects all your enterprise applications and data—Google Workspace, Dropbox,
|
||||
Confluent Wiki, GitHub, and more—into a single, powerful search interface. This repository contains the **Coco App**,
|
||||
built for both **desktop and mobile**. The app allows users to search and interact with their enterprise data across
|
||||
@@ -12,16 +20,15 @@ and internal resources. Coco enhances collaboration by making information instan
|
||||
insights based on your enterprise's specific data.
|
||||
|
||||
> **Note**: Backend services, including data indexing and search functionality, are handled in a
|
||||
> separate [repository](https://github.com/infinilabs/coco-server).
|
||||
separate [repository](https://github.com/infinilabs/coco-server).
|
||||
|
||||
## Vision
|
||||

|
||||
|
||||
At Coco AI, we aim to streamline workplace collaboration by centralizing access to enterprise data. The Coco
|
||||
App
|
||||
provides a seamless, cross-platform experience, enabling teams to easily search, connect, and collaborate within their
|
||||
workspace.
|
||||
## 🚀 Vision
|
||||
|
||||
## Use Cases
|
||||
At Coco AI, we aim to streamline workplace collaboration by centralizing access to enterprise data. The Coco App provides a seamless, cross-platform experience, enabling teams to easily search, connect, and collaborate within their workspace.
|
||||
|
||||
## 💡 Use Cases
|
||||
|
||||
- **Unified Search Across Platforms**: Coco integrates with all your enterprise apps, letting you search documents,
|
||||
conversations, and files across Google Workspace, Dropbox, GitHub, etc.
|
||||
@@ -32,37 +39,73 @@ workspace.
|
||||
- **Simplified Data Access**: By removing the friction between various tools, Coco enhances your workflow and increases
|
||||
productivity.
|
||||
|
||||
## Getting Started
|
||||
## ✨ Key Features
|
||||
|
||||
### Initial Setup
|
||||
- 🔍 **Unified Search**: One-stop enterprise search with multi-platform integration
|
||||
- Supports major collaboration platforms: Google Workspace, Dropbox, Confluence Wiki, GitHub, etc.
|
||||
- Real-time search across documents, conversations, and files
|
||||
- Smart search intent understanding with relevance ranking
|
||||
- Cross-platform data correlation and context display
|
||||
- 🤖 **AI-Powered Chat**: Team-specific ChatGPT-like assistant trained on your enterprise data
|
||||
- 🌐 **Cross-Platform**: Available for Windows, macOS, Linux and Web
|
||||
- 🔒 **Security-First**: Support for private deployment and data sovereignty
|
||||
- ⚡ **High Performance**: Built with Rust and Tauri 2.0
|
||||
- 🎨 **Modern UI**: Sleek interface designed for productivity
|
||||
|
||||
**This version of pnpm requires at least Node.js v18.12**
|
||||
## 🛠️ Technology Stack
|
||||
|
||||
To set up the Coco App for development:
|
||||
- **Frontend**: React + TypeScript
|
||||
- **Desktop Framework**: Tauri 2.0
|
||||
- **Styling**: Tailwind CSS
|
||||
- **State Management**: Zustand
|
||||
- **Build Tool**: Vite
|
||||
|
||||
## 🚀 Getting Started
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Node.js >= 18.12
|
||||
- Rust (latest stable)
|
||||
- pnpm (package manager)
|
||||
|
||||
### Development Setup
|
||||
|
||||
```bash
|
||||
cd coco-app
|
||||
# Install pnpm
|
||||
npm install -g pnpm
|
||||
|
||||
# Install dependencies
|
||||
pnpm install
|
||||
|
||||
# Start development server
|
||||
pnpm tauri dev
|
||||
```
|
||||
|
||||
#### Desktop Development:
|
||||
|
||||
To start desktop development, run:
|
||||
### Production Build
|
||||
|
||||
```bash
|
||||
pnpm tauri dev
|
||||
pnpm tauri build
|
||||
```
|
||||
|
||||
## Documentation
|
||||
## 📚 Documentation
|
||||
|
||||
For full documentation on Coco AI, please visit the [Coco AI Documentation](https://docs.infinilabs.com/coco-app/main/).
|
||||
- [Coco App Documentation](https://docs.infinilabs.com/coco-app/main/)
|
||||
- [Coco Server Documentation](https://docs.infinilabs.com/coco-server/main/)
|
||||
- [Tauri Documentation](https://tauri.app/)
|
||||
|
||||
## License
|
||||
## Contributors
|
||||
|
||||
Coco AI is an open-source project licensed under
|
||||
the [MIT License](https://github.com/infinilabs/coco-app/blob/main/LICENSE).
|
||||
<a href="https://github.com/infinilabs/coco-app/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=infinilabs/coco-app" />
|
||||
</a>
|
||||
|
||||
This means that you can freely use, modify, and
|
||||
distribute the software for both personal and commercial purposes, including hosting it on your own servers.
|
||||
## 📄 License
|
||||
|
||||
Coco AI is an open-source project licensed under the [MIT License](LICENSE). You can freely use, modify, and
|
||||
distribute the software for both personal and commercial purposes, including hosting it on your own servers.
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
Built with ❤️ by <a href="https://infinilabs.com">INFINI Labs</a>
|
||||
</div>
|
||||
|
||||
@@ -7,8 +7,7 @@ type: docs
|
||||
|
||||
Coco AI is a fully open-source, cross-platform unified search and productivity tool that connects and searches across various data sources, including applications, files, Google Drive, Notion, Yuque, Hugo, and more, both local and cloud-based. By integrating with large models like DeepSeek, Coco AI enables intelligent personal knowledge management, emphasizing privacy and supporting private deployment, helping users quickly and intelligently access their information.
|
||||
|
||||
{{% load-img "/img/screenshot/fusion-search-across-datasources.png" "" %}}
|
||||
{{% load-img "/img/screenshot/coco-chat.png" "" %}}
|
||||
{{% load-img "/img/coco-preview.gif" "" %}}
|
||||
|
||||
For more details on Coco Server, visit: [https://docs.infinilabs.com/coco-app/](https://docs.infinilabs.com/coco-app/).
|
||||
|
||||
|
||||
@@ -7,8 +7,7 @@ type: docs
|
||||
|
||||
Coco AI is a fully open-source, cross-platform unified search and productivity tool that connects and searches across various data sources, including applications, files, Google Drive, Notion, Yuque, Hugo, and more, both local and cloud-based. By integrating with large models like DeepSeek, Coco AI enables intelligent personal knowledge management, emphasizing privacy and supporting private deployment, helping users quickly and intelligently access their information.
|
||||
|
||||
{{% load-img "/img/screenshot/fusion-search-across-datasources.png" "" %}}
|
||||
{{% load-img "/img/screenshot/coco-chat.png" "" %}}
|
||||
{{% load-img "/img/coco-preview.gif" "" %}}
|
||||
|
||||
For more details on Coco Server, visit: [https://docs.infinilabs.com/coco-app/](https://docs.infinilabs.com/coco-app/).
|
||||
|
||||
|
||||
@@ -1,21 +1,35 @@
|
||||
---
|
||||
weight: 10
|
||||
title: "Mac OS"
|
||||
title: "macOS"
|
||||
asciinema: true
|
||||
---
|
||||
|
||||
# Mac OS
|
||||
# macOS
|
||||
|
||||
## Download Coco AI
|
||||
|
||||
Goto [https://coco.rs/](https://coco.rs/)
|
||||
Go to [coco.rs](https://coco.rs/) and download the package of your architecture:
|
||||
|
||||
{{% load-img "/img/download-mac-app.png" "" %}}
|
||||
{{% load-img "/img/macos/mac-download-app.png" "" %}}
|
||||
|
||||
It should be placed in your "Downloads" folder:
|
||||
|
||||
{{% load-img "/img/macos/mac-zip-file.png" "" %}}
|
||||
|
||||
## Unzip DMG file
|
||||
|
||||
{{% load-img "/img/unzip-dmg-file.png" "" %}}
|
||||
Unzip the file:
|
||||
|
||||
{{% load-img "/img/macos/mac-unzip-zip-file.png" "" %}}
|
||||
|
||||
You will get a `dmg` file:
|
||||
|
||||
{{% load-img "/img/macos/mac-dmg.png" "" %}}
|
||||
|
||||
## Drag to Application Folder
|
||||
|
||||
{{% load-img "/img/drag-to-application-folder.png" "" %}}
|
||||
Double click the `dmg` file, a window will pop up. Then drag the "Coco-AI" app to
|
||||
your "Applications" folder:
|
||||
|
||||
{{% load-img "/img/macos/drag-to-app-folder.png" "" %}}
|
||||
|
||||
|
||||
40
docs/content.en/docs/getting-started/installation/ubuntu.md
Normal file
@@ -0,0 +1,40 @@
|
||||
---
|
||||
weight: 10
|
||||
title: "Ubuntu"
|
||||
asciinema: true
|
||||
---
|
||||
|
||||
# Ubuntu
|
||||
|
||||
> NOTE: Coco app only works fully under [X11][x11_protocol].
|
||||
>
|
||||
> Don't know if you running X11 or not? take a look at this [question][if_x11]!
|
||||
|
||||
[x11_protocol]: https://en.wikipedia.org/wiki/X_Window_System
|
||||
[if_x11]: https://unix.stackexchange.com/q/202891/498440
|
||||
|
||||
|
||||
## Go to the download page
|
||||
|
||||
Download page: [link](https://coco.rs/#install)
|
||||
|
||||
## Download the package
|
||||
|
||||
Download the package of your architecture, it should be put in your `Downloads` directory
|
||||
and look like this:
|
||||
|
||||
```sh
|
||||
$ cd ~/Downloads
|
||||
$ ls
|
||||
Coco-AI-x.y.z-bbbb-deb-linux-amd64.zip
|
||||
# or Coco-AI-x.y.z-bbbb-deb-linux-arm64.zip depending on your architecture
|
||||
```
|
||||
|
||||
## Install it
|
||||
|
||||
Unzip and install it
|
||||
|
||||
```
|
||||
$ unzip Coco-AI-x.y.z-bbbb-deb-linux-amd64.zip
|
||||
$ sudo dpkg -i Coco-AI-x.y.z-bbbb-deb-linux-amd64.deb
|
||||
```
|
||||
@@ -9,14 +9,218 @@ Information about release notes of Coco Server is provided here.
|
||||
|
||||
## Latest (In development)
|
||||
|
||||
### ❌ Breaking changes
|
||||
### 🚀 Features
|
||||
### 🐛 Bug fix
|
||||
### ✈️ Improvements
|
||||
|
||||
## 0.6.0 (2025-06-29)
|
||||
|
||||
### ❌ Breaking changes
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- feat: support `Tab` and `Enter` for delete dialog buttons #700
|
||||
- feat: add check for updates #701
|
||||
- feat: impl extension store #699
|
||||
- feat: support back navigation via delete key #717
|
||||
|
||||
### 🐛 Bug fix
|
||||
|
||||
- fix: quick ai state synchronous #693
|
||||
- fix: toggle extension should register/unregister hotkey #691
|
||||
- fix: take coco server back on refresh #696
|
||||
- fix: some input fields couldn’t accept spaces #709
|
||||
- fix: context menu search not working #713
|
||||
- fix: open extension store display #724
|
||||
|
||||
### ✈️ Improvements
|
||||
|
||||
- refactor: use author/ext_id as extension unique identifier #643
|
||||
- refactor: refactoring search api #679
|
||||
- chore: continue to chat page display #690
|
||||
- chore: improve server list selection with enter key #692
|
||||
- chore: add message for latest version check #703
|
||||
- chore: log command execution results #718
|
||||
- chore: adjust styles and add button reindex #719
|
||||
|
||||
## 0.5.0 (2025-06-13)
|
||||
|
||||
### ❌ Breaking changes
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- feat: check or enter to close the list of assistants #469
|
||||
- feat: add dimness settings for pinned window #470
|
||||
- feat: supports Shift + Enter input box line feeds #472
|
||||
- feat: support for snapshot version updates #480
|
||||
- feat: history list add put away button #482
|
||||
- feat: the chat input box supports multi-line input #490
|
||||
- feat: add `~/Applications` to the search path #493
|
||||
- feat: the chat content has added a button to return to the bottom #495
|
||||
- feat: the search input box supports multi-line input #501
|
||||
- feat: websocket support self-signed TLS #504
|
||||
- feat: add option to allow self-signed certificates #509
|
||||
- feat: add AI summary component #518
|
||||
- feat: dynamic log level via env var COCO_LOG #535
|
||||
- feat: add quick AI access to search mode #556
|
||||
- feat: rerank search results #561
|
||||
- feat: ai overview support is enabled with shortcut #597
|
||||
- feat: add key monitoring during reset #615
|
||||
- feat: calculator extension add description #623
|
||||
- feat: support right-click actions after text selection #624
|
||||
- feat: add ai overview minimum number of search results configuration #625
|
||||
- feat: add internationalized translations of AI-related extensions #632
|
||||
- feat: context menu support for secondary pages #680
|
||||
|
||||
### 🐛 Bug fix
|
||||
|
||||
- fix: solve the problem of modifying the assistant in the chat #476
|
||||
- fix: several issues around search #502
|
||||
- fix: fixed the newly created session has no title when it is deleted #511
|
||||
- fix: loading chat history for potential empty attachments
|
||||
- fix: datasource & MCP list synchronization update #521
|
||||
- fix: app icon & category icon #529
|
||||
- fix: show only enabled datasource & MCP list
|
||||
- fix: server image loading failure #534
|
||||
- fix: panic when fetching app metadata on Windows #538
|
||||
- fix: service switching error #539
|
||||
- fix: switch server assistant and session unchanged #540
|
||||
- fix: history list height #550
|
||||
- fix: secondary page cannot be searched #551
|
||||
- fix: the scroll button is not displayed by default #552
|
||||
- fix: suggestion list position #553
|
||||
- fix: independent chat window has no data #554
|
||||
- fix: resolved navigation error on continue chat action #558
|
||||
- fix: make extension search source respect parameter datasource #576
|
||||
- fix: fixed issue with incorrect login status #600
|
||||
- fix: new chat assistant id not found #603
|
||||
- fix: resolve regex error on older macOS versions #605
|
||||
- fix: fix chat log update and sorting issues #612
|
||||
- fix: resolved an issue where number keys were not working on the web #616
|
||||
- fix: do not panic when the datasource specified does not exist #618
|
||||
- fix: fixed modifier keys not working with continue chat #619
|
||||
- fix: invalid DSL error if input contains multiple lines #620
|
||||
- fix: fix ai overview hidden height before message #622
|
||||
- fix: tab key hides window in chat mode #641
|
||||
- fix: arrow keys still navigated search when menu opened with Cmd+K #642
|
||||
- fix: input lost when reopening dialog after search #644
|
||||
- fix: web page unmount event #645
|
||||
- fix: fix the problem of local path not opening #650
|
||||
- fix: number keys not following settings #661
|
||||
- fix: fix problem with up and down key indexing #676
|
||||
- fix: arrow inserting escape sequences #683
|
||||
|
||||
### ✈️ Improvements
|
||||
|
||||
- chore: adjust list error message #475
|
||||
- chore: refine wording on search failure
|
||||
- chore:search and MCP show hidden logic #494
|
||||
- chore: greetings show hidden logic #496
|
||||
- refactor: fetch app list in settings in real time #498
|
||||
- chore: UpdateApp component loading location #499
|
||||
- chore: add clear monitoring & cache calculation to optimize performance #500
|
||||
- refactor: optimizing the code #505
|
||||
- refactor: optimized the modification operation of the numeric input box #508
|
||||
- style: modify the style of the search input box #513
|
||||
- style: chat input icons show #515
|
||||
- refactor: refactoring icon component #514
|
||||
- refactor: optimizing list styles in markdown content #520
|
||||
- feat: add a component for text reading aloud #522
|
||||
- style: history component styles #528
|
||||
- style: search error styles #533
|
||||
- chore: skip register server that not logged in #536
|
||||
- refactor: service info related components #537
|
||||
- chore: chat content can be copied #539
|
||||
- refactor: refactoring search error #541
|
||||
- chore: add assistant count #542
|
||||
- chore: add global login judgment #544
|
||||
- chore: mark server offline on user logout #546
|
||||
- chore: logout update server profile #549
|
||||
- chore: assistant keyboard events and mouse events #559
|
||||
- chore: web component start page config #560
|
||||
- chore: assistant chat placeholder & refactor input box components #566
|
||||
- refactor: input box related components #568
|
||||
- chore: mark unavailable server to offline on refresh info #569
|
||||
- chore: only show available servers in chat #570
|
||||
- refactor: search result related components #571
|
||||
- chore: initialize current assistant from history #606
|
||||
- chore: add onContextMenu event #629
|
||||
- chore: more logs for the setup process #634
|
||||
- chore: copy supports http protocol #639
|
||||
- refactor: use author/ext_id as extension unique identifier #643
|
||||
- chore: add special character filtering #668
|
||||
|
||||
## 0.4.0 (2025-04-27)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
### Features
|
||||
|
||||
- feat: history support for searching, renaming and deleting #322
|
||||
- feat: linux support for application search #330
|
||||
- feat: add shortcuts to most icon buttons #334
|
||||
- feat: add font icon for search list #342
|
||||
- feat: add a border to the main window in Windows 10 #343
|
||||
- feat: mobile terminal adaptation about style #348
|
||||
- feat: service list popup box supports keyboard-only operation #359
|
||||
- feat: networked search data sources support search and keyboard-only operation #367
|
||||
- feat: add application management to the plugin #374
|
||||
- feat: add keyboard-only operation to history list #385
|
||||
- feat: add error notification #386
|
||||
- feat: add support for AI assistant #394
|
||||
- feat: add support for calculator function #399
|
||||
- feat: auto selects the first item after searching #411
|
||||
- feat: web components assistant #422
|
||||
- feat: right-click menu support for search #423
|
||||
- feat: add chat mode launch page #424
|
||||
- feat: add MCP & call LLM tools #430
|
||||
- feat: ai assistant supports search and paging #431
|
||||
- feat: data sources support displaying customized icons #432
|
||||
- feat: add shortcut key conflict hint and reset function #442
|
||||
- feat: updated to include error message #465
|
||||
- feat: support third party extensions #572
|
||||
- feat: support ai overview #572
|
||||
|
||||
### Bug fix
|
||||
|
||||
- fix: fixed the problem of not being able to search in secondary directories #338
|
||||
- fix: active shadow setting #354
|
||||
- fix: chat history was not show up #377
|
||||
- fix: get attachments in chat sessions
|
||||
- fix: filter http query_args and convert only supported values
|
||||
- fix:fixed several search & chat bugs #412
|
||||
- fix: fixed carriage return problem with chinese input method #464
|
||||
|
||||
### Improvements
|
||||
|
||||
- refactor: web components #331
|
||||
- refactor: refactoring login callback, receive access_token from coco-server
|
||||
- chore: adjust web component styles #362
|
||||
- style: modify the style #370
|
||||
- style: search list details display #378
|
||||
- refactor: refactoring api error handling #382
|
||||
- chore: update assistant icon & think mode #397
|
||||
- build: build web components and publish #404
|
||||
|
||||
## 0.3.0 (2025-03-31)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
### Features
|
||||
|
||||
- feat: add web pages components #277
|
||||
- feat: support for customizing some of the preset shortcuts #316
|
||||
- feat: support multi websocket connections #314
|
||||
- feat: add support for embeddable web widget #277
|
||||
|
||||
### Bug fix
|
||||
|
||||
### Improvements
|
||||
|
||||
- refactor: refactor invoke related code #309
|
||||
- refactor: hide apps without icon #312
|
||||
|
||||
## 0.2.1 (2025-03-14)
|
||||
|
||||
### Features
|
||||
@@ -36,10 +240,10 @@ Information about release notes of Coco Server is provided here.
|
||||
### Improvements
|
||||
|
||||
- Refactor: chat components #273
|
||||
- Feat:add endpoint display #282
|
||||
- Feat: add endpoint display #282
|
||||
- Chore: chat window min width & remove input bg #284
|
||||
- Chore: remove selected function & add hide_coco #286
|
||||
- Chore:websocket timeout increased to 2 minutes #289
|
||||
- Chore: websocket timeout increased to 2 minutes #289
|
||||
- Chore: remove chat input border & clear input #295
|
||||
|
||||
## 0.2.0 (2025-03-07)
|
||||
@@ -97,4 +301,4 @@ Information about release notes of Coco Server is provided here.
|
||||
|
||||
### Bug fix
|
||||
|
||||
### Improvements
|
||||
### Improvements
|
||||
BIN
docs/static/img/coco-preview.gif
vendored
Normal file
|
After Width: | Height: | Size: 4.8 MiB |
BIN
docs/static/img/download-mac-app.png
vendored
|
Before Width: | Height: | Size: 155 KiB |
BIN
docs/static/img/drag-to-application-folder.png
vendored
|
Before Width: | Height: | Size: 69 KiB |
BIN
docs/static/img/macos/drag-to-app-folder.png
vendored
Normal file
|
After Width: | Height: | Size: 239 KiB |
BIN
docs/static/img/macos/mac-dmg.png
vendored
Normal file
|
After Width: | Height: | Size: 586 KiB |
BIN
docs/static/img/macos/mac-download-app.png
vendored
Normal file
|
After Width: | Height: | Size: 299 KiB |
BIN
docs/static/img/macos/mac-unzip-zip-file.png
vendored
Normal file
|
After Width: | Height: | Size: 650 KiB |
BIN
docs/static/img/macos/mac-zip-file.png
vendored
Normal file
|
After Width: | Height: | Size: 441 KiB |
BIN
docs/static/img/unzip-dmg-file.png
vendored
|
Before Width: | Height: | Size: 121 KiB |
@@ -7,7 +7,7 @@
|
||||
<title>Coco</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<body class="coco-container">
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
|
||||
77
package.json
@@ -1,11 +1,16 @@
|
||||
{
|
||||
"name": "coco",
|
||||
"private": true,
|
||||
"version": "0.2.1",
|
||||
"version": "0.6.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "tsc && vite build",
|
||||
"build:web": "cross-env BUILD_TARGET=web tsc && cross-env BUILD_TARGET=web tsup --format esm",
|
||||
"publish:web": "cd out/search-chat && npm publish",
|
||||
"publish:web:beta": "cd dist/search-chat && npm publish --tag beta",
|
||||
"publish:web:alpha": "cd dist/search-chat && npm publish --tag alpha",
|
||||
"publish:web:rc": "cd dist/search-chat && npm publish --tag rc",
|
||||
"preview": "vite preview",
|
||||
"tauri": "tauri",
|
||||
"release": "release-it",
|
||||
@@ -13,33 +18,40 @@
|
||||
"release-beta": "release-it --preRelease=beta --preReleaseBase=1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@headlessui/react": "^2.2.0",
|
||||
"@tauri-apps/api": "^2.3.0",
|
||||
"@ant-design/icons": "^6.0.0",
|
||||
"@headlessui/react": "^2.2.2",
|
||||
"@tauri-apps/api": "^2.5.0",
|
||||
"@tauri-apps/plugin-autostart": "~2.2.0",
|
||||
"@tauri-apps/plugin-deep-link": "^2.2.0",
|
||||
"@tauri-apps/plugin-dialog": "^2.2.0",
|
||||
"@tauri-apps/plugin-deep-link": "^2.2.1",
|
||||
"@tauri-apps/plugin-dialog": "^2.2.1",
|
||||
"@tauri-apps/plugin-global-shortcut": "~2.0.0",
|
||||
"@tauri-apps/plugin-http": "~2.0.2",
|
||||
"@tauri-apps/plugin-log": "~2.4.0",
|
||||
"@tauri-apps/plugin-opener": "^2.2.7",
|
||||
"@tauri-apps/plugin-os": "^2.2.1",
|
||||
"@tauri-apps/plugin-process": "^2.2.0",
|
||||
"@tauri-apps/plugin-shell": "^2.2.0",
|
||||
"@tauri-apps/plugin-updater": "^2.6.0",
|
||||
"@tauri-apps/plugin-process": "^2.2.1",
|
||||
"@tauri-apps/plugin-shell": "^2.2.1",
|
||||
"@tauri-apps/plugin-updater": "github:infinilabs/tauri-plugin-updater#v2",
|
||||
"@tauri-apps/plugin-websocket": "~2.3.0",
|
||||
"@tauri-apps/plugin-window": "2.0.0-alpha.1",
|
||||
"@wavesurfer/react": "^1.0.11",
|
||||
"ahooks": "^3.8.4",
|
||||
"axios": "^1.9.0",
|
||||
"clsx": "^2.1.1",
|
||||
"dotenv": "^16.4.7",
|
||||
"dayjs": "^1.11.13",
|
||||
"dotenv": "^16.5.0",
|
||||
"filesize": "^10.1.6",
|
||||
"i18next": "^23.16.8",
|
||||
"i18next-browser-languagedetector": "^8.0.4",
|
||||
"i18next-browser-languagedetector": "^8.1.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"lucide-react": "^0.461.0",
|
||||
"mermaid": "^11.4.1",
|
||||
"nanoid": "^5.1.3",
|
||||
"mdast-util-gfm-autolink-literal": "2.0.0",
|
||||
"mermaid": "^11.6.0",
|
||||
"nanoid": "^5.1.5",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-hotkeys-hook": "^4.6.1",
|
||||
"react-i18next": "^15.4.1",
|
||||
"react-hotkeys-hook": "^4.6.2",
|
||||
"react-i18next": "^15.5.1",
|
||||
"react-markdown": "^9.1.0",
|
||||
"react-router-dom": "^6.30.0",
|
||||
"react-window": "^1.8.11",
|
||||
@@ -48,32 +60,39 @@
|
||||
"remark-breaks": "^4.0.0",
|
||||
"remark-gfm": "^4.0.1",
|
||||
"remark-math": "^6.0.0",
|
||||
"tauri-plugin-fs-pro-api": "^2.3.1",
|
||||
"tauri-plugin-macos-permissions-api": "^2.1.1",
|
||||
"tauri-plugin-screenshots-api": "^2.1.0",
|
||||
"tailwind-merge": "^3.3.1",
|
||||
"tauri-plugin-fs-pro-api": "^2.4.0",
|
||||
"tauri-plugin-macos-permissions-api": "^2.3.0",
|
||||
"tauri-plugin-screenshots-api": "^2.2.0",
|
||||
"tauri-plugin-windows-version-api": "^2.0.0",
|
||||
"type-fest": "^4.41.0",
|
||||
"use-debounce": "^10.0.4",
|
||||
"uuid": "^11.1.0",
|
||||
"zustand": "^5.0.3"
|
||||
"wavesurfer.js": "^7.9.5",
|
||||
"zustand": "^5.0.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tauri-apps/cli": "^2.3.1",
|
||||
"@tauri-apps/cli": "^2.5.0",
|
||||
"@types/dom-speech-recognition": "^0.0.4",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/markdown-it": "^14.1.2",
|
||||
"@types/node": "^22.13.10",
|
||||
"@types/react": "^18.3.18",
|
||||
"@types/react-dom": "^18.3.5",
|
||||
"@types/react-i18next": "^8.1.0",
|
||||
"@types/node": "^22.15.17",
|
||||
"@types/react": "^18.3.21",
|
||||
"@types/react-dom": "^18.3.7",
|
||||
"@types/react-katex": "^3.0.4",
|
||||
"@types/react-window": "^1.8.8",
|
||||
"@vitejs/plugin-react": "^4.3.4",
|
||||
"@vitejs/plugin-react": "^4.4.1",
|
||||
"autoprefixer": "^10.4.21",
|
||||
"cross-env": "^7.0.3",
|
||||
"immer": "^10.1.1",
|
||||
"postcss": "^8.5.3",
|
||||
"release-it": "^18.1.2",
|
||||
"sass": "^1.87.0",
|
||||
"tailwindcss": "^3.4.17",
|
||||
"tsx": "^4.19.3",
|
||||
"typescript": "^5.8.2",
|
||||
"vite": "^5.4.14"
|
||||
}
|
||||
}
|
||||
"tsup": "^8.4.0",
|
||||
"tsx": "^4.19.4",
|
||||
"typescript": "^5.8.3",
|
||||
"vite": "^5.4.19"
|
||||
},
|
||||
"packageManager": "pnpm@10.11.0+sha512.6540583f41cc5f628eb3d9773ecee802f4f9ef9923cc45b69890fb47991d4b092964694ec3a4f738a420c918a333062c8b925d312f42e4f0c263eb603551f977"
|
||||
}
|
||||
2488
pnpm-lock.yaml
generated
BIN
public/assets/calculator.png
Normal file
|
After Width: | Height: | Size: 845 B |
1
scripts/devWeb.ts
Normal file
@@ -0,0 +1 @@
|
||||
(() => {})();
|
||||
2535
src-tauri/Cargo.lock
generated
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "coco"
|
||||
version = "0.2.1"
|
||||
version = "0.6.0"
|
||||
description = "Search, connect, collaborate – all in one place."
|
||||
authors = ["INFINI Labs"]
|
||||
edition = "2021"
|
||||
@@ -20,14 +20,36 @@ tauri-build = { version = "2", features = ["default"] }
|
||||
default = ["desktop"]
|
||||
desktop = []
|
||||
cargo-clippy = []
|
||||
# If enabled, code that relies on pizza_engine will be activated.
|
||||
#
|
||||
# Only do this if:
|
||||
# 1. Pizza engine is listed in the `dependencies` section
|
||||
#
|
||||
# ```toml
|
||||
# [dependencies]
|
||||
# pizza-engine = { git = "ssh://git@github.com/infinilabs/pizza.git", features = ["query_string_parser", "persistence"] }
|
||||
# ```
|
||||
#
|
||||
# 2. It is a private repo, you have access to it.
|
||||
#
|
||||
# So, for external contributors, do NOT enable this feature.
|
||||
#
|
||||
# Previously, We listed it in the dependencies and marked it optional, but cargo
|
||||
# would fetch all the dependencies regardless of wheterh they are optional or not,
|
||||
# so we removed it.
|
||||
#
|
||||
# https://github.com/rust-lang/cargo/issues/4544#issuecomment-1906902755
|
||||
use_pizza_engine = []
|
||||
|
||||
[dependencies]
|
||||
pizza-common = { git = "https://github.com/infinilabs/pizza-common", branch = "main" }
|
||||
|
||||
tauri = { version = "2", features = ["protocol-asset", "macos-private-api", "tray-icon", "image-ico", "image-png", "unstable"] }
|
||||
tauri = { version = "2", features = ["protocol-asset", "macos-private-api", "tray-icon", "image-ico", "image-png"] }
|
||||
tauri-plugin-shell = "2"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
# Need `arbitrary_precision` feature to support storing u128
|
||||
# see: https://docs.rs/serde_json/latest/serde_json/struct.Number.html#method.from_u128
|
||||
serde_json = { version = "1", features = ["arbitrary_precision", "preserve_order"] }
|
||||
tauri-plugin-http = "2"
|
||||
tauri-plugin-websocket = "2"
|
||||
tauri-plugin-deep-link = "2.0.0"
|
||||
@@ -35,19 +57,17 @@ tauri-plugin-store = "2.2.0"
|
||||
tauri-plugin-os = "2"
|
||||
tauri-plugin-dialog = "2"
|
||||
tauri-plugin-fs = "2"
|
||||
tauri-plugin-updater = "2"
|
||||
tauri-plugin-process = "2"
|
||||
tauri-plugin-drag = "2"
|
||||
tauri-plugin-macos-permissions = "2"
|
||||
tauri-plugin-fs-pro = "2"
|
||||
tauri-plugin-screenshots = "2"
|
||||
applications = "0.3.0"
|
||||
|
||||
applications = { git = "https://github.com/infinilabs/applications-rs", rev = "7bb507e6b12f73c96f3a52f0578d0246a689f381" }
|
||||
tokio-native-tls = "0.3" # For wss connections
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio-tungstenite = { version = "0.20", features = ["rustls-tls-webpki-roots"] }
|
||||
tokio-tungstenite = { version = "0.20", features = ["native-tls"] }
|
||||
hyper = { version = "0.14", features = ["client"] }
|
||||
reqwest = "0.12.12"
|
||||
reqwest = { version = "0.12", features = ["json", "multipart"] }
|
||||
futures = "0.3.31"
|
||||
ordered-float = { version = "4.6.0", default-features = false }
|
||||
lazy_static = "1.5.0"
|
||||
@@ -60,14 +80,28 @@ hostname = "0.3"
|
||||
plist = "1.7"
|
||||
base64 = "0.13"
|
||||
walkdir = "2"
|
||||
fuzzy_prefix_search = "0.2"
|
||||
log = "0.4"
|
||||
|
||||
strsim = "0.10"
|
||||
futures-util = "0.3.31"
|
||||
url = "2.5.2"
|
||||
http = "1.1.0"
|
||||
tungstenite = "0.24.0"
|
||||
env_logger = "0.11.5"
|
||||
tokio-util = "0.7.14"
|
||||
tauri-plugin-windows-version = "2"
|
||||
meval = "0.2"
|
||||
chinese-number = "0.7"
|
||||
num2words = "1"
|
||||
tauri-plugin-log = "2"
|
||||
chrono = "0.4.41"
|
||||
serde_plain = "1.0.2"
|
||||
derive_more = { version = "2.0.1", features = ["display"] }
|
||||
anyhow = "1.0.98"
|
||||
function_name = "0.3.0"
|
||||
regex = "1.11.1"
|
||||
borrowme = "0.0.15"
|
||||
tauri-plugin-opener = "2"
|
||||
async-recursion = "1.1.1"
|
||||
zip = "4.0.0"
|
||||
url = "2.5.2"
|
||||
|
||||
[target."cfg(target_os = \"macos\")".dependencies]
|
||||
tauri-nspanel = { git = "https://github.com/ahkohd/tauri-nspanel", branch = "v2" }
|
||||
@@ -75,7 +109,6 @@ tauri-nspanel = { git = "https://github.com/ahkohd/tauri-nspanel", branch = "v2"
|
||||
[target."cfg(any(target_os = \"macos\", windows, target_os = \"linux\"))".dependencies]
|
||||
tauri-plugin-single-instance = { version = "2.0.0", features = ["deep-link"] }
|
||||
|
||||
|
||||
[profile.dev]
|
||||
incremental = true # Compile your binary in smaller steps.
|
||||
|
||||
@@ -89,4 +122,7 @@ strip = true # Ensures debug symbols are removed.
|
||||
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies]
|
||||
tauri-plugin-autostart = "^2.2"
|
||||
tauri-plugin-global-shortcut = "2"
|
||||
tauri-plugin-updater = "2"
|
||||
tauri-plugin-updater = { git = "https://github.com/infinilabs/plugins-workspace", branch = "v2" }
|
||||
|
||||
[target."cfg(target_os = \"windows\")".dependencies]
|
||||
enigo="0.3"
|
||||
|
||||
|
Before Width: | Height: | Size: 36 KiB |
@@ -2,7 +2,7 @@
|
||||
"$schema": "../gen/schemas/desktop-schema.json",
|
||||
"identifier": "default",
|
||||
"description": "Capability for the main window",
|
||||
"windows": ["main", "chat", "settings"],
|
||||
"windows": ["main", "chat", "settings", "check"],
|
||||
"permissions": [
|
||||
"core:default",
|
||||
"core:event:allow-emit",
|
||||
@@ -29,6 +29,7 @@
|
||||
"core:window:allow-set-focus",
|
||||
"core:window:allow-set-always-on-top",
|
||||
"core:window:deny-internal-toggle-maximize",
|
||||
"core:window:allow-set-shadow",
|
||||
"core:app:allow-set-app-theme",
|
||||
"shell:default",
|
||||
"http:default",
|
||||
@@ -68,6 +69,9 @@
|
||||
"screenshots:default",
|
||||
"core:window:allow-set-theme",
|
||||
"process:default",
|
||||
"updater:default"
|
||||
"updater:default",
|
||||
"windows-version:default",
|
||||
"log:default",
|
||||
"opener:default"
|
||||
]
|
||||
}
|
||||
|
||||
2
src-tauri/rust-toolchain.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[toolchain]
|
||||
channel = "nightly-2025-02-28"
|
||||
@@ -1,62 +1,60 @@
|
||||
use crate::common::assistant::ChatRequestMessage;
|
||||
use crate::common::http::GetResponse;
|
||||
use crate::common::http::{convert_query_params_to_strings, GetResponse};
|
||||
use crate::common::register::SearchSourceRegistry;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use reqwest::Response;
|
||||
use crate::{common, server::servers::COCO_SERVERS};
|
||||
use futures::stream::FuturesUnordered;
|
||||
use futures::StreamExt;
|
||||
use futures_util::TryStreamExt;
|
||||
use http::Method;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime};
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn chat_history<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
_app_handle: AppHandle<R>,
|
||||
server_id: String,
|
||||
from: u32,
|
||||
size: u32,
|
||||
query: Option<String>,
|
||||
) -> Result<String, String> {
|
||||
let mut query_params: HashMap<String, Value> = HashMap::new();
|
||||
if from > 0 {
|
||||
query_params.insert("from".to_string(), from.into());
|
||||
}
|
||||
if size > 0 {
|
||||
query_params.insert("size".to_string(), size.into());
|
||||
let mut query_params = Vec::new();
|
||||
|
||||
// Add from/size as number values
|
||||
query_params.push(format!("from={}", from));
|
||||
query_params.push(format!("size={}", size));
|
||||
|
||||
if let Some(query) = query {
|
||||
if !query.is_empty() {
|
||||
query_params.push(format!("query={}", query.to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
let response = HttpClient::get(&server_id, "/chat/_history", Some(query_params))
|
||||
.await
|
||||
.map_err(|e| format!("Error get sessions: {}", e))?;
|
||||
.map_err(|e| {
|
||||
dbg!("Error get history: {}", &e);
|
||||
format!("Error get history: {}", e)
|
||||
})?;
|
||||
|
||||
handle_raw_response(response).await?
|
||||
}
|
||||
|
||||
async fn handle_raw_response(response: Response) -> Result<Result<String, String>, String> {
|
||||
Ok(
|
||||
if response.status().as_u16() < 200 || response.status().as_u16() >= 400 {
|
||||
Err("Failed to send message".to_string())
|
||||
} else {
|
||||
let body = response
|
||||
.text()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to parse response JSON: {}", e))?;
|
||||
Ok(body)
|
||||
},
|
||||
)
|
||||
common::http::get_response_body_text(response).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn session_chat_history<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
_app_handle: AppHandle<R>,
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
from: u32,
|
||||
size: u32,
|
||||
) -> Result<String, String> {
|
||||
let mut query_params: HashMap<String, Value> = HashMap::new();
|
||||
if from > 0 {
|
||||
query_params.insert("from".to_string(), from.into());
|
||||
}
|
||||
if size > 0 {
|
||||
query_params.insert("size".to_string(), size.into());
|
||||
}
|
||||
let mut query_params = Vec::new();
|
||||
|
||||
// Add from/size as number values
|
||||
query_params.push(format!("from={}", from));
|
||||
query_params.push(format!("size={}", size));
|
||||
|
||||
let path = format!("/chat/{}/_history", session_id);
|
||||
|
||||
@@ -64,87 +62,94 @@ pub async fn session_chat_history<R: Runtime>(
|
||||
.await
|
||||
.map_err(|e| format!("Error get session message: {}", e))?;
|
||||
|
||||
handle_raw_response(response).await?
|
||||
common::http::get_response_body_text(response).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn open_session_chat<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
_app_handle: AppHandle<R>,
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
) -> Result<String, String> {
|
||||
let mut query_params = HashMap::new();
|
||||
let path = format!("/chat/{}/_open", session_id);
|
||||
|
||||
let response = HttpClient::post(&server_id, path.as_str(), Some(query_params), None)
|
||||
let response = HttpClient::post(&server_id, path.as_str(), None, None)
|
||||
.await
|
||||
.map_err(|e| format!("Error open session: {}", e))?;
|
||||
|
||||
handle_raw_response(response).await?
|
||||
common::http::get_response_body_text(response).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn close_session_chat<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
_app_handle: AppHandle<R>,
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
) -> Result<String, String> {
|
||||
let mut query_params = HashMap::new();
|
||||
let path = format!("/chat/{}/_close", session_id);
|
||||
|
||||
let response = HttpClient::post(&server_id, path.as_str(), Some(query_params), None)
|
||||
let response = HttpClient::post(&server_id, path.as_str(), None, None)
|
||||
.await
|
||||
.map_err(|e| format!("Error close session: {}", e))?;
|
||||
|
||||
handle_raw_response(response).await?
|
||||
common::http::get_response_body_text(response).await
|
||||
}
|
||||
#[tauri::command]
|
||||
pub async fn cancel_session_chat<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
_app_handle: AppHandle<R>,
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
) -> Result<String, String> {
|
||||
let mut query_params = HashMap::new();
|
||||
let path = format!("/chat/{}/_cancel", session_id);
|
||||
|
||||
let response = HttpClient::post(&server_id, path.as_str(), Some(query_params), None)
|
||||
let response = HttpClient::post(&server_id, path.as_str(), None, None)
|
||||
.await
|
||||
.map_err(|e| format!("Error cancel session: {}", e))?;
|
||||
|
||||
handle_raw_response(response).await?
|
||||
common::http::get_response_body_text(response).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn new_chat<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
_app_handle: AppHandle<R>,
|
||||
server_id: String,
|
||||
websocket_id: String,
|
||||
message: String,
|
||||
query_params: Option<HashMap<String, Value>>, //search,deep_thinking
|
||||
query_params: Option<HashMap<String, Value>>,
|
||||
) -> Result<GetResponse, String> {
|
||||
let body = if !message.is_empty() {
|
||||
let message = ChatRequestMessage {
|
||||
message: Some(message),
|
||||
};
|
||||
let body = reqwest::Body::from(serde_json::to_string(&message).unwrap());
|
||||
Some(body)
|
||||
Some(
|
||||
serde_json::to_string(&message)
|
||||
.map_err(|e| format!("Failed to serialize message: {}", e))?
|
||||
.into(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let response = HttpClient::post(&server_id, "/chat/_new", query_params, body)
|
||||
.await
|
||||
.map_err(|e| format!("Error sending message: {}", e))?;
|
||||
let mut headers = HashMap::new();
|
||||
headers.insert("WEBSOCKET-SESSION-ID".to_string(), websocket_id.into());
|
||||
|
||||
if response.status().as_u16() < 200 || response.status().as_u16() >= 400 {
|
||||
return Err("Failed to send message".to_string());
|
||||
}
|
||||
let response = HttpClient::advanced_post(
|
||||
&server_id,
|
||||
"/chat/_new",
|
||||
Some(headers),
|
||||
convert_query_params_to_strings(query_params),
|
||||
body,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Error sending message: {}", e))?;
|
||||
|
||||
let chat_response: GetResponse = response
|
||||
.json()
|
||||
.await
|
||||
let body_text = common::http::get_response_body_text(response).await?;
|
||||
|
||||
log::debug!("New chat response: {}", &body_text);
|
||||
|
||||
let chat_response: GetResponse = serde_json::from_str(&body_text)
|
||||
.map_err(|e| format!("Failed to parse response JSON: {}", e))?;
|
||||
|
||||
// Check the result and status fields
|
||||
if chat_response.result != "created" {
|
||||
return Err(format!("Unexpected result: {}", chat_response.result));
|
||||
}
|
||||
@@ -154,8 +159,9 @@ pub async fn new_chat<R: Runtime>(
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn send_message<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
_app_handle: AppHandle<R>,
|
||||
server_id: String,
|
||||
websocket_id: String,
|
||||
session_id: String,
|
||||
message: String,
|
||||
query_params: Option<HashMap<String, Value>>, //search,deep_thinking
|
||||
@@ -165,11 +171,248 @@ pub async fn send_message<R: Runtime>(
|
||||
message: Some(message),
|
||||
};
|
||||
|
||||
let body = reqwest::Body::from(serde_json::to_string(&msg).unwrap());
|
||||
let response =
|
||||
HttpClient::advanced_post(&server_id, path.as_str(), None, query_params, Some(body))
|
||||
.await
|
||||
.map_err(|e| format!("Error cancel session: {}", e))?;
|
||||
let mut headers = HashMap::new();
|
||||
headers.insert("WEBSOCKET-SESSION-ID".to_string(), websocket_id.into());
|
||||
|
||||
handle_raw_response(response).await?
|
||||
let body = reqwest::Body::from(serde_json::to_string(&msg).unwrap());
|
||||
let response = HttpClient::advanced_post(
|
||||
&server_id,
|
||||
path.as_str(),
|
||||
Some(headers),
|
||||
convert_query_params_to_strings(query_params),
|
||||
Some(body),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Error cancel session: {}", e))?;
|
||||
|
||||
common::http::get_response_body_text(response).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_session_chat(server_id: String, session_id: String) -> Result<bool, String> {
|
||||
let response =
|
||||
HttpClient::delete(&server_id, &format!("/chat/{}", session_id), None, None).await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
Ok(true)
|
||||
} else {
|
||||
Err(format!("Delete failed with status: {}", response.status()))
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn update_session_chat(
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
title: Option<String>,
|
||||
context: Option<HashMap<String, Value>>,
|
||||
) -> Result<bool, String> {
|
||||
let mut body = HashMap::new();
|
||||
if let Some(title) = title {
|
||||
body.insert("title".to_string(), Value::String(title));
|
||||
}
|
||||
if let Some(context) = context {
|
||||
body.insert(
|
||||
"context".to_string(),
|
||||
Value::Object(context.into_iter().collect()),
|
||||
);
|
||||
}
|
||||
|
||||
let response = HttpClient::put(
|
||||
&server_id,
|
||||
&format!("/chat/{}", session_id),
|
||||
None,
|
||||
None,
|
||||
Some(reqwest::Body::from(serde_json::to_string(&body).unwrap())),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Error updating session: {}", e))?;
|
||||
|
||||
Ok(response.status().is_success())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn assistant_search<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
server_id: String,
|
||||
query_params: Option<Vec<String>>,
|
||||
) -> Result<Value, String> {
|
||||
let response = HttpClient::post(&server_id, "/assistant/_search", query_params, None)
|
||||
.await
|
||||
.map_err(|e| format!("Error searching assistants: {}", e))?;
|
||||
|
||||
response
|
||||
.json::<Value>()
|
||||
.await
|
||||
.map_err(|err| err.to_string())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn assistant_get<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
server_id: String,
|
||||
assistant_id: String,
|
||||
) -> Result<Value, String> {
|
||||
let response = HttpClient::get(
|
||||
&server_id,
|
||||
&format!("/assistant/{}", assistant_id),
|
||||
None, // headers
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Error getting assistant: {}", e))?;
|
||||
|
||||
response
|
||||
.json::<Value>()
|
||||
.await
|
||||
.map_err(|err| err.to_string())
|
||||
}
|
||||
|
||||
/// Gets the information of the assistant specified by `assistant_id` by querying **all**
|
||||
/// Coco servers.
|
||||
///
|
||||
/// Returns as soon as the assistant is found on any Coco server.
|
||||
#[tauri::command]
|
||||
pub async fn assistant_get_multi<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
assistant_id: String,
|
||||
) -> Result<Value, String> {
|
||||
let search_sources = app_handle.state::<SearchSourceRegistry>();
|
||||
let sources_future = search_sources.get_sources();
|
||||
let sources_list = sources_future.await;
|
||||
|
||||
let mut futures = FuturesUnordered::new();
|
||||
|
||||
for query_source in &sources_list {
|
||||
let query_source_type = query_source.get_type();
|
||||
if query_source_type.r#type != COCO_SERVERS {
|
||||
// Assistants only exists on Coco servers.
|
||||
continue;
|
||||
}
|
||||
|
||||
let coco_server_id = query_source_type.id.clone();
|
||||
|
||||
let path = format!("/assistant/{}", assistant_id);
|
||||
|
||||
let fut = async move {
|
||||
let res_response = HttpClient::get(
|
||||
&coco_server_id,
|
||||
&path,
|
||||
None, // headers
|
||||
)
|
||||
.await;
|
||||
match res_response {
|
||||
Ok(response) => response
|
||||
.json::<serde_json::Value>()
|
||||
.await
|
||||
.map_err(|e| e.to_string()),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
};
|
||||
|
||||
futures.push(fut);
|
||||
}
|
||||
|
||||
while let Some(res_response_json) = futures.next().await {
|
||||
let response_json = match res_response_json {
|
||||
Ok(json) => json,
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
|
||||
// Example response JSON
|
||||
//
|
||||
// When assistant is not found:
|
||||
// ```json
|
||||
// {
|
||||
// "_id": "ID",
|
||||
// "result": "not_found"
|
||||
// }
|
||||
// ```
|
||||
//
|
||||
// When assistant is found:
|
||||
// ```json
|
||||
// {
|
||||
// "_id": "ID",
|
||||
// "_source": {...}
|
||||
// "found": true
|
||||
// }
|
||||
// ```
|
||||
if let Some(found) = response_json.get("found") {
|
||||
if found == true {
|
||||
return Ok(response_json);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(format!(
|
||||
"could not find Assistant [{}] on all the Coco servers",
|
||||
assistant_id
|
||||
))
|
||||
}
|
||||
|
||||
use regex::Regex;
|
||||
/// Remove all `"icon": "..."` fields from a JSON string
|
||||
pub fn remove_icon_fields(json: &str) -> String {
|
||||
// Regex to match `"icon": "..."` fields, including base64 or escaped strings
|
||||
let re = Regex::new(r#""icon"\s*:\s*"[^"]*"(,?)"#).unwrap();
|
||||
// Replace with empty string, or just remove trailing comma if needed
|
||||
re.replace_all(json, |caps: ®ex::Captures| {
|
||||
if &caps[1] == "," {
|
||||
"".to_string() // keep comma removal logic safe
|
||||
} else {
|
||||
"".to_string()
|
||||
}
|
||||
})
|
||||
.to_string()
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn ask_ai<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
message: String,
|
||||
server_id: String,
|
||||
assistant_id: String,
|
||||
client_id: String,
|
||||
) -> Result<(), String> {
|
||||
let cleaned = remove_icon_fields(message.as_str());
|
||||
|
||||
let body = serde_json::json!({ "message": cleaned });
|
||||
|
||||
let path = format!("/assistant/{}/_ask", assistant_id);
|
||||
|
||||
println!("Sending request to {}", &path);
|
||||
|
||||
let response = HttpClient::send_request(
|
||||
server_id.as_str(),
|
||||
Method::POST,
|
||||
path.as_str(),
|
||||
None,
|
||||
None,
|
||||
Some(reqwest::Body::from(body.to_string())),
|
||||
)
|
||||
.await?;
|
||||
|
||||
if response.status() == 429 {
|
||||
log::warn!("Rate limit exceeded for assistant: {}", &assistant_id);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("Request Failed: {}", response.status()));
|
||||
}
|
||||
|
||||
let stream = response.bytes_stream();
|
||||
let reader = tokio_util::io::StreamReader::new(
|
||||
stream.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)),
|
||||
);
|
||||
let mut lines = tokio::io::BufReader::new(reader).lines();
|
||||
|
||||
while let Ok(Some(line)) = lines.next_line().await {
|
||||
dbg!("Received line: {}", &line);
|
||||
|
||||
let _ = app_handle.emit(&client_id, line).map_err(|err| {
|
||||
println!("Failed to emit: {:?}", err);
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -3,38 +3,43 @@ use std::{fs::create_dir, io::Read};
|
||||
use tauri::{Manager, Runtime};
|
||||
use tauri_plugin_autostart::ManagerExt;
|
||||
|
||||
// Start or stop according to configuration
|
||||
pub fn enable_autostart(app: &mut tauri::App) {
|
||||
use tauri_plugin_autostart::MacosLauncher;
|
||||
use tauri_plugin_autostart::ManagerExt;
|
||||
|
||||
app.handle()
|
||||
.plugin(tauri_plugin_autostart::init(
|
||||
MacosLauncher::AppleScript,
|
||||
None,
|
||||
))
|
||||
.unwrap();
|
||||
|
||||
/// If the state reported from the OS and the state stored by us differ, our state is
|
||||
/// prioritized and seen as the correct one. Update the OS state to make them consistent.
|
||||
pub fn ensure_autostart_state_consistent(app: &mut tauri::App) -> Result<(), String> {
|
||||
let autostart_manager = app.autolaunch();
|
||||
|
||||
// close autostart
|
||||
// autostart_manager.disable().unwrap();
|
||||
// return;
|
||||
let os_state = autostart_manager.is_enabled().map_err(|e| e.to_string())?;
|
||||
let coco_stored_state = current_autostart(app.app_handle()).map_err(|e| e.to_string())?;
|
||||
|
||||
match (
|
||||
autostart_manager.is_enabled(),
|
||||
current_autostart(app.app_handle()),
|
||||
) {
|
||||
(Ok(false), Ok(true)) => match autostart_manager.enable() {
|
||||
Ok(_) => println!("Autostart enabled successfully."),
|
||||
Err(err) => eprintln!("Failed to enable autostart: {}", err),
|
||||
},
|
||||
(Ok(true), Ok(false)) => match autostart_manager.disable() {
|
||||
Ok(_) => println!("Autostart disable successfully."),
|
||||
Err(err) => eprintln!("Failed to disable autostart: {}", err),
|
||||
},
|
||||
_ => (),
|
||||
if os_state != coco_stored_state {
|
||||
log::warn!(
|
||||
"autostart inconsistent states, OS state [{}], Coco state [{}], config file could be deleted or corrupted",
|
||||
os_state,
|
||||
coco_stored_state
|
||||
);
|
||||
log::info!("trying to correct the inconsistent states");
|
||||
|
||||
let result = if coco_stored_state {
|
||||
autostart_manager.enable()
|
||||
} else {
|
||||
autostart_manager.disable()
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(_) => {
|
||||
log::info!("inconsistent autostart states fixed");
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!(
|
||||
"failed to fix inconsistent autostart state due to error [{}]",
|
||||
e
|
||||
);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn current_autostart<R: Runtime>(app: &tauri::AppHandle<R>) -> Result<bool, String> {
|
||||
@@ -60,7 +65,10 @@ fn current_autostart<R: Runtime>(app: &tauri::AppHandle<R>) -> Result<bool, Stri
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn change_autostart<R: Runtime>(app: tauri::AppHandle<R>, open: bool) -> Result<(), String> {
|
||||
pub async fn change_autostart<R: Runtime>(
|
||||
app: tauri::AppHandle<R>,
|
||||
open: bool,
|
||||
) -> Result<(), String> {
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
|
||||
|
||||
@@ -6,15 +6,16 @@ pub struct ChatRequestMessage {
|
||||
pub message: Option<String>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub struct NewChatResponse {
|
||||
pub _id: String,
|
||||
pub _source: Source,
|
||||
pub _source: Session,
|
||||
pub result: String,
|
||||
pub payload: Option<Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Source {
|
||||
pub struct Session {
|
||||
pub id: String,
|
||||
pub created: String,
|
||||
pub updated: String,
|
||||
@@ -22,4 +23,11 @@ pub struct Source {
|
||||
pub title: Option<String>,
|
||||
pub summary: Option<String>,
|
||||
pub manually_renamed_title: bool,
|
||||
pub visible: Option<bool>,
|
||||
pub context: Option<SessionContext>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SessionContext {
|
||||
pub attachments: Option<Vec<String>>,
|
||||
}
|
||||
@@ -13,6 +13,7 @@ pub struct DataSourceReference {
|
||||
pub r#type: Option<String>,
|
||||
pub name: Option<String>,
|
||||
pub id: Option<String>,
|
||||
pub icon: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@@ -28,7 +29,90 @@ pub struct EditorInfo {
|
||||
pub timestamp: Option<String>,
|
||||
}
|
||||
|
||||
/// Defines the action that would be performed when a document gets opened.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub(crate) enum OnOpened {
|
||||
/// Launch the application
|
||||
Application { app_path: String },
|
||||
/// Open the URL.
|
||||
Document { url: String },
|
||||
/// Spawn a child process to run the `CommandAction`.
|
||||
Command {
|
||||
action: crate::extension::CommandAction,
|
||||
},
|
||||
}
|
||||
|
||||
impl OnOpened {
|
||||
pub(crate) fn url(&self) -> String {
|
||||
match self {
|
||||
Self::Application { app_path } => app_path.clone(),
|
||||
Self::Document { url } => url.clone(),
|
||||
Self::Command { action } => {
|
||||
const WHITESPACE: &str = " ";
|
||||
let mut ret = action.exec.clone();
|
||||
ret.push_str(WHITESPACE);
|
||||
if let Some(ref args) = action.args {
|
||||
ret.push_str(args.join(WHITESPACE).as_str());
|
||||
}
|
||||
|
||||
ret
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub(crate) async fn open(on_opened: OnOpened) -> Result<(), String> {
|
||||
log::debug!("open({})", on_opened.url());
|
||||
|
||||
use crate::util::open as homemade_tauri_shell_open;
|
||||
use crate::GLOBAL_TAURI_APP_HANDLE;
|
||||
use std::process::Command;
|
||||
|
||||
let global_tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
|
||||
match on_opened {
|
||||
OnOpened::Application { app_path } => {
|
||||
homemade_tauri_shell_open(global_tauri_app_handle.clone(), app_path).await?
|
||||
}
|
||||
OnOpened::Document { url } => {
|
||||
homemade_tauri_shell_open(global_tauri_app_handle.clone(), url).await?
|
||||
}
|
||||
OnOpened::Command { action } => {
|
||||
let mut cmd = Command::new(action.exec);
|
||||
if let Some(args) = action.args {
|
||||
cmd.args(args);
|
||||
}
|
||||
let output = cmd.output().map_err(|e| e.to_string())?;
|
||||
// Sometimes, we wanna see the result in logs even though it doesn't fail.
|
||||
log::debug!(
|
||||
"executing open(Command) result, exit code: [{}], stdout: [{}], stderr: [{}]",
|
||||
output.status,
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
if !output.status.success() {
|
||||
log::warn!(
|
||||
"executing open(Command) failed, exit code: [{}], stdout: [{}], stderr: [{}]",
|
||||
output.status,
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
return Err(format!(
|
||||
"Command failed, stderr [{}]",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct Document {
|
||||
pub id: String,
|
||||
pub created: Option<String>,
|
||||
@@ -47,6 +131,8 @@ pub struct Document {
|
||||
pub thumbnail: Option<String>,
|
||||
pub cover: Option<String>,
|
||||
pub tags: Option<Vec<String>>,
|
||||
/// What will happen if we open this document.
|
||||
pub on_opened: Option<OnOpened>,
|
||||
pub url: Option<String>,
|
||||
pub size: Option<i64>,
|
||||
pub metadata: Option<HashMap<String, serde_json::Value>>,
|
||||
@@ -54,32 +140,3 @@ pub struct Document {
|
||||
pub owner: Option<UserInfo>,
|
||||
pub last_updated_by: Option<EditorInfo>,
|
||||
}
|
||||
impl Document {
|
||||
pub fn new(source: Option<DataSourceReference>, id: String, category: String, name: String, url: String) -> Self {
|
||||
Self {
|
||||
id,
|
||||
created: None,
|
||||
updated: None,
|
||||
source,
|
||||
r#type: None,
|
||||
category: Some(category),
|
||||
subcategory: None,
|
||||
categories: None,
|
||||
rich_categories: None,
|
||||
title: Some(name),
|
||||
summary: None,
|
||||
lang: None,
|
||||
content: None,
|
||||
icon: None,
|
||||
thumbnail: None,
|
||||
cover: None,
|
||||
tags: None,
|
||||
url: Some(url),
|
||||
size: None,
|
||||
metadata: None,
|
||||
payload: None,
|
||||
owner: None,
|
||||
last_updated_by: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
65
src-tauri/src/common/error.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
pub struct ErrorCause {
|
||||
#[serde(default)]
|
||||
pub r#type: Option<String>,
|
||||
#[serde(default)]
|
||||
pub reason: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
pub struct ErrorDetail {
|
||||
#[serde(default)]
|
||||
pub root_cause: Option<Vec<ErrorCause>>,
|
||||
#[serde(default)]
|
||||
pub r#type: Option<String>,
|
||||
#[serde(default)]
|
||||
pub reason: Option<String>,
|
||||
#[serde(default)]
|
||||
pub caused_by: Option<ErrorCause>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
pub struct ErrorResponse {
|
||||
#[serde(default)]
|
||||
pub error: Option<ErrorDetail>,
|
||||
#[serde(default)]
|
||||
pub status: Option<u16>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error, Serialize)]
|
||||
pub enum SearchError {
|
||||
#[error("HttpError: {0}")]
|
||||
HttpError(String),
|
||||
|
||||
#[error("ParseError: {0}")]
|
||||
ParseError(String),
|
||||
|
||||
#[error("Timeout occurred")]
|
||||
Timeout,
|
||||
|
||||
#[error("UnknownError: {0}")]
|
||||
#[allow(dead_code)]
|
||||
Unknown(String),
|
||||
|
||||
#[error("InternalError: {0}")]
|
||||
#[allow(dead_code)]
|
||||
InternalError(String),
|
||||
}
|
||||
|
||||
impl From<reqwest::Error> for SearchError {
|
||||
fn from(err: reqwest::Error) -> Self {
|
||||
if err.is_timeout() {
|
||||
SearchError::Timeout
|
||||
} else if err.is_decode() {
|
||||
SearchError::ParseError(err.to_string())
|
||||
} else {
|
||||
SearchError::HttpError(err.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,9 @@
|
||||
use crate::common;
|
||||
use reqwest::Response;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use tauri_plugin_store::JsonValue;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct GetResponse {
|
||||
@@ -15,4 +19,62 @@ pub struct Source {
|
||||
pub created: String,
|
||||
pub updated: String,
|
||||
pub status: String,
|
||||
}
|
||||
|
||||
pub async fn get_response_body_text(response: Response) -> Result<String, String> {
|
||||
let status = response.status().as_u16();
|
||||
let body = response
|
||||
.text()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read response body: {}, code: {}", e, status))?;
|
||||
|
||||
log::debug!("Response status: {}, body: {}", status, &body);
|
||||
|
||||
if status < 200 || status >= 400 {
|
||||
// Try to parse the error body
|
||||
let fallback_error = "Failed to send message".to_string();
|
||||
|
||||
if body.trim().is_empty() {
|
||||
return Err(fallback_error);
|
||||
}
|
||||
|
||||
|
||||
match serde_json::from_str::<common::error::ErrorResponse>(&body) {
|
||||
Ok(parsed_error) => {
|
||||
dbg!(&parsed_error);
|
||||
Err(format!(
|
||||
"Server error ({}): {:?}",
|
||||
status, parsed_error.error
|
||||
))
|
||||
}
|
||||
Err(_) => {
|
||||
log::warn!("Failed to parse error response: {}", &body);
|
||||
Err(fallback_error)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Ok(body)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn convert_query_params_to_strings(
|
||||
query_params: Option<HashMap<String, JsonValue>>,
|
||||
) -> Option<Vec<String>> {
|
||||
query_params.map(|map| {
|
||||
map.into_iter()
|
||||
.filter_map(|(k, v)| match v {
|
||||
JsonValue::String(s) => Some(format!("{}={}", k, s)),
|
||||
JsonValue::Number(n) => Some(format!("{}={}", k, n)),
|
||||
JsonValue::Bool(b) => Some(format!("{}={}", k, b)),
|
||||
_ => {
|
||||
eprintln!(
|
||||
"Skipping unsupported query value for key '{}': {:?}",
|
||||
k, v
|
||||
);
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
@@ -1,15 +1,17 @@
|
||||
pub mod health;
|
||||
pub mod profile;
|
||||
pub mod server;
|
||||
pub mod auth;
|
||||
pub mod datasource;
|
||||
pub mod connector;
|
||||
pub mod search;
|
||||
pub mod document;
|
||||
pub mod traits;
|
||||
pub mod register;
|
||||
pub mod assistant;
|
||||
pub mod auth;
|
||||
pub mod connector;
|
||||
pub mod datasource;
|
||||
pub mod document;
|
||||
pub mod error;
|
||||
pub mod health;
|
||||
pub mod http;
|
||||
pub mod profile;
|
||||
pub mod register;
|
||||
pub mod search;
|
||||
pub mod server;
|
||||
pub mod traits;
|
||||
|
||||
pub static MAIN_WINDOW_LABEL: &str = "main";
|
||||
pub static SETTINGS_WINDOW_LABEL: &str = "settings";
|
||||
pub static CHECK_WINDOW_LABEL: &str = "check";
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
use serde::{Serialize, Deserialize};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug,Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Preferences {
|
||||
pub theme: String,
|
||||
pub language: String,
|
||||
pub theme: Option<String>,
|
||||
pub language: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug,Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct UserProfile {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub email: String,
|
||||
pub avatar: String,
|
||||
pub preferences: Preferences,
|
||||
pub avatar: Option<String>,
|
||||
pub preferences: Option<Preferences>,
|
||||
}
|
||||
@@ -16,6 +16,7 @@ impl SearchSourceRegistry {
|
||||
sources.insert(source_id, Arc::new(source));
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn clear(&self) {
|
||||
let mut sources = self.sources.write().await;
|
||||
sources.clear();
|
||||
@@ -26,6 +27,7 @@ impl SearchSourceRegistry {
|
||||
sources.remove(id);
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn get_source(&self, id: &str) -> Option<Arc<dyn SearchSource>> {
|
||||
let sources = self.sources.read().await;
|
||||
sources.get(id).cloned()
|
||||
@@ -34,4 +36,4 @@ impl SearchSourceRegistry {
|
||||
let sources = self.sources.read().await;
|
||||
sources.values().cloned().collect() // Returns Vec<Arc<dyn SearchSource>>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
use crate::common::document::Document;
|
||||
use crate::common::http::get_response_body_text;
|
||||
use reqwest::Response;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::error::Error;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SearchResponse<T> {
|
||||
pub took: u64,
|
||||
pub timed_out: bool,
|
||||
pub _shards: Shards,
|
||||
pub _shards: Option<Shards>,
|
||||
pub hits: Hits<T>,
|
||||
}
|
||||
|
||||
@@ -24,7 +25,7 @@ pub struct Shards {
|
||||
pub struct Hits<T> {
|
||||
pub total: Total,
|
||||
pub max_score: Option<f32>,
|
||||
pub hits: Vec<SearchHit<T>>,
|
||||
pub hits: Option<Vec<SearchHit<T>>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
@@ -35,9 +36,9 @@ pub struct Total {
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SearchHit<T> {
|
||||
pub _index: String,
|
||||
pub _type: String,
|
||||
pub _id: String,
|
||||
pub _index: Option<String>,
|
||||
pub _type: Option<String>,
|
||||
pub _id: Option<String>,
|
||||
pub _score: Option<f64>,
|
||||
pub _source: T, // This will hold the type we pass in (e.g., DataSource)
|
||||
}
|
||||
@@ -47,26 +48,28 @@ pub async fn parse_search_response<T>(
|
||||
where
|
||||
T: for<'de> Deserialize<'de> + std::fmt::Debug,
|
||||
{
|
||||
let body = response
|
||||
.json::<Value>()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to parse JSON: {}", e))?;
|
||||
let body_text = get_response_body_text(response).await?;
|
||||
|
||||
// dbg!(&body);
|
||||
// dbg!(&body_text);
|
||||
|
||||
let search_response: SearchResponse<T> = serde_json::from_value(body)
|
||||
let search_response: SearchResponse<T> = serde_json::from_str(&body_text)
|
||||
.map_err(|e| format!("Failed to deserialize search response: {}", e))?;
|
||||
|
||||
Ok(search_response)
|
||||
}
|
||||
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
pub async fn parse_search_hits<T>(response: Response) -> Result<Vec<SearchHit<T>>, Box<dyn Error>>
|
||||
where
|
||||
T: for<'de> Deserialize<'de> + std::fmt::Debug,
|
||||
T: DeserializeOwned + std::fmt::Debug,
|
||||
{
|
||||
let response = parse_search_response(response).await?;
|
||||
|
||||
Ok(response.hits.hits)
|
||||
match response.hits.hits {
|
||||
Some(hits) => Ok(hits),
|
||||
None => Ok(Vec::new()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn parse_search_results<T>(response: Response) -> Result<Vec<T>, Box<dyn Error>>
|
||||
@@ -80,6 +83,7 @@ where
|
||||
.collect())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn parse_search_results_with_score<T>(
|
||||
response: Response,
|
||||
) -> Result<Vec<(T, Option<f64>)>, Box<dyn Error>>
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use crate::common::health::Health;
|
||||
use crate::common::profile::UserProfile;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@@ -60,6 +62,7 @@ pub struct Server {
|
||||
pub auth_provider: AuthProvider,
|
||||
#[serde(default = "default_priority_type")]
|
||||
pub priority: u32,
|
||||
pub stats: Option<HashMap<String, Value>>,
|
||||
}
|
||||
|
||||
impl PartialEq for Server {
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
use crate::common::search::{QueryResponse, QuerySource};
|
||||
use thiserror::Error;
|
||||
|
||||
use async_trait::async_trait;
|
||||
// use std::{future::Future, pin::Pin};
|
||||
use crate::common::error::SearchError;
|
||||
use crate::common::search::SearchQuery;
|
||||
use serde::Serialize;
|
||||
use crate::common::search::{QueryResponse, QuerySource};
|
||||
use async_trait::async_trait;
|
||||
|
||||
#[async_trait]
|
||||
pub trait SearchSource: Send + Sync {
|
||||
@@ -12,33 +9,3 @@ pub trait SearchSource: Send + Sync {
|
||||
|
||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Error, Serialize)]
|
||||
pub enum SearchError {
|
||||
#[error("HTTP request failed: {0}")]
|
||||
HttpError(String),
|
||||
|
||||
#[error("Invalid response format: {0}")]
|
||||
ParseError(String),
|
||||
|
||||
#[error("Timeout occurred")]
|
||||
Timeout,
|
||||
|
||||
#[error("Unknown error: {0}")]
|
||||
Unknown(String),
|
||||
|
||||
#[error("InternalError error: {0}")]
|
||||
InternalError(String),
|
||||
}
|
||||
|
||||
impl From<reqwest::Error> for SearchError {
|
||||
fn from(err: reqwest::Error) -> Self {
|
||||
if err.is_timeout() {
|
||||
SearchError::Timeout
|
||||
} else if err.is_decode() {
|
||||
SearchError::ParseError(err.to_string())
|
||||
} else {
|
||||
SearchError::HttpError(err.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
13
src-tauri/src/extension/built_in/ai_overview.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
pub(super) const EXTENSION_ID: &str = "AIOverview";
|
||||
|
||||
/// JSON file for this extension.
|
||||
pub(crate) const PLUGIN_JSON_FILE: &str = r#"
|
||||
{
|
||||
"id": "AIOverview",
|
||||
"name": "AI Overview",
|
||||
"description": "...",
|
||||
"icon": "font_a-AIOverview",
|
||||
"type": "ai_extension",
|
||||
"enabled": true
|
||||
}
|
||||
"#;
|
||||
48
src-tauri/src/extension/built_in/application/mod.rs
Normal file
@@ -0,0 +1,48 @@
|
||||
use serde::Serialize;
|
||||
|
||||
#[cfg(feature = "use_pizza_engine")]
|
||||
mod with_feature;
|
||||
|
||||
#[cfg(not(feature = "use_pizza_engine"))]
|
||||
mod without_feature;
|
||||
|
||||
#[cfg(feature = "use_pizza_engine")]
|
||||
pub use with_feature::*;
|
||||
|
||||
#[cfg(not(feature = "use_pizza_engine"))]
|
||||
pub use without_feature::*;
|
||||
|
||||
#[derive(Debug, Serialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AppEntry {
|
||||
path: String,
|
||||
name: String,
|
||||
icon_path: String,
|
||||
alias: String,
|
||||
hotkey: String,
|
||||
is_disabled: bool,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AppMetadata {
|
||||
name: String,
|
||||
r#where: String,
|
||||
size: u64,
|
||||
created: u128,
|
||||
modified: u128,
|
||||
last_opened: u128,
|
||||
}
|
||||
|
||||
/// JSON file for this extension.
|
||||
pub(crate) const PLUGIN_JSON_FILE: &str = r#"
|
||||
{
|
||||
"id": "Applications",
|
||||
"platforms": ["macos", "linux", "windows"],
|
||||
"name": "Applications",
|
||||
"description": "Application search",
|
||||
"icon": "font_Application",
|
||||
"type": "group",
|
||||
"enabled": true
|
||||
}
|
||||
"#;
|
||||
1192
src-tauri/src/extension/built_in/application/with_feature.rs
Normal file
141
src-tauri/src/extension/built_in/application/without_feature.rs
Normal file
@@ -0,0 +1,141 @@
|
||||
use super::super::Extension;
|
||||
use super::AppMetadata;
|
||||
use crate::common::error::SearchError;
|
||||
use crate::common::search::{QueryResponse, QuerySource, SearchQuery};
|
||||
use crate::common::traits::SearchSource;
|
||||
use crate::extension::LOCAL_QUERY_SOURCE_TYPE;
|
||||
use async_trait::async_trait;
|
||||
use tauri::{AppHandle, Runtime};
|
||||
|
||||
pub(crate) const QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME: &str = "Applications";
|
||||
|
||||
pub struct ApplicationSearchSource;
|
||||
|
||||
impl ApplicationSearchSource {
|
||||
pub async fn prepare_index_and_store<R: Runtime>(
|
||||
_app_handle: AppHandle<R>,
|
||||
) -> Result<(), String> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl SearchSource for ApplicationSearchSource {
|
||||
fn get_type(&self) -> QuerySource {
|
||||
QuerySource {
|
||||
r#type: LOCAL_QUERY_SOURCE_TYPE.into(),
|
||||
name: hostname::get()
|
||||
.unwrap_or("My Computer".into())
|
||||
.to_string_lossy()
|
||||
.into(),
|
||||
id: QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME.into(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn search(&self, _query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
||||
Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits: Vec::new(),
|
||||
total_hits: 0,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_app_alias<R: Runtime>(_tauri_app_handle: &AppHandle<R>, _app_path: &str, _alias: &str) {
|
||||
unreachable!("app list should be empty, there is no way this can be invoked")
|
||||
}
|
||||
|
||||
pub fn register_app_hotkey<R: Runtime>(
|
||||
_tauri_app_handle: &AppHandle<R>,
|
||||
_app_path: &str,
|
||||
_hotkey: &str,
|
||||
) -> Result<(), String> {
|
||||
unreachable!("app list should be empty, there is no way this can be invoked")
|
||||
}
|
||||
|
||||
pub fn unregister_app_hotkey<R: Runtime>(
|
||||
_tauri_app_handle: &AppHandle<R>,
|
||||
_app_path: &str,
|
||||
) -> Result<(), String> {
|
||||
unreachable!("app list should be empty, there is no way this can be invoked")
|
||||
}
|
||||
|
||||
pub fn disable_app_search<R: Runtime>(
|
||||
_tauri_app_handle: &AppHandle<R>,
|
||||
_app_path: &str,
|
||||
) -> Result<(), String> {
|
||||
// no-op
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn enable_app_search<R: Runtime>(
|
||||
_tauri_app_handle: &AppHandle<R>,
|
||||
_app_path: &str,
|
||||
) -> Result<(), String> {
|
||||
// no-op
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn is_app_search_enabled(_app_path: &str) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn add_app_search_path<R: Runtime>(
|
||||
_tauri_app_handle: AppHandle<R>,
|
||||
_search_path: String,
|
||||
) -> Result<(), String> {
|
||||
// no-op
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn remove_app_search_path<R: Runtime>(
|
||||
_tauri_app_handle: AppHandle<R>,
|
||||
_search_path: String,
|
||||
) -> Result<(), String> {
|
||||
// no-op
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_app_search_path<R: Runtime>(_tauri_app_handle: AppHandle<R>) -> Vec<String> {
|
||||
// Return an empty list
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_app_list<R: Runtime>(
|
||||
_tauri_app_handle: AppHandle<R>,
|
||||
) -> Result<Vec<Extension>, String> {
|
||||
// Return an empty list
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_app_metadata<R: Runtime>(
|
||||
_tauri_app_handle: AppHandle<R>,
|
||||
_app_path: String,
|
||||
) -> Result<AppMetadata, String> {
|
||||
unreachable!("app list should be empty, there is no way this can be invoked")
|
||||
}
|
||||
|
||||
pub(crate) fn set_apps_hotkey<R: Runtime>(_tauri_app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||
// no-op
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn unset_apps_hotkey<R: Runtime>(
|
||||
_tauri_app_handle: &AppHandle<R>,
|
||||
) -> Result<(), String> {
|
||||
// no-op
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn reindex_applications<R: Runtime>(
|
||||
_tauri_app_handle: AppHandle<R>,
|
||||
) -> Result<(), String> {
|
||||
// no-op
|
||||
Ok(())
|
||||
}
|
||||
196
src-tauri/src/extension/built_in/calculator.rs
Normal file
@@ -0,0 +1,196 @@
|
||||
use super::super::LOCAL_QUERY_SOURCE_TYPE;
|
||||
use crate::common::{
|
||||
document::{DataSourceReference, Document},
|
||||
error::SearchError,
|
||||
search::{QueryResponse, QuerySource, SearchQuery},
|
||||
traits::SearchSource,
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use chinese_number::{ChineseCase, ChineseCountMethod, ChineseVariant, NumberToChinese};
|
||||
use num2words::Num2Words;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub(crate) const DATA_SOURCE_ID: &str = "Calculator";
|
||||
|
||||
/// JSON file for this extension.
|
||||
pub(crate) const PLUGIN_JSON_FILE: &str = r#"
|
||||
{
|
||||
"id": "Calculator",
|
||||
"name": "Calculator",
|
||||
"platforms": ["macos", "linux", "windows"],
|
||||
"description": "...",
|
||||
"icon": "font_Calculator",
|
||||
"type": "calculator",
|
||||
"enabled": true
|
||||
}
|
||||
"#;
|
||||
|
||||
pub struct CalculatorSource {
|
||||
base_score: f64,
|
||||
}
|
||||
|
||||
impl CalculatorSource {
|
||||
pub fn new(base_score: f64) -> Self {
|
||||
CalculatorSource { base_score }
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_query(query: &str) -> Value {
|
||||
let mut query_json = serde_json::Map::new();
|
||||
|
||||
let operators = ["+", "-", "*", "/", "%"];
|
||||
|
||||
let found_operators: Vec<_> = query
|
||||
.chars()
|
||||
.filter(|c| operators.contains(&c.to_string().as_str()))
|
||||
.collect();
|
||||
|
||||
if found_operators.len() == 1 {
|
||||
let operation = match found_operators[0] {
|
||||
'+' => "sum",
|
||||
'-' => "subtract",
|
||||
'*' => "multiply",
|
||||
'/' => "divide",
|
||||
'%' => "remainder",
|
||||
_ => "expression",
|
||||
};
|
||||
|
||||
query_json.insert("type".to_string(), Value::String(operation.to_string()));
|
||||
} else {
|
||||
query_json.insert("type".to_string(), Value::String("expression".to_string()));
|
||||
}
|
||||
|
||||
query_json.insert("value".to_string(), Value::String(query.to_string()));
|
||||
|
||||
Value::Object(query_json)
|
||||
}
|
||||
|
||||
fn parse_result(num: f64) -> Value {
|
||||
let mut result_json = serde_json::Map::new();
|
||||
|
||||
let to_zh = num
|
||||
.to_chinese(
|
||||
ChineseVariant::Simple,
|
||||
ChineseCase::Upper,
|
||||
ChineseCountMethod::TenThousand,
|
||||
)
|
||||
.unwrap_or(num.to_string());
|
||||
|
||||
let to_en = Num2Words::new(num)
|
||||
.to_words()
|
||||
.map(|s| {
|
||||
let mut chars = s.chars();
|
||||
let mut result = String::new();
|
||||
let mut capitalize = true;
|
||||
|
||||
while let Some(c) = chars.next() {
|
||||
if c == ' ' || c == '-' {
|
||||
result.push(c);
|
||||
capitalize = true;
|
||||
} else if capitalize {
|
||||
result.extend(c.to_uppercase());
|
||||
capitalize = false;
|
||||
} else {
|
||||
result.push(c);
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
})
|
||||
.unwrap_or(num.to_string());
|
||||
|
||||
result_json.insert("value".to_string(), Value::String(num.to_string()));
|
||||
result_json.insert("toZh".to_string(), Value::String(to_zh));
|
||||
result_json.insert("toEn".to_string(), Value::String(to_en));
|
||||
|
||||
Value::Object(result_json)
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl SearchSource for CalculatorSource {
|
||||
fn get_type(&self) -> QuerySource {
|
||||
QuerySource {
|
||||
r#type: LOCAL_QUERY_SOURCE_TYPE.into(),
|
||||
name: hostname::get()
|
||||
.unwrap_or(DATA_SOURCE_ID.into())
|
||||
.to_string_lossy()
|
||||
.into(),
|
||||
id: DATA_SOURCE_ID.into(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
||||
let Some(query_string) = query.query_strings.get("query") else {
|
||||
return Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits: Vec::new(),
|
||||
total_hits: 0,
|
||||
});
|
||||
};
|
||||
|
||||
// Trim the leading and tailing whitespace so that our later if condition
|
||||
// will only be evaluated against non-whitespace characters.
|
||||
let query_string = query_string.trim();
|
||||
|
||||
if query_string.is_empty() || query_string.len() == 1 {
|
||||
return Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits: Vec::new(),
|
||||
total_hits: 0,
|
||||
});
|
||||
}
|
||||
|
||||
let query_string_clone = query_string.to_string();
|
||||
let query_source = self.get_type();
|
||||
let base_score = self.base_score;
|
||||
let closure = move || -> QueryResponse {
|
||||
let res_num = meval::eval_str(&query_string_clone);
|
||||
|
||||
match res_num {
|
||||
Ok(num) => {
|
||||
let mut payload: HashMap<String, Value> = HashMap::new();
|
||||
|
||||
let payload_query = parse_query(&query_string_clone);
|
||||
let payload_result = parse_result(num);
|
||||
|
||||
payload.insert("query".to_string(), payload_query);
|
||||
payload.insert("result".to_string(), payload_result);
|
||||
|
||||
let doc = Document {
|
||||
id: DATA_SOURCE_ID.to_string(),
|
||||
category: Some(DATA_SOURCE_ID.to_string()),
|
||||
payload: Some(payload),
|
||||
source: Some(DataSourceReference {
|
||||
r#type: Some(LOCAL_QUERY_SOURCE_TYPE.into()),
|
||||
name: Some(DATA_SOURCE_ID.into()),
|
||||
id: Some(DATA_SOURCE_ID.into()),
|
||||
icon: Some(String::from("font_Calculator")),
|
||||
}),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
QueryResponse {
|
||||
source: query_source,
|
||||
hits: vec![(doc, base_score)],
|
||||
total_hits: 1,
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
QueryResponse {
|
||||
source: query_source,
|
||||
hits: Vec::new(),
|
||||
total_hits: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let spawn_result = tokio::task::spawn_blocking(closure).await;
|
||||
|
||||
match spawn_result {
|
||||
Ok(response) => Ok(response),
|
||||
Err(e) => std::panic::resume_unwind(e.into_panic()),
|
||||
}
|
||||
}
|
||||
}
|
||||
1
src-tauri/src/extension/built_in/file_system.rs
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
482
src-tauri/src/extension/built_in/mod.rs
Normal file
@@ -0,0 +1,482 @@
|
||||
//! Built-in extensions and related stuff.
|
||||
|
||||
pub mod ai_overview;
|
||||
pub mod application;
|
||||
pub mod calculator;
|
||||
pub mod file_system;
|
||||
pub mod pizza_engine_runtime;
|
||||
pub mod quick_ai_access;
|
||||
|
||||
use super::Extension;
|
||||
use crate::extension::built_in::application::{set_apps_hotkey, unset_apps_hotkey};
|
||||
use crate::extension::{
|
||||
alter_extension_json_file, ExtensionBundleIdBorrowed, PLUGIN_JSON_FILE_NAME,
|
||||
};
|
||||
use crate::{SearchSourceRegistry, GLOBAL_TAURI_APP_HANDLE};
|
||||
use anyhow::Context;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::LazyLock;
|
||||
use tauri::{AppHandle, Manager, Runtime};
|
||||
|
||||
pub(crate) static BUILT_IN_EXTENSION_DIRECTORY: LazyLock<PathBuf> = LazyLock::new(|| {
|
||||
let mut resource_dir = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set")
|
||||
.path()
|
||||
.app_data_dir()
|
||||
.expect(
|
||||
"User home directory not found, which should be impossible on desktop environments",
|
||||
);
|
||||
resource_dir.push("built_in_extensions");
|
||||
|
||||
resource_dir
|
||||
});
|
||||
|
||||
/// Helper function to load the built-in extension specified by `extension_id`, used
|
||||
/// in `list_built_in_extensions()`.
|
||||
///
|
||||
/// For built-in extensions, users are only allowed to edit these fields:
|
||||
///
|
||||
/// 1. alias (if this extension supports alias)
|
||||
/// 2. hotkey (if this extension supports hotkey)
|
||||
/// 3. enabled
|
||||
///
|
||||
/// If
|
||||
///
|
||||
/// 1. The above fields have invalid value
|
||||
/// 2. Other fields are modified
|
||||
///
|
||||
/// we ignore and reset them to the default value.
|
||||
async fn load_built_in_extension(
|
||||
built_in_extensions_dir: &Path,
|
||||
extension_id: &str,
|
||||
default_plugin_json_file: &str,
|
||||
) -> Result<Extension, String> {
|
||||
let mut extension_dir = built_in_extensions_dir.join(extension_id);
|
||||
let mut default_plugin_json = serde_json::from_str::<Extension>(&default_plugin_json_file).unwrap_or_else( |e| {
|
||||
panic!("the default extension {} file of built-in extension [{}] cannot be parsed as a valid [struct Extension], error [{}]", PLUGIN_JSON_FILE_NAME, extension_id, e);
|
||||
});
|
||||
|
||||
if !extension_dir.try_exists().map_err(|e| e.to_string())? {
|
||||
tokio::fs::create_dir_all(extension_dir.as_path())
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
}
|
||||
|
||||
let plugin_json_file_path = {
|
||||
extension_dir.push(PLUGIN_JSON_FILE_NAME);
|
||||
extension_dir
|
||||
};
|
||||
|
||||
// If the JSON file does not exist, create a file with the default template and return.
|
||||
if !plugin_json_file_path
|
||||
.try_exists()
|
||||
.map_err(|e| e.to_string())?
|
||||
{
|
||||
tokio::fs::write(plugin_json_file_path, default_plugin_json_file)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
return Ok(default_plugin_json);
|
||||
}
|
||||
|
||||
let plugin_json_file_content = tokio::fs::read_to_string(plugin_json_file_path.as_path())
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
let res_plugin_json = serde_json::from_str::<Extension>(&plugin_json_file_content);
|
||||
let Ok(plugin_json) = res_plugin_json else {
|
||||
log::warn!("user invalidated built-in extension [{}] file, overwriting it with the default template", extension_id);
|
||||
|
||||
// If the JSON file cannot be parsed as `struct Extension`, overwrite it with the default template and return.
|
||||
tokio::fs::write(plugin_json_file_path, default_plugin_json_file)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
return Ok(default_plugin_json);
|
||||
};
|
||||
|
||||
// Users are only allowed to edit the below fields
|
||||
// 1. alias (if this extension supports alias)
|
||||
// 2. hotkey (if this extension supports hotkey)
|
||||
// 3. enabled
|
||||
// so we ignore all other fields.
|
||||
let alias = if default_plugin_json.supports_alias_hotkey() {
|
||||
plugin_json.alias.clone()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let hotkey = if default_plugin_json.supports_alias_hotkey() {
|
||||
plugin_json.hotkey.clone()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let enabled = plugin_json.enabled;
|
||||
|
||||
default_plugin_json.alias = alias;
|
||||
default_plugin_json.hotkey = hotkey;
|
||||
default_plugin_json.enabled = enabled;
|
||||
|
||||
let final_plugin_json_file_content = serde_json::to_string_pretty(&default_plugin_json)
|
||||
.expect("failed to serialize `struct Extension`");
|
||||
tokio::fs::write(plugin_json_file_path, final_plugin_json_file_content)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(default_plugin_json)
|
||||
}
|
||||
|
||||
/// Return the built-in extension list.
|
||||
///
|
||||
/// Will create extension files when they are not found.
|
||||
///
|
||||
/// Users may put extension files in the built-in extension directory, but
|
||||
/// we do not care and will ignore them.
|
||||
///
|
||||
/// We only read alias/hotkey/enabled from the JSON file, we have ensured that if
|
||||
/// alias/hotkey is not supported, then it will be `None`. Besides that, no further
|
||||
/// validation is needed because nothing could go wrong.
|
||||
pub(crate) async fn list_built_in_extensions() -> Result<Vec<Extension>, String> {
|
||||
let dir = BUILT_IN_EXTENSION_DIRECTORY.as_path();
|
||||
|
||||
let mut built_in_extensions = Vec::new();
|
||||
built_in_extensions.push(
|
||||
load_built_in_extension(
|
||||
dir,
|
||||
application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME,
|
||||
application::PLUGIN_JSON_FILE,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
built_in_extensions.push(
|
||||
load_built_in_extension(
|
||||
dir,
|
||||
calculator::DATA_SOURCE_ID,
|
||||
calculator::PLUGIN_JSON_FILE,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
built_in_extensions.push(
|
||||
load_built_in_extension(
|
||||
dir,
|
||||
ai_overview::EXTENSION_ID,
|
||||
ai_overview::PLUGIN_JSON_FILE,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
built_in_extensions.push(
|
||||
load_built_in_extension(
|
||||
dir,
|
||||
quick_ai_access::EXTENSION_ID,
|
||||
quick_ai_access::PLUGIN_JSON_FILE,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
|
||||
Ok(built_in_extensions)
|
||||
}
|
||||
|
||||
pub(super) async fn init_built_in_extension<R: Runtime>(
|
||||
tauri_app_handle: &AppHandle<R>,
|
||||
extension: &Extension,
|
||||
search_source_registry: &SearchSourceRegistry,
|
||||
) -> Result<(), String> {
|
||||
log::trace!("initializing built-in extensions");
|
||||
|
||||
if extension.id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
||||
search_source_registry
|
||||
.register_source(application::ApplicationSearchSource)
|
||||
.await;
|
||||
set_apps_hotkey(&tauri_app_handle)?;
|
||||
log::debug!("built-in extension [{}] initialized", extension.id);
|
||||
}
|
||||
|
||||
if extension.id == calculator::DATA_SOURCE_ID {
|
||||
let calculator_search = calculator::CalculatorSource::new(2000f64);
|
||||
search_source_registry
|
||||
.register_source(calculator_search)
|
||||
.await;
|
||||
log::debug!("built-in extension [{}] initialized", extension.id);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn is_extension_built_in(bundle_id: &ExtensionBundleIdBorrowed<'_>) -> bool {
|
||||
bundle_id.developer.is_none()
|
||||
}
|
||||
|
||||
pub(crate) async fn enable_built_in_extension(
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
) -> Result<(), String> {
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
let search_source_registry_tauri_state = tauri_app_handle.state::<SearchSourceRegistry>();
|
||||
|
||||
let update_extension = |extension: &mut Extension| -> Result<(), String> {
|
||||
extension.enabled = true;
|
||||
Ok(())
|
||||
};
|
||||
|
||||
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
||||
&& bundle_id.sub_extension_id.is_none()
|
||||
{
|
||||
search_source_registry_tauri_state
|
||||
.register_source(application::ApplicationSearchSource)
|
||||
.await;
|
||||
set_apps_hotkey(tauri_app_handle)?;
|
||||
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Check if this is an application
|
||||
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
||||
&& bundle_id.sub_extension_id.is_some()
|
||||
{
|
||||
let app_path = bundle_id.sub_extension_id.expect("just checked it is Some");
|
||||
application::enable_app_search(tauri_app_handle, app_path)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if bundle_id.extension_id == calculator::DATA_SOURCE_ID {
|
||||
let calculator_search = calculator::CalculatorSource::new(2000f64);
|
||||
search_source_registry_tauri_state
|
||||
.register_source(calculator_search)
|
||||
.await;
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if bundle_id.extension_id == quick_ai_access::EXTENSION_ID {
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if bundle_id.extension_id == ai_overview::EXTENSION_ID {
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) async fn disable_built_in_extension(
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
) -> Result<(), String> {
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
let search_source_registry_tauri_state = tauri_app_handle.state::<SearchSourceRegistry>();
|
||||
|
||||
let update_extension = |extension: &mut Extension| -> Result<(), String> {
|
||||
extension.enabled = false;
|
||||
Ok(())
|
||||
};
|
||||
|
||||
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
||||
&& bundle_id.sub_extension_id.is_none()
|
||||
{
|
||||
search_source_registry_tauri_state
|
||||
.remove_source(bundle_id.extension_id)
|
||||
.await;
|
||||
unset_apps_hotkey(tauri_app_handle)?;
|
||||
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Check if this is an application
|
||||
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
||||
&& bundle_id.sub_extension_id.is_some()
|
||||
{
|
||||
let app_path = bundle_id.sub_extension_id.expect("just checked it is Some");
|
||||
application::disable_app_search(tauri_app_handle, app_path)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if bundle_id.extension_id == calculator::DATA_SOURCE_ID {
|
||||
search_source_registry_tauri_state
|
||||
.remove_source(bundle_id.extension_id)
|
||||
.await;
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if bundle_id.extension_id == quick_ai_access::EXTENSION_ID {
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if bundle_id.extension_id == ai_overview::EXTENSION_ID {
|
||||
alter_extension_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
bundle_id,
|
||||
update_extension,
|
||||
)?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn set_built_in_extension_alias(bundle_id: &ExtensionBundleIdBorrowed<'_>, alias: &str) {
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
|
||||
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
||||
if let Some(app_path) = bundle_id.sub_extension_id {
|
||||
application::set_app_alias(tauri_app_handle, app_path, alias);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn register_built_in_extension_hotkey(
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
hotkey: &str,
|
||||
) -> Result<(), String> {
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
|
||||
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
||||
if let Some(app_path) = bundle_id.sub_extension_id {
|
||||
application::register_app_hotkey(&tauri_app_handle, app_path, hotkey)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn unregister_built_in_extension_hotkey(
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
) -> Result<(), String> {
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
|
||||
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
||||
if let Some(app_path) = bundle_id.sub_extension_id {
|
||||
application::unregister_app_hotkey(&tauri_app_handle, app_path)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn split_extension_id(extension_id: &str) -> (&str, Option<&str>) {
|
||||
match extension_id.find('.') {
|
||||
Some(idx) => (&extension_id[..idx], Some(&extension_id[idx + 1..])),
|
||||
None => (extension_id, None),
|
||||
}
|
||||
}
|
||||
|
||||
fn load_extension_from_json_file(
|
||||
extension_directory: &Path,
|
||||
extension_id: &str,
|
||||
) -> Result<Extension, String> {
|
||||
let (parent_extension_id, _opt_sub_extension_id) = split_extension_id(extension_id);
|
||||
let json_file_path = {
|
||||
let mut extension_directory_path = extension_directory.join(parent_extension_id);
|
||||
extension_directory_path.push(PLUGIN_JSON_FILE_NAME);
|
||||
|
||||
extension_directory_path
|
||||
};
|
||||
|
||||
let mut extension = serde_json::from_reader::<_, Extension>(
|
||||
std::fs::File::open(&json_file_path)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"the [{}] file for extension [{}] is missing or broken",
|
||||
PLUGIN_JSON_FILE_NAME, parent_extension_id
|
||||
)
|
||||
})
|
||||
.map_err(|e| e.to_string())?,
|
||||
)
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
super::canonicalize_relative_icon_path(extension_directory, &mut extension)?;
|
||||
|
||||
Ok(extension)
|
||||
}
|
||||
|
||||
pub(crate) async fn is_built_in_extension_enabled(
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
) -> Result<bool, String> {
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
let search_source_registry_tauri_state = tauri_app_handle.state::<SearchSourceRegistry>();
|
||||
|
||||
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
||||
&& bundle_id.sub_extension_id.is_none()
|
||||
{
|
||||
return Ok(search_source_registry_tauri_state
|
||||
.get_source(bundle_id.extension_id)
|
||||
.await
|
||||
.is_some());
|
||||
}
|
||||
|
||||
// Check if this is an application
|
||||
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
||||
if let Some(app_path) = bundle_id.sub_extension_id {
|
||||
return Ok(application::is_app_search_enabled(app_path));
|
||||
}
|
||||
}
|
||||
|
||||
if bundle_id.extension_id == calculator::DATA_SOURCE_ID {
|
||||
return Ok(search_source_registry_tauri_state
|
||||
.get_source(bundle_id.extension_id)
|
||||
.await
|
||||
.is_some());
|
||||
}
|
||||
|
||||
if bundle_id.extension_id == quick_ai_access::EXTENSION_ID {
|
||||
let extension = load_extension_from_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
bundle_id.extension_id,
|
||||
)?;
|
||||
return Ok(extension.enabled);
|
||||
}
|
||||
|
||||
if bundle_id.extension_id == ai_overview::EXTENSION_ID {
|
||||
let extension = load_extension_from_json_file(
|
||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
||||
bundle_id.extension_id,
|
||||
)?;
|
||||
return Ok(extension.enabled);
|
||||
}
|
||||
|
||||
unreachable!("extension [{:?}] is not a built-in extension", bundle_id)
|
||||
}
|
||||
76
src-tauri/src/extension/built_in/pizza_engine_runtime.rs
Normal file
@@ -0,0 +1,76 @@
|
||||
//! We use Pizza Engine to index applications and local files. The engine will be
|
||||
//! run in the thread/runtime defined in this file.
|
||||
//!
|
||||
//! # Why such a thread/runtime is needed
|
||||
//!
|
||||
//! Generally, Tokio async runtime requires all the async tasks running on it to be
|
||||
//! `Send` and `Sync`, but the async tasks created by Pizza Engine are not,
|
||||
//! which forces us to create a dedicated thread/runtime to execute them.
|
||||
|
||||
use std::any::Any;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
pub(crate) trait SearchSourceState {
|
||||
#[cfg_attr(not(feature = "use_pizza_engine"), allow(unused))]
|
||||
fn as_mut_any(&mut self) -> &mut dyn Any;
|
||||
}
|
||||
|
||||
#[async_trait::async_trait(?Send)]
|
||||
pub(crate) trait Task: Send + Sync {
|
||||
fn search_source_id(&self) -> &'static str;
|
||||
|
||||
async fn exec(&mut self, state: &mut Option<Box<dyn SearchSourceState>>);
|
||||
}
|
||||
|
||||
pub(crate) static RUNTIME_TX: OnceLock<tokio::sync::mpsc::UnboundedSender<Box<dyn Task>>> =
|
||||
OnceLock::new();
|
||||
|
||||
/// This function blocks until the runtime thread is ready for accepting tasks.
|
||||
pub(crate) async fn start_pizza_engine_runtime() {
|
||||
const THREAD_NAME: &str = "Pizza engine runtime thread";
|
||||
|
||||
log::trace!("starting Pizza engine runtime");
|
||||
let (engine_start_signal_tx, engine_start_signal_rx) = tokio::sync::oneshot::channel();
|
||||
|
||||
std::thread::Builder::new()
|
||||
.name(THREAD_NAME.into())
|
||||
.spawn(move || {
|
||||
let rt = tokio::runtime::Runtime::new().unwrap();
|
||||
|
||||
let main = async {
|
||||
let mut states: HashMap<String, Option<Box<dyn SearchSourceState>>> =
|
||||
HashMap::new();
|
||||
|
||||
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel();
|
||||
RUNTIME_TX.set(tx).unwrap();
|
||||
|
||||
engine_start_signal_tx
|
||||
.send(())
|
||||
.expect("engine_start_signal_rx dropped");
|
||||
|
||||
while let Some(mut task) = rx.recv().await {
|
||||
let opt_search_source_state = match states.entry(task.search_source_id().into())
|
||||
{
|
||||
Entry::Occupied(o) => o.into_mut(),
|
||||
Entry::Vacant(v) => v.insert(None),
|
||||
};
|
||||
task.exec(opt_search_source_state).await;
|
||||
}
|
||||
};
|
||||
|
||||
rt.block_on(main);
|
||||
})
|
||||
.unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"failed to start thread [{}] due to error [{}]",
|
||||
THREAD_NAME, e
|
||||
);
|
||||
});
|
||||
|
||||
engine_start_signal_rx
|
||||
.await
|
||||
.expect("engine_start_signal_tx dropped, the runtime thread could be dead");
|
||||
log::trace!("Pizza engine runtime started");
|
||||
}
|
||||
12
src-tauri/src/extension/built_in/quick_ai_access.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
pub(super) const EXTENSION_ID: &str = "QuickAIAccess";
|
||||
|
||||
pub(crate) const PLUGIN_JSON_FILE: &str = r#"
|
||||
{
|
||||
"id": "QuickAIAccess",
|
||||
"name": "Quick AI Access",
|
||||
"description": "...",
|
||||
"icon": "font_a-QuickAIAccess",
|
||||
"type": "ai_extension",
|
||||
"enabled": true
|
||||
}
|
||||
"#;
|
||||
757
src-tauri/src/extension/mod.rs
Normal file
@@ -0,0 +1,757 @@
|
||||
pub(crate) mod built_in;
|
||||
pub(crate) mod store;
|
||||
mod third_party;
|
||||
|
||||
use crate::common::document::OnOpened;
|
||||
use crate::{common::register::SearchSourceRegistry, GLOBAL_TAURI_APP_HANDLE};
|
||||
use anyhow::Context;
|
||||
use borrowme::{Borrow, ToOwned};
|
||||
use derive_more::Display;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value as Json;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use tauri::Manager;
|
||||
use third_party::THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE;
|
||||
|
||||
pub const LOCAL_QUERY_SOURCE_TYPE: &str = "local";
|
||||
const PLUGIN_JSON_FILE_NAME: &str = "plugin.json";
|
||||
const ASSETS_DIRECTORY_FILE_NAME: &str = "assets";
|
||||
|
||||
fn default_true() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize, Copy, Clone, Hash, PartialEq, Eq, Display)]
|
||||
#[serde(rename_all(serialize = "lowercase", deserialize = "lowercase"))]
|
||||
enum Platform {
|
||||
#[display("macOS")]
|
||||
Macos,
|
||||
#[display("Linux")]
|
||||
Linux,
|
||||
#[display("windows")]
|
||||
Windows,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||
pub struct Extension {
|
||||
/// Extension ID.
|
||||
///
|
||||
/// The ID doesn't uniquely identifies an extension; Its bundle ID (ID & developer) does.
|
||||
id: String,
|
||||
/// Extension name.
|
||||
name: String,
|
||||
/// ID of the developer.
|
||||
///
|
||||
/// * For built-in extensions, this will always be None.
|
||||
/// * For third-party first-layer extensions, the on-disk plugin.json file
|
||||
/// won't contain this field, but we will set this field for them after reading them into the memory.
|
||||
/// * For third-party sub extensions, this field will be None.
|
||||
developer: Option<String>,
|
||||
/// Platforms supported by this extension.
|
||||
///
|
||||
/// If `None`, then this extension can be used on all the platforms.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
platforms: Option<HashSet<Platform>>,
|
||||
/// Extension description.
|
||||
description: String,
|
||||
//// Specify the icon for this extension, multi options are available:
|
||||
///
|
||||
/// 1. It can be a path to the icon file, the path can be
|
||||
///
|
||||
/// * relative (relative to the "assets" directory)
|
||||
/// * absolute
|
||||
/// 2. It can be a font class code, e.g., 'font_coco', if you want to use
|
||||
/// Coco's built-in icons.
|
||||
///
|
||||
/// In cases where your icon file is named similarly to a font class code, Coco
|
||||
/// will treat it as an icon file if it exists, i.e., if file `<extension>/assets/font_coco`
|
||||
/// exists, then Coco will use this file rather than the built-in 'font_coco' icon.
|
||||
icon: String,
|
||||
r#type: ExtensionType,
|
||||
/// If this is a Command extension, then action defines the operation to execute
|
||||
/// when the it is triggered.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
action: Option<CommandAction>,
|
||||
/// The link to open if this is a QuickLink extension.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
quicklink: Option<QuickLink>,
|
||||
|
||||
// If this extension is of type Group or Extension, then it behaves like a
|
||||
// directory, i.e., it could contain sub items.
|
||||
commands: Option<Vec<Extension>>,
|
||||
scripts: Option<Vec<Extension>>,
|
||||
quicklinks: Option<Vec<Extension>>,
|
||||
|
||||
/// The alias of the extension.
|
||||
///
|
||||
/// Extension of type Group and Extension cannot have alias.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
alias: Option<String>,
|
||||
/// The hotkey of the extension.
|
||||
///
|
||||
/// Extension of type Group and Extension cannot have hotkey.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
hotkey: Option<String>,
|
||||
|
||||
/// Is this extension enabled.
|
||||
#[serde(default = "default_true")]
|
||||
enabled: bool,
|
||||
|
||||
/// Extension settings
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
settings: Option<Json>,
|
||||
|
||||
// We do not care about these fields, just take it regardless of what it is.
|
||||
screenshots: Option<Json>,
|
||||
url: Option<Json>,
|
||||
version: Option<Json>,
|
||||
}
|
||||
|
||||
/// Bundle ID uniquely identifies an extension.
|
||||
#[derive(Debug, Deserialize, Serialize, PartialEq, Clone)]
|
||||
pub(crate) struct ExtensionBundleId {
|
||||
developer: Option<String>,
|
||||
extension_id: String,
|
||||
sub_extension_id: Option<String>,
|
||||
}
|
||||
|
||||
impl Borrow for ExtensionBundleId {
|
||||
type Target<'a> = ExtensionBundleIdBorrowed<'a>;
|
||||
|
||||
fn borrow(&self) -> Self::Target<'_> {
|
||||
ExtensionBundleIdBorrowed {
|
||||
developer: self.developer.as_deref(),
|
||||
extension_id: &self.extension_id,
|
||||
sub_extension_id: self.sub_extension_id.as_deref(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Reference version of `ExtensionBundleId`.
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub(crate) struct ExtensionBundleIdBorrowed<'ext> {
|
||||
developer: Option<&'ext str>,
|
||||
extension_id: &'ext str,
|
||||
sub_extension_id: Option<&'ext str>,
|
||||
}
|
||||
|
||||
impl ToOwned for ExtensionBundleIdBorrowed<'_> {
|
||||
type Owned = ExtensionBundleId;
|
||||
|
||||
fn to_owned(&self) -> Self::Owned {
|
||||
ExtensionBundleId {
|
||||
developer: self.developer.map(|s| s.to_string()),
|
||||
extension_id: self.extension_id.to_string(),
|
||||
sub_extension_id: self.sub_extension_id.map(|s| s.to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'ext> PartialEq<ExtensionBundleIdBorrowed<'ext>> for ExtensionBundleId {
|
||||
fn eq(&self, other: &ExtensionBundleIdBorrowed<'ext>) -> bool {
|
||||
self.developer.as_deref() == other.developer
|
||||
&& self.extension_id == other.extension_id
|
||||
&& self.sub_extension_id.as_deref() == other.sub_extension_id
|
||||
}
|
||||
}
|
||||
|
||||
impl<'ext> PartialEq<ExtensionBundleId> for ExtensionBundleIdBorrowed<'ext> {
|
||||
fn eq(&self, other: &ExtensionBundleId) -> bool {
|
||||
self.developer == other.developer.as_deref()
|
||||
&& self.extension_id == other.extension_id
|
||||
&& self.sub_extension_id == other.sub_extension_id.as_deref()
|
||||
}
|
||||
}
|
||||
|
||||
impl Extension {
|
||||
/// WARNING: the bundle ID returned from this function always has its `sub_extension_id`
|
||||
/// set to `None`, this may not be what you want.
|
||||
pub(crate) fn bundle_id_borrowed(&self) -> ExtensionBundleIdBorrowed<'_> {
|
||||
ExtensionBundleIdBorrowed {
|
||||
developer: self.developer.as_deref(),
|
||||
extension_id: &self.id,
|
||||
sub_extension_id: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether this extension could be searched.
|
||||
pub(crate) fn searchable(&self) -> bool {
|
||||
self.on_opened().is_some()
|
||||
}
|
||||
/// Return what will happen when we open this extension.
|
||||
///
|
||||
/// `None` if it cannot be opened.
|
||||
pub(crate) fn on_opened(&self) -> Option<OnOpened> {
|
||||
match self.r#type {
|
||||
ExtensionType::Group => None,
|
||||
ExtensionType::Extension => None,
|
||||
ExtensionType::Command => Some(OnOpened::Command {
|
||||
action: self.action.clone().unwrap_or_else(|| {
|
||||
panic!(
|
||||
"Command extension [{}]'s [action] field is not set, something wrong with your extension validity check", self.id
|
||||
)
|
||||
}),
|
||||
}),
|
||||
ExtensionType::Application => Some(OnOpened::Application {
|
||||
app_path: self.id.clone(),
|
||||
}),
|
||||
ExtensionType::Script => todo!("not supported yet"),
|
||||
ExtensionType::Quicklink => todo!("not supported yet"),
|
||||
ExtensionType::Setting => todo!("not supported yet"),
|
||||
ExtensionType::Calculator => None,
|
||||
ExtensionType::AiExtension => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_sub_extension(&self, sub_extension_id: &str) -> Option<&Self> {
|
||||
if !self.r#type.contains_sub_items() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if let Some(ref commands) = self.commands {
|
||||
if let Some(sub_ext) = commands.iter().find(|cmd| cmd.id == sub_extension_id) {
|
||||
return Some(sub_ext);
|
||||
}
|
||||
}
|
||||
if let Some(ref scripts) = self.scripts {
|
||||
if let Some(sub_ext) = scripts.iter().find(|script| script.id == sub_extension_id) {
|
||||
return Some(sub_ext);
|
||||
}
|
||||
}
|
||||
if let Some(ref quick_links) = self.quicklinks {
|
||||
if let Some(sub_ext) = quick_links.iter().find(|link| link.id == sub_extension_id) {
|
||||
return Some(sub_ext);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) fn get_sub_extension_mut(&mut self, sub_extension_id: &str) -> Option<&mut Self> {
|
||||
if !self.r#type.contains_sub_items() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if let Some(ref mut commands) = self.commands {
|
||||
if let Some(sub_ext) = commands.iter_mut().find(|cmd| cmd.id == sub_extension_id) {
|
||||
return Some(sub_ext);
|
||||
}
|
||||
}
|
||||
if let Some(ref mut scripts) = self.scripts {
|
||||
if let Some(sub_ext) = scripts
|
||||
.iter_mut()
|
||||
.find(|script| script.id == sub_extension_id)
|
||||
{
|
||||
return Some(sub_ext);
|
||||
}
|
||||
}
|
||||
if let Some(ref mut quick_links) = self.quicklinks {
|
||||
if let Some(sub_ext) = quick_links
|
||||
.iter_mut()
|
||||
.find(|link| link.id == sub_extension_id)
|
||||
{
|
||||
return Some(sub_ext);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) fn supports_alias_hotkey(&self) -> bool {
|
||||
let ty = self.r#type;
|
||||
|
||||
ty != ExtensionType::Group && ty != ExtensionType::Extension
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||
pub(crate) struct CommandAction {
|
||||
pub(crate) exec: String,
|
||||
pub(crate) args: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||
pub struct QuickLink {
|
||||
link: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Deserialize, Serialize, Clone, Display, Copy)]
|
||||
#[serde(rename_all(serialize = "snake_case", deserialize = "snake_case"))]
|
||||
pub enum ExtensionType {
|
||||
#[display("Group")]
|
||||
Group,
|
||||
#[display("Extension")]
|
||||
Extension,
|
||||
#[display("Command")]
|
||||
Command,
|
||||
#[display("Application")]
|
||||
Application,
|
||||
#[display("Script")]
|
||||
Script,
|
||||
#[display("Quicklink")]
|
||||
Quicklink,
|
||||
#[display("Setting")]
|
||||
Setting,
|
||||
#[display("Calculator")]
|
||||
Calculator,
|
||||
#[display("AI Extension")]
|
||||
AiExtension,
|
||||
}
|
||||
|
||||
impl ExtensionType {
|
||||
pub(crate) fn contains_sub_items(&self) -> bool {
|
||||
self == &Self::Group || self == &Self::Extension
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to filter out the extensions that do not satisfy the specifies conditions.
|
||||
///
|
||||
/// used in `list_extensions()`
|
||||
fn filter_out_extensions(
|
||||
extensions: &mut Vec<Extension>,
|
||||
query: Option<&str>,
|
||||
extension_type: Option<ExtensionType>,
|
||||
list_enabled: bool,
|
||||
) {
|
||||
// apply `list_enabled`
|
||||
if list_enabled {
|
||||
extensions.retain(|ext| ext.enabled);
|
||||
for extension in extensions.iter_mut() {
|
||||
if extension.r#type.contains_sub_items() {
|
||||
if let Some(ref mut commands) = extension.commands {
|
||||
commands.retain(|cmd| cmd.enabled);
|
||||
}
|
||||
if let Some(ref mut scripts) = extension.scripts {
|
||||
scripts.retain(|script| script.enabled);
|
||||
}
|
||||
if let Some(ref mut quicklinks) = extension.quicklinks {
|
||||
quicklinks.retain(|link| link.enabled);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// apply extension type filter to non-group/extension extensions
|
||||
if let Some(extension_type) = extension_type {
|
||||
assert!(
|
||||
extension_type != ExtensionType::Group && extension_type != ExtensionType::Extension,
|
||||
"filtering in folder extensions is pointless"
|
||||
);
|
||||
|
||||
extensions.retain(|ext| {
|
||||
let ty = ext.r#type;
|
||||
ty == ExtensionType::Group || ty == ExtensionType::Extension || ty == extension_type
|
||||
});
|
||||
|
||||
// Filter sub-extensions to only include the requested type
|
||||
for extension in extensions.iter_mut() {
|
||||
if extension.r#type.contains_sub_items() {
|
||||
if let Some(ref mut commands) = extension.commands {
|
||||
commands.retain(|cmd| cmd.r#type == extension_type);
|
||||
}
|
||||
if let Some(ref mut scripts) = extension.scripts {
|
||||
scripts.retain(|script| script.r#type == extension_type);
|
||||
}
|
||||
if let Some(ref mut quicklinks) = extension.quicklinks {
|
||||
quicklinks.retain(|link| link.r#type == extension_type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Application is special, technically, it should never be filtered out by
|
||||
// this condition. But if our users will be surprising if they choose a
|
||||
// non-Application type and see it in the results. So we do this to remedy the
|
||||
// issue
|
||||
if let Some(idx) = extensions.iter().position(|ext| {
|
||||
ext.developer.is_none()
|
||||
&& ext.id == built_in::application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
||||
}) {
|
||||
if extension_type != ExtensionType::Application {
|
||||
extensions.remove(idx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// apply query filter
|
||||
if let Some(query) = query {
|
||||
let match_closure = |ext: &Extension| {
|
||||
let lowercase_title = ext.name.to_lowercase();
|
||||
let lowercase_alias = ext.alias.as_ref().map(|alias| alias.to_lowercase());
|
||||
let lowercase_query = query.to_lowercase();
|
||||
|
||||
lowercase_title.contains(&lowercase_query)
|
||||
|| lowercase_alias.map_or(false, |alias| alias.contains(&lowercase_query))
|
||||
};
|
||||
|
||||
extensions.retain(|ext| {
|
||||
if ext.r#type.contains_sub_items() {
|
||||
// Keep all group/extension types
|
||||
true
|
||||
} else {
|
||||
// Apply filter to non-group/extension types
|
||||
match_closure(ext)
|
||||
}
|
||||
});
|
||||
|
||||
// Filter sub-extensions in groups and extensions
|
||||
for extension in extensions.iter_mut() {
|
||||
if extension.r#type.contains_sub_items() {
|
||||
if let Some(ref mut commands) = extension.commands {
|
||||
commands.retain(&match_closure);
|
||||
}
|
||||
if let Some(ref mut scripts) = extension.scripts {
|
||||
scripts.retain(&match_closure);
|
||||
}
|
||||
if let Some(ref mut quicklinks) = extension.quicklinks {
|
||||
quicklinks.retain(&match_closure);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove parent extensions (Group/Extension types) that have no sub-items after filtering
|
||||
extensions.retain(|ext| {
|
||||
if !ext.r#type.contains_sub_items() {
|
||||
return true;
|
||||
}
|
||||
|
||||
// We don't do this filter to applications since it is always empty, load at runtime.
|
||||
if ext.developer.is_none()
|
||||
&& ext.id == built_in::application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
let has_commands = ext
|
||||
.commands
|
||||
.as_ref()
|
||||
.map_or(false, |commands| !commands.is_empty());
|
||||
let has_scripts = ext
|
||||
.scripts
|
||||
.as_ref()
|
||||
.map_or(false, |scripts| !scripts.is_empty());
|
||||
let has_quicklinks = ext
|
||||
.quicklinks
|
||||
.as_ref()
|
||||
.map_or(false, |quicklinks| !quicklinks.is_empty());
|
||||
|
||||
has_commands || has_scripts || has_quicklinks
|
||||
});
|
||||
}
|
||||
|
||||
/// Return value:
|
||||
///
|
||||
/// * boolean: indicates if we found any invalid extensions
|
||||
/// * Vec<Extension>: loaded extensions
|
||||
#[tauri::command]
|
||||
pub(crate) async fn list_extensions(
|
||||
query: Option<String>,
|
||||
extension_type: Option<ExtensionType>,
|
||||
list_enabled: bool,
|
||||
) -> Result<(bool, Vec<Extension>), String> {
|
||||
log::trace!("loading extensions");
|
||||
|
||||
let third_party_dir = third_party::THIRD_PARTY_EXTENSIONS_DIRECTORY.as_path();
|
||||
if !third_party_dir.try_exists().map_err(|e| e.to_string())? {
|
||||
tokio::fs::create_dir_all(third_party_dir)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
}
|
||||
let (third_party_found_invalid_extension, mut third_party_extensions) =
|
||||
third_party::list_third_party_extensions(third_party_dir).await?;
|
||||
|
||||
let built_in_extensions = built_in::list_built_in_extensions().await?;
|
||||
|
||||
let found_invalid_extension = third_party_found_invalid_extension;
|
||||
let mut extensions = {
|
||||
third_party_extensions.extend(built_in_extensions);
|
||||
|
||||
third_party_extensions
|
||||
};
|
||||
|
||||
filter_out_extensions(
|
||||
&mut extensions,
|
||||
query.as_deref(),
|
||||
extension_type,
|
||||
list_enabled,
|
||||
);
|
||||
|
||||
Ok((found_invalid_extension, extensions))
|
||||
}
|
||||
|
||||
pub(crate) async fn init_extensions(mut extensions: Vec<Extension>) -> Result<(), String> {
|
||||
log::trace!("initializing extensions");
|
||||
|
||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app handle not set");
|
||||
let search_source_registry_tauri_state = tauri_app_handle.state::<SearchSourceRegistry>();
|
||||
|
||||
built_in::application::ApplicationSearchSource::prepare_index_and_store(
|
||||
tauri_app_handle.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// extension store
|
||||
search_source_registry_tauri_state .register_source(store::ExtensionStore).await;
|
||||
|
||||
// Init the built-in enabled extensions
|
||||
for built_in_extension in extensions
|
||||
.extract_if(.., |ext| {
|
||||
built_in::is_extension_built_in(&ext.bundle_id_borrowed())
|
||||
})
|
||||
.filter(|ext| ext.enabled)
|
||||
{
|
||||
built_in::init_built_in_extension(
|
||||
tauri_app_handle,
|
||||
&built_in_extension,
|
||||
&search_source_registry_tauri_state,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Now the third-party extensions
|
||||
let third_party_search_source = third_party::ThirdPartyExtensionsSearchSource::new(extensions);
|
||||
third_party_search_source.init().await?;
|
||||
let third_party_search_source_clone = third_party_search_source.clone();
|
||||
// Set the global search source so that we can access it in `#[tauri::command]`s
|
||||
// ignore the result because this function will be invoked twice, which
|
||||
// means this global variable will be set twice.
|
||||
let _ = THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE.set(third_party_search_source_clone);
|
||||
search_source_registry_tauri_state
|
||||
.register_source(third_party_search_source)
|
||||
.await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub(crate) async fn enable_extension(bundle_id: ExtensionBundleId) -> Result<(), String> {
|
||||
let bundle_id_borrowed = bundle_id.borrow();
|
||||
|
||||
if built_in::is_extension_built_in(&bundle_id_borrowed) {
|
||||
built_in::enable_built_in_extension(&bundle_id_borrowed).await?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
third_party::THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE.get().expect("global third party search source not set, looks like init_extensions() has not been executed").enable_extension(&bundle_id_borrowed).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub(crate) async fn disable_extension(bundle_id: ExtensionBundleId) -> Result<(), String> {
|
||||
let bundle_id_borrowed = bundle_id.borrow();
|
||||
|
||||
if built_in::is_extension_built_in(&bundle_id_borrowed) {
|
||||
built_in::disable_built_in_extension(&bundle_id_borrowed).await?;
|
||||
return Ok(());
|
||||
}
|
||||
third_party::THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE.get().expect("global third party search source not set, looks like init_extensions() has not been executed").disable_extension(&bundle_id_borrowed).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub(crate) async fn set_extension_alias(
|
||||
bundle_id: ExtensionBundleId,
|
||||
alias: String,
|
||||
) -> Result<(), String> {
|
||||
let bundle_id_borrowed = bundle_id.borrow();
|
||||
|
||||
if built_in::is_extension_built_in(&bundle_id_borrowed) {
|
||||
built_in::set_built_in_extension_alias(&bundle_id_borrowed, &alias);
|
||||
return Ok(());
|
||||
}
|
||||
third_party::THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE.get().expect("global third party search source not set, looks like init_extensions() has not been executed").set_extension_alias(&bundle_id_borrowed, &alias).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub(crate) async fn register_extension_hotkey(
|
||||
bundle_id: ExtensionBundleId,
|
||||
hotkey: String,
|
||||
) -> Result<(), String> {
|
||||
let bundle_id_borrowed = bundle_id.borrow();
|
||||
|
||||
if built_in::is_extension_built_in(&bundle_id_borrowed) {
|
||||
built_in::register_built_in_extension_hotkey(&bundle_id_borrowed, &hotkey)?;
|
||||
return Ok(());
|
||||
}
|
||||
third_party::THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE.get().expect("global third party search source not set, looks like init_extensions() has not been executed").register_extension_hotkey(&bundle_id_borrowed, &hotkey).await
|
||||
}
|
||||
|
||||
/// NOTE: this function won't error out if the extension specified by `extension_id`
|
||||
/// has no hotkey set because we need it to behave like this.
|
||||
#[tauri::command]
|
||||
pub(crate) async fn unregister_extension_hotkey(
|
||||
bundle_id: ExtensionBundleId,
|
||||
) -> Result<(), String> {
|
||||
let bundle_id_borrowed = bundle_id.borrow();
|
||||
|
||||
if built_in::is_extension_built_in(&bundle_id_borrowed) {
|
||||
built_in::unregister_built_in_extension_hotkey(&bundle_id_borrowed)?;
|
||||
return Ok(());
|
||||
}
|
||||
third_party::THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE.get().expect("global third party search source not set, looks like init_extensions() has not been executed").unregister_extension_hotkey(&bundle_id_borrowed).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub(crate) async fn is_extension_enabled(bundle_id: ExtensionBundleId) -> Result<bool, String> {
|
||||
let bundle_id_borrowed = bundle_id.borrow();
|
||||
|
||||
if built_in::is_extension_built_in(&bundle_id_borrowed) {
|
||||
return built_in::is_built_in_extension_enabled(&bundle_id_borrowed).await;
|
||||
}
|
||||
third_party::THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE.get().expect("global third party search source not set, looks like init_extensions() has not been executed").is_extension_enabled(&bundle_id_borrowed).await
|
||||
}
|
||||
|
||||
pub(crate) fn canonicalize_relative_icon_path(
|
||||
extension_dir: &Path,
|
||||
extension: &mut Extension,
|
||||
) -> Result<(), String> {
|
||||
fn _canonicalize_relative_icon_path(
|
||||
extension_dir: &Path,
|
||||
extension: &mut Extension,
|
||||
) -> Result<(), String> {
|
||||
let icon_str = &extension.icon;
|
||||
let icon_path = Path::new(icon_str);
|
||||
|
||||
if icon_path.is_relative() {
|
||||
let absolute_icon_path = {
|
||||
let mut assets_directory = extension_dir.join(ASSETS_DIRECTORY_FILE_NAME);
|
||||
assets_directory.push(icon_path);
|
||||
|
||||
assets_directory
|
||||
};
|
||||
|
||||
if absolute_icon_path.try_exists().map_err(|e| e.to_string())? {
|
||||
extension.icon = absolute_icon_path
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.expect("path should be UTF-8 encoded");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
_canonicalize_relative_icon_path(extension_dir, extension)?;
|
||||
|
||||
if let Some(commands) = &mut extension.commands {
|
||||
for command in commands {
|
||||
_canonicalize_relative_icon_path(extension_dir, command)?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(scripts) = &mut extension.scripts {
|
||||
for script in scripts {
|
||||
_canonicalize_relative_icon_path(extension_dir, script)?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(quick_links) = &mut extension.quicklinks {
|
||||
for quick_link in quick_links {
|
||||
_canonicalize_relative_icon_path(extension_dir, quick_link)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn alter_extension_json_file(
|
||||
extension_directory: &Path,
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
how: impl Fn(&mut Extension) -> Result<(), String>,
|
||||
) -> Result<(), String> {
|
||||
/// Perform `how` against the extension specified by `extension_id`.
|
||||
///
|
||||
/// Please note that `bundle` could point to a sub extension if `sub_extension_id` is Some.
|
||||
pub(crate) fn modify(
|
||||
root_extension: &mut Extension,
|
||||
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||
how: impl FnOnce(&mut Extension) -> Result<(), String>,
|
||||
) -> Result<(), String> {
|
||||
let (parent_extension_id, opt_sub_extension_id) =
|
||||
(bundle_id.extension_id, bundle_id.sub_extension_id);
|
||||
assert_eq!(
|
||||
parent_extension_id, root_extension.id,
|
||||
"modify() should be invoked against a parent extension"
|
||||
);
|
||||
|
||||
let Some(sub_extension_id) = opt_sub_extension_id else {
|
||||
how(root_extension)?;
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// Search in commands
|
||||
if let Some(ref mut commands) = root_extension.commands {
|
||||
if let Some(command) = commands.iter_mut().find(|cmd| cmd.id == sub_extension_id) {
|
||||
how(command)?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
// Search in scripts
|
||||
if let Some(ref mut scripts) = root_extension.scripts {
|
||||
if let Some(script) = scripts.iter_mut().find(|scr| scr.id == sub_extension_id) {
|
||||
how(script)?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
// Search in quick_links
|
||||
if let Some(ref mut quick_links) = root_extension.quicklinks {
|
||||
if let Some(link) = quick_links
|
||||
.iter_mut()
|
||||
.find(|lnk| lnk.id == sub_extension_id)
|
||||
{
|
||||
how(link)?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
Err(format!(
|
||||
"extension [{:?}] not found in {:?}",
|
||||
bundle_id, root_extension
|
||||
))
|
||||
}
|
||||
|
||||
log::debug!(
|
||||
"altering extension JSON file for extension [{:?}]",
|
||||
bundle_id
|
||||
);
|
||||
|
||||
let json_file_path = {
|
||||
let mut path = extension_directory.to_path_buf();
|
||||
|
||||
if let Some(developer) = bundle_id.developer {
|
||||
path.push(developer);
|
||||
}
|
||||
path.push(bundle_id.extension_id);
|
||||
path.push(PLUGIN_JSON_FILE_NAME);
|
||||
|
||||
path
|
||||
};
|
||||
|
||||
let mut extension = serde_json::from_reader::<_, Extension>(
|
||||
std::fs::File::open(&json_file_path)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"the [{}] file for extension [{:?}] is missing or broken",
|
||||
PLUGIN_JSON_FILE_NAME, bundle_id
|
||||
)
|
||||
})
|
||||
.map_err(|e| e.to_string())?,
|
||||
)
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
modify(&mut extension, bundle_id, how)?;
|
||||
|
||||
std::fs::write(
|
||||
&json_file_path,
|
||||
serde_json::to_string_pretty(&extension).map_err(|e| e.to_string())?,
|
||||
)
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
345
src-tauri/src/extension/store.rs
Normal file
@@ -0,0 +1,345 @@
|
||||
//! Extension store related stuff.
|
||||
|
||||
use super::LOCAL_QUERY_SOURCE_TYPE;
|
||||
use crate::common::document::DataSourceReference;
|
||||
use crate::common::document::Document;
|
||||
use crate::common::error::SearchError;
|
||||
use crate::common::search::QueryResponse;
|
||||
use crate::common::search::QuerySource;
|
||||
use crate::common::search::SearchQuery;
|
||||
use crate::common::traits::SearchSource;
|
||||
use crate::extension::canonicalize_relative_icon_path;
|
||||
use crate::extension::third_party::THIRD_PARTY_EXTENSIONS_DIRECTORY;
|
||||
use crate::extension::Extension;
|
||||
use crate::extension::PLUGIN_JSON_FILE_NAME;
|
||||
use crate::extension::THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use async_trait::async_trait;
|
||||
use reqwest::StatusCode;
|
||||
use serde_json::Map as JsonObject;
|
||||
use serde_json::Value as Json;
|
||||
|
||||
const DATA_SOURCE_ID: &str = "Extension Store";
|
||||
|
||||
pub(crate) struct ExtensionStore;
|
||||
|
||||
#[async_trait]
|
||||
impl SearchSource for ExtensionStore {
|
||||
fn get_type(&self) -> QuerySource {
|
||||
QuerySource {
|
||||
r#type: LOCAL_QUERY_SOURCE_TYPE.into(),
|
||||
name: hostname::get()
|
||||
.unwrap_or(DATA_SOURCE_ID.into())
|
||||
.to_string_lossy()
|
||||
.into(),
|
||||
id: DATA_SOURCE_ID.into(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
||||
const SCORE: f64 = 2000.0;
|
||||
|
||||
let Some(query_string) = query.query_strings.get("query") else {
|
||||
return Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits: Vec::new(),
|
||||
total_hits: 0,
|
||||
});
|
||||
};
|
||||
|
||||
let lowercase_query_string = query_string.to_lowercase();
|
||||
let expected_str = "extension store";
|
||||
|
||||
if expected_str.contains(&lowercase_query_string) {
|
||||
let doc = Document {
|
||||
id: DATA_SOURCE_ID.to_string(),
|
||||
category: Some(DATA_SOURCE_ID.to_string()),
|
||||
title: Some(DATA_SOURCE_ID.to_string()),
|
||||
icon: Some("font_Store".to_string()),
|
||||
source: Some(DataSourceReference {
|
||||
r#type: Some(LOCAL_QUERY_SOURCE_TYPE.into()),
|
||||
name: Some(DATA_SOURCE_ID.into()),
|
||||
id: Some(DATA_SOURCE_ID.into()),
|
||||
icon: Some("font_Store".to_string()),
|
||||
}),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits: vec![(doc, SCORE)],
|
||||
total_hits: 1,
|
||||
})
|
||||
} else {
|
||||
Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits: Vec::new(),
|
||||
total_hits: 0,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub(crate) async fn search_extension(
|
||||
query_params: Option<Vec<String>>,
|
||||
) -> Result<Vec<Json>, String> {
|
||||
let response = HttpClient::get(
|
||||
"default_coco_server",
|
||||
"store/extension/_search",
|
||||
query_params,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to send request: {:?}", e))?;
|
||||
|
||||
// The response of a ES style search request
|
||||
let mut response: JsonObject<String, Json> = response
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to parse response: {:?}", e))?;
|
||||
|
||||
let hits_json = response
|
||||
.remove("hits")
|
||||
.expect("the JSON response should contain field [hits]");
|
||||
let mut hits = match hits_json {
|
||||
Json::Object(obj) => obj,
|
||||
_ => panic!(
|
||||
"field [hits] should be a JSON object, but it is not, value: [{}]",
|
||||
hits_json
|
||||
),
|
||||
};
|
||||
|
||||
let Some(hits_hits_json) = hits.remove("hits") else {
|
||||
return Ok(Vec::new());
|
||||
};
|
||||
|
||||
let hits_hits = match hits_hits_json {
|
||||
Json::Array(arr) => arr,
|
||||
_ => panic!(
|
||||
"field [hits.hits] should be an array, but it is not, value: [{}]",
|
||||
hits_hits_json
|
||||
),
|
||||
};
|
||||
|
||||
let mut extensions = Vec::with_capacity(hits_hits.len());
|
||||
for hit in hits_hits {
|
||||
let mut hit_obj = match hit {
|
||||
Json::Object(obj) => obj,
|
||||
_ => panic!(
|
||||
"each hit in [hits.hits] should be a JSON object, but it is not, value: [{}]",
|
||||
hit
|
||||
),
|
||||
};
|
||||
let source = hit_obj
|
||||
.remove("_source")
|
||||
.expect("each hit should contain field [_source]");
|
||||
|
||||
let mut source_obj = match source {
|
||||
Json::Object(obj) => obj,
|
||||
_ => panic!(
|
||||
"field [_source] should be a JSON object, but it is not, value: [{}]",
|
||||
source
|
||||
),
|
||||
};
|
||||
|
||||
let developer_id = source_obj
|
||||
.get("developer")
|
||||
.and_then(|dev| dev.get("id"))
|
||||
.and_then(|id| id.as_str())
|
||||
.expect("developer.id should exist")
|
||||
.to_string();
|
||||
|
||||
let extension_id = source_obj
|
||||
.get("id")
|
||||
.and_then(|id| id.as_str())
|
||||
.expect("extension id should exist")
|
||||
.to_string();
|
||||
|
||||
let installed = is_extension_installed(developer_id, extension_id).await;
|
||||
source_obj.insert("installed".to_string(), Json::Bool(installed));
|
||||
|
||||
extensions.push(Json::Object(source_obj));
|
||||
}
|
||||
|
||||
Ok(extensions)
|
||||
}
|
||||
|
||||
async fn is_extension_installed(developer: String, extension_id: String) -> bool {
|
||||
THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE
|
||||
.get()
|
||||
.unwrap()
|
||||
.extension_exists(&developer, &extension_id)
|
||||
.await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub(crate) async fn install_extension(id: String) -> Result<(), String> {
|
||||
let path = format!("store/extension/{}/_download", id);
|
||||
let response = HttpClient::get("default_coco_server", &path, None)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to download extension: {}", e))?;
|
||||
|
||||
if response.status() == StatusCode::NOT_FOUND {
|
||||
return Err(format!("extension [{}] not found", id));
|
||||
}
|
||||
|
||||
let bytes = response
|
||||
.bytes()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read response bytes: {}", e))?;
|
||||
|
||||
let cursor = std::io::Cursor::new(bytes);
|
||||
let mut archive =
|
||||
zip::ZipArchive::new(cursor).map_err(|e| format!("Failed to read zip archive: {}", e))?;
|
||||
|
||||
let mut plugin_json = archive.by_name("plugin.json").map_err(|e| e.to_string())?;
|
||||
let mut plugin_json_content = String::new();
|
||||
std::io::Read::read_to_string(&mut plugin_json, &mut plugin_json_content)
|
||||
.map_err(|e| e.to_string())?;
|
||||
let mut extension: Json = serde_json::from_str(&plugin_json_content)
|
||||
.map_err(|e| format!("Failed to parse plugin.json: {}", e))?;
|
||||
|
||||
let mut_ref_to_developer_object: &mut Json = extension
|
||||
.as_object_mut()
|
||||
.expect("plugin.json should be an object")
|
||||
.get_mut("developer")
|
||||
.expect("plugin.json should contain field [developer]");
|
||||
let developer_id = mut_ref_to_developer_object
|
||||
.get("id")
|
||||
.expect("plugin.json should contain [developer.id]")
|
||||
.as_str()
|
||||
.expect("plugin.json field [developer.id] should be a string");
|
||||
*mut_ref_to_developer_object = Json::String(developer_id.into());
|
||||
|
||||
// Set IDs for sub-extensions (commands, quicklinks, scripts)
|
||||
let mut counter = 0;
|
||||
// Set IDs for commands
|
||||
// Helper function to set IDs for array fields
|
||||
fn set_ids_for_field(extension: &mut Json, field_name: &str, counter: &mut i32) {
|
||||
if let Some(field) = extension.as_object_mut().unwrap().get_mut(field_name) {
|
||||
if let Some(array) = field.as_array_mut() {
|
||||
for item in array {
|
||||
if let Some(item_obj) = item.as_object_mut() {
|
||||
if !item_obj.contains_key("id") {
|
||||
item_obj.insert("id".to_string(), Json::String(counter.to_string()));
|
||||
*counter += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set IDs for sub-extensions
|
||||
set_ids_for_field(&mut extension, "commands", &mut counter);
|
||||
set_ids_for_field(&mut extension, "quicklinks", &mut counter);
|
||||
set_ids_for_field(&mut extension, "scripts", &mut counter);
|
||||
|
||||
let mut extension: Extension = serde_json::from_value(extension).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"cannot parse plugin.json as struct Extension, error [{:?}]",
|
||||
e
|
||||
);
|
||||
});
|
||||
|
||||
drop(plugin_json);
|
||||
|
||||
let developer = extension.developer.clone().unwrap_or_default();
|
||||
let extension_id = extension.id.clone();
|
||||
|
||||
// Extract the zip file
|
||||
let extension_directory = {
|
||||
let mut path = THIRD_PARTY_EXTENSIONS_DIRECTORY.to_path_buf();
|
||||
path.push(developer);
|
||||
path.push(extension_id.as_str());
|
||||
path
|
||||
};
|
||||
|
||||
tokio::fs::create_dir_all(extension_directory.as_path())
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Extract all files except plugin.json
|
||||
for i in 0..archive.len() {
|
||||
let mut file = archive.by_index(i).map_err(|e| e.to_string())?;
|
||||
let outpath = match file.enclosed_name() {
|
||||
Some(path) => extension_directory.join(path),
|
||||
None => continue,
|
||||
};
|
||||
|
||||
// Skip the plugin.json file as we'll create it from the extension variable
|
||||
if file.name() == "plugin.json" {
|
||||
continue;
|
||||
}
|
||||
|
||||
if file.name().ends_with('/') {
|
||||
tokio::fs::create_dir_all(&outpath)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
} else {
|
||||
if let Some(p) = outpath.parent() {
|
||||
if !p.exists() {
|
||||
tokio::fs::create_dir_all(p)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
}
|
||||
}
|
||||
let mut outfile = tokio::fs::File::create(&outpath)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
let mut content = Vec::new();
|
||||
std::io::Read::read_to_end(&mut file, &mut content).map_err(|e| e.to_string())?;
|
||||
tokio::io::AsyncWriteExt::write_all(&mut outfile, &content)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
}
|
||||
}
|
||||
|
||||
// Create plugin.json from the extension variable
|
||||
let plugin_json_path = extension_directory.join(PLUGIN_JSON_FILE_NAME);
|
||||
let extension_json = serde_json::to_string_pretty(&extension).map_err(|e| e.to_string())?;
|
||||
tokio::fs::write(&plugin_json_path, extension_json)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Turn it into an absolute path if it is a valid relative path because frontend code need this.
|
||||
canonicalize_relative_icon_path(&extension_directory, &mut extension)?;
|
||||
|
||||
THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE
|
||||
.get()
|
||||
.unwrap()
|
||||
.add_extension(extension)
|
||||
.await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub(crate) async fn uninstall_extension(
|
||||
developer: String,
|
||||
extension_id: String,
|
||||
) -> Result<(), String> {
|
||||
let extension_dir = {
|
||||
let mut path = THIRD_PARTY_EXTENSIONS_DIRECTORY.join(developer.as_str());
|
||||
path.push(extension_id.as_str());
|
||||
|
||||
path
|
||||
};
|
||||
if !extension_dir.try_exists().map_err(|e| e.to_string())? {
|
||||
panic!(
|
||||
"we are uninstalling extension [{}/{}], but there is no such extension files on disk",
|
||||
developer, extension_id
|
||||
)
|
||||
}
|
||||
tokio::fs::remove_dir_all(extension_dir.as_path())
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE
|
||||
.get()
|
||||
.unwrap()
|
||||
.remove_extension(&developer, &extension_id)
|
||||
.await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
1160
src-tauri/src/extension/third_party.rs
Normal file
@@ -1,28 +1,26 @@
|
||||
mod assistant;
|
||||
mod autostart;
|
||||
mod common;
|
||||
mod local;
|
||||
mod extension;
|
||||
mod search;
|
||||
mod server;
|
||||
mod settings;
|
||||
mod setup;
|
||||
mod shortcut;
|
||||
mod util;
|
||||
|
||||
use crate::common::register::SearchSourceRegistry;
|
||||
// use crate::common::traits::SearchSource;
|
||||
use crate::common::{MAIN_WINDOW_LABEL, SETTINGS_WINDOW_LABEL};
|
||||
use crate::common::{CHECK_WINDOW_LABEL, MAIN_WINDOW_LABEL, SETTINGS_WINDOW_LABEL};
|
||||
use crate::server::servers::{load_or_insert_default_server, load_servers_token};
|
||||
use autostart::{change_autostart, enable_autostart};
|
||||
use autostart::{change_autostart, ensure_autostart_state_consistent};
|
||||
use lazy_static::lazy_static;
|
||||
use std::sync::Mutex;
|
||||
#[cfg(target_os = "macos")]
|
||||
use tauri::ActivationPolicy;
|
||||
use tauri::{
|
||||
AppHandle, Emitter, Manager, PhysicalPosition, Runtime, State, WebviewWindow, Window,
|
||||
WindowEvent,
|
||||
};
|
||||
use std::sync::OnceLock;
|
||||
use tauri::async_runtime::block_on;
|
||||
use tauri::plugin::TauriPlugin;
|
||||
use tauri::{AppHandle, Emitter, Manager, PhysicalPosition, Runtime, WebviewWindow, WindowEvent};
|
||||
use tauri_plugin_autostart::MacosLauncher;
|
||||
use tokio::runtime::Runtime as RT;
|
||||
|
||||
/// Tauri store name
|
||||
pub(crate) const COCO_TAURI_STORE: &str = "coco_tauri_store";
|
||||
@@ -31,8 +29,12 @@ lazy_static! {
|
||||
static ref PREVIOUS_MONITOR_NAME: Mutex<Option<String>> = Mutex::new(None);
|
||||
}
|
||||
|
||||
/// To allow us to access tauri's `AppHandle` when its context is inaccessible,
|
||||
/// store it globally. It will be set in `init()`.
|
||||
pub(crate) static GLOBAL_TAURI_APP_HANDLE: OnceLock<AppHandle> = OnceLock::new();
|
||||
|
||||
#[tauri::command]
|
||||
fn change_window_height(handle: AppHandle, height: u32) {
|
||||
async fn change_window_height(handle: AppHandle, height: u32) {
|
||||
let window: WebviewWindow = handle.get_webview_window(MAIN_WINDOW_LABEL).unwrap();
|
||||
|
||||
let mut size = window.outer_size().unwrap();
|
||||
@@ -42,10 +44,12 @@ fn change_window_height(handle: AppHandle, height: u32) {
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct ThemeChangedPayload {
|
||||
#[allow(dead_code)]
|
||||
is_dark_mode: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, serde::Serialize)]
|
||||
#[allow(dead_code)]
|
||||
struct Payload {
|
||||
args: Vec<String>,
|
||||
cwd: String,
|
||||
@@ -53,16 +57,16 @@ struct Payload {
|
||||
|
||||
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
||||
pub fn run() {
|
||||
let mut ctx = tauri::generate_context!();
|
||||
// Initialize logger
|
||||
env_logger::init();
|
||||
let ctx = tauri::generate_context!();
|
||||
|
||||
let mut app_builder = tauri::Builder::default();
|
||||
// Set up logger first
|
||||
app_builder = app_builder.plugin(set_up_tauri_logger());
|
||||
|
||||
#[cfg(desktop)]
|
||||
{
|
||||
app_builder = app_builder.plugin(tauri_plugin_single_instance::init(|_app, argv, _cwd| {
|
||||
println!("a new app instance was opened with {argv:?} and the deep link event was already triggered");
|
||||
log::debug!("a new app instance was opened with {argv:?} and the deep link event was already triggered");
|
||||
// when defining deep link schemes at runtime, you must also check `argv` here
|
||||
}));
|
||||
}
|
||||
@@ -71,7 +75,7 @@ pub fn run() {
|
||||
.plugin(tauri_plugin_http::init())
|
||||
.plugin(tauri_plugin_shell::init())
|
||||
.plugin(tauri_plugin_autostart::init(
|
||||
MacosLauncher::AppleScript,
|
||||
MacosLauncher::LaunchAgent,
|
||||
None,
|
||||
))
|
||||
.plugin(tauri_plugin_deep_link::init())
|
||||
@@ -81,7 +85,9 @@ pub fn run() {
|
||||
.plugin(tauri_plugin_macos_permissions::init())
|
||||
.plugin(tauri_plugin_screenshots::init())
|
||||
.plugin(tauri_plugin_process::init())
|
||||
.plugin(tauri_plugin_updater::Builder::new().build());
|
||||
.plugin(tauri_plugin_updater::Builder::new().build())
|
||||
.plugin(tauri_plugin_windows_version::init())
|
||||
.plugin(tauri_plugin_opener::init());
|
||||
|
||||
// Conditional compilation for macOS
|
||||
#[cfg(target_os = "macos")]
|
||||
@@ -99,6 +105,8 @@ pub fn run() {
|
||||
show_coco,
|
||||
hide_coco,
|
||||
show_settings,
|
||||
show_check,
|
||||
hide_check,
|
||||
server::servers::get_server_token,
|
||||
server::servers::add_coco_server,
|
||||
server::servers::remove_coco_server,
|
||||
@@ -109,7 +117,8 @@ pub fn run() {
|
||||
server::servers::disable_server,
|
||||
server::auth::handle_sso_callback,
|
||||
server::profile::get_user_profiles,
|
||||
server::datasource::get_datasources_by_server,
|
||||
server::datasource::datasource_search,
|
||||
server::datasource::mcp_server_search,
|
||||
server::connector::get_connectors_by_server,
|
||||
search::query_coco_fusion,
|
||||
assistant::chat_history,
|
||||
@@ -119,41 +128,74 @@ pub fn run() {
|
||||
assistant::open_session_chat,
|
||||
assistant::close_session_chat,
|
||||
assistant::cancel_session_chat,
|
||||
assistant::delete_session_chat,
|
||||
assistant::update_session_chat,
|
||||
assistant::assistant_search,
|
||||
assistant::assistant_get,
|
||||
assistant::assistant_get_multi,
|
||||
// server::get_coco_server_datasources,
|
||||
// server::get_coco_server_connectors,
|
||||
server::websocket::connect_to_server,
|
||||
server::websocket::disconnect,
|
||||
get_app_search_source
|
||||
get_app_search_source,
|
||||
server::attachment::upload_attachment,
|
||||
server::attachment::get_attachment,
|
||||
server::attachment::delete_attachment,
|
||||
server::transcription::transcription,
|
||||
server::system_settings::get_system_settings,
|
||||
simulate_mouse_click,
|
||||
extension::built_in::application::get_app_list,
|
||||
extension::built_in::application::get_app_search_path,
|
||||
extension::built_in::application::get_app_metadata,
|
||||
extension::built_in::application::add_app_search_path,
|
||||
extension::built_in::application::remove_app_search_path,
|
||||
extension::built_in::application::reindex_applications,
|
||||
extension::list_extensions,
|
||||
extension::enable_extension,
|
||||
extension::disable_extension,
|
||||
extension::set_extension_alias,
|
||||
extension::register_extension_hotkey,
|
||||
extension::unregister_extension_hotkey,
|
||||
extension::is_extension_enabled,
|
||||
extension::store::search_extension,
|
||||
extension::store::install_extension,
|
||||
extension::store::uninstall_extension,
|
||||
settings::set_allow_self_signature,
|
||||
settings::get_allow_self_signature,
|
||||
assistant::ask_ai,
|
||||
crate::common::document::open,
|
||||
])
|
||||
.setup(|app| {
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
log::trace!("hiding Dock icon on macOS");
|
||||
app.set_activation_policy(tauri::ActivationPolicy::Accessory);
|
||||
log::trace!("Dock icon should be hidden now");
|
||||
}
|
||||
|
||||
let app_handle = app.handle().clone();
|
||||
GLOBAL_TAURI_APP_HANDLE
|
||||
.set(app_handle.clone())
|
||||
.expect("variable already initialized");
|
||||
log::trace!("global Tauri app handle set");
|
||||
|
||||
let registry = SearchSourceRegistry::default();
|
||||
|
||||
app.manage(registry); // Store registry in Tauri's app state
|
||||
app.manage(server::websocket::WebSocketManager::default());
|
||||
|
||||
// Get app handle
|
||||
let app_handle = app.handle().clone();
|
||||
|
||||
// Create a single Tokio runtime instance
|
||||
let rt = RT::new().expect("Failed to create Tokio runtime");
|
||||
|
||||
// Use the runtime to spawn the async initialization tasks
|
||||
let init_app_handle = app.handle().clone();
|
||||
rt.spawn(async move {
|
||||
init(&init_app_handle).await; // Pass a reference to `app_handle`
|
||||
block_on(async {
|
||||
init(app.handle()).await;
|
||||
});
|
||||
|
||||
shortcut::enable_shortcut(&app);
|
||||
// enable_tray(app);
|
||||
enable_autostart(app);
|
||||
shortcut::enable_shortcut(app);
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
app.set_activation_policy(ActivationPolicy::Accessory);
|
||||
ensure_autostart_state_consistent(app)?;
|
||||
|
||||
// app.listen("theme-changed", move |event| {
|
||||
// if let Ok(payload) = serde_json::from_str::<ThemeChangedPayload>(event.payload()) {
|
||||
// // switch_tray_icon(app.app_handle(), payload.is_dark_mode);
|
||||
// println!("Theme changed: is_dark_mode = {}", payload.is_dark_mode);
|
||||
// log::debug!("Theme changed: is_dark_mode = {}", payload.is_dark_mode);
|
||||
// }
|
||||
// });
|
||||
|
||||
@@ -173,13 +215,19 @@ pub fn run() {
|
||||
|
||||
let main_window = app.get_webview_window(MAIN_WINDOW_LABEL).unwrap();
|
||||
let settings_window = app.get_webview_window(SETTINGS_WINDOW_LABEL).unwrap();
|
||||
setup::default(app, main_window.clone(), settings_window.clone());
|
||||
let check_window = app.get_webview_window(CHECK_WINDOW_LABEL).unwrap();
|
||||
setup::default(
|
||||
app,
|
||||
main_window.clone(),
|
||||
settings_window.clone(),
|
||||
check_window.clone(),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.on_window_event(|window, event| match event {
|
||||
WindowEvent::CloseRequested { api, .. } => {
|
||||
dbg!("Close requested event received");
|
||||
//dbg!("Close requested event received");
|
||||
window.hide().unwrap();
|
||||
api.prevent_close();
|
||||
}
|
||||
@@ -194,10 +242,10 @@ pub fn run() {
|
||||
has_visible_windows,
|
||||
..
|
||||
} => {
|
||||
dbg!(
|
||||
"Reopen event received: has_visible_windows = {}",
|
||||
has_visible_windows
|
||||
);
|
||||
// dbg!(
|
||||
// "Reopen event received: has_visible_windows = {}",
|
||||
// has_visible_windows
|
||||
// );
|
||||
if has_visible_windows {
|
||||
return;
|
||||
}
|
||||
@@ -211,77 +259,59 @@ pub fn run() {
|
||||
pub async fn init<R: Runtime>(app_handle: &AppHandle<R>) {
|
||||
// Await the async functions to load the servers and tokens
|
||||
if let Err(err) = load_or_insert_default_server(app_handle).await {
|
||||
eprintln!("Failed to load servers: {}", err);
|
||||
log::error!("Failed to load servers: {}", err);
|
||||
}
|
||||
|
||||
if let Err(err) = load_servers_token(app_handle).await {
|
||||
eprintln!("Failed to load server tokens: {}", err);
|
||||
log::error!("Failed to load server tokens: {}", err);
|
||||
}
|
||||
|
||||
let coco_servers = server::servers::get_all_servers();
|
||||
|
||||
// Get the registry from Tauri's state
|
||||
let registry: State<SearchSourceRegistry> = app_handle.state::<SearchSourceRegistry>();
|
||||
// let registry: State<SearchSourceRegistry> = app_handle.state::<SearchSourceRegistry>();
|
||||
|
||||
for server in coco_servers {
|
||||
crate::server::servers::try_register_server_to_search_source(app_handle.clone(), &server)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn init_app_search_source<R: Runtime>(app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||
let application_search =
|
||||
local::application::ApplicationSearchSource::new(app_handle.clone(), 1000f64).await?;
|
||||
|
||||
// Register the application search source
|
||||
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||
registry.register_source(application_search).await;
|
||||
|
||||
Ok(())
|
||||
extension::built_in::pizza_engine_runtime::start_pizza_engine_runtime().await;
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn show_coco(app_handle: AppHandle) {
|
||||
handle_open_coco(&app_handle);
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
fn hide_coco(app: tauri::AppHandle) {
|
||||
if let Some(window) = app.get_window(MAIN_WINDOW_LABEL) {
|
||||
match window.is_visible() {
|
||||
Ok(true) => {
|
||||
if let Err(err) = window.hide() {
|
||||
eprintln!("Failed to hide the window: {}", err);
|
||||
}
|
||||
}
|
||||
Ok(false) => {
|
||||
println!("Window is already hidden.");
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("Failed to check window visibility: {}", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_open_coco(app: &AppHandle) {
|
||||
if let Some(window) = app.get_window(MAIN_WINDOW_LABEL) {
|
||||
async fn show_coco<R: Runtime>(app_handle: AppHandle<R>) {
|
||||
if let Some(window) = app_handle.get_webview_window(MAIN_WINDOW_LABEL) {
|
||||
move_window_to_active_monitor(&window);
|
||||
|
||||
window.show().unwrap();
|
||||
window.set_visible_on_all_workspaces(true).unwrap();
|
||||
window.set_always_on_top(true).unwrap();
|
||||
window.set_focus().unwrap();
|
||||
let _ = window.show();
|
||||
let _ = window.unminimize();
|
||||
let _ = window.set_focus();
|
||||
|
||||
let _ = app_handle.emit("show-coco", ());
|
||||
}
|
||||
}
|
||||
|
||||
fn move_window_to_active_monitor<R: Runtime>(window: &Window<R>) {
|
||||
dbg!("Moving window to active monitor");
|
||||
#[tauri::command]
|
||||
async fn hide_coco<R: Runtime>(app: AppHandle<R>) {
|
||||
if let Some(window) = app.get_webview_window(MAIN_WINDOW_LABEL) {
|
||||
if let Err(err) = window.hide() {
|
||||
log::error!("Failed to hide the window: {}", err);
|
||||
} else {
|
||||
log::debug!("Window successfully hidden.");
|
||||
}
|
||||
} else {
|
||||
log::error!("Main window not found.");
|
||||
}
|
||||
}
|
||||
|
||||
fn move_window_to_active_monitor<R: Runtime>(window: &WebviewWindow<R>) {
|
||||
//dbg!("Moving window to active monitor");
|
||||
// Try to get the available monitors, handle failure gracefully
|
||||
let available_monitors = match window.available_monitors() {
|
||||
Ok(monitors) => monitors,
|
||||
Err(e) => {
|
||||
eprintln!("Failed to get monitors: {}", e);
|
||||
log::error!("Failed to get monitors: {}", e);
|
||||
return;
|
||||
}
|
||||
};
|
||||
@@ -290,7 +320,7 @@ fn move_window_to_active_monitor<R: Runtime>(window: &Window<R>) {
|
||||
let cursor_position = match window.cursor_position() {
|
||||
Ok(pos) => Some(pos),
|
||||
Err(e) => {
|
||||
eprintln!("Failed to get cursor position: {}", e);
|
||||
log::error!("Failed to get cursor position: {}", e);
|
||||
None
|
||||
}
|
||||
};
|
||||
@@ -319,7 +349,7 @@ fn move_window_to_active_monitor<R: Runtime>(window: &Window<R>) {
|
||||
let monitor = match target_monitor.or_else(|| window.primary_monitor().ok().flatten()) {
|
||||
Some(monitor) => monitor,
|
||||
None => {
|
||||
eprintln!("No monitor found!");
|
||||
log::error!("No monitor found!");
|
||||
return;
|
||||
}
|
||||
};
|
||||
@@ -329,7 +359,7 @@ fn move_window_to_active_monitor<R: Runtime>(window: &Window<R>) {
|
||||
|
||||
if let Some(ref prev_name) = *previous_monitor_name {
|
||||
if name.to_string() == *prev_name {
|
||||
println!("Currently on the same monitor");
|
||||
log::debug!("Currently on the same monitor");
|
||||
|
||||
return;
|
||||
}
|
||||
@@ -343,7 +373,7 @@ fn move_window_to_active_monitor<R: Runtime>(window: &Window<R>) {
|
||||
let window_size = match window.inner_size() {
|
||||
Ok(size) => size,
|
||||
Err(e) => {
|
||||
eprintln!("Failed to get window size: {}", e);
|
||||
log::error!("Failed to get window size: {}", e);
|
||||
return;
|
||||
}
|
||||
};
|
||||
@@ -357,125 +387,25 @@ fn move_window_to_active_monitor<R: Runtime>(window: &Window<R>) {
|
||||
|
||||
// Move the window to the new position
|
||||
if let Err(e) = window.set_position(PhysicalPosition::new(window_x, window_y)) {
|
||||
eprintln!("Failed to move window: {}", e);
|
||||
log::error!("Failed to move window: {}", e);
|
||||
}
|
||||
|
||||
if let Some(name) = monitor.name() {
|
||||
println!("Window moved to monitor: {}", name);
|
||||
log::debug!("Window moved to monitor: {}", name);
|
||||
|
||||
let mut previous_monitor = PREVIOUS_MONITOR_NAME.lock().unwrap();
|
||||
*previous_monitor = Some(name.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_hide_coco(app: &AppHandle) {
|
||||
if let Some(window) = app.get_window(MAIN_WINDOW_LABEL) {
|
||||
if let Err(err) = window.hide() {
|
||||
eprintln!("Failed to hide the window: {}", err);
|
||||
} else {
|
||||
println!("Window successfully hidden.");
|
||||
}
|
||||
} else {
|
||||
eprintln!("Main window not found.");
|
||||
}
|
||||
}
|
||||
|
||||
fn enable_tray(app: &mut tauri::App) {
|
||||
use tauri::{
|
||||
image::Image,
|
||||
menu::{MenuBuilder, MenuItem},
|
||||
tray::TrayIconBuilder,
|
||||
};
|
||||
|
||||
let quit_i = MenuItem::with_id(app, "quit", "Quit Coco", true, None::<&str>).unwrap();
|
||||
let settings_i = MenuItem::with_id(app, "settings", "Settings...", true, None::<&str>).unwrap();
|
||||
let open_i = MenuItem::with_id(app, "open", "Show Coco", true, None::<&str>).unwrap();
|
||||
// let about_i = MenuItem::with_id(app, "about", "About Coco", true, None::<&str>).unwrap();
|
||||
// let hide_i = MenuItem::with_id(app, "hide", "Hide Coco", true, None::<&str>).unwrap();
|
||||
|
||||
let menu = MenuBuilder::new(app)
|
||||
.item(&open_i)
|
||||
.separator()
|
||||
// .item(&hide_i)
|
||||
// .item(&about_i)
|
||||
.item(&settings_i)
|
||||
.separator()
|
||||
.item(&quit_i)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let _tray = TrayIconBuilder::with_id("tray")
|
||||
.icon_as_template(true)
|
||||
// .icon(app.default_window_icon().unwrap().clone())
|
||||
.icon(
|
||||
Image::from_bytes(include_bytes!("../assets/tray-mac.ico"))
|
||||
.expect("Failed to load icon"),
|
||||
)
|
||||
.menu(&menu)
|
||||
.on_menu_event(|app, event| match event.id.as_ref() {
|
||||
"open" => {
|
||||
handle_open_coco(app);
|
||||
}
|
||||
"hide" => {
|
||||
handle_hide_coco(app);
|
||||
}
|
||||
"about" => {
|
||||
let _ = app.emit("open_settings", "about");
|
||||
}
|
||||
"settings" => {
|
||||
// windows failed to open second window, issue: https://github.com/tauri-apps/tauri/issues/11144 https://github.com/tauri-apps/tauri/issues/8196
|
||||
//#[cfg(windows)]
|
||||
let _ = app.emit("open_settings", "settings");
|
||||
|
||||
// #[cfg(not(windows))]
|
||||
// open_settings(&app);
|
||||
}
|
||||
"quit" => {
|
||||
println!("quit menu item was clicked");
|
||||
app.exit(0);
|
||||
}
|
||||
_ => {
|
||||
println!("menu item {:?} not handled", event.id);
|
||||
}
|
||||
})
|
||||
.build(app)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn open_settings(app: &tauri::AppHandle) {
|
||||
use tauri::webview::WebviewBuilder;
|
||||
println!("settings menu item was clicked");
|
||||
let window = app.get_webview_window("settings");
|
||||
if let Some(window) = window {
|
||||
window.show().unwrap();
|
||||
window.set_focus().unwrap();
|
||||
} else {
|
||||
let window = tauri::window::WindowBuilder::new(app, "settings")
|
||||
.title("Settings Window")
|
||||
.fullscreen(false)
|
||||
.resizable(false)
|
||||
.minimizable(false)
|
||||
.maximizable(false)
|
||||
.inner_size(800.0, 600.0)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let webview_builder =
|
||||
WebviewBuilder::new("settings", tauri::WebviewUrl::App("/ui/settings".into()));
|
||||
let _webview = window
|
||||
.add_child(
|
||||
webview_builder,
|
||||
tauri::LogicalPosition::new(0, 0),
|
||||
window.inner_size().unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn get_app_search_source<R: Runtime>(app_handle: AppHandle<R>) -> Result<(), String> {
|
||||
init_app_search_source(&app_handle).await?;
|
||||
// We want all the extensions here, so no filter condition specified.
|
||||
let (_found_invalid_extensions, extensions) = extension::list_extensions(None, None, false)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
extension::init_extensions(extensions).await?;
|
||||
|
||||
let _ = server::connector::refresh_all_connectors(&app_handle).await;
|
||||
let _ = server::datasource::refresh_all_datasources(&app_handle).await;
|
||||
|
||||
@@ -484,5 +414,207 @@ async fn get_app_search_source<R: Runtime>(app_handle: AppHandle<R>) -> Result<(
|
||||
|
||||
#[tauri::command]
|
||||
async fn show_settings(app_handle: AppHandle) {
|
||||
open_settings(&app_handle);
|
||||
log::debug!("settings menu item was clicked");
|
||||
let window = app_handle
|
||||
.get_webview_window(SETTINGS_WINDOW_LABEL)
|
||||
.expect("we have a settings window");
|
||||
|
||||
window.show().unwrap();
|
||||
window.unminimize().unwrap();
|
||||
window.set_focus().unwrap();
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn show_check(app_handle: AppHandle) {
|
||||
log::debug!("check menu item was clicked");
|
||||
let window = app_handle
|
||||
.get_webview_window(CHECK_WINDOW_LABEL)
|
||||
.expect("we have a check window");
|
||||
|
||||
window.show().unwrap();
|
||||
window.unminimize().unwrap();
|
||||
window.set_focus().unwrap();
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn hide_check(app_handle: AppHandle) {
|
||||
log::debug!("check window was closed");
|
||||
let window = &app_handle
|
||||
.get_webview_window(CHECK_WINDOW_LABEL)
|
||||
.expect("we have a check window");
|
||||
|
||||
window.hide().unwrap();
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn simulate_mouse_click<R: Runtime>(window: WebviewWindow<R>, is_chat_mode: bool) {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use enigo::{Button, Coordinate, Direction, Enigo, Mouse, Settings};
|
||||
use std::{thread, time::Duration};
|
||||
|
||||
if let Ok(mut enigo) = Enigo::new(&Settings::default()) {
|
||||
// Save the current mouse position
|
||||
if let Ok((original_x, original_y)) = enigo.location() {
|
||||
// Retrieve the window's outer position (top-left corner)
|
||||
if let Ok(position) = window.outer_position() {
|
||||
// Retrieve the window's inner size (client area)
|
||||
if let Ok(size) = window.inner_size() {
|
||||
// Calculate the center position of the title bar
|
||||
let x = position.x + (size.width as i32 / 2);
|
||||
let y = if is_chat_mode {
|
||||
position.y + size.height as i32 - 50
|
||||
} else {
|
||||
position.y + 30
|
||||
};
|
||||
|
||||
// Move the mouse cursor to the calculated position
|
||||
if enigo.move_mouse(x, y, Coordinate::Abs).is_ok() {
|
||||
// // Simulate a left mouse click
|
||||
let _ = enigo.button(Button::Left, Direction::Click);
|
||||
// let _ = enigo.button(Button::Left, Direction::Release);
|
||||
|
||||
thread::sleep(Duration::from_millis(100));
|
||||
|
||||
// Move the mouse cursor back to the original position
|
||||
let _ = enigo.move_mouse(original_x, original_y, Coordinate::Abs);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
let _ = window;
|
||||
let _ = is_chat_mode;
|
||||
}
|
||||
}
|
||||
|
||||
/// Log format:
|
||||
///
|
||||
/// ```text
|
||||
/// [time] [log level] [file module:line] message
|
||||
/// ```
|
||||
///
|
||||
/// Example:
|
||||
///
|
||||
///
|
||||
/// ```text
|
||||
/// [05-11 17:00:00] [INF] [coco_lib:625] Coco-AI started
|
||||
/// ```
|
||||
fn set_up_tauri_logger() -> TauriPlugin<tauri::Wry> {
|
||||
use log::Level;
|
||||
use log::LevelFilter;
|
||||
use tauri_plugin_log::Builder;
|
||||
|
||||
/// Coco-AI app's default log level.
|
||||
const DEFAULT_LOG_LEVEL: LevelFilter = LevelFilter::Info;
|
||||
const LOG_LEVEL_ENV_VAR: &str = "COCO_LOG";
|
||||
|
||||
fn format_log_level(level: Level) -> &'static str {
|
||||
match level {
|
||||
Level::Trace => "TRC",
|
||||
Level::Debug => "DBG",
|
||||
Level::Info => "INF",
|
||||
Level::Warn => "WAR",
|
||||
Level::Error => "ERR",
|
||||
}
|
||||
}
|
||||
|
||||
fn format_target_and_line(record: &log::Record) -> String {
|
||||
let mut str = record.target().to_string();
|
||||
if let Some(line) = record.line() {
|
||||
str.push(':');
|
||||
str.push_str(&line.to_string());
|
||||
}
|
||||
|
||||
str
|
||||
}
|
||||
|
||||
/// Allow us to configure dynamic log levels via environment variable `COCO_LOG`.
|
||||
///
|
||||
/// Generally, it mirros the behavior of `env_logger`. Syntax: `COCO_LOG=[target][=][level][,...]`
|
||||
///
|
||||
/// * If this environment variable is not set, use the default log level.
|
||||
/// * If it is set, respect it:
|
||||
///
|
||||
/// * `COCO_LOG=coco_lib` turns on all logging for the `coco_lib` module, which is
|
||||
/// equivalent to `COCO_LOG=coco_lib=trace`
|
||||
/// * `COCO_LOG=trace` turns on all logging for the application, regardless of its name
|
||||
/// * `COCO_LOG=TRACE` turns on all logging for the application, regardless of its name (same as previous)
|
||||
/// * `COCO_LOG=reqwest=debug` turns on debug logging for `reqwest`
|
||||
/// * `COCO_LOG=trace,tauri=off` turns on all the logging except for the logs come from `tauri`
|
||||
/// * `COCO_LOG=off` turns off all logging for the application
|
||||
/// * `COCO_LOG=` Since the value is empty, turns off all logging for the application as well
|
||||
fn dynamic_log_level(mut builder: Builder) -> Builder {
|
||||
let Some(log_levels) = std::env::var_os(LOG_LEVEL_ENV_VAR) else {
|
||||
return builder.level(DEFAULT_LOG_LEVEL);
|
||||
};
|
||||
|
||||
builder = builder.level(LevelFilter::Off);
|
||||
|
||||
let log_levels = log_levels.into_string().unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"The value '{}' set in environment varaible '{}' is not UTF-8 encoded",
|
||||
// Cannot use `.display()` here becuase that requires MSRV 1.87.0
|
||||
e.to_string_lossy(),
|
||||
LOG_LEVEL_ENV_VAR
|
||||
)
|
||||
});
|
||||
|
||||
// COCO_LOG=[target][=][level][,...]
|
||||
let target_log_levels = log_levels.split(',');
|
||||
for target_log_level in target_log_levels {
|
||||
#[allow(clippy::collapsible_else_if)]
|
||||
if let Some(char_index) = target_log_level.chars().position(|c| c == '=') {
|
||||
let (target, equal_sign_and_level) = target_log_level.split_at(char_index);
|
||||
// Remove the equal sign, we know it takes 1 byte
|
||||
let level = &equal_sign_and_level[1..];
|
||||
|
||||
if let Ok(level) = level.parse::<LevelFilter>() {
|
||||
// Here we have to call `.to_string()` because `Cow<'static, str>` requires `&'static str`
|
||||
builder = builder.level_for(target.to_string(), level);
|
||||
} else {
|
||||
panic!(
|
||||
"log level '{}' set in '{}={}' is invalid",
|
||||
level, target, level
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if let Ok(level) = target_log_level.parse::<LevelFilter>() {
|
||||
// This is a level
|
||||
builder = builder.level(level);
|
||||
} else {
|
||||
// This is a target, enable all the logging
|
||||
//
|
||||
// Here we have to call `.to_string()` because `Cow<'static, str>` requires `&'static str`
|
||||
builder = builder.level_for(target_log_level.to_string(), LevelFilter::Trace);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
builder
|
||||
}
|
||||
|
||||
// When running the built binary, set `COCO_LOG` to `coco_lib=trace` to capture all logs
|
||||
// that come from Coco in the log file, which helps with debugging.
|
||||
if !tauri::is_dev() {
|
||||
std::env::set_var("COCO_LOG", "coco_lib=trace");
|
||||
}
|
||||
|
||||
let mut builder = tauri_plugin_log::Builder::new();
|
||||
builder = builder.format(|out, message, record| {
|
||||
let now = chrono::Local::now().format("%m-%d %H:%M:%S");
|
||||
let level = format_log_level(record.level());
|
||||
let target_and_line = format_target_and_line(record);
|
||||
out.finish(format_args!(
|
||||
"[{}] [{}] [{}] {}",
|
||||
now, level, target_and_line, message
|
||||
));
|
||||
});
|
||||
builder = dynamic_log_level(builder);
|
||||
|
||||
builder.build()
|
||||
}
|
||||
|
||||
@@ -1,158 +0,0 @@
|
||||
use crate::common::document::{DataSourceReference, Document};
|
||||
use crate::common::search::{QueryResponse, QuerySource, SearchQuery};
|
||||
use crate::common::traits::{SearchError, SearchSource};
|
||||
use crate::local::LOCAL_QUERY_SOURCE_TYPE;
|
||||
use applications::{AppInfo, AppInfoContext};
|
||||
use async_trait::async_trait;
|
||||
use base64::encode;
|
||||
use fuzzy_prefix_search::Trie;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use tauri_plugin_fs_pro::{icon, name};
|
||||
|
||||
pub struct ApplicationSearchSource {
|
||||
base_score: f64,
|
||||
icons: HashMap<String, PathBuf>,
|
||||
application_paths: Trie<String>,
|
||||
}
|
||||
|
||||
impl ApplicationSearchSource {
|
||||
pub async fn new<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
base_score: f64,
|
||||
) -> Result<Self, String> {
|
||||
let application_paths = Trie::new();
|
||||
let mut icons = HashMap::new();
|
||||
|
||||
let mut ctx = AppInfoContext::new(vec![]);
|
||||
ctx.refresh_apps().map_err(|err| err.to_string())?; // must refresh apps before getting them
|
||||
let apps = ctx.get_all_apps();
|
||||
|
||||
for app in &apps {
|
||||
let path = if cfg!(target_os = "macos") {
|
||||
app.app_desktop_path.clone()
|
||||
} else {
|
||||
app.app_path_exe
|
||||
.clone()
|
||||
.unwrap_or(PathBuf::from("Path not found"))
|
||||
};
|
||||
let search_word = name(path.clone()).await;
|
||||
let icon = icon(app_handle.clone(), path.clone(), Some(256))
|
||||
.await
|
||||
.map_err(|err| err.to_string())?;
|
||||
let path_string = path.to_string_lossy().into_owned();
|
||||
|
||||
if search_word.is_empty() || search_word.eq("coco-ai") {
|
||||
continue;
|
||||
}
|
||||
|
||||
application_paths.insert(&search_word, path_string.clone());
|
||||
icons.insert(path_string, icon);
|
||||
}
|
||||
|
||||
Ok(ApplicationSearchSource {
|
||||
base_score,
|
||||
icons,
|
||||
application_paths,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl SearchSource for ApplicationSearchSource {
|
||||
fn get_type(&self) -> QuerySource {
|
||||
QuerySource {
|
||||
r#type: LOCAL_QUERY_SOURCE_TYPE.into(),
|
||||
name: hostname::get()
|
||||
.unwrap_or("My Computer".into())
|
||||
.to_string_lossy()
|
||||
.into(),
|
||||
id: "local_applications".into(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
||||
let query_string = query
|
||||
.query_strings
|
||||
.get("query")
|
||||
.unwrap_or(&"".to_string())
|
||||
.to_lowercase();
|
||||
|
||||
if query_string.is_empty() {
|
||||
return Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits: Vec::new(),
|
||||
total_hits: 0,
|
||||
});
|
||||
}
|
||||
|
||||
let mut total_hits = 0;
|
||||
let mut hits = Vec::new();
|
||||
|
||||
let mut results = self
|
||||
.application_paths
|
||||
.search_within_distance_scored(&query_string, 3);
|
||||
|
||||
// Check for NaN or extreme score values and handle them properly
|
||||
results.sort_by(|a, b| {
|
||||
// If either score is NaN, consider them equal (you can customize this logic as needed)
|
||||
if a.score.is_nan() || b.score.is_nan() {
|
||||
std::cmp::Ordering::Equal
|
||||
} else {
|
||||
// Otherwise, compare the scores as usual
|
||||
b.score
|
||||
.partial_cmp(&a.score)
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
}
|
||||
});
|
||||
|
||||
if !results.is_empty() {
|
||||
for result in results {
|
||||
let file_name_str = result.word;
|
||||
let file_path_str = result.data.get(0).unwrap().to_string();
|
||||
let file_path = PathBuf::from(file_path_str.clone());
|
||||
let cleaned_file_name = name(file_path).await;
|
||||
total_hits += 1;
|
||||
let mut doc = Document::new(
|
||||
Some(DataSourceReference {
|
||||
r#type: Some(LOCAL_QUERY_SOURCE_TYPE.into()),
|
||||
name: Some("Applications".into()),
|
||||
id: Some(file_name_str.clone()),
|
||||
}),
|
||||
file_path_str.clone(),
|
||||
"Application".to_string(),
|
||||
cleaned_file_name,
|
||||
file_path_str.clone(),
|
||||
);
|
||||
|
||||
// Attach icon if available
|
||||
if let Some(icon_path) = self.icons.get(file_path_str.as_str()) {
|
||||
// doc.icon = Some(format!("file://{}", icon_path.to_string_lossy()));
|
||||
// dbg!(&doc.icon);
|
||||
if let Ok(icon_data) = read_icon_and_encode(icon_path) {
|
||||
doc.icon = Some(format!("data:image/png;base64,{}", icon_data));
|
||||
}
|
||||
}
|
||||
|
||||
hits.push((doc, self.base_score + result.score as f64));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits,
|
||||
total_hits,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Function to read the icon file and convert it to base64
|
||||
fn read_icon_and_encode(icon_path: &Path) -> Result<String, std::io::Error> {
|
||||
// Read the icon file as binary data
|
||||
let icon_data = fs::read(icon_path)?;
|
||||
|
||||
// Encode the data to base64
|
||||
Ok(encode(&icon_data))
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
pub mod application;
|
||||
pub mod file_system;
|
||||
|
||||
pub const LOCAL_QUERY_SOURCE_TYPE: &str = "local";
|
||||
@@ -1,119 +1,210 @@
|
||||
use crate::common::error::SearchError;
|
||||
use crate::common::register::SearchSourceRegistry;
|
||||
use crate::common::search::{
|
||||
FailedRequest, MultiSourceQueryResponse, QueryHits, QuerySource, SearchQuery,
|
||||
FailedRequest, MultiSourceQueryResponse, QueryHits, QueryResponse, QuerySource, SearchQuery,
|
||||
};
|
||||
use crate::common::traits::SearchError;
|
||||
use crate::common::traits::SearchSource;
|
||||
use function_name::named;
|
||||
use futures::stream::FuturesUnordered;
|
||||
use futures::StreamExt;
|
||||
use std::cmp::Reverse;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::future::Future;
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Manager, Runtime};
|
||||
use tokio::time::error::Elapsed;
|
||||
use tokio::time::{timeout, Duration};
|
||||
|
||||
/// Helper function to return the Future used for querying querysources.
|
||||
///
|
||||
/// It is a workaround for the limitations:
|
||||
///
|
||||
/// 1. 2 async blocks have different types in Rust's type system even though
|
||||
/// they are literally same
|
||||
/// 2. `futures::stream::FuturesUnordered` needs the `Futures` pushed to it to
|
||||
/// have only 1 type
|
||||
///
|
||||
/// Putting the async block in a function to unify the types.
|
||||
fn same_type_futures(
|
||||
query_source: QuerySource,
|
||||
query_source_trait_object: Arc<dyn SearchSource>,
|
||||
timeout_duration: Duration,
|
||||
search_query: SearchQuery,
|
||||
) -> impl Future<
|
||||
Output = (
|
||||
QuerySource,
|
||||
Result<Result<QueryResponse, SearchError>, Elapsed>,
|
||||
),
|
||||
> + 'static {
|
||||
async move {
|
||||
(
|
||||
// Store `query_source` as part of future for debugging purposes.
|
||||
query_source,
|
||||
timeout(timeout_duration, async {
|
||||
query_source_trait_object.search(search_query).await
|
||||
})
|
||||
.await,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[named]
|
||||
#[tauri::command]
|
||||
pub async fn query_coco_fusion<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
from: u64,
|
||||
size: u64,
|
||||
query_strings: HashMap<String, String>,
|
||||
query_timeout: u64,
|
||||
) -> Result<MultiSourceQueryResponse, SearchError> {
|
||||
let query_keyword = query_strings
|
||||
.get("query")
|
||||
.unwrap_or(&"".to_string())
|
||||
.clone();
|
||||
|
||||
let opt_query_source_id = query_strings.get("querysource");
|
||||
|
||||
let search_sources = app_handle.state::<SearchSourceRegistry>();
|
||||
|
||||
let sources_future = search_sources.get_sources();
|
||||
let mut futures = FuturesUnordered::new();
|
||||
let mut sources = HashMap::new();
|
||||
|
||||
let sources_list = sources_future.await;
|
||||
let mut sources_list = sources_future.await;
|
||||
let sources_list_len = sources_list.len();
|
||||
|
||||
// Time limit for each query
|
||||
let timeout_duration = Duration::from_millis(500); //TODO, settings
|
||||
let timeout_duration = Duration::from_millis(query_timeout);
|
||||
|
||||
// Push all queries into futures
|
||||
for query_source in sources_list {
|
||||
let query_source_type = query_source.get_type().clone();
|
||||
sources.insert(query_source_type.id.clone(), query_source_type);
|
||||
log::debug!(
|
||||
"{}(): {:?}, timeout: {:?}",
|
||||
function_name!(),
|
||||
query_strings,
|
||||
timeout_duration
|
||||
);
|
||||
|
||||
let query = SearchQuery::new(from, size, query_strings.clone());
|
||||
let query_source_clone = query_source.clone(); // Clone Arc to avoid ownership issues
|
||||
let search_query = SearchQuery::new(from, size, query_strings.clone());
|
||||
|
||||
futures.push(tokio::spawn(async move {
|
||||
// Timeout each query execution
|
||||
timeout(timeout_duration, async {
|
||||
query_source_clone.search(query).await
|
||||
})
|
||||
.await
|
||||
}));
|
||||
if let Some(query_source_id) = opt_query_source_id {
|
||||
// If this query source ID is specified, we only query this query source.
|
||||
log::debug!(
|
||||
"parameter [querysource={}] specified, will only query this querysource",
|
||||
query_source_id
|
||||
);
|
||||
|
||||
let opt_query_source_trait_object_index = sources_list
|
||||
.iter()
|
||||
.position(|query_source| &query_source.get_type().id == query_source_id);
|
||||
|
||||
let Some(query_source_trait_object_index) = opt_query_source_trait_object_index else {
|
||||
// It is possible (an edge case) that the frontend invokes `query_coco_fusion()` with a
|
||||
// datasource that does not exist in the source list:
|
||||
//
|
||||
// 1. Search applications
|
||||
// 2. Navigate to the application sub page
|
||||
// 3. Disable the application extension in settings
|
||||
// 4. hide the search window
|
||||
// 5. Re-open the search window and search for something
|
||||
//
|
||||
// The application search source is not in the source list because the extension
|
||||
// has been disabled, but the last search is indeed invoked with parameter
|
||||
// `datasource=application`.
|
||||
return Ok(MultiSourceQueryResponse {
|
||||
failed: Vec::new(),
|
||||
hits: Vec::new(),
|
||||
total_hits: 0,
|
||||
});
|
||||
};
|
||||
|
||||
let query_source_trait_object = sources_list.remove(query_source_trait_object_index);
|
||||
let query_source = query_source_trait_object.get_type();
|
||||
|
||||
futures.push(same_type_futures(
|
||||
query_source,
|
||||
query_source_trait_object,
|
||||
timeout_duration,
|
||||
search_query,
|
||||
));
|
||||
} else {
|
||||
for query_source_trait_object in sources_list {
|
||||
let query_source = query_source_trait_object.get_type().clone();
|
||||
log::debug!("will query querysource [{}]", query_source.id);
|
||||
futures.push(same_type_futures(
|
||||
query_source,
|
||||
query_source_trait_object,
|
||||
timeout_duration,
|
||||
search_query.clone(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let mut total_hits = 0;
|
||||
let mut need_rerank = true; //TODO set default to false when boost supported in Pizza
|
||||
let mut failed_requests = Vec::new();
|
||||
let mut all_hits: Vec<(String, QueryHits, f64)> = Vec::new();
|
||||
let mut hits_per_source: HashMap<String, Vec<(QueryHits, f64)>> = HashMap::new();
|
||||
|
||||
while let Some(result) = futures.next().await {
|
||||
match result {
|
||||
Ok(Ok(Ok(response))) => {
|
||||
total_hits += response.total_hits;
|
||||
let source_id = response.source.id.clone();
|
||||
if sources_list_len > 1 {
|
||||
need_rerank = true; // If we have more than one source, we need to rerank the hits
|
||||
}
|
||||
|
||||
for (doc, score) in response.hits {
|
||||
let query_hit = QueryHits {
|
||||
source: Some(response.source.clone()),
|
||||
score,
|
||||
document: doc,
|
||||
};
|
||||
while let Some((query_source, timeout_result)) = futures.next().await {
|
||||
match timeout_result {
|
||||
// Ignore the `_timeout` variable as it won't provide any useful debugging information.
|
||||
Err(_timeout) => {
|
||||
log::warn!(
|
||||
"searching query source [{}] timed out, skip this request",
|
||||
query_source.id
|
||||
);
|
||||
// failed_requests.push(FailedRequest {
|
||||
// source: query_source,
|
||||
// status: 0,
|
||||
// error: Some("querying timed out".into()),
|
||||
// reason: None,
|
||||
// });
|
||||
}
|
||||
Ok(query_result) => match query_result {
|
||||
Ok(response) => {
|
||||
total_hits += response.total_hits;
|
||||
let source_id = response.source.id.clone();
|
||||
|
||||
all_hits.push((source_id.clone(), query_hit.clone(), score));
|
||||
for (doc, score) in response.hits {
|
||||
log::debug!("doc: {}, {:?}, {}", doc.id, doc.title, score);
|
||||
|
||||
hits_per_source
|
||||
.entry(source_id.clone())
|
||||
.or_insert_with(Vec::new)
|
||||
.push((query_hit, score));
|
||||
let query_hit = QueryHits {
|
||||
source: Some(response.source.clone()),
|
||||
score,
|
||||
document: doc,
|
||||
};
|
||||
|
||||
all_hits.push((source_id.clone(), query_hit.clone(), score));
|
||||
|
||||
hits_per_source
|
||||
.entry(source_id.clone())
|
||||
.or_insert_with(Vec::new)
|
||||
.push((query_hit, score));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Err(err)) => {
|
||||
failed_requests.push(FailedRequest {
|
||||
source: QuerySource {
|
||||
r#type: "N/A".into(),
|
||||
name: "N/A".into(),
|
||||
id: "N/A".into(),
|
||||
},
|
||||
status: 0,
|
||||
error: Some(err.to_string()),
|
||||
reason: None,
|
||||
});
|
||||
}
|
||||
// Timeout reached, skip this request
|
||||
Ok(_) => {
|
||||
failed_requests.push(FailedRequest {
|
||||
source: QuerySource {
|
||||
r#type: "N/A".into(),
|
||||
name: "N/A".into(),
|
||||
id: "N/A".into(),
|
||||
},
|
||||
status: 0,
|
||||
error: Some("Query source timed out".to_string()),
|
||||
reason: None,
|
||||
});
|
||||
}
|
||||
Err(_) => {
|
||||
failed_requests.push(FailedRequest {
|
||||
source: QuerySource {
|
||||
r#type: "N/A".into(),
|
||||
name: "N/A".into(),
|
||||
id: "N/A".into(),
|
||||
},
|
||||
status: 0,
|
||||
error: Some("Task panicked".to_string()),
|
||||
reason: None,
|
||||
});
|
||||
}
|
||||
Err(search_error) => {
|
||||
log::error!(
|
||||
"searching query source [{}] failed, error [{}]",
|
||||
query_source.id,
|
||||
search_error
|
||||
);
|
||||
failed_requests.push(FailedRequest {
|
||||
source: query_source,
|
||||
status: 0,
|
||||
error: Some(search_error.to_string()),
|
||||
reason: None,
|
||||
});
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Sort hits within each source by score (descending)
|
||||
for hits in hits_per_source.values_mut() {
|
||||
hits.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
|
||||
hits.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Greater));
|
||||
}
|
||||
|
||||
let total_sources = hits_per_source.len();
|
||||
@@ -129,16 +220,71 @@ pub async fn query_coco_fusion<R: Runtime>(
|
||||
// Distribute hits fairly across sources
|
||||
for (_source_id, hits) in &mut hits_per_source {
|
||||
let take_count = hits.len().min(max_hits_per_source);
|
||||
for (doc, _) in hits.drain(0..take_count) {
|
||||
for (doc, score) in hits.drain(0..take_count) {
|
||||
if !seen_docs.contains(&doc.document.id) {
|
||||
seen_docs.insert(doc.document.id.clone());
|
||||
log::debug!(
|
||||
"collect doc: {}, {:?}, {}",
|
||||
doc.document.id,
|
||||
doc.document.title,
|
||||
score
|
||||
);
|
||||
final_hits.push(doc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we still need more hits, take the highest-scoring remaining ones
|
||||
if final_hits.len() < size as usize {
|
||||
log::debug!("final hits: {:?}", final_hits.len());
|
||||
|
||||
let mut unique_sources = HashSet::new();
|
||||
for hit in &final_hits {
|
||||
if let Some(source) = &hit.source {
|
||||
if source.id != crate::extension::built_in::calculator::DATA_SOURCE_ID {
|
||||
unique_sources.insert(&source.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log::debug!(
|
||||
"Multiple sources found: {:?}, no rerank needed",
|
||||
unique_sources
|
||||
);
|
||||
|
||||
if unique_sources.len() < 1 {
|
||||
need_rerank = false; // If we have hits from multiple sources, we don't need to rerank
|
||||
}
|
||||
|
||||
if need_rerank && final_hits.len() > 1 {
|
||||
// Precollect (index, title)
|
||||
let titles_to_score: Vec<(usize, &str)> = final_hits
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(idx, hit)| {
|
||||
let source = hit.source.as_ref()?;
|
||||
let title = hit.document.title.as_deref()?;
|
||||
|
||||
if source.id != crate::extension::built_in::calculator::DATA_SOURCE_ID {
|
||||
Some((idx, title))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Score them
|
||||
let scored_hits = boosted_levenshtein_rerank(query_keyword.as_str(), titles_to_score);
|
||||
|
||||
// Sort descending by score
|
||||
let mut scored_hits = scored_hits;
|
||||
scored_hits.sort_by_key(|&(_, score)| Reverse((score * 1000.0) as u64));
|
||||
|
||||
// Apply new scores to final_hits
|
||||
for (idx, score) in scored_hits.into_iter().take(size as usize) {
|
||||
final_hits[idx].score = score;
|
||||
}
|
||||
} else if final_hits.len() < size as usize {
|
||||
// If we still need more hits, take the highest-scoring remaining ones
|
||||
|
||||
let remaining_needed = size as usize - final_hits.len();
|
||||
|
||||
// Sort all hits by score descending, removing duplicates by document ID
|
||||
@@ -168,9 +314,45 @@ pub async fn query_coco_fusion<R: Runtime>(
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
});
|
||||
|
||||
if final_hits.len() < 5 {
|
||||
//TODO: Add a recommendation system to suggest more sources
|
||||
log::info!(
|
||||
"Less than 5 hits found, consider using recommendation to find more suggestions."
|
||||
);
|
||||
//local: recent history, local extensions
|
||||
//remote: ai agents, quick links, other tasks, managed by server
|
||||
}
|
||||
|
||||
Ok(MultiSourceQueryResponse {
|
||||
failed: failed_requests,
|
||||
hits: final_hits,
|
||||
total_hits,
|
||||
})
|
||||
}
|
||||
|
||||
fn boosted_levenshtein_rerank(query: &str, titles: Vec<(usize, &str)>) -> Vec<(usize, f64)> {
|
||||
use strsim::levenshtein;
|
||||
|
||||
let query_lower = query.to_lowercase();
|
||||
|
||||
titles
|
||||
.into_iter()
|
||||
.map(|(idx, title)| {
|
||||
let mut score = 0.0;
|
||||
|
||||
if title.contains(query) {
|
||||
score += 0.4;
|
||||
} else if title.to_lowercase().contains(&query_lower) {
|
||||
score += 0.2;
|
||||
}
|
||||
|
||||
let dist = levenshtein(&query_lower, &title.to_lowercase());
|
||||
let max_len = query_lower.len().max(title.len());
|
||||
if max_len > 0 {
|
||||
score += (1.0 - (dist as f64 / max_len as f64)) as f32;
|
||||
}
|
||||
|
||||
(idx, score.min(1.0) as f64)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
143
src-tauri/src/server/attachment.rs
Normal file
@@ -0,0 +1,143 @@
|
||||
use super::servers::{get_server_by_id, get_server_token};
|
||||
use crate::common::http::get_response_body_text;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use reqwest::multipart::{Form, Part};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
use tauri::command;
|
||||
use tokio::fs::File;
|
||||
use tokio_util::codec::{BytesCodec, FramedRead};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct UploadAttachmentResponse {
|
||||
pub acknowledged: bool,
|
||||
pub attachments: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct AttachmentSource {
|
||||
pub id: String,
|
||||
pub created: String,
|
||||
pub updated: String,
|
||||
pub session: String,
|
||||
pub name: String,
|
||||
pub icon: String,
|
||||
pub url: String,
|
||||
pub size: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct AttachmentHit {
|
||||
pub _index: String,
|
||||
pub _type: Option<String>,
|
||||
pub _id: String,
|
||||
pub _score: Option<f64>,
|
||||
pub _source: AttachmentSource,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct AttachmentHits {
|
||||
pub total: Value,
|
||||
pub max_score: Option<f64>,
|
||||
pub hits: Option<Vec<AttachmentHit>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct GetAttachmentResponse {
|
||||
pub took: u32,
|
||||
pub timed_out: bool,
|
||||
pub _shards: Option<Value>,
|
||||
pub hits: AttachmentHits,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct DeleteAttachmentResponse {
|
||||
pub _id: String,
|
||||
pub result: String,
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn upload_attachment(
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
file_paths: Vec<PathBuf>,
|
||||
) -> Result<UploadAttachmentResponse, String> {
|
||||
let mut form = Form::new();
|
||||
|
||||
for file_path in file_paths {
|
||||
let file = File::open(&file_path)
|
||||
.await
|
||||
.map_err(|err| err.to_string())?;
|
||||
|
||||
let stream = FramedRead::new(file, BytesCodec::new());
|
||||
let file_name = file_path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.ok_or("Invalid filename")?;
|
||||
|
||||
let part =
|
||||
Part::stream(reqwest::Body::wrap_stream(stream)).file_name(file_name.to_string());
|
||||
|
||||
form = form.part("files", part);
|
||||
}
|
||||
|
||||
let server = get_server_by_id(&server_id).ok_or("Server not found")?;
|
||||
let url = HttpClient::join_url(&server.endpoint, &format!("chat/{}/_upload", session_id));
|
||||
|
||||
let token = get_server_token(&server_id).await?;
|
||||
let mut headers = HashMap::new();
|
||||
if let Some(token) = token {
|
||||
headers.insert("X-API-TOKEN".to_string(), token.access_token);
|
||||
}
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
let response = client
|
||||
.post(url)
|
||||
.multipart(form)
|
||||
.headers((&headers).try_into().map_err(|err| format!("{}", err))?)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|err| err.to_string())?;
|
||||
|
||||
let body = get_response_body_text(response).await?;
|
||||
|
||||
serde_json::from_str::<UploadAttachmentResponse>(&body)
|
||||
.map_err(|e| format!("Failed to parse upload response: {}", e))
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn get_attachment(
|
||||
server_id: String,
|
||||
session_id: String,
|
||||
) -> Result<GetAttachmentResponse, String> {
|
||||
let mut query_params = Vec::new();
|
||||
query_params.push(format!("session={}", session_id));
|
||||
|
||||
let response = HttpClient::get(&server_id, "/attachment/_search", Some(query_params))
|
||||
.await
|
||||
.map_err(|e| format!("Request error: {}", e))?;
|
||||
|
||||
let body = get_response_body_text(response).await?;
|
||||
|
||||
serde_json::from_str::<GetAttachmentResponse>(&body)
|
||||
.map_err(|e| format!("Failed to parse attachment response: {}", e))
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn delete_attachment(server_id: String, id: String) -> Result<bool, String> {
|
||||
let response = HttpClient::delete(&server_id, &format!("/attachment/{}", id), None, None)
|
||||
.await
|
||||
.map_err(|e| format!("Request error: {}", e))?;
|
||||
|
||||
let body = get_response_body_text(response).await?;
|
||||
|
||||
let parsed: DeleteAttachmentResponse = serde_json::from_str(&body)
|
||||
.map_err(|e| format!("Failed to parse delete response: {}", e))?;
|
||||
|
||||
parsed
|
||||
.result
|
||||
.eq("deleted")
|
||||
.then_some(true)
|
||||
.ok_or_else(|| "Delete operation was not successful".to_string())
|
||||
}
|
||||
@@ -1,11 +1,12 @@
|
||||
use crate::common::auth::RequestAccessTokenResponse;
|
||||
use crate::common::server::ServerAccessToken;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::server::profile::get_user_profiles;
|
||||
use crate::server::servers::{get_server_by_id, persist_servers, persist_servers_token, save_access_token, save_server, try_register_server_to_search_source};
|
||||
use reqwest::StatusCode;
|
||||
use std::collections::HashMap;
|
||||
use crate::server::servers::{
|
||||
get_server_by_id, persist_servers, persist_servers_token, save_access_token, save_server,
|
||||
try_register_server_to_search_source,
|
||||
};
|
||||
use tauri::{AppHandle, Runtime};
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn request_access_token_url(request_id: &str) -> String {
|
||||
// Remove the endpoint part and keep just the path for the request
|
||||
format!("/auth/request_access_token?request_id={}", request_id)
|
||||
@@ -21,71 +22,30 @@ pub async fn handle_sso_callback<R: Runtime>(
|
||||
// Retrieve the server details using the server ID
|
||||
let server = get_server_by_id(&server_id);
|
||||
|
||||
let expire_in = 3600; // TODO, need to update to actual expire_in value
|
||||
if let Some(mut server) = server {
|
||||
// Prepare the URL for requesting the access token (endpoint is base URL, path is relative)
|
||||
// save_access_token(server_id.clone(), ServerAccessToken::new(server_id.clone(), code.clone(), 60 * 15));
|
||||
let path = request_access_token_url(&request_id);
|
||||
// Save the access token for the server
|
||||
let access_token = ServerAccessToken::new(server_id.clone(), code.clone(), expire_in);
|
||||
// dbg!(&server_id, &request_id, &code, &token);
|
||||
save_access_token(server_id.clone(), access_token);
|
||||
persist_servers_token(&app_handle)?;
|
||||
|
||||
// Send the request for the access token using the util::http::HttpClient::get method
|
||||
let mut header = HashMap::new();
|
||||
header.insert("Authorization".to_string(), format!("Bearer {}", code).to_string());
|
||||
let response = HttpClient::advanced_post(&server_id, &path, Some(header), None, None)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to send request to the server: {}", e))?;
|
||||
// Register the server to the search source
|
||||
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
||||
|
||||
if response.status() == StatusCode::OK {
|
||||
// Check if the response has a valid content length
|
||||
if let Some(content_length) = response.content_length() {
|
||||
if content_length > 0 {
|
||||
// Deserialize the response body to get the access token
|
||||
let token_result: Result<RequestAccessTokenResponse, _> = response.json().await;
|
||||
// Update the server's profile using the util::http::HttpClient::get method
|
||||
let profile = get_user_profiles(app_handle.clone(), server_id.clone()).await;
|
||||
dbg!(&profile);
|
||||
|
||||
match token_result {
|
||||
Ok(token) => {
|
||||
// Save the access token for the server
|
||||
let access_token = ServerAccessToken::new(
|
||||
server_id.clone(),
|
||||
token.access_token.clone(),
|
||||
token.expire_in,
|
||||
);
|
||||
// dbg!(&server_id, &request_id, &code, &token);
|
||||
save_access_token(server_id.clone(), access_token);
|
||||
persist_servers_token(&app_handle)?;
|
||||
|
||||
// Register the server to the search source
|
||||
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
||||
|
||||
// Update the server's profile using the util::http::HttpClient::get method
|
||||
let profile = get_user_profiles(app_handle.clone(), server_id.clone()).await;
|
||||
dbg!(&profile);
|
||||
|
||||
match profile {
|
||||
Ok(p) => {
|
||||
server.profile = Some(p);
|
||||
server.available = true;
|
||||
save_server(&server);
|
||||
persist_servers(&app_handle).await?;
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => Err(format!("Failed to get user profile: {}", e)),
|
||||
}
|
||||
}
|
||||
Err(e) => Err(format!("Failed to deserialize the token response: {}", e)),
|
||||
}
|
||||
} else {
|
||||
Err("Received empty response body.".to_string())
|
||||
}
|
||||
} else {
|
||||
Err("Could not determine the content length.".to_string())
|
||||
match profile {
|
||||
Ok(p) => {
|
||||
server.profile = Some(p);
|
||||
server.available = true;
|
||||
save_server(&server);
|
||||
persist_servers(&app_handle).await?;
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
Err(format!(
|
||||
"Request failed with status: {}, URL: {}, Code: {}, Response: {:?}",
|
||||
response.status(),
|
||||
path,
|
||||
code,
|
||||
response
|
||||
))
|
||||
Err(e) => Err(format!("Failed to get user profile: {}", e)),
|
||||
}
|
||||
} else {
|
||||
Err(format!(
|
||||
@@ -93,4 +53,4 @@ pub async fn handle_sso_callback<R: Runtime>(
|
||||
server_id, request_id, code
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,6 +34,10 @@ pub async fn refresh_all_connectors<R: Runtime>(app_handle: &AppHandle<R>) -> Re
|
||||
// Collect all the tasks for fetching and refreshing connectors
|
||||
let mut server_map = HashMap::new();
|
||||
for server in servers {
|
||||
if !server.enabled {
|
||||
continue;
|
||||
}
|
||||
|
||||
// dbg!("start fetch connectors for server: {}", &server.id);
|
||||
let connectors = match get_connectors_by_server(app_handle.clone(), server.id.clone()).await
|
||||
{
|
||||
@@ -65,6 +69,7 @@ pub async fn refresh_all_connectors<R: Runtime>(app_handle: &AppHandle<R>) -> Re
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn get_connectors_from_cache_or_remote(
|
||||
server_id: &str,
|
||||
) -> Result<Vec<Connector>, String> {
|
||||
@@ -96,7 +101,7 @@ pub async fn get_connectors_from_cache_or_remote(
|
||||
|
||||
pub async fn fetch_connectors_by_server(id: &str) -> Result<Vec<Connector>, String> {
|
||||
// Use the generic GET method from HttpClient
|
||||
let resp = HttpClient::get(&id, "/connector/_search",None)
|
||||
let resp = HttpClient::get(&id, "/connector/_search", None)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
// dbg!("Error fetching connector for id {}: {}", &id, &e);
|
||||
@@ -104,9 +109,9 @@ pub async fn fetch_connectors_by_server(id: &str) -> Result<Vec<Connector>, Stri
|
||||
})?;
|
||||
|
||||
// Parse the search results directly from the response body
|
||||
let datasource: Vec<Connector> = parse_search_results(resp).await.map_err(|e| {
|
||||
e.to_string()
|
||||
})?;
|
||||
let datasource: Vec<Connector> = parse_search_results(resp)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Save the connectors to the cache
|
||||
save_connectors_to_cache(&id, datasource.clone());
|
||||
|
||||
@@ -22,14 +22,15 @@ pub fn save_datasource_to_cache(server_id: &str, datasources: Vec<DataSource>) {
|
||||
cache.insert(server_id.to_string(), datasources_map);
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn get_datasources_from_cache(server_id: &str) -> Option<HashMap<String, DataSource>> {
|
||||
let cache = DATASOURCE_CACHE.read().unwrap(); // Acquire read lock
|
||||
// dbg!("cache: {:?}", &cache);
|
||||
// dbg!("cache: {:?}", &cache);
|
||||
let server_cache = cache.get(server_id)?; // Get the server's cache
|
||||
Some(server_cache.clone())
|
||||
}
|
||||
|
||||
pub async fn refresh_all_datasources<R: Runtime>(app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||
pub async fn refresh_all_datasources<R: Runtime>(_app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||
// dbg!("Attempting to refresh all datasources");
|
||||
|
||||
let servers = get_all_servers();
|
||||
@@ -39,23 +40,26 @@ pub async fn refresh_all_datasources<R: Runtime>(app_handle: &AppHandle<R>) -> R
|
||||
for server in servers {
|
||||
// dbg!("fetch datasources for server: {}", &server.id);
|
||||
|
||||
if !server.enabled {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Attempt to get datasources by server, and continue even if it fails
|
||||
let connectors =
|
||||
match get_datasources_by_server(server.id.as_str()).await {
|
||||
Ok(connectors) => {
|
||||
// Process connectors only after fetching them
|
||||
let connectors_map: HashMap<String, DataSource> = connectors
|
||||
.into_iter()
|
||||
.map(|connector| (connector.id.clone(), connector))
|
||||
.collect();
|
||||
// dbg!("connectors_map: {:?}", &connectors_map);
|
||||
connectors_map
|
||||
}
|
||||
Err(_e) => {
|
||||
// dbg!("Failed to get dataSources for server {}: {}", &server.id, e);
|
||||
HashMap::new()
|
||||
}
|
||||
};
|
||||
let connectors = match datasource_search(server.id.as_str(), None).await {
|
||||
Ok(connectors) => {
|
||||
// Process connectors only after fetching them
|
||||
let connectors_map: HashMap<String, DataSource> = connectors
|
||||
.into_iter()
|
||||
.map(|connector| (connector.id.clone(), connector))
|
||||
.collect();
|
||||
// dbg!("connectors_map: {:?}", &connectors_map);
|
||||
connectors_map
|
||||
}
|
||||
Err(_e) => {
|
||||
// dbg!("Failed to get dataSources for server {}: {}", &server.id, e);
|
||||
HashMap::new()
|
||||
}
|
||||
};
|
||||
|
||||
let mut new_map = HashMap::new();
|
||||
for (id, datasource) in connectors.iter() {
|
||||
@@ -79,27 +83,22 @@ pub async fn refresh_all_datasources<R: Runtime>(app_handle: &AppHandle<R>) -> R
|
||||
cache.extend(server_map);
|
||||
cache.len()
|
||||
};
|
||||
// dbg!("datasource_map size: {:?}", cache_size);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_datasources_by_server(
|
||||
pub async fn datasource_search(
|
||||
id: &str,
|
||||
query_params: Option<Vec<String>>, //["query=abc", "filter=er", "filter=efg", "from=0", "size=5"],
|
||||
) -> Result<Vec<DataSource>, String> {
|
||||
|
||||
// Perform the async HTTP request outside the cache lock
|
||||
let resp = HttpClient::get(id, "/datasource/_search", None)
|
||||
let resp = HttpClient::post(id, "/datasource/_search", query_params, None)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
// dbg!("Error fetching datasource: {}", &e);
|
||||
format!("Error fetching datasource: {}", e)
|
||||
})?;
|
||||
.map_err(|e| format!("Error fetching datasource: {}", e))?;
|
||||
|
||||
// Parse the search results from the response
|
||||
let datasources: Vec<DataSource> = parse_search_results(resp).await.map_err(|e| {
|
||||
dbg!("Error parsing search results: {}", &e);
|
||||
//dbg!("Error parsing search results: {}", &e);
|
||||
e.to_string()
|
||||
})?;
|
||||
|
||||
@@ -108,3 +107,25 @@ pub async fn get_datasources_by_server(
|
||||
|
||||
Ok(datasources)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn mcp_server_search(
|
||||
id: &str,
|
||||
query_params: Option<Vec<String>>,
|
||||
) -> Result<Vec<DataSource>, String> {
|
||||
// Perform the async HTTP request outside the cache lock
|
||||
let resp = HttpClient::post(id, "/mcp_server/_search", query_params, None)
|
||||
.await
|
||||
.map_err(|e| format!("Error fetching datasource: {}", e))?;
|
||||
|
||||
// Parse the search results from the response
|
||||
let mcp_server: Vec<DataSource> = parse_search_results(resp).await.map_err(|e| {
|
||||
//dbg!("Error parsing search results: {}", &e);
|
||||
e.to_string()
|
||||
})?;
|
||||
|
||||
// Save the updated mcp_server to cache
|
||||
// save_datasource_to_cache(&id, mcp_server.clone());
|
||||
|
||||
Ok(mcp_server)
|
||||
}
|
||||
|
||||
@@ -1,22 +1,29 @@
|
||||
use crate::server::servers::{get_server_by_id, get_server_token};
|
||||
use http::HeaderName;
|
||||
use http::{HeaderName, HeaderValue};
|
||||
use once_cell::sync::Lazy;
|
||||
use reqwest::{Client, Method, RequestBuilder};
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::time::Duration;
|
||||
use tauri::ipc::RuntimeCapability;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
pub static HTTP_CLIENT: Lazy<Mutex<Client>> = Lazy::new(|| {
|
||||
let client = Client::builder()
|
||||
pub(crate) fn new_reqwest_http_client(accept_invalid_certs: bool) -> Client {
|
||||
Client::builder()
|
||||
.read_timeout(Duration::from_secs(3)) // Set a timeout of 3 second
|
||||
.connect_timeout(Duration::from_secs(3)) // Set a timeout of 3 second
|
||||
.timeout(Duration::from_secs(10)) // Set a timeout of 10 seconds
|
||||
.danger_accept_invalid_certs(true) // example for self-signed certificates
|
||||
.danger_accept_invalid_certs(accept_invalid_certs) // allow self-signed certificates
|
||||
.build()
|
||||
.expect("Failed to build client");
|
||||
Mutex::new(client)
|
||||
.expect("Failed to build client")
|
||||
}
|
||||
|
||||
pub static HTTP_CLIENT: Lazy<Mutex<Client>> = Lazy::new(|| {
|
||||
let allow_self_signature = crate::settings::_get_allow_self_signature(
|
||||
crate::GLOBAL_TAURI_APP_HANDLE
|
||||
.get()
|
||||
.expect("global tauri app store not set")
|
||||
.clone(),
|
||||
);
|
||||
Mutex::new(new_reqwest_http_client(allow_self_signature))
|
||||
});
|
||||
|
||||
pub struct HttpClient;
|
||||
@@ -32,14 +39,33 @@ impl HttpClient {
|
||||
pub async fn send_raw_request(
|
||||
method: Method,
|
||||
url: &str,
|
||||
query_params: Option<HashMap<String, Value>>,
|
||||
query_params: Option<Vec<String>>,
|
||||
headers: Option<HashMap<String, String>>,
|
||||
body: Option<reqwest::Body>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
let mut request_builder = Self::get_request_builder(method, url, headers, query_params, body).await;
|
||||
log::debug!(
|
||||
"Sending Request: {}, query_params: {:?}, header: {:?}, body: {:?}",
|
||||
&url,
|
||||
&query_params,
|
||||
&headers,
|
||||
&body
|
||||
);
|
||||
|
||||
let request_builder =
|
||||
Self::get_request_builder(method, url, headers, query_params, body).await;
|
||||
|
||||
let response = request_builder.send().await.map_err(|e| {
|
||||
//dbg!("Failed to send request: {}", &e);
|
||||
format!("Failed to send request: {}", e)
|
||||
})?;
|
||||
|
||||
log::debug!(
|
||||
"Request: {}, Response status: {:?}, header: {:?}",
|
||||
&url,
|
||||
&response.status(),
|
||||
&response.headers()
|
||||
);
|
||||
|
||||
let response = request_builder.send().await
|
||||
.map_err(|e| format!("Failed to send request: {}", e))?;
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
@@ -47,7 +73,7 @@ impl HttpClient {
|
||||
method: Method,
|
||||
url: &str,
|
||||
headers: Option<HashMap<String, String>>,
|
||||
query_params: Option<HashMap<String, Value>>, // Add query parameters
|
||||
query_params: Option<Vec<String>>, // Add query parameters
|
||||
body: Option<reqwest::Body>,
|
||||
) -> RequestBuilder {
|
||||
let client = HTTP_CLIENT.lock().await; // Acquire the lock on HTTP_CLIENT
|
||||
@@ -55,21 +81,38 @@ impl HttpClient {
|
||||
// Build the request
|
||||
let mut request_builder = client.request(method.clone(), url);
|
||||
|
||||
|
||||
if let Some(h) = headers {
|
||||
let mut req_headers = reqwest::header::HeaderMap::new();
|
||||
for (key, value) in h.into_iter() {
|
||||
let _ = req_headers.insert(
|
||||
HeaderName::from_bytes(key.as_bytes()).unwrap(),
|
||||
reqwest::header::HeaderValue::from_str(&value).unwrap(),
|
||||
);
|
||||
match (
|
||||
HeaderName::from_bytes(key.as_bytes()),
|
||||
HeaderValue::from_str(value.trim()),
|
||||
) {
|
||||
(Ok(name), Ok(val)) => {
|
||||
req_headers.insert(name, val);
|
||||
}
|
||||
(Err(e), _) => {
|
||||
eprintln!("Invalid header name: {:?}, error: {}", key, e);
|
||||
}
|
||||
(_, Err(e)) => {
|
||||
eprintln!(
|
||||
"Invalid header value for {}: {:?}, error: {}",
|
||||
key, value, e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
request_builder = request_builder.headers(req_headers);
|
||||
}
|
||||
|
||||
if let Some(query) = query_params {
|
||||
if let Some(params) = query_params {
|
||||
let query: Vec<(&str, &str)> = params
|
||||
.iter()
|
||||
.filter_map(|s| s.split_once('='))
|
||||
.collect();
|
||||
request_builder = request_builder.query(&query);
|
||||
}
|
||||
|
||||
// Add body if present
|
||||
if let Some(b) = body {
|
||||
request_builder = request_builder.body(b);
|
||||
@@ -78,12 +121,13 @@ impl HttpClient {
|
||||
request_builder
|
||||
}
|
||||
|
||||
|
||||
pub async fn send_request(
|
||||
server_id: &str,
|
||||
method: Method,
|
||||
path: &str,
|
||||
custom_headers: Option<HashMap<String, String>>,
|
||||
query_params: Option<HashMap<String, Value>>,
|
||||
query_params: Option<Vec<String>>,
|
||||
body: Option<reqwest::Body>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
// Fetch the server using the server_id
|
||||
@@ -93,26 +137,27 @@ impl HttpClient {
|
||||
let url = HttpClient::join_url(&s.endpoint, path);
|
||||
|
||||
// Retrieve the token for the server (token is optional)
|
||||
let token = get_server_token(server_id).map(|t| t.access_token.clone());
|
||||
let token = get_server_token(server_id)
|
||||
.await?
|
||||
.map(|t| t.access_token.clone());
|
||||
|
||||
let mut headers = if let Some(custom_headers) = custom_headers {
|
||||
custom_headers
|
||||
} else {
|
||||
let mut headers = HashMap::new();
|
||||
let headers = HashMap::new();
|
||||
headers
|
||||
};
|
||||
|
||||
if let Some(t) = token {
|
||||
headers.insert(
|
||||
"X-API-TOKEN".to_string(),
|
||||
t,
|
||||
);
|
||||
headers.insert("X-API-TOKEN".to_string(), t);
|
||||
}
|
||||
|
||||
|
||||
// dbg!(&server_id);
|
||||
// dbg!(&url);
|
||||
// dbg!(&headers);
|
||||
// log::debug!(
|
||||
// "Sending request to server: {}, url: {}, headers: {:?}",
|
||||
// &server_id,
|
||||
// &url,
|
||||
// &headers
|
||||
// );
|
||||
|
||||
Self::send_raw_request(method, &url, query_params, Some(headers), body).await
|
||||
} else {
|
||||
@@ -121,16 +166,20 @@ impl HttpClient {
|
||||
}
|
||||
|
||||
// Convenience method for GET requests (as it's the most common)
|
||||
pub async fn get(server_id: &str, path: &str, query_params: Option<HashMap<String, Value>>, // Add query parameters
|
||||
pub async fn get(
|
||||
server_id: &str,
|
||||
path: &str,
|
||||
query_params: Option<Vec<String>>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
HttpClient::send_request(server_id, Method::GET, path, None, query_params, None).await
|
||||
HttpClient::send_request(server_id, Method::GET, path, None, query_params,
|
||||
None).await
|
||||
}
|
||||
|
||||
// Convenience method for POST requests
|
||||
pub async fn post(
|
||||
server_id: &str,
|
||||
path: &str,
|
||||
query_params: Option<HashMap<String, Value>>, // Add query parameters
|
||||
query_params: Option<Vec<String>>,
|
||||
body: Option<reqwest::Body>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
HttpClient::send_request(server_id, Method::POST, path, None, query_params, body).await
|
||||
@@ -140,27 +189,56 @@ impl HttpClient {
|
||||
server_id: &str,
|
||||
path: &str,
|
||||
custom_headers: Option<HashMap<String, String>>,
|
||||
query_params: Option<HashMap<String, Value>>, // Add query parameters
|
||||
query_params: Option<Vec<String>>,
|
||||
body: Option<reqwest::Body>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
HttpClient::send_request(server_id, Method::POST, path, custom_headers, query_params, body).await
|
||||
HttpClient::send_request(
|
||||
server_id,
|
||||
Method::POST,
|
||||
path,
|
||||
custom_headers,
|
||||
query_params,
|
||||
body,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
// Convenience method for PUT requests
|
||||
#[allow(dead_code)]
|
||||
pub async fn put(
|
||||
server_id: &str,
|
||||
path: &str,
|
||||
custom_headers: Option<HashMap<String, String>>,
|
||||
query_params: Option<HashMap<String, Value>>, // Add query parameters
|
||||
query_params: Option<Vec<String>>,
|
||||
body: Option<reqwest::Body>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
HttpClient::send_request(server_id, Method::PUT, path, custom_headers, query_params, body).await
|
||||
HttpClient::send_request(
|
||||
server_id,
|
||||
Method::PUT,
|
||||
path,
|
||||
custom_headers,
|
||||
query_params,
|
||||
body,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
// Convenience method for DELETE requests
|
||||
pub async fn delete(server_id: &str, path: &str, custom_headers: Option<HashMap<String, String>>,
|
||||
query_params: Option<HashMap<String, Value>>, // Add query parameters
|
||||
#[allow(dead_code)]
|
||||
pub async fn delete(
|
||||
server_id: &str,
|
||||
path: &str,
|
||||
custom_headers: Option<HashMap<String, String>>,
|
||||
query_params: Option<Vec<String>>,
|
||||
) -> Result<reqwest::Response, String> {
|
||||
HttpClient::send_request(server_id, Method::DELETE, path, custom_headers, query_params, None).await
|
||||
HttpClient::send_request(
|
||||
server_id,
|
||||
Method::DELETE,
|
||||
path,
|
||||
custom_headers,
|
||||
query_params,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
//! This file contains Rust APIs related to Coco Server management.
|
||||
|
||||
pub mod attachment;
|
||||
pub mod auth;
|
||||
pub mod servers;
|
||||
pub mod connector;
|
||||
pub mod datasource;
|
||||
pub mod http_client;
|
||||
pub mod profile;
|
||||
pub mod search;
|
||||
pub mod servers;
|
||||
pub mod system_settings;
|
||||
pub mod transcription;
|
||||
pub mod websocket;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use crate::common::http::get_response_body_text;
|
||||
use crate::common::profile::UserProfile;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use tauri::{AppHandle, Runtime};
|
||||
@@ -12,14 +13,16 @@ pub async fn get_user_profiles<R: Runtime>(
|
||||
.await
|
||||
.map_err(|e| format!("Error fetching profile: {}", e))?;
|
||||
|
||||
if let Some(content_length) = response.content_length() {
|
||||
if content_length > 0 {
|
||||
let profile: UserProfile = response
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to parse response: {}", e))?;
|
||||
return Ok(profile);
|
||||
}
|
||||
// Use get_response_body_text to extract the body content
|
||||
let response_body = get_response_body_text(response)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read response body: {}", e))?;
|
||||
|
||||
// Check if the response body is not empty before deserializing
|
||||
if !response_body.is_empty() {
|
||||
let profile: UserProfile = serde_json::from_str(&response_body)
|
||||
.map_err(|e| format!("Failed to parse response: {}", e))?;
|
||||
return Ok(profile);
|
||||
}
|
||||
|
||||
Err("Profile not found or empty response".to_string())
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
use crate::common::document::Document;
|
||||
use crate::common::search::{
|
||||
parse_search_response, QueryHits, QueryResponse, QuerySource, SearchQuery,
|
||||
};
|
||||
use crate::common::document::{Document, OnOpened};
|
||||
use crate::common::error::SearchError;
|
||||
use crate::common::http::get_response_body_text;
|
||||
use crate::common::search::{QueryHits, QueryResponse, QuerySource, SearchQuery, SearchResponse};
|
||||
use crate::common::server::Server;
|
||||
use crate::common::traits::{SearchError, SearchSource};
|
||||
use crate::common::traits::SearchSource;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::server::servers::get_server_token;
|
||||
use async_trait::async_trait;
|
||||
// use futures::stream::StreamExt;
|
||||
use ordered_float::OrderedFloat;
|
||||
use reqwest::{Client, Method, RequestBuilder};
|
||||
use std::collections::HashMap;
|
||||
// use std::hash::Hash;
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) struct DocumentsSizedCollector {
|
||||
size: u64,
|
||||
/// Documents and scores
|
||||
@@ -20,6 +20,7 @@ pub(crate) struct DocumentsSizedCollector {
|
||||
docs: Vec<(String, Document, OrderedFloat<f64>)>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl DocumentsSizedCollector {
|
||||
pub(crate) fn new(size: u64) -> Self {
|
||||
// there will be size + 1 documents in docs at max
|
||||
@@ -43,7 +44,7 @@ impl DocumentsSizedCollector {
|
||||
}
|
||||
}
|
||||
|
||||
fn documents(self) -> impl ExactSizeIterator<Item = Document> {
|
||||
fn documents(self) -> impl ExactSizeIterator<Item=Document> {
|
||||
self.docs.into_iter().map(|(_, doc, _)| doc)
|
||||
}
|
||||
|
||||
@@ -71,36 +72,11 @@ const COCO_SERVERS: &str = "coco-servers";
|
||||
|
||||
pub struct CocoSearchSource {
|
||||
server: Server,
|
||||
client: Client,
|
||||
}
|
||||
|
||||
impl CocoSearchSource {
|
||||
pub fn new(server: Server, client: Client) -> Self {
|
||||
CocoSearchSource { server, client }
|
||||
}
|
||||
|
||||
fn build_request_from_query(&self, query: &SearchQuery) -> RequestBuilder {
|
||||
self.build_request(query.from, query.size, &query.query_strings)
|
||||
}
|
||||
|
||||
fn build_request(
|
||||
&self,
|
||||
from: u64,
|
||||
size: u64,
|
||||
query_strings: &HashMap<String, String>,
|
||||
) -> RequestBuilder {
|
||||
let url = HttpClient::join_url(&self.server.endpoint, "/query/_search");
|
||||
let mut request_builder = self.client.request(Method::GET, url);
|
||||
|
||||
if !self.server.public {
|
||||
if let Some(token) = get_server_token(&self.server.id).map(|t| t.access_token) {
|
||||
request_builder = request_builder.header("X-API-TOKEN", token);
|
||||
}
|
||||
}
|
||||
|
||||
request_builder
|
||||
.query(&[("from", &from.to_string()), ("size", &size.to_string())])
|
||||
.query(query_strings)
|
||||
pub fn new(server: Server) -> Self {
|
||||
CocoSearchSource { server }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,58 +90,66 @@ impl SearchSource for CocoSearchSource {
|
||||
}
|
||||
}
|
||||
|
||||
// Directly return Result<QueryResponse, SearchError> instead of Future
|
||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
||||
let _server_id = self.server.id.clone();
|
||||
let _server_name = self.server.name.clone();
|
||||
let request_builder = self.build_request_from_query(&query);
|
||||
let url = "/query/_search";
|
||||
let mut total_hits = 0;
|
||||
let mut hits: Vec<(Document, f64)> = Vec::new();
|
||||
|
||||
// Send the HTTP request asynchronously
|
||||
let response = request_builder.send().await;
|
||||
let mut query_params = Vec::new();
|
||||
|
||||
match response {
|
||||
Ok(response) => {
|
||||
let status_code = response.status().as_u16();
|
||||
// Add from/size as number values
|
||||
query_params.push(format!("from={}", query.from));
|
||||
query_params.push(format!("size={}", query.size));
|
||||
|
||||
if status_code >= 200 && status_code < 400 {
|
||||
// Parse the response only if the status code is successful
|
||||
match parse_search_response(response).await {
|
||||
Ok(response) => {
|
||||
let total_hits = response.hits.total.value as usize;
|
||||
let hits: Vec<(Document, f64)> = response
|
||||
.hits
|
||||
.hits
|
||||
.into_iter()
|
||||
.map(|hit| {
|
||||
// Handling Option<f64> in hit._score by defaulting to 0.0 if None
|
||||
(hit._source, hit._score.unwrap_or(0.0)) // Use 0.0 if _score is None
|
||||
})
|
||||
.collect();
|
||||
// Add query strings
|
||||
for (key, value) in query.query_strings {
|
||||
query_params.push(format!("{}={}", key, value));
|
||||
}
|
||||
|
||||
// Return the QueryResponse with hits and total hits
|
||||
Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits,
|
||||
total_hits,
|
||||
})
|
||||
}
|
||||
Err(err) => {
|
||||
// Parse error when response parsing fails
|
||||
Err(SearchError::ParseError(err.to_string()))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Handle unsuccessful HTTP status codes (e.g., 4xx, 5xx)
|
||||
Err(SearchError::HttpError(format!(
|
||||
"Request failed with status code: {}",
|
||||
status_code
|
||||
)))
|
||||
let response = HttpClient::get(&self.server.id, &url, Some(query_params))
|
||||
.await
|
||||
.map_err(|e| SearchError::HttpError(format!("{}", e)))?;
|
||||
|
||||
// Use the helper function to parse the response body
|
||||
let response_body = get_response_body_text(response)
|
||||
.await
|
||||
.map_err(|e| SearchError::ParseError(e))?;
|
||||
|
||||
// Check if the response body is empty
|
||||
if !response_body.is_empty() {
|
||||
// log::info!("Search response body: {}", &response_body);
|
||||
|
||||
// Parse the search response from the body text
|
||||
let parsed: SearchResponse<Document> = serde_json::from_str(&response_body)
|
||||
.map_err(|e| SearchError::ParseError(format!("{}", e)))?;
|
||||
|
||||
|
||||
// Process the parsed response
|
||||
total_hits = parsed.hits.total.value as usize;
|
||||
|
||||
if let Some(items) = parsed.hits.hits {
|
||||
for hit in items {
|
||||
let mut document = hit._source;
|
||||
// Default _score to 0.0 if None
|
||||
let score = hit._score.unwrap_or(0.0);
|
||||
|
||||
let on_opened = document
|
||||
.url
|
||||
.as_ref()
|
||||
.map(|url| OnOpened::Document { url: url.clone() });
|
||||
// Set the `on_opened` field as it won't be returned from Coco server
|
||||
document.on_opened = on_opened;
|
||||
|
||||
hits.push((document, score));
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
// Handle error from the request itself
|
||||
Err(SearchError::HttpError(err.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
// Return the final result
|
||||
Ok(QueryResponse {
|
||||
source: self.get_type(),
|
||||
hits,
|
||||
total_hits,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
use crate::common::http::get_response_body_text;
|
||||
use crate::common::register::SearchSourceRegistry;
|
||||
use crate::common::server::{AuthProvider, Provider, Server, ServerAccessToken, Sso, Version};
|
||||
use crate::server::connector::fetch_connectors_by_server;
|
||||
use crate::server::datasource::get_datasources_by_server;
|
||||
use crate::server::datasource::datasource_search;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use crate::server::search::CocoSearchSource;
|
||||
use crate::COCO_TAURI_STORE;
|
||||
use lazy_static::lazy_static;
|
||||
use reqwest::{Client, Method, StatusCode};
|
||||
use reqwest::Method;
|
||||
use serde_json::from_value;
|
||||
use serde_json::Value as JsonValue;
|
||||
use std::collections::HashMap;
|
||||
@@ -24,6 +25,7 @@ lazy_static! {
|
||||
Arc::new(RwLock::new(HashMap::new()));
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn check_server_exists(id: &str) -> bool {
|
||||
let cache = SERVER_CACHE.read().unwrap(); // Acquire read lock
|
||||
cache.contains_key(id)
|
||||
@@ -35,9 +37,10 @@ pub fn get_server_by_id(id: &str) -> Option<Server> {
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn get_server_token(id: &str) -> Option<ServerAccessToken> {
|
||||
let cache = SERVER_TOKEN.read().unwrap(); // Acquire read lock
|
||||
cache.get(id).cloned()
|
||||
pub async fn get_server_token(id: &str) -> Result<Option<ServerAccessToken>, String> {
|
||||
let cache = SERVER_TOKEN.read().map_err(|err| err.to_string())?;
|
||||
|
||||
Ok(cache.get(id).cloned())
|
||||
}
|
||||
|
||||
pub fn save_access_token(server_id: String, token: ServerAccessToken) -> bool {
|
||||
@@ -56,7 +59,7 @@ pub fn save_server(server: &Server) -> bool {
|
||||
}
|
||||
|
||||
fn remove_server_by_id(id: String) -> bool {
|
||||
dbg!("remove server by id:", &id);
|
||||
log::debug!("remove server by id: {}", &id);
|
||||
let mut cache = SERVER_CACHE.write().unwrap();
|
||||
let deleted = cache.remove(id.as_str());
|
||||
deleted.is_some()
|
||||
@@ -84,7 +87,7 @@ pub async fn persist_servers<R: Runtime>(app_handle: &AppHandle<R>) -> Result<()
|
||||
}
|
||||
|
||||
pub fn remove_server_token(id: &str) -> bool {
|
||||
dbg!("remove server token by id:", &id);
|
||||
log::debug!("remove server token by id: {}", &id);
|
||||
let mut cache = SERVER_TOKEN.write().unwrap();
|
||||
cache.remove(id).is_some()
|
||||
}
|
||||
@@ -101,7 +104,7 @@ pub fn persist_servers_token<R: Runtime>(app_handle: &AppHandle<R>) -> Result<()
|
||||
.map(|server| serde_json::to_value(server).expect("Failed to serialize access_tokens")) // Automatically serialize all fields
|
||||
.collect();
|
||||
|
||||
dbg!(format!("persist servers token: {:?}", &json_servers));
|
||||
log::debug!("persist servers token: {:?}", &json_servers);
|
||||
|
||||
// Save the serialized servers to Tauri's store
|
||||
app_handle
|
||||
@@ -140,17 +143,18 @@ fn get_default_server() -> Server {
|
||||
profile: None,
|
||||
auth_provider: AuthProvider {
|
||||
sso: Sso {
|
||||
url: "https://coco.infini.cloud/sso/login/".to_string(),
|
||||
url: "https://coco.infini.cloud/sso/login/cloud?provider=coco-cloud&product=coco".to_string(),
|
||||
},
|
||||
},
|
||||
priority: 0,
|
||||
stats: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn load_servers_token<R: Runtime>(
|
||||
app_handle: &AppHandle<R>,
|
||||
) -> Result<Vec<ServerAccessToken>, String> {
|
||||
dbg!("Attempting to load servers token");
|
||||
log::debug!("Attempting to load servers token");
|
||||
|
||||
let store = app_handle
|
||||
.store(COCO_TAURI_STORE)
|
||||
@@ -184,10 +188,7 @@ pub async fn load_servers_token<R: Runtime>(
|
||||
save_access_token(server.id.clone(), server.clone());
|
||||
}
|
||||
|
||||
dbg!(format!(
|
||||
"loaded {:?} servers's token",
|
||||
&deserialized_tokens.len()
|
||||
));
|
||||
log::debug!("loaded {:?} servers's token", &deserialized_tokens.len());
|
||||
|
||||
Ok(deserialized_tokens)
|
||||
} else {
|
||||
@@ -228,7 +229,7 @@ pub async fn load_servers<R: Runtime>(app_handle: &AppHandle<R>) -> Result<Vec<S
|
||||
save_server(&server);
|
||||
}
|
||||
|
||||
// dbg!(format!("load servers: {:?}", &deserialized_servers));
|
||||
log::debug!("load servers: {:?}", &deserialized_servers);
|
||||
|
||||
Ok(deserialized_servers)
|
||||
} else {
|
||||
@@ -240,18 +241,18 @@ pub async fn load_servers<R: Runtime>(app_handle: &AppHandle<R>) -> Result<Vec<S
|
||||
pub async fn load_or_insert_default_server<R: Runtime>(
|
||||
app_handle: &AppHandle<R>,
|
||||
) -> Result<Vec<Server>, String> {
|
||||
dbg!("Attempting to load or insert default server");
|
||||
log::debug!("Attempting to load or insert default server");
|
||||
|
||||
let exists_servers = load_servers(&app_handle).await;
|
||||
if exists_servers.is_ok() && !exists_servers.as_ref()?.is_empty() {
|
||||
dbg!(format!("loaded {} servers", &exists_servers.clone()?.len()));
|
||||
log::debug!("loaded {} servers", &exists_servers.clone()?.len());
|
||||
return exists_servers;
|
||||
}
|
||||
|
||||
let default = get_default_server();
|
||||
save_server(&default);
|
||||
|
||||
dbg!("loaded default servers");
|
||||
log::debug!("loaded default servers");
|
||||
|
||||
Ok(vec![default])
|
||||
}
|
||||
@@ -267,6 +268,7 @@ pub async fn list_coco_servers<R: Runtime>(
|
||||
Ok(servers)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn get_servers_as_hashmap() -> HashMap<String, Server> {
|
||||
let cache = SERVER_CACHE.read().unwrap();
|
||||
cache.clone()
|
||||
@@ -295,61 +297,67 @@ pub async fn refresh_coco_server_info<R: Runtime>(
|
||||
id: String,
|
||||
) -> Result<Server, String> {
|
||||
// Retrieve the server from the cache
|
||||
let server = {
|
||||
let cached_server = {
|
||||
let cache = SERVER_CACHE.read().unwrap();
|
||||
cache.get(&id).cloned()
|
||||
};
|
||||
|
||||
if let Some(server) = server {
|
||||
let is_enabled = server.enabled;
|
||||
let is_builtin = server.builtin;
|
||||
let profile = server.profile;
|
||||
let server = match cached_server {
|
||||
Some(server) => server,
|
||||
None => return Err("Server not found.".into()),
|
||||
};
|
||||
|
||||
// Use the HttpClient to send the request
|
||||
let response = HttpClient::get(&id, "/provider/_info", None) // Assuming "/provider-info" is the endpoint
|
||||
.await
|
||||
.map_err(|e| format!("Failed to send request to the server: {}", e))?;
|
||||
// Preserve important local state
|
||||
let is_enabled = server.enabled;
|
||||
let is_builtin = server.builtin;
|
||||
let profile = server.profile;
|
||||
|
||||
if response.status() == StatusCode::OK {
|
||||
if let Some(content_length) = response.content_length() {
|
||||
if content_length > 0 {
|
||||
let new_coco_server: Result<Server, _> = response.json().await;
|
||||
match new_coco_server {
|
||||
Ok(mut server) => {
|
||||
server.id = id.clone();
|
||||
server.builtin = is_builtin;
|
||||
server.enabled = is_enabled;
|
||||
server.available = true;
|
||||
server.profile = profile;
|
||||
trim_endpoint_last_forward_slash(&mut server);
|
||||
save_server(&server);
|
||||
persist_servers(&app_handle)
|
||||
.await
|
||||
.expect("Failed to persist coco servers.");
|
||||
// Send request to fetch updated server info
|
||||
let response = HttpClient::get(&id, "/provider/_info", None)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to contact the server: {}", e));
|
||||
|
||||
//refresh connectors and datasources
|
||||
let _ = fetch_connectors_by_server(&id).await;
|
||||
|
||||
let _ = get_datasources_by_server(&id).await;
|
||||
|
||||
Ok(server)
|
||||
}
|
||||
Err(e) => Err(format!("Failed to deserialize the response: {:?}", e)),
|
||||
}
|
||||
} else {
|
||||
Err("Received empty response body.".to_string())
|
||||
}
|
||||
} else {
|
||||
mark_server_as_offline(id.as_str()).await;
|
||||
Err("Could not determine the content length.".to_string())
|
||||
}
|
||||
} else {
|
||||
mark_server_as_offline(id.as_str()).await;
|
||||
Err(format!("Request failed with status: {}", response.status()))
|
||||
}
|
||||
} else {
|
||||
Err("Server not found.".to_string())
|
||||
if response.is_err() {
|
||||
let _ = mark_server_as_offline(app_handle, &id).await;
|
||||
return Err(response.err().unwrap());
|
||||
}
|
||||
|
||||
let response = response?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let _ = mark_server_as_offline(app_handle, &id).await;
|
||||
return Err(format!("Request failed with status: {}", response.status()));
|
||||
}
|
||||
|
||||
// Get body text via helper
|
||||
let body = get_response_body_text(response).await?;
|
||||
|
||||
// Deserialize server
|
||||
let mut updated_server: Server = serde_json::from_str(&body)
|
||||
.map_err(|e| format!("Failed to deserialize the response: {}", e))?;
|
||||
|
||||
// Mark server as online
|
||||
let _ = mark_server_as_online(app_handle.clone(), &id).await;
|
||||
|
||||
// Restore local state
|
||||
updated_server.id = id.clone();
|
||||
updated_server.builtin = is_builtin;
|
||||
updated_server.enabled = is_enabled;
|
||||
updated_server.available = true;
|
||||
updated_server.profile = profile;
|
||||
trim_endpoint_last_forward_slash(&mut updated_server);
|
||||
|
||||
// Save and persist
|
||||
save_server(&updated_server);
|
||||
persist_servers(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to persist servers: {}", e))?;
|
||||
|
||||
// Refresh connectors and datasources (best effort)
|
||||
let _ = fetch_connectors_by_server(&id).await;
|
||||
let _ = datasource_search(&id, None).await;
|
||||
|
||||
Ok(updated_server)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
@@ -359,73 +367,49 @@ pub async fn add_coco_server<R: Runtime>(
|
||||
) -> Result<Server, String> {
|
||||
load_or_insert_default_server(&app_handle)
|
||||
.await
|
||||
.expect("Failed to load default servers");
|
||||
.map_err(|e| format!("Failed to load default servers: {}", e))?;
|
||||
|
||||
// Remove the trailing '/' from the endpoint to ensure correct URL construction
|
||||
let endpoint = endpoint.trim_end_matches('/');
|
||||
|
||||
// Check if the server with this endpoint already exists
|
||||
if check_endpoint_exists(endpoint) {
|
||||
dbg!(format!(
|
||||
log::debug!(
|
||||
"This Coco server has already been registered: {:?}",
|
||||
&endpoint
|
||||
));
|
||||
);
|
||||
return Err("This Coco server has already been registered.".into());
|
||||
}
|
||||
|
||||
let url = provider_info_url(&endpoint);
|
||||
|
||||
// Use the HttpClient to fetch provider information
|
||||
let url = provider_info_url(endpoint);
|
||||
let response = HttpClient::send_raw_request(Method::GET, url.as_str(), None, None, None)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to send request to the server: {}", e))?;
|
||||
|
||||
dbg!(format!("Get provider info response: {:?}", &response));
|
||||
log::debug!("Get provider info response: {:?}", &response);
|
||||
|
||||
// Check if the response status is OK (200)
|
||||
if response.status() == StatusCode::OK {
|
||||
if let Some(content_length) = response.content_length() {
|
||||
if content_length > 0 {
|
||||
let new_coco_server: Result<Server, _> = response.json().await;
|
||||
let body = get_response_body_text(response).await?;
|
||||
|
||||
match new_coco_server {
|
||||
Ok(mut server) => {
|
||||
// Perform necessary checks and adjustments on the server data
|
||||
trim_endpoint_last_forward_slash(&mut server);
|
||||
let mut server: Server = serde_json::from_str(&body)
|
||||
.map_err(|e| format!("Failed to deserialize the response: {}", e))?;
|
||||
|
||||
if server.id.is_empty() {
|
||||
server.id = pizza_common::utils::uuid::Uuid::new().to_string();
|
||||
}
|
||||
trim_endpoint_last_forward_slash(&mut server);
|
||||
|
||||
if server.name.is_empty() {
|
||||
server.name = "Coco Cloud".to_string();
|
||||
}
|
||||
|
||||
// Save the new server to the cache
|
||||
save_server(&server);
|
||||
|
||||
// Register the server to the search source
|
||||
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
||||
|
||||
// Persist the servers to the store
|
||||
persist_servers(&app_handle)
|
||||
.await
|
||||
.expect("Failed to persist Coco servers.");
|
||||
|
||||
dbg!(format!("Successfully registered server: {:?}", &endpoint));
|
||||
Ok(server)
|
||||
}
|
||||
Err(e) => Err(format!("Failed to deserialize the response: {}", e)),
|
||||
}
|
||||
} else {
|
||||
Err("Received empty response body.".to_string())
|
||||
}
|
||||
} else {
|
||||
Err("Could not determine the content length.".to_string())
|
||||
}
|
||||
} else {
|
||||
Err(format!("Request failed with status: {}", response.status()))
|
||||
if server.id.is_empty() {
|
||||
server.id = pizza_common::utils::uuid::Uuid::new().to_string();
|
||||
}
|
||||
|
||||
if server.name.is_empty() {
|
||||
server.name = "Coco Server".to_string();
|
||||
}
|
||||
|
||||
save_server(&server);
|
||||
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
||||
|
||||
persist_servers(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to persist Coco servers: {}", e))?;
|
||||
|
||||
log::debug!("Successfully registered server: {:?}", &endpoint);
|
||||
Ok(server)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
@@ -470,26 +454,63 @@ pub async fn try_register_server_to_search_source(
|
||||
server: &Server,
|
||||
) {
|
||||
if server.enabled {
|
||||
log::trace!(
|
||||
"Server {} is public: {} and available: {}",
|
||||
&server.name,
|
||||
&server.public,
|
||||
&server.available
|
||||
);
|
||||
|
||||
if !server.public {
|
||||
let token = get_server_token(&server.id).await;
|
||||
|
||||
if !token.is_ok() || token.is_ok() && token.unwrap().is_none() {
|
||||
log::debug!("Server {} is not public and no token was found", &server.id);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||
let source = CocoSearchSource::new(server.clone(), Client::new());
|
||||
let source = CocoSearchSource::new(server.clone());
|
||||
registry.register_source(source).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn mark_server_as_offline(id: &str) {
|
||||
#[tauri::command]
|
||||
pub async fn mark_server_as_online<R: Runtime>(
|
||||
app_handle: AppHandle<R>, id: &str) -> Result<(), ()> {
|
||||
// println!("server_is_offline: {}", id);
|
||||
let server = get_server_by_id(id);
|
||||
if let Some(mut server) = server {
|
||||
server.available = true;
|
||||
server.health = None;
|
||||
save_server(&server);
|
||||
|
||||
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn mark_server_as_offline<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
id: &str,
|
||||
) -> Result<(), ()> {
|
||||
// println!("server_is_offline: {}", id);
|
||||
let server = get_server_by_id(id);
|
||||
if let Some(mut server) = server {
|
||||
server.available = false;
|
||||
server.health = None;
|
||||
save_server(&server);
|
||||
|
||||
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||
registry.remove_source(id).await;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn disable_server<R: Runtime>(app_handle: AppHandle<R>, id: String) -> Result<(), ()> {
|
||||
println!("disable_server: {}", id);
|
||||
|
||||
let server = get_server_by_id(id.as_str());
|
||||
if let Some(mut server) = server {
|
||||
server.enabled = false;
|
||||
@@ -510,47 +531,48 @@ pub async fn logout_coco_server<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
id: String,
|
||||
) -> Result<(), String> {
|
||||
dbg!("Attempting to log out server by id:", &id);
|
||||
log::debug!("Attempting to log out server by id: {}", &id);
|
||||
|
||||
// Check if server token exists
|
||||
if let Some(_token) = get_server_token(id.as_str()) {
|
||||
dbg!("Found server token for id:", &id);
|
||||
if let Some(_token) = get_server_token(id.as_str()).await? {
|
||||
log::debug!("Found server token for id: {}", &id);
|
||||
|
||||
// Remove the server token from cache
|
||||
remove_server_token(id.as_str());
|
||||
|
||||
// Persist the updated tokens
|
||||
if let Err(e) = persist_servers_token(&app_handle) {
|
||||
dbg!("Failed to save tokens for id: {}. Error: {:?}", &id, &e);
|
||||
log::debug!("Failed to save tokens for id: {}. Error: {:?}", &id, &e);
|
||||
return Err(format!("Failed to save tokens: {}", &e));
|
||||
}
|
||||
} else {
|
||||
// Log the case where server token is not found
|
||||
dbg!("No server token found for id: {}", &id);
|
||||
log::debug!("No server token found for id: {}", &id);
|
||||
}
|
||||
|
||||
// Check if the server exists
|
||||
if let Some(mut server) = get_server_by_id(id.as_str()) {
|
||||
dbg!("Found server for id:", &id);
|
||||
log::debug!("Found server for id: {}", &id);
|
||||
|
||||
// Clear server profile
|
||||
server.profile = None;
|
||||
let _ = mark_server_as_offline(app_handle.clone(), id.as_str()).await;
|
||||
|
||||
// Save the updated server data
|
||||
save_server(&server);
|
||||
|
||||
// Persist the updated server data
|
||||
if let Err(e) = persist_servers(&app_handle).await {
|
||||
dbg!("Failed to save server for id: {}. Error: {:?}", &id, &e);
|
||||
log::debug!("Failed to save server for id: {}. Error: {:?}", &id, &e);
|
||||
return Err(format!("Failed to save server: {}", &e));
|
||||
}
|
||||
} else {
|
||||
// Log the case where server is not found
|
||||
dbg!("No server found for id: {}", &id);
|
||||
log::debug!("No server found for id: {}", &id);
|
||||
return Err(format!("No server found for id: {}", id));
|
||||
}
|
||||
|
||||
dbg!("Successfully logged out server with id:", &id);
|
||||
log::debug!("Successfully logged out server with id: {}", &id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -601,6 +623,7 @@ fn test_trim_endpoint_last_forward_slash() {
|
||||
},
|
||||
},
|
||||
priority: 0,
|
||||
stats: None,
|
||||
};
|
||||
|
||||
trim_endpoint_last_forward_slash(&mut server);
|
||||
|
||||
15
src-tauri/src/server/system_settings.rs
Normal file
@@ -0,0 +1,15 @@
|
||||
use crate::server::http_client::HttpClient;
|
||||
use serde_json::Value;
|
||||
use tauri::command;
|
||||
|
||||
#[command]
|
||||
pub async fn get_system_settings(server_id: String) -> Result<Value, String> {
|
||||
let response = HttpClient::get(&server_id, "/settings", None)
|
||||
.await
|
||||
.map_err(|err| err.to_string())?;
|
||||
|
||||
response
|
||||
.json::<Value>()
|
||||
.await
|
||||
.map_err(|err| err.to_string())
|
||||
}
|
||||
41
src-tauri/src/server/transcription.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use crate::common::http::get_response_body_text;
|
||||
use crate::server::http_client::HttpClient;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::command;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct TranscriptionResponse {
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn transcription(
|
||||
server_id: String,
|
||||
_audio_type: String,
|
||||
_audio_content: String,
|
||||
) -> Result<TranscriptionResponse, String> {
|
||||
// let mut query_params = HashMap::new();
|
||||
// query_params.insert("type".to_string(), JsonValue::String(audio_type));
|
||||
// query_params.insert("content".to_string(), JsonValue::String(audio_content));
|
||||
|
||||
// Send the HTTP POST request
|
||||
let response = HttpClient::post(
|
||||
&server_id,
|
||||
"/services/audio/transcription",
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Error sending transcription request: {}", e))?;
|
||||
|
||||
// Use get_response_body_text to extract the response body as text
|
||||
let response_body = get_response_body_text(response)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read response body: {}", e))?;
|
||||
|
||||
// Deserialize the response body into TranscriptionResponse
|
||||
let transcription_response: TranscriptionResponse = serde_json::from_str(&response_body)
|
||||
.map_err(|e| format!("Failed to parse transcription response: {}", e))?;
|
||||
|
||||
Ok(transcription_response)
|
||||
}
|
||||
@@ -1,87 +1,66 @@
|
||||
use crate::server::servers::{get_server_by_id, get_server_token};
|
||||
use futures_util::{SinkExt, StreamExt};
|
||||
use http::{HeaderMap, HeaderName, HeaderValue};
|
||||
use futures::StreamExt;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tauri::Emitter;
|
||||
use tauri::{AppHandle, Emitter, Runtime};
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::sync::{mpsc, Mutex};
|
||||
use tokio_tungstenite::tungstenite::client::IntoClientRequest;
|
||||
use tokio_tungstenite::tungstenite::Error;
|
||||
use tokio_tungstenite::tungstenite::Error as WsError;
|
||||
use tokio_tungstenite::{
|
||||
connect_async, tungstenite::protocol::Message, MaybeTlsStream, WebSocketStream,
|
||||
};
|
||||
use tungstenite::handshake::client::generate_key;
|
||||
|
||||
use tokio_tungstenite::tungstenite::handshake::client::generate_key;
|
||||
use tokio_tungstenite::tungstenite::Message;
|
||||
use tokio_tungstenite::MaybeTlsStream;
|
||||
use tokio_tungstenite::WebSocketStream;
|
||||
use tokio_tungstenite::{connect_async_tls_with_config, Connector};
|
||||
#[derive(Default)]
|
||||
pub struct WebSocketManager {
|
||||
ws_connection: Arc<Mutex<Option<WebSocketStream<MaybeTlsStream<TcpStream>>>>>,
|
||||
cancel_tx: Arc<Mutex<Option<mpsc::Sender<()>>>>,
|
||||
connections: Arc<Mutex<HashMap<String, Arc<WebSocketInstance>>>>,
|
||||
}
|
||||
|
||||
struct WebSocketInstance {
|
||||
ws_connection: Mutex<WebSocketStream<MaybeTlsStream<TcpStream>>>, // No need to lock the entire map
|
||||
cancel_tx: mpsc::Sender<()>,
|
||||
}
|
||||
|
||||
// Function to convert the HTTP endpoint to WebSocket endpoint
|
||||
fn convert_to_websocket(endpoint: &str) -> Result<String, String> {
|
||||
let url = url::Url::parse(endpoint).map_err(|e| format!("Invalid URL: {}", e))?;
|
||||
|
||||
// Determine WebSocket protocol based on the scheme
|
||||
let ws_protocol = if url.scheme() == "https" {
|
||||
"wss://"
|
||||
} else {
|
||||
"ws://"
|
||||
};
|
||||
|
||||
// Extract host and port (if present)
|
||||
let host = url.host_str().ok_or_else(|| "No host found in URL")?;
|
||||
let host = url.host_str().ok_or("No host found in URL")?;
|
||||
let port = url
|
||||
.port_or_known_default()
|
||||
.unwrap_or(if url.scheme() == "https" { 443 } else { 80 });
|
||||
|
||||
// Build WebSocket URL, include the port if not the default
|
||||
let ws_endpoint = if port == 80 || port == 443 {
|
||||
format!("{}{}{}", ws_protocol, host, "/ws")
|
||||
} else {
|
||||
format!("{}{}:{}/ws", ws_protocol, host, port)
|
||||
};
|
||||
|
||||
Ok(ws_endpoint)
|
||||
}
|
||||
|
||||
// Function to build a HeaderMap from a vector of key-value pairs
|
||||
fn build_header_map(headers: Vec<(String, String)>) -> Result<HeaderMap, String> {
|
||||
let mut header_map = HeaderMap::new();
|
||||
for (key, value) in headers {
|
||||
let header_name = HeaderName::from_bytes(key.as_bytes())
|
||||
.map_err(|e| format!("Invalid header name: {}", e))?;
|
||||
let header_value =
|
||||
HeaderValue::from_str(&value).map_err(|e| format!("Invalid header value: {}", e))?;
|
||||
header_map.insert(header_name, header_value);
|
||||
}
|
||||
Ok(header_map)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn connect_to_server(
|
||||
pub async fn connect_to_server<R: Runtime>(
|
||||
tauri_app_handle: AppHandle<R>,
|
||||
id: String,
|
||||
client_id: String,
|
||||
state: tauri::State<'_, WebSocketManager>,
|
||||
app_handle: tauri::AppHandle,
|
||||
app_handle: AppHandle,
|
||||
) -> Result<(), String> {
|
||||
// Disconnect any existing connection first
|
||||
disconnect(state.clone()).await?;
|
||||
let connections_clone = state.connections.clone();
|
||||
|
||||
// Retrieve server details
|
||||
let server =
|
||||
get_server_by_id(id.as_str()).ok_or_else(|| format!("Server with ID {} not found", id))?;
|
||||
let endpoint = convert_to_websocket(server.endpoint.as_str())?;
|
||||
// Disconnect old connection first
|
||||
disconnect(client_id.clone(), state.clone()).await.ok();
|
||||
|
||||
// Retrieve the token for the server (token is optional)
|
||||
let token = get_server_token(id.as_str()).map(|t| t.access_token.clone());
|
||||
let server = get_server_by_id(&id).ok_or(format!("Server with ID {} not found", id))?;
|
||||
let endpoint = convert_to_websocket(&server.endpoint)?;
|
||||
let token = get_server_token(&id).await?.map(|t| t.access_token.clone());
|
||||
|
||||
// Create the WebSocket request
|
||||
let mut request =
|
||||
tokio_tungstenite::tungstenite::client::IntoClientRequest::into_client_request(&endpoint)
|
||||
.map_err(|e| format!("Failed to create WebSocket request: {}", e))?;
|
||||
|
||||
// Add necessary headers
|
||||
request
|
||||
.headers_mut()
|
||||
.insert("Connection", "Upgrade".parse().unwrap());
|
||||
@@ -95,88 +74,95 @@ pub async fn connect_to_server(
|
||||
.headers_mut()
|
||||
.insert("Sec-WebSocket-Key", generate_key().parse().unwrap());
|
||||
|
||||
// If a token exists, add it to the headers
|
||||
if let Some(token) = token {
|
||||
request
|
||||
.headers_mut()
|
||||
.insert("X-API-TOKEN", token.parse().unwrap());
|
||||
}
|
||||
|
||||
// Establish the WebSocket connection
|
||||
// dbg!(&request);
|
||||
let (mut ws_remote, _) = connect_async(request).await.map_err(|e| match e {
|
||||
Error::ConnectionClosed => "WebSocket connection was closed".to_string(),
|
||||
Error::Protocol(protocol_error) => format!("Protocol error: {}", protocol_error),
|
||||
Error::Utf8 => "UTF-8 error in WebSocket data".to_string(),
|
||||
_ => format!("Unknown error: {:?}", e),
|
||||
})?;
|
||||
let allow_self_signature =
|
||||
crate::settings::get_allow_self_signature(tauri_app_handle.clone()).await;
|
||||
let tls_connector = tokio_native_tls::native_tls::TlsConnector::builder()
|
||||
.danger_accept_invalid_certs(allow_self_signature)
|
||||
.build()
|
||||
.map_err(|e| format!("TLS build error: {:?}", e))?;
|
||||
|
||||
let connector = Connector::NativeTls(tls_connector.into());
|
||||
|
||||
let (ws_stream, _) = connect_async_tls_with_config(
|
||||
request,
|
||||
None, // WebSocketConfig
|
||||
true, // disable_nagle
|
||||
Some(connector), // Connector
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("WebSocket TLS error: {:?}", e))?;
|
||||
|
||||
// Create cancellation channel
|
||||
let (cancel_tx, mut cancel_rx) = mpsc::channel(1);
|
||||
|
||||
// Store connection and cancellation sender
|
||||
*state.ws_connection.lock().await = Some(ws_remote);
|
||||
*state.cancel_tx.lock().await = Some(cancel_tx);
|
||||
// Spawn listener task with cancellation
|
||||
let instance = Arc::new(WebSocketInstance {
|
||||
ws_connection: Mutex::new(ws_stream),
|
||||
cancel_tx,
|
||||
});
|
||||
|
||||
// Insert connection into the map (lock is held briefly)
|
||||
{
|
||||
let mut connections = connections_clone.lock().await;
|
||||
connections.insert(client_id.clone(), instance.clone());
|
||||
}
|
||||
|
||||
// Spawn WebSocket handler in a separate task
|
||||
let app_handle_clone = app_handle.clone();
|
||||
let connection_clone = state.ws_connection.clone();
|
||||
let client_id_clone = client_id.clone();
|
||||
tokio::spawn(async move {
|
||||
let mut connection = connection_clone.lock().await;
|
||||
if let Some(ws) = connection.as_mut() {
|
||||
loop {
|
||||
tokio::select! {
|
||||
msg = ws.next() => {
|
||||
match msg {
|
||||
Some(Ok(Message::Text(text))) => {
|
||||
//println!("Received message: {}", text);
|
||||
let _ = app_handle_clone.emit("ws-message", text);
|
||||
},
|
||||
Some(Err(WsError::ConnectionClosed)) => {
|
||||
let _ = app_handle_clone.emit("ws-error", id);
|
||||
eprintln!("WebSocket connection closed by the server.");
|
||||
break;
|
||||
},
|
||||
Some(Err(WsError::Protocol(e))) => {
|
||||
let _ = app_handle_clone.emit("ws-error", id);
|
||||
eprintln!("Protocol error: {}", e);
|
||||
break;
|
||||
},
|
||||
Some(Err(WsError::Utf8)) => {
|
||||
let _ = app_handle_clone.emit("ws-error", id);
|
||||
eprintln!("Received invalid UTF-8 data.");
|
||||
break;
|
||||
},
|
||||
Some(Err(_)) => {
|
||||
let _ = app_handle_clone.emit("ws-error", id);
|
||||
eprintln!("WebSocket error encountered.");
|
||||
break;
|
||||
},
|
||||
_ => continue,
|
||||
let ws = &mut *instance.ws_connection.lock().await;
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
msg = ws.next() => {
|
||||
match msg {
|
||||
Some(Ok(Message::Text(text))) => {
|
||||
let _ = app_handle_clone.emit(&format!("ws-message-{}", client_id_clone), text);
|
||||
},
|
||||
Some(Err(_)) | None => {
|
||||
log::debug!("WebSocket connection closed or error");
|
||||
let _ = app_handle_clone.emit(&format!("ws-error-{}", client_id_clone), id.clone());
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
_ = cancel_rx.recv() => {
|
||||
let _ = app_handle_clone.emit("ws-error", id);
|
||||
dbg!("Cancelling WebSocket connection");
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ = cancel_rx.recv() => {
|
||||
log::debug!("WebSocket connection cancelled");
|
||||
let _ = app_handle_clone.emit(&format!("ws-cancel-{}", client_id_clone), id.clone());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove connection after it closes
|
||||
let mut connections = connections_clone.lock().await;
|
||||
connections.remove(&client_id_clone);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn disconnect(state: tauri::State<'_, WebSocketManager>) -> Result<(), String> {
|
||||
// Send cancellation signal
|
||||
if let Some(cancel_tx) = state.cancel_tx.lock().await.take() {
|
||||
let _ = cancel_tx.send(()).await;
|
||||
}
|
||||
pub async fn disconnect(
|
||||
client_id: String,
|
||||
state: tauri::State<'_, WebSocketManager>,
|
||||
) -> Result<(), String> {
|
||||
let instance = {
|
||||
let mut connections = state.connections.lock().await;
|
||||
connections.remove(&client_id)
|
||||
};
|
||||
|
||||
// Close connection
|
||||
let mut connection = state.ws_connection.lock().await;
|
||||
if let Some(mut ws) = connection.take() {
|
||||
if let Some(instance) = instance {
|
||||
let _ = instance.cancel_tx.send(()).await;
|
||||
|
||||
// Close WebSocket (lock only the connection, not the whole map)
|
||||
let mut ws = instance.ws_connection.lock().await;
|
||||
let _ = ws.close(None).await;
|
||||
}
|
||||
|
||||
|
||||
72
src-tauri/src/settings.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
use crate::COCO_TAURI_STORE;
|
||||
use serde_json::Value as Json;
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use tauri_plugin_store::StoreExt;
|
||||
|
||||
const SETTINGS_ALLOW_SELF_SIGNATURE: &str = "settings_allow_self_signature";
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn set_allow_self_signature<R: Runtime>(tauri_app_handle: AppHandle<R>, value: bool) {
|
||||
use crate::server::http_client;
|
||||
|
||||
let store = tauri_app_handle
|
||||
.store(COCO_TAURI_STORE)
|
||||
.unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"store [{}] not found/loaded, error [{}]",
|
||||
COCO_TAURI_STORE, e
|
||||
)
|
||||
});
|
||||
|
||||
let old_value = match store
|
||||
.get(SETTINGS_ALLOW_SELF_SIGNATURE)
|
||||
.expect("should be initialized upon first get call")
|
||||
{
|
||||
Json::Bool(b) => b,
|
||||
_ => unreachable!(
|
||||
"{} should be stored in a boolean",
|
||||
SETTINGS_ALLOW_SELF_SIGNATURE
|
||||
),
|
||||
};
|
||||
|
||||
if old_value == value {
|
||||
return;
|
||||
}
|
||||
|
||||
store.set(SETTINGS_ALLOW_SELF_SIGNATURE, value);
|
||||
|
||||
let mut guard = http_client::HTTP_CLIENT.lock().await;
|
||||
*guard = http_client::new_reqwest_http_client(value)
|
||||
}
|
||||
|
||||
/// Synchronous version of `async get_allow_self_signature()`.
|
||||
pub fn _get_allow_self_signature<R: Runtime>(tauri_app_handle: AppHandle<R>) -> bool {
|
||||
let store = tauri_app_handle
|
||||
.store(COCO_TAURI_STORE)
|
||||
.unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"store [{}] not found/loaded, error [{}]",
|
||||
COCO_TAURI_STORE, e
|
||||
)
|
||||
});
|
||||
if !store.has(SETTINGS_ALLOW_SELF_SIGNATURE) {
|
||||
// default to false
|
||||
store.set(SETTINGS_ALLOW_SELF_SIGNATURE, false);
|
||||
}
|
||||
|
||||
match store
|
||||
.get(SETTINGS_ALLOW_SELF_SIGNATURE)
|
||||
.expect("should be Some")
|
||||
{
|
||||
Json::Bool(b) => b,
|
||||
_ => unreachable!(
|
||||
"{} should be stored in a boolean",
|
||||
SETTINGS_ALLOW_SELF_SIGNATURE
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_allow_self_signature<R: Runtime>(tauri_app_handle: AppHandle<R>) -> bool {
|
||||
_get_allow_self_signature(tauri_app_handle)
|
||||
}
|
||||
@@ -1,3 +1,9 @@
|
||||
use tauri::{App, WebviewWindow};
|
||||
|
||||
pub fn platform(_app: &mut App, _main_window: WebviewWindow, _settings_window: WebviewWindow) {}
|
||||
pub fn platform(
|
||||
_app: &mut App,
|
||||
_main_window: WebviewWindow,
|
||||
_settings_window: WebviewWindow,
|
||||
_check_window: WebviewWindow,
|
||||
) {
|
||||
}
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
//credits to: https://github.com/ayangweb/ayangweb-EcoPaste/blob/169323dbe6365ffe4abb64d867439ed2ea84c6d1/src-tauri/src/core/setup/mac.rs
|
||||
use tauri::{ActivationPolicy, App, Emitter, EventTarget, WebviewWindow};
|
||||
use tauri_nspanel::{
|
||||
cocoa::appkit::{NSMainMenuWindowLevel, NSWindowCollectionBehavior},
|
||||
panel_delegate, WebviewWindowExt,
|
||||
};
|
||||
use tauri::{App, Emitter, EventTarget, WebviewWindow};
|
||||
use tauri_nspanel::{cocoa::appkit::NSWindowCollectionBehavior, panel_delegate, WebviewWindowExt};
|
||||
|
||||
use crate::common::MAIN_WINDOW_LABEL;
|
||||
|
||||
@@ -15,14 +12,17 @@ const WINDOW_BLUR_EVENT: &str = "tauri://blur";
|
||||
const WINDOW_MOVED_EVENT: &str = "tauri://move";
|
||||
const WINDOW_RESIZED_EVENT: &str = "tauri://resize";
|
||||
|
||||
pub fn platform(app: &mut App, main_window: WebviewWindow, _settings_window: WebviewWindow) {
|
||||
app.set_activation_policy(ActivationPolicy::Accessory);
|
||||
|
||||
pub fn platform(
|
||||
_app: &mut App,
|
||||
main_window: WebviewWindow,
|
||||
_settings_window: WebviewWindow,
|
||||
_check_window: WebviewWindow,
|
||||
) {
|
||||
// Convert ns_window to ns_panel
|
||||
let panel = main_window.to_panel().unwrap();
|
||||
|
||||
// Make the window above the dock
|
||||
panel.set_level(NSMainMenuWindowLevel + 1);
|
||||
panel.set_level(20);
|
||||
|
||||
// Do not steal focus from other windows
|
||||
panel.set_style_mask(NSWindowStyleMaskNonActivatingPanel);
|
||||
|
||||
@@ -18,10 +18,20 @@ pub use windows::*;
|
||||
#[cfg(target_os = "linux")]
|
||||
pub use linux::*;
|
||||
|
||||
pub fn default(app: &mut App, main_window: WebviewWindow, settings_window: WebviewWindow) {
|
||||
pub fn default(
|
||||
app: &mut App,
|
||||
main_window: WebviewWindow,
|
||||
settings_window: WebviewWindow,
|
||||
check_window: WebviewWindow,
|
||||
) {
|
||||
// Development mode automatically opens the console: https://tauri.app/develop/debug
|
||||
#[cfg(any(dev, debug_assertions))]
|
||||
#[cfg(debug_assertions)]
|
||||
main_window.open_devtools();
|
||||
|
||||
platform(app, main_window.clone(), settings_window.clone());
|
||||
platform(
|
||||
app,
|
||||
main_window.clone(),
|
||||
settings_window.clone(),
|
||||
check_window.clone(),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,3 +1,9 @@
|
||||
use tauri::{App, WebviewWindow};
|
||||
|
||||
pub fn platform(_app: &mut App, _main_window: WebviewWindow, _settings_window: WebviewWindow) {}
|
||||
pub fn platform(
|
||||
_app: &mut App,
|
||||
_main_window: WebviewWindow,
|
||||
_settings_window: WebviewWindow,
|
||||
_check_window: WebviewWindow,
|
||||
) {
|
||||
}
|
||||
|
||||
@@ -1,13 +1,7 @@
|
||||
use crate::{move_window_to_active_monitor, COCO_TAURI_STORE};
|
||||
use tauri::App;
|
||||
use tauri::AppHandle;
|
||||
use tauri::Manager;
|
||||
use tauri::Runtime;
|
||||
use tauri_plugin_global_shortcut::GlobalShortcutExt;
|
||||
use tauri_plugin_global_shortcut::Shortcut;
|
||||
use tauri_plugin_global_shortcut::ShortcutState;
|
||||
use tauri_plugin_store::JsonValue;
|
||||
use tauri_plugin_store::StoreExt;
|
||||
use crate::{hide_coco, show_coco, COCO_TAURI_STORE};
|
||||
use tauri::{async_runtime, App, AppHandle, Manager, Runtime};
|
||||
use tauri_plugin_global_shortcut::{GlobalShortcutExt, Shortcut, ShortcutState};
|
||||
use tauri_plugin_store::{JsonValue, StoreExt};
|
||||
|
||||
/// Tauri's store is a key-value database, we use it to store our registered
|
||||
/// global shortcut.
|
||||
@@ -23,6 +17,7 @@ const DEFAULT_SHORTCUT: &str = "ctrl+shift+space";
|
||||
|
||||
/// Set up the shortcut upon app start.
|
||||
pub fn enable_shortcut(app: &App) {
|
||||
log::trace!("setting up Coco hotkey");
|
||||
let store = app
|
||||
.store(COCO_TAURI_STORE)
|
||||
.expect("creating a store should not fail");
|
||||
@@ -49,19 +44,20 @@ pub fn enable_shortcut(app: &App) {
|
||||
.expect("default shortcut should never be invalid");
|
||||
_register_shortcut_upon_start(app, default_shortcut);
|
||||
}
|
||||
log::trace!("Coco hotkey has been set");
|
||||
}
|
||||
|
||||
/// Get the stored shortcut as a string, same as [`_get_shortcut()`], except that
|
||||
/// this is a `tauri::command` interface.
|
||||
#[tauri::command]
|
||||
pub fn get_current_shortcut<R: Runtime>(app: AppHandle<R>) -> Result<String, String> {
|
||||
pub async fn get_current_shortcut<R: Runtime>(app: AppHandle<R>) -> Result<String, String> {
|
||||
let shortcut = _get_shortcut(&app);
|
||||
Ok(shortcut)
|
||||
}
|
||||
|
||||
/// Get the current shortcut and unregister it on the tauri side.
|
||||
#[tauri::command]
|
||||
pub fn unregister_shortcut<R: Runtime>(app: AppHandle<R>) {
|
||||
pub async fn unregister_shortcut<R: Runtime>(app: AppHandle<R>) {
|
||||
let shortcut_str = _get_shortcut(&app);
|
||||
let shortcut = shortcut_str
|
||||
.parse::<Shortcut>()
|
||||
@@ -74,7 +70,7 @@ pub fn unregister_shortcut<R: Runtime>(app: AppHandle<R>) {
|
||||
|
||||
/// Change the global shortcut to `key`.
|
||||
#[tauri::command]
|
||||
pub fn change_shortcut<R: Runtime>(
|
||||
pub async fn change_shortcut<R: Runtime>(
|
||||
app: AppHandle<R>,
|
||||
_window: tauri::Window<R>,
|
||||
key: String,
|
||||
@@ -103,18 +99,17 @@ fn _register_shortcut<R: Runtime>(app: &AppHandle<R>, shortcut: Shortcut) {
|
||||
.on_shortcut(shortcut, move |app, scut, event| {
|
||||
if scut == &shortcut {
|
||||
dbg!("shortcut pressed");
|
||||
let main_window = app.get_window(MAIN_WINDOW_LABEL).unwrap();
|
||||
let main_window = app.get_webview_window(MAIN_WINDOW_LABEL).unwrap();
|
||||
if let ShortcutState::Pressed = event.state() {
|
||||
let app_handle = app.clone();
|
||||
if main_window.is_visible().unwrap() {
|
||||
dbg!("hiding window");
|
||||
main_window.hide().unwrap();
|
||||
async_runtime::spawn(async move {
|
||||
hide_coco(app_handle).await;
|
||||
});
|
||||
} else {
|
||||
dbg!("showing window");
|
||||
move_window_to_active_monitor(&main_window);
|
||||
main_window.set_visible_on_all_workspaces(true).unwrap();
|
||||
main_window.set_always_on_top(true).unwrap();
|
||||
main_window.set_focus().unwrap();
|
||||
main_window.show().unwrap();
|
||||
async_runtime::spawn(async move {
|
||||
show_coco(app_handle).await;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -133,17 +128,18 @@ fn _register_shortcut_upon_start(app: &App, shortcut: Shortcut) {
|
||||
tauri_plugin_global_shortcut::Builder::new()
|
||||
.with_handler(move |app, scut, event| {
|
||||
if scut == &shortcut {
|
||||
let window = app.get_window(MAIN_WINDOW_LABEL).unwrap();
|
||||
let window = app.get_webview_window(MAIN_WINDOW_LABEL).unwrap();
|
||||
if let ShortcutState::Pressed = event.state() {
|
||||
let app_handle = app.clone();
|
||||
|
||||
if window.is_visible().unwrap() {
|
||||
window.hide().unwrap();
|
||||
async_runtime::spawn(async move {
|
||||
hide_coco(app_handle).await;
|
||||
});
|
||||
} else {
|
||||
// dbg!("showing window");
|
||||
move_window_to_active_monitor(&window);
|
||||
window.set_visible_on_all_workspaces(true).unwrap();
|
||||
window.set_always_on_top(true).unwrap();
|
||||
window.set_focus().unwrap();
|
||||
window.show().unwrap();
|
||||
async_runtime::spawn(async move {
|
||||
show_coco(app_handle).await;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,85 @@
|
||||
use std::{path::Path, process::Command};
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use tauri_plugin_shell::ShellExt;
|
||||
|
||||
enum LinuxDesktopEnvironment {
|
||||
Gnome,
|
||||
Kde,
|
||||
}
|
||||
|
||||
impl LinuxDesktopEnvironment {
|
||||
// This impl is based on: https://wiki.archlinux.org/title/Desktop_entries#Usage
|
||||
fn launch_app_via_desktop_file<P: AsRef<Path>>(&self, file: P) -> Result<(), String> {
|
||||
let path = file.as_ref();
|
||||
if !path.try_exists().map_err(|e| e.to_string())? {
|
||||
return Err(format!("desktop file [{}] does not exist", path.display()));
|
||||
}
|
||||
|
||||
let cmd_output = match self {
|
||||
Self::Gnome => {
|
||||
let uri = path
|
||||
.file_stem()
|
||||
.expect("the desktop file should contain a file stem part");
|
||||
|
||||
Command::new("gtk-launch")
|
||||
.arg(uri)
|
||||
.output()
|
||||
.map_err(|e| e.to_string())?
|
||||
}
|
||||
Self::Kde => Command::new("kde-open")
|
||||
.arg(path)
|
||||
.output()
|
||||
.map_err(|e| e.to_string())?,
|
||||
};
|
||||
|
||||
if !cmd_output.status.success() {
|
||||
return Err(format!(
|
||||
"failed to launch app via desktop file [{}], underlying command stderr [{}]",
|
||||
path.display(),
|
||||
String::from_utf8_lossy(&cmd_output.stderr)
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn get_linux_desktop_environment() -> Option<LinuxDesktopEnvironment> {
|
||||
let de_os_str = std::env::var_os("XDG_CURRENT_DESKTOP")?;
|
||||
let de_str = de_os_str
|
||||
.into_string()
|
||||
.expect("$XDG_CURRENT_DESKTOP should be UTF-8 encoded");
|
||||
|
||||
let de = match de_str.as_str() {
|
||||
"GNOME" => LinuxDesktopEnvironment::Gnome,
|
||||
"KDE" => LinuxDesktopEnvironment::Kde,
|
||||
|
||||
unsupported_de => unimplemented!(
|
||||
"This desktop environment [{}] has not been supported yet",
|
||||
unsupported_de
|
||||
),
|
||||
};
|
||||
|
||||
Some(de)
|
||||
}
|
||||
|
||||
/// Homemade open() function to support open Linux applications via the `.desktop` file.
|
||||
//
|
||||
// tauri_plugin_shell::open() is deprecated, but we still use it.
|
||||
#[allow(deprecated)]
|
||||
pub async fn open<R: Runtime>(app_handle: AppHandle<R>, path: String) -> Result<(), String> {
|
||||
if cfg!(target_os = "linux") {
|
||||
let borrowed_path = Path::new(&path);
|
||||
if let Some(file_extension) = borrowed_path.extension() {
|
||||
if file_extension == "desktop" {
|
||||
let desktop_environment = get_linux_desktop_environment().expect("The Linux OS is running without a desktop, Coco could never run in such a environment");
|
||||
return desktop_environment.launch_app_via_desktop_file(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
app_handle
|
||||
.shell()
|
||||
.open(path, None)
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
@@ -31,15 +31,19 @@
|
||||
"visible": false,
|
||||
"windowEffects": {
|
||||
"effects": [],
|
||||
"radius": 12
|
||||
}
|
||||
"radius": 6
|
||||
},
|
||||
"visibleOnAllWorkspaces": true,
|
||||
"alwaysOnTop": true
|
||||
},
|
||||
{
|
||||
"label": "settings",
|
||||
"title": "Coco AI Settings",
|
||||
"url": "/ui/settings",
|
||||
"width": 1000,
|
||||
"minWidth": 1000,
|
||||
"height": 700,
|
||||
"minHeight": 700,
|
||||
"center": true,
|
||||
"transparent": true,
|
||||
"maximizable": false,
|
||||
@@ -51,6 +55,26 @@
|
||||
"effects": ["sidebar"],
|
||||
"state": "active"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "check",
|
||||
"title": "Coco AI Update",
|
||||
"url": "/ui/check",
|
||||
"width": 340,
|
||||
"minWidth": 340,
|
||||
"height": 260,
|
||||
"minHeight": 260,
|
||||
"center": false,
|
||||
"transparent": true,
|
||||
"maximizable": false,
|
||||
"skipTaskbar": false,
|
||||
"dragDropEnabled": false,
|
||||
"hiddenTitle": true,
|
||||
"visible": false,
|
||||
"windowEffects": {
|
||||
"effects": ["sidebar"],
|
||||
"state": "active"
|
||||
}
|
||||
}
|
||||
],
|
||||
"security": {
|
||||
@@ -90,7 +114,7 @@
|
||||
"icons/StoreLogo.png"
|
||||
],
|
||||
"macOS": {
|
||||
"minimumSystemVersion": "12.0",
|
||||
"minimumSystemVersion": "10.12",
|
||||
"hardenedRuntime": true,
|
||||
"dmg": {
|
||||
"appPosition": {
|
||||
@@ -103,7 +127,7 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"resources": ["assets", "icons"]
|
||||
"resources": ["assets/**/*", "icons"]
|
||||
},
|
||||
"plugins": {
|
||||
"features": {
|
||||
|
||||
124
src/api/axiosRequest.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
import axios from "axios";
|
||||
|
||||
import { useAppStore } from "@/stores/appStore";
|
||||
|
||||
import {
|
||||
handleChangeRequestHeader,
|
||||
handleConfigureAuth,
|
||||
// handleAuthError,
|
||||
// handleGeneralError,
|
||||
handleNetworkError,
|
||||
} from "./tools";
|
||||
|
||||
type Fn = (data: FcResponse<any>) => unknown;
|
||||
|
||||
interface IAnyObj {
|
||||
[index: string]: unknown;
|
||||
}
|
||||
|
||||
interface FcResponse<T> {
|
||||
errno: string;
|
||||
errmsg: string;
|
||||
data: T;
|
||||
}
|
||||
|
||||
axios.interceptors.request.use((config) => {
|
||||
config = handleChangeRequestHeader(config);
|
||||
config = handleConfigureAuth(config);
|
||||
// console.log("config", config);
|
||||
return config;
|
||||
});
|
||||
|
||||
axios.interceptors.response.use(
|
||||
(response) => {
|
||||
if (response.status !== 200) return Promise.reject(response.data);
|
||||
// handleAuthError(response.data.errno);
|
||||
// handleGeneralError(response.data.errno, response.data.errmsg);
|
||||
return response;
|
||||
},
|
||||
(err) => {
|
||||
handleNetworkError(err?.response?.status);
|
||||
return Promise.reject(err?.response);
|
||||
}
|
||||
);
|
||||
|
||||
export const handleApiError = (error: any) => {
|
||||
const addError = useAppStore.getState().addError;
|
||||
|
||||
let message = "Request failed";
|
||||
|
||||
if (error.response) {
|
||||
// Server error response
|
||||
message =
|
||||
error.response.data?.message || `Error (${error.response.status})`;
|
||||
} else if (error.request) {
|
||||
// Request failed to send
|
||||
message = "Network connection failed";
|
||||
} else {
|
||||
// Other errors
|
||||
message = error.message;
|
||||
}
|
||||
|
||||
console.error(error);
|
||||
addError(message, "error");
|
||||
return error;
|
||||
};
|
||||
|
||||
export const Get = <T>(
|
||||
url: string,
|
||||
params: IAnyObj = {},
|
||||
clearFn?: Fn
|
||||
): Promise<[any, FcResponse<T> | undefined]> =>
|
||||
new Promise((resolve) => {
|
||||
const appStore = JSON.parse(localStorage.getItem("app-store") || "{}");
|
||||
|
||||
let baseURL = appStore.state?.endpoint_http;
|
||||
if (!baseURL || baseURL === "undefined") {
|
||||
baseURL = "";
|
||||
}
|
||||
|
||||
axios
|
||||
.get(baseURL + url, { params })
|
||||
.then((result) => {
|
||||
let res: FcResponse<T>;
|
||||
if (clearFn !== undefined) {
|
||||
res = clearFn(result?.data) as unknown as FcResponse<T>;
|
||||
} else {
|
||||
res = result?.data as FcResponse<T>;
|
||||
}
|
||||
resolve([null, res as FcResponse<T>]);
|
||||
})
|
||||
.catch((err) => {
|
||||
handleApiError(err);
|
||||
resolve([err, undefined]);
|
||||
});
|
||||
});
|
||||
|
||||
export const Post = <T>(
|
||||
url: string,
|
||||
data: IAnyObj,
|
||||
params: IAnyObj = {},
|
||||
headers: IAnyObj = {}
|
||||
): Promise<[any, FcResponse<T> | undefined]> => {
|
||||
return new Promise((resolve) => {
|
||||
const appStore = JSON.parse(localStorage.getItem("app-store") || "{}");
|
||||
|
||||
let baseURL = appStore.state?.endpoint_http
|
||||
if (!baseURL || baseURL === "undefined") {
|
||||
baseURL = "";
|
||||
}
|
||||
|
||||
axios
|
||||
.post(baseURL + url, data, {
|
||||
params,
|
||||
headers,
|
||||
} as any)
|
||||
.then((result) => {
|
||||
resolve([null, result.data as FcResponse<T>]);
|
||||
})
|
||||
.catch((err) => {
|
||||
handleApiError(err);
|
||||
resolve([err, undefined]);
|
||||
});
|
||||
});
|
||||
};
|
||||
@@ -1,9 +1,8 @@
|
||||
import { fetch } from "@tauri-apps/plugin-http";
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
|
||||
import { clientEnv } from "@/utils/env";
|
||||
import { useLogStore } from "@/stores/logStore";
|
||||
|
||||
import { get_server_token } from "@/commands";
|
||||
interface FetchRequestConfig {
|
||||
url: string;
|
||||
method?: "GET" | "POST" | "PUT" | "DELETE";
|
||||
@@ -63,8 +62,8 @@ export const tauriFetch = async <T = any>({
|
||||
}
|
||||
|
||||
const server_id = connectStore.state?.currentService?.id || "default_coco_server"
|
||||
const res: any = await invoke("get_server_token", {id: server_id});
|
||||
|
||||
const res: any = await get_server_token(server_id);
|
||||
|
||||
headers["X-API-TOKEN"] = headers["X-API-TOKEN"] || res?.access_token || undefined;
|
||||
|
||||
// debug API
|
||||
|
||||
73
src/api/tools.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
export const handleChangeRequestHeader = (config: any) => {
|
||||
config["xxxx"] = "xxx";
|
||||
return config;
|
||||
};
|
||||
|
||||
export const handleConfigureAuth = (config: any) => {
|
||||
// config.headers["X-API-TOKEN"] = localStorage.getItem("token") || "";
|
||||
|
||||
const headersStr = localStorage.getItem("headers") || "{}";
|
||||
const headers = JSON.parse(headersStr);
|
||||
// console.log("headers:", headers);
|
||||
|
||||
config.headers = {
|
||||
...config.headers,
|
||||
...headers,
|
||||
}
|
||||
// console.log("config.headers", config.headers)
|
||||
return config;
|
||||
};
|
||||
|
||||
export const handleNetworkError = (errStatus?: number): void => {
|
||||
const networkErrMap: any = {
|
||||
"400": "Bad Request", // token invalid
|
||||
"401": "Unauthorized, please login again",
|
||||
"403": "Access Denied",
|
||||
"404": "Resource Not Found",
|
||||
"405": "Method Not Allowed",
|
||||
"408": "Request Timeout",
|
||||
"500": "Internal Server Error",
|
||||
"501": "Not Implemented",
|
||||
"502": "Bad Gateway",
|
||||
"503": "Service Unavailable",
|
||||
"504": "Gateway Timeout",
|
||||
"505": "HTTP Version Not Supported",
|
||||
};
|
||||
if (errStatus) {
|
||||
console.error(networkErrMap[errStatus] ?? `Other Connection Error --${errStatus}`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.error("Unable to connect to server!");
|
||||
};
|
||||
|
||||
export const handleAuthError = (errno: string): boolean => {
|
||||
const authErrMap: any = {
|
||||
"10031": "Login expired, please login again", // token invalid
|
||||
"10032": "Session timeout, please login again", // token expired
|
||||
"10033": "Account not bound to role, please contact administrator",
|
||||
"10034": "User not registered, please contact administrator",
|
||||
"10035": "Unable to get third-party platform user with code",
|
||||
"10036": "Account not linked to employee, please contact administrator",
|
||||
"10037": "Account is invalid",
|
||||
"10038": "Account not found",
|
||||
};
|
||||
|
||||
if (authErrMap.hasOwnProperty(errno)) {
|
||||
console.error(authErrMap[errno]);
|
||||
// Authorization error, logout account
|
||||
// logout();
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
export const handleGeneralError = (errno: string, errmsg: string): boolean => {
|
||||
if (errno !== "0") {
|
||||
console.error(errmsg);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
BIN
src/assets/images/ReadAloud/back-dark.png
Executable file
|
After Width: | Height: | Size: 1.3 KiB |
BIN
src/assets/images/ReadAloud/back-light.png
Executable file
|
After Width: | Height: | Size: 1.3 KiB |
BIN
src/assets/images/ReadAloud/close-dark.png
Executable file
|
After Width: | Height: | Size: 346 B |
BIN
src/assets/images/ReadAloud/close-light.png
Executable file
|
After Width: | Height: | Size: 347 B |
BIN
src/assets/images/ReadAloud/forward-dark.png
Executable file
|
After Width: | Height: | Size: 1.2 KiB |
BIN
src/assets/images/ReadAloud/forward-light.png
Executable file
|
After Width: | Height: | Size: 1.2 KiB |
BIN
src/assets/images/ReadAloud/loading-dark.png
Executable file
|
After Width: | Height: | Size: 485 B |
BIN
src/assets/images/ReadAloud/loading-light.png
Executable file
|
After Width: | Height: | Size: 491 B |
BIN
src/assets/images/ReadAloud/pause-dark.png
Executable file
|
After Width: | Height: | Size: 504 B |
BIN
src/assets/images/ReadAloud/pause-light.png
Executable file
|
After Width: | Height: | Size: 500 B |
BIN
src/assets/images/ReadAloud/play-dark.png
Executable file
|
After Width: | Height: | Size: 203 B |
BIN
src/assets/images/ReadAloud/play-light.png
Executable file
|
After Width: | Height: | Size: 196 B |
|
Before Width: | Height: | Size: 1.8 KiB After Width: | Height: | Size: 1.8 KiB |
|
Before Width: | Height: | Size: 1.8 KiB After Width: | Height: | Size: 1.8 KiB |
BIN
src/assets/images/logo-dark.png
Normal file
|
After Width: | Height: | Size: 3.4 KiB |
BIN
src/assets/images/logo-light.png
Normal file
|
After Width: | Height: | Size: 3.4 KiB |
2
src/commands/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from './servers';
|
||||
export * from './system';
|
||||