Compare commits
230 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8f992bfa92 | ||
|
|
e7dd27c744 | ||
|
|
7914836c3e | ||
|
|
b37bf1f7c7 | ||
|
|
419d9d55c5 | ||
|
|
d3ed54c771 | ||
|
|
8f26dbcbe6 | ||
|
|
663873ae14 | ||
|
|
286b1be212 | ||
|
|
37221782b0 | ||
|
|
644e291105 | ||
|
|
aae6984aa7 | ||
|
|
dbd296d399 | ||
|
|
e2ad25967d | ||
|
|
21b61d80d8 | ||
|
|
9f4c693ac4 | ||
|
|
45c27cac56 | ||
|
|
e46035afd4 | ||
|
|
1004bb73f4 | ||
|
|
d664fa7271 | ||
|
|
067fb7144f | ||
|
|
579f91f3aa | ||
|
|
abe2aecedf | ||
|
|
e8f9a4e627 | ||
|
|
22b1558e8b | ||
|
|
ca3b514a65 | ||
|
|
c694c4eda9 | ||
|
|
ac835c76aa | ||
|
|
25bbab7432 | ||
|
|
cca00e944e | ||
|
|
e78fe4ac89 | ||
|
|
60fd79f1fa | ||
|
|
5c0a865822 | ||
|
|
5b50e4b51b | ||
|
|
b97386a827 | ||
|
|
29aa26af94 | ||
|
|
3650d9914c | ||
|
|
f26031047c | ||
|
|
c8719926be | ||
|
|
f1dfc5c730 | ||
|
|
74ed642a42 | ||
|
|
5a17173620 | ||
|
|
29d14ff931 | ||
|
|
ad01504766 | ||
|
|
57ab08fb6d | ||
|
|
db5c09f80c | ||
|
|
b1e2c6961d | ||
|
|
3f4abe51e5 | ||
|
|
060c09e11c | ||
|
|
657df482bf | ||
|
|
f4f7732927 | ||
|
|
5e536e1444 | ||
|
|
2b48cdf84a | ||
|
|
bc37616506 | ||
|
|
07bcd80776 | ||
|
|
7b8b396368 | ||
|
|
823a95d601 | ||
|
|
af0b98a41b | ||
|
|
7d0e7cd7dc | ||
|
|
e56d6b1b60 | ||
|
|
941cf96a07 | ||
|
|
14fbf2ac5d | ||
|
|
494e2f0d8a | ||
|
|
e3a3849fa4 | ||
|
|
0b5e31a476 | ||
|
|
c8a723ed9d | ||
|
|
aaf4bf2737 | ||
|
|
24b0123a61 | ||
|
|
e8bd970cdb | ||
|
|
dd3be3a819 | ||
|
|
5b034c28ac | ||
|
|
b17949fe29 | ||
|
|
5d37420109 | ||
|
|
1d3ceb0c70 | ||
|
|
4d11afe18e | ||
|
|
0c0291c8c0 | ||
|
|
cca672b2cb | ||
|
|
5b27488402 | ||
|
|
c1c4e0db7b | ||
|
|
074a7c8b0a | ||
|
|
bc524e19db | ||
|
|
05f70d26d9 | ||
|
|
ab26dc7c6a | ||
|
|
6ff6b46139 | ||
|
|
119fd87a25 | ||
|
|
de226a8fa4 | ||
|
|
6865957725 | ||
|
|
87818d69ed | ||
|
|
38b67d01b8 | ||
|
|
a4f4a24730 | ||
|
|
87bd3d020f | ||
|
|
825ac5d565 | ||
|
|
f21a35e15d | ||
|
|
6e90b28204 | ||
|
|
e92e5e5158 | ||
|
|
2ac81566c6 | ||
|
|
b004670dec | ||
|
|
a426e33e6b | ||
|
|
bb7dd6bf7c | ||
|
|
37c5f2de24 | ||
|
|
ab6c25fe96 | ||
|
|
1fb464df09 | ||
|
|
65aa75043f | ||
|
|
79dcc7b4ec | ||
|
|
3d29cfe235 | ||
|
|
aea3a7ba98 | ||
|
|
190dfc6ecd | ||
|
|
316a7940d6 | ||
|
|
acfc1bb32d | ||
|
|
c4d178dc2d | ||
|
|
6333c697d5 | ||
|
|
810541494f | ||
|
|
e45dc2acbe | ||
|
|
2d1ccb9744 | ||
|
|
406f3b31e9 | ||
|
|
f51dd81014 | ||
|
|
3b38cbfb6c | ||
|
|
a4483ba277 | ||
|
|
bf46979b80 | ||
|
|
070f171ad4 | ||
|
|
3180704a0d | ||
|
|
b3f68697ce | ||
|
|
69d2b4b834 | ||
|
|
6837286061 | ||
|
|
a431ead22a | ||
|
|
7ec41dfe80 | ||
|
|
06053e9fd9 | ||
|
|
70b048fba3 | ||
|
|
45083f829b | ||
|
|
e4f6fb8e98 | ||
|
|
ee182b22da | ||
|
|
a37e22c227 | ||
|
|
d75ab1018d | ||
|
|
40ad066e69 | ||
|
|
a2a5a9f8fe | ||
|
|
5fd9339e56 | ||
|
|
a8a9208b1f | ||
|
|
8c9a2ff441 | ||
|
|
2251b0af95 | ||
|
|
560a12ab93 | ||
|
|
2ff66c0b91 | ||
|
|
ef4a184233 | ||
|
|
8422bc03e7 | ||
|
|
370113129c | ||
|
|
cb758ef452 | ||
|
|
12b9b4bb81 | ||
|
|
562db19f16 | ||
|
|
dc5cd9aecb | ||
|
|
0b018cd24f | ||
|
|
2ed22d3d7c | ||
|
|
4ce9561eb7 | ||
|
|
3aeb39b3af | ||
|
|
27e99d4629 | ||
|
|
df70276a54 | ||
|
|
6553a8f5d3 | ||
|
|
4ebbc9ec6e | ||
|
|
4208633556 | ||
|
|
fc43fbe798 | ||
|
|
b5bb9105d4 | ||
|
|
b6ebd6e5f8 | ||
|
|
22216491b6 | ||
|
|
44ca66259c | ||
|
|
be3cae36e2 | ||
|
|
35ea30626f | ||
|
|
4bcae5cffb | ||
|
|
76458db8ab | ||
|
|
5b41e190d3 | ||
|
|
43ac9a054c | ||
|
|
ac485a32cc | ||
|
|
e10908a095 | ||
|
|
78b8908ac8 | ||
|
|
3c54cb84a8 | ||
|
|
8ed808c591 | ||
|
|
7a2dde7448 | ||
|
|
65451fc63e | ||
|
|
5d108a46d3 | ||
|
|
f9567c2d46 | ||
|
|
da917e6012 | ||
|
|
335a906674 | ||
|
|
a50a636d59 | ||
|
|
2dd3f776e6 | ||
|
|
40f6aa0ccd | ||
|
|
4da9e024e0 | ||
|
|
c20bba51f5 | ||
|
|
0a62a2095b | ||
|
|
5677995185 | ||
|
|
ec4e5e7d1d | ||
|
|
1df5265b1a | ||
|
|
fb8a4684dc | ||
|
|
0b609e570d | ||
|
|
f91f6bdc17 | ||
|
|
57590f3b57 | ||
|
|
c18f9ea154 | ||
|
|
441875d9b4 | ||
|
|
eddf9075bb | ||
|
|
9eac8f8a8e | ||
|
|
515260c43f | ||
|
|
118de0e80b | ||
|
|
19ce896fdc | ||
|
|
4a41ea5d8b | ||
|
|
880e1206ce | ||
|
|
1e6d9f9550 | ||
|
|
ff0faf425f | ||
|
|
1fbf5d6552 | ||
|
|
db41e817c3 | ||
|
|
1296755bc5 | ||
|
|
d410f20864 | ||
|
|
61d0a3b79a | ||
|
|
b24319b649 | ||
|
|
3c0fb24548 | ||
|
|
2fcbed0381 | ||
|
|
7444347e0c | ||
|
|
725ce042de | ||
|
|
3b67de5387 | ||
|
|
9b53a026ff | ||
|
|
9ea7dbf3aa | ||
|
|
55622911ac | ||
|
|
92f78ad08c | ||
|
|
f690dbaab2 | ||
|
|
210efe763d | ||
|
|
f23498afa0 | ||
|
|
a80a5d928f | ||
|
|
b733bb5516 | ||
|
|
5046754534 | ||
|
|
f557f7e780 | ||
|
|
18feb2d690 | ||
|
|
af59f2fe9f | ||
|
|
5e1bb54d5e | ||
|
|
33fa516aad | ||
|
|
d2c1cf513d |
116
.github/workflows/release.yml
vendored
@@ -9,10 +9,16 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
create-release:
|
create-release:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
APP_VERSION: ${{ steps.get-version.outputs.APP_VERSION }}
|
||||||
|
RELEASE_BODY: ${{ steps.get-changelog.outputs.RELEASE_BODY }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set output
|
- name: Set output
|
||||||
id: vars
|
id: vars
|
||||||
run: echo "tag=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT
|
run: echo "tag=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT
|
||||||
@@ -22,11 +28,28 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
node-version: 20
|
node-version: 20
|
||||||
|
|
||||||
|
- name: Get build version
|
||||||
|
shell: bash
|
||||||
|
id: get-version
|
||||||
|
run: |
|
||||||
|
PACKAGE_VERSION=$(jq -r '.version' package.json)
|
||||||
|
CARGO_VERSION=$(grep -m 1 '^version =' src-tauri/Cargo.toml | sed -E 's/.*"([^"]+)".*/\1/')
|
||||||
|
if [ "$PACKAGE_VERSION" != "$CARGO_VERSION" ]; then
|
||||||
|
echo "::error::Version mismatch!"
|
||||||
|
else
|
||||||
|
echo "Version match: $PACKAGE_VERSION"
|
||||||
|
fi
|
||||||
|
echo "APP_VERSION=$PACKAGE_VERSION" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Generate changelog
|
- name: Generate changelog
|
||||||
id: create_release
|
id: get-changelog
|
||||||
run: npx changelogithub --draft --name ${{ steps.vars.outputs.tag }}
|
run: |
|
||||||
|
CHANGELOG_BODY=$(npx changelogithub --draft --name ${{ steps.vars.outputs.tag }})
|
||||||
|
echo "RELEASE_BODY<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
echo "$CHANGELOG_BODY" >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
build-app:
|
build-app:
|
||||||
needs: create-release
|
needs: create-release
|
||||||
@@ -52,11 +75,23 @@ jobs:
|
|||||||
target: "x86_64-unknown-linux-gnu"
|
target: "x86_64-unknown-linux-gnu"
|
||||||
- platform: "ubuntu-22.04-arm"
|
- platform: "ubuntu-22.04-arm"
|
||||||
target: "aarch64-unknown-linux-gnu"
|
target: "aarch64-unknown-linux-gnu"
|
||||||
|
env:
|
||||||
|
APP_VERSION: ${{ needs.create-release.outputs.APP_VERSION }}
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Checkout dependency repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: 'infinilabs/pizza'
|
||||||
|
ssh-key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||||
|
submodules: recursive
|
||||||
|
ref: main
|
||||||
|
path: pizza
|
||||||
|
|
||||||
- name: Setup node
|
- name: Setup node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
@@ -65,17 +100,31 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
version: latest
|
version: latest
|
||||||
|
|
||||||
- name: Install rust target
|
|
||||||
run: rustup target add ${{ matrix.target }}
|
|
||||||
|
|
||||||
- name: Install dependencies (ubuntu only)
|
- name: Install dependencies (ubuntu only)
|
||||||
if: startsWith(matrix.platform, 'ubuntu-22.04')
|
if: startsWith(matrix.platform, 'ubuntu-22.04')
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf xdg-utils
|
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf xdg-utils
|
||||||
|
|
||||||
- name: Install Rust stable
|
- name: Add Rust build target
|
||||||
run: rustup toolchain install stable
|
working-directory: src-tauri
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
rustup target add ${{ matrix.target }} || true
|
||||||
|
|
||||||
|
- name: Add pizza engine as a dependency
|
||||||
|
working-directory: src-tauri
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
BUILD_ARGS="--target ${{ matrix.target }}"
|
||||||
|
if [[ "${{matrix.target }}" != "i686-pc-windows-msvc" ]]; then
|
||||||
|
echo "Adding pizza engine as a dependency for ${{matrix.platform }}-${{matrix.target }}"
|
||||||
|
( cargo add --path ../pizza/lib/engine --features query_string_parser,persistence )
|
||||||
|
BUILD_ARGS+=" --features use_pizza_engine"
|
||||||
|
else
|
||||||
|
echo "Skipping pizza engine dependency for ${{matrix.platform }}-${{matrix.target }}"
|
||||||
|
fi
|
||||||
|
echo "BUILD_ARGS=${BUILD_ARGS}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Rust cache
|
- name: Rust cache
|
||||||
uses: swatinem/rust-cache@v2
|
uses: swatinem/rust-cache@v2
|
||||||
@@ -91,28 +140,8 @@ jobs:
|
|||||||
- name: Install app dependencies and build web
|
- name: Install app dependencies and build web
|
||||||
run: pnpm install --frozen-lockfile
|
run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
- name: Set up SSH agent for private repository clone
|
- name: Build the coco at ${{ matrix.platform}} for ${{ matrix.target }} @ ${{ env.APP_VERSION }}
|
||||||
if: matrix.target != 'i686-pc-windows-msvc'
|
|
||||||
uses: webfactory/ssh-agent@v0.9.0
|
|
||||||
with:
|
|
||||||
ssh-private-key: ${{ secrets.SSH_PRIVATE_KEY }}
|
|
||||||
|
|
||||||
- name: Add Git server to known hosts
|
|
||||||
if: matrix.platform != 'windows-latest'
|
|
||||||
run: |
|
|
||||||
mkdir -p ~/.ssh
|
|
||||||
ssh-keyscan github.com >> ~/.ssh/known_hosts
|
|
||||||
chmod 600 ~/.ssh/known_hosts
|
|
||||||
|
|
||||||
- name: Pizza engine features setup
|
|
||||||
if: matrix.target != 'i686-pc-windows-msvc'
|
|
||||||
run: |
|
|
||||||
make add-dep-pizza-engine
|
|
||||||
rustup target add ${{ matrix.target}} --toolchain nightly-2025-02-28
|
|
||||||
|
|
||||||
- name: Build the app with ${{ matrix.platform }}
|
|
||||||
uses: tauri-apps/tauri-action@v0
|
uses: tauri-apps/tauri-action@v0
|
||||||
if: matrix.target != 'i686-pc-windows-msvc'
|
|
||||||
env:
|
env:
|
||||||
CI: false
|
CI: false
|
||||||
PLATFORM: ${{ matrix.platform }}
|
PLATFORM: ${{ matrix.platform }}
|
||||||
@@ -127,31 +156,8 @@ jobs:
|
|||||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||||
with:
|
with:
|
||||||
tagName: ${{ github.ref_name }}
|
tagName: ${{ github.ref_name }}
|
||||||
releaseName: Coco ${{ needs.create-release.outputs.APP_VERSION }}
|
releaseName: Coco ${{ env.APP_VERSION }}
|
||||||
releaseBody: ""
|
releaseBody: "${{ needs.create-release.outputs.RELEASE_BODY }}"
|
||||||
releaseDraft: true
|
releaseDraft: true
|
||||||
prerelease: false
|
prerelease: false
|
||||||
args: --target ${{ matrix.target }} --features use_pizza_engine
|
args: ${{ env.BUILD_ARGS }}
|
||||||
|
|
||||||
- name: Build the app with ${{ matrix.platform }} (windows i686 only)
|
|
||||||
uses: tauri-apps/tauri-action@v0
|
|
||||||
if: matrix.target == 'i686-pc-windows-msvc'
|
|
||||||
env:
|
|
||||||
CI: false
|
|
||||||
PLATFORM: ${{ matrix.platform }}
|
|
||||||
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
|
|
||||||
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
|
|
||||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ""
|
|
||||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
|
|
||||||
APPLE_CERTIFICATE_PASSWORD: ""
|
|
||||||
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
|
|
||||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
|
||||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
|
||||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
|
||||||
with:
|
|
||||||
tagName: ${{ github.ref_name }}
|
|
||||||
releaseName: Coco ${{ needs.create-release.outputs.APP_VERSION }}
|
|
||||||
releaseBody: ""
|
|
||||||
releaseDraft: true
|
|
||||||
prerelease: false
|
|
||||||
args: --target ${{ matrix.target }}
|
|
||||||
|
|||||||
61
.github/workflows/rust_code_check.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
name: Rust Code Check
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
# Only run it when Rust code changes
|
||||||
|
paths:
|
||||||
|
- 'src-tauri/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
platform: [ubuntu-latest, windows-latest, macos-latest]
|
||||||
|
|
||||||
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Checkout dependency (pizza-engine) repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: 'infinilabs/pizza'
|
||||||
|
ssh-key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||||
|
submodules: recursive
|
||||||
|
ref: main
|
||||||
|
path: pizza
|
||||||
|
|
||||||
|
- name: Install dependencies (ubuntu only)
|
||||||
|
if: startsWith(matrix.platform, 'ubuntu-latest')
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf xdg-utils
|
||||||
|
|
||||||
|
- name: Add pizza engine as a dependency
|
||||||
|
working-directory: src-tauri
|
||||||
|
shell: bash
|
||||||
|
run: cargo add --path ../pizza/lib/engine --features query_string_parser,persistence
|
||||||
|
|
||||||
|
- name: Format check
|
||||||
|
working-directory: src-tauri
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
rustup component add rustfmt
|
||||||
|
cargo fmt --all --check
|
||||||
|
|
||||||
|
- name: Check compilation (Without Pizza engine enabled)
|
||||||
|
working-directory: ./src-tauri
|
||||||
|
run: cargo check
|
||||||
|
|
||||||
|
- name: Check compilation (With Pizza engine enabled)
|
||||||
|
working-directory: ./src-tauri
|
||||||
|
run: cargo check --features use_pizza_engine
|
||||||
|
|
||||||
|
- name: Run tests (Without Pizza engine)
|
||||||
|
working-directory: ./src-tauri
|
||||||
|
run: cargo test
|
||||||
|
|
||||||
|
- name: Run tests (With Pizza engine)
|
||||||
|
working-directory: ./src-tauri
|
||||||
|
run: cargo test --features use_pizza_engine
|
||||||
7
.vscode/settings.json
vendored
@@ -8,6 +8,8 @@
|
|||||||
"clsx",
|
"clsx",
|
||||||
"codegen",
|
"codegen",
|
||||||
"dataurl",
|
"dataurl",
|
||||||
|
"deeplink",
|
||||||
|
"deepthink",
|
||||||
"dtolnay",
|
"dtolnay",
|
||||||
"dyld",
|
"dyld",
|
||||||
"elif",
|
"elif",
|
||||||
@@ -30,6 +32,8 @@
|
|||||||
"localstorage",
|
"localstorage",
|
||||||
"lucide",
|
"lucide",
|
||||||
"maximizable",
|
"maximizable",
|
||||||
|
"mdast",
|
||||||
|
"meval",
|
||||||
"Minimizable",
|
"Minimizable",
|
||||||
"msvc",
|
"msvc",
|
||||||
"nord",
|
"nord",
|
||||||
@@ -39,9 +43,11 @@
|
|||||||
"overscan",
|
"overscan",
|
||||||
"partialize",
|
"partialize",
|
||||||
"patchelf",
|
"patchelf",
|
||||||
|
"Quicklink",
|
||||||
"Raycast",
|
"Raycast",
|
||||||
"rehype",
|
"rehype",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
|
"rerank",
|
||||||
"rgba",
|
"rgba",
|
||||||
"rustup",
|
"rustup",
|
||||||
"screenshotable",
|
"screenshotable",
|
||||||
@@ -56,6 +62,7 @@
|
|||||||
"traptitech",
|
"traptitech",
|
||||||
"unlisten",
|
"unlisten",
|
||||||
"unlistener",
|
"unlistener",
|
||||||
|
"unlisteners",
|
||||||
"unminimize",
|
"unminimize",
|
||||||
"uuidv",
|
"uuidv",
|
||||||
"VITE",
|
"VITE",
|
||||||
|
|||||||
@@ -91,6 +91,8 @@ pnpm tauri build
|
|||||||
|
|
||||||
- [Coco App Documentation](https://docs.infinilabs.com/coco-app/main/)
|
- [Coco App Documentation](https://docs.infinilabs.com/coco-app/main/)
|
||||||
- [Coco Server Documentation](https://docs.infinilabs.com/coco-server/main/)
|
- [Coco Server Documentation](https://docs.infinilabs.com/coco-server/main/)
|
||||||
|
- [DeepWiki Coco App](https://deepwiki.com/infinilabs/coco-app)
|
||||||
|
- [DeepWiki Coco Server](https://deepwiki.com/infinilabs/coco-server)
|
||||||
- [Tauri Documentation](https://tauri.app/)
|
- [Tauri Documentation](https://tauri.app/)
|
||||||
|
|
||||||
## Contributors
|
## Contributors
|
||||||
|
|||||||
@@ -1,21 +1,35 @@
|
|||||||
---
|
---
|
||||||
weight: 10
|
weight: 10
|
||||||
title: "Mac OS"
|
title: "macOS"
|
||||||
asciinema: true
|
asciinema: true
|
||||||
---
|
---
|
||||||
|
|
||||||
# Mac OS
|
# macOS
|
||||||
|
|
||||||
## Download Coco AI
|
## Download Coco AI
|
||||||
|
|
||||||
Goto [https://coco.rs/](https://coco.rs/)
|
Go to [coco.rs](https://coco.rs/) and download the package of your architecture:
|
||||||
|
|
||||||
{{% load-img "/img/download-mac-app.png" "" %}}
|
{{% load-img "/img/macos/mac-download-app.png" "" %}}
|
||||||
|
|
||||||
|
It should be placed in your "Downloads" folder:
|
||||||
|
|
||||||
|
{{% load-img "/img/macos/mac-zip-file.png" "" %}}
|
||||||
|
|
||||||
## Unzip DMG file
|
## Unzip DMG file
|
||||||
|
|
||||||
{{% load-img "/img/unzip-dmg-file.png" "" %}}
|
Unzip the file:
|
||||||
|
|
||||||
|
{{% load-img "/img/macos/mac-unzip-zip-file.png" "" %}}
|
||||||
|
|
||||||
|
You will get a `dmg` file:
|
||||||
|
|
||||||
|
{{% load-img "/img/macos/mac-dmg.png" "" %}}
|
||||||
|
|
||||||
## Drag to Application Folder
|
## Drag to Application Folder
|
||||||
|
|
||||||
{{% load-img "/img/drag-to-application-folder.png" "" %}}
|
Double click the `dmg` file, a window will pop up. Then drag the "Coco-AI" app to
|
||||||
|
your "Applications" folder:
|
||||||
|
|
||||||
|
{{% load-img "/img/macos/drag-to-app-folder.png" "" %}}
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,9 @@ asciinema: true
|
|||||||
[if_x11]: https://unix.stackexchange.com/q/202891/498440
|
[if_x11]: https://unix.stackexchange.com/q/202891/498440
|
||||||
|
|
||||||
|
|
||||||
## Goto [https://coco.rs/](https://coco.rs/)
|
## Go to the download page
|
||||||
|
|
||||||
|
Download page: [link](https://coco.rs/#install)
|
||||||
|
|
||||||
## Download the package
|
## Download the package
|
||||||
|
|
||||||
|
|||||||
@@ -8,12 +8,107 @@ title: "Release Notes"
|
|||||||
Information about release notes of Coco Server is provided here.
|
Information about release notes of Coco Server is provided here.
|
||||||
|
|
||||||
## Latest (In development)
|
## Latest (In development)
|
||||||
|
|
||||||
### ❌ Breaking changes
|
### ❌ Breaking changes
|
||||||
|
|
||||||
### 🚀 Features
|
### 🚀 Features
|
||||||
|
|
||||||
### 🐛 Bug fix
|
### 🐛 Bug fix
|
||||||
|
|
||||||
|
- fix: correct enter key behavior #828
|
||||||
|
|
||||||
### ✈️ Improvements
|
### ✈️ Improvements
|
||||||
|
|
||||||
## 0.5.1 (2025-05-31)
|
- chore: web component add notification component #825
|
||||||
|
|
||||||
|
## 0.7.0 (2025-07-25)
|
||||||
|
|
||||||
|
### ❌ Breaking changes
|
||||||
|
|
||||||
|
### 🚀 Features
|
||||||
|
|
||||||
|
- feat: file search using spotlight #705
|
||||||
|
- feat: voice input support in both search and chat modes #732
|
||||||
|
- feat: text to speech now powered by LLM #750
|
||||||
|
- feat: file search for Windows #762
|
||||||
|
|
||||||
|
### 🐛 Bug fix
|
||||||
|
|
||||||
|
- fix(file search): apply filters before from/size parameters #741
|
||||||
|
- fix(file search): searching by name&content does not search file name #743
|
||||||
|
- fix: prevent window from hiding when moved on Windows #748
|
||||||
|
- fix: unregister ext hotkey when it gets deleted #770
|
||||||
|
- fix: indexing apps does not respect search scope config #773
|
||||||
|
- fix: restore missing category titles on subpages #772
|
||||||
|
- fix: correct incorrect assistant display when quick ai access #779
|
||||||
|
- fix: resolved minor issues with voice playback #780
|
||||||
|
- fix: fixed incorrect taskbar icon display on linux #783
|
||||||
|
- fix: fix data inconsistency issue on secondary pages #784
|
||||||
|
- fix: incorrect status when installing extension #789
|
||||||
|
- fix: increase read_timeout for HTTP streaming stability #798
|
||||||
|
- fix: enter key problem #794
|
||||||
|
- fix: fix selection issue after renaming #800
|
||||||
|
- fix: fix shortcut issue in windows context menu #804
|
||||||
|
- fix: panic caused by "state() called before manage()" #806
|
||||||
|
- fix: fix multiline input issue #808
|
||||||
|
- fix: fix ctrl+k not working #815
|
||||||
|
- fix: fix update window config sync #818
|
||||||
|
- fix: fix enter key on subpages #819
|
||||||
|
- fix: panic on Ubuntu (GNOME) when opening apps #821
|
||||||
|
|
||||||
|
### ✈️ Improvements
|
||||||
|
|
||||||
|
- refactor: prioritize stat(2) when checking if a file is dir #737
|
||||||
|
- refactor: change File Search ext type to extension #738
|
||||||
|
- refactor: create chat & send chat api #739
|
||||||
|
- chore: icon support for more file types #740
|
||||||
|
- chore: replace meval-rs with our fork to clear dep warning #745
|
||||||
|
- refactor: adjusted assistant, datasource, mcp_server interface parameters #746
|
||||||
|
- refactor: adjust extension code hierarchy #747
|
||||||
|
- chore: bump dep applications-rs #751
|
||||||
|
- chore: rename QuickLink/quick_link to Quicklink/quicklink #752
|
||||||
|
- chore: assistant params & styles #753
|
||||||
|
- chore: make optional fields optional #758
|
||||||
|
- chore: search-chat components add formatUrl & think data & icons url #765
|
||||||
|
- chore: Coco app http request headers #744
|
||||||
|
- refactor: do status code check before deserializing response #767
|
||||||
|
- style: splash adapts to the width of mobile phones #768
|
||||||
|
- chore: search-chat add language and formatUrl parameters #775
|
||||||
|
- chore: not request the interface if not logged in #795
|
||||||
|
- refactor: clean up unsupported characters from query string in Win Search #802
|
||||||
|
- chore: display backtrace in panic log #805
|
||||||
|
|
||||||
|
## 0.6.0 (2025-06-29)
|
||||||
|
|
||||||
|
### ❌ Breaking changes
|
||||||
|
|
||||||
|
### 🚀 Features
|
||||||
|
|
||||||
|
- feat: support `Tab` and `Enter` for delete dialog buttons #700
|
||||||
|
- feat: add check for updates #701
|
||||||
|
- feat: impl extension store #699
|
||||||
|
- feat: support back navigation via delete key #717
|
||||||
|
|
||||||
|
### 🐛 Bug fix
|
||||||
|
|
||||||
|
- fix: quick ai state synchronous #693
|
||||||
|
- fix: toggle extension should register/unregister hotkey #691
|
||||||
|
- fix: take coco server back on refresh #696
|
||||||
|
- fix: some input fields couldn’t accept spaces #709
|
||||||
|
- fix: context menu search not working #713
|
||||||
|
- fix: open extension store display #724
|
||||||
|
|
||||||
|
### ✈️ Improvements
|
||||||
|
|
||||||
|
- refactor: use author/ext_id as extension unique identifier #643
|
||||||
|
- refactor: refactoring search api #679
|
||||||
|
- chore: continue to chat page display #690
|
||||||
|
- chore: improve server list selection with enter key #692
|
||||||
|
- chore: add message for latest version check #703
|
||||||
|
- chore: log command execution results #718
|
||||||
|
- chore: adjust styles and add button reindex #719
|
||||||
|
|
||||||
|
## 0.5.0 (2025-06-13)
|
||||||
|
|
||||||
### ❌ Breaking changes
|
### ❌ Breaking changes
|
||||||
|
|
||||||
@@ -34,6 +129,13 @@ Information about release notes of Coco Server is provided here.
|
|||||||
- feat: dynamic log level via env var COCO_LOG #535
|
- feat: dynamic log level via env var COCO_LOG #535
|
||||||
- feat: add quick AI access to search mode #556
|
- feat: add quick AI access to search mode #556
|
||||||
- feat: rerank search results #561
|
- feat: rerank search results #561
|
||||||
|
- feat: ai overview support is enabled with shortcut #597
|
||||||
|
- feat: add key monitoring during reset #615
|
||||||
|
- feat: calculator extension add description #623
|
||||||
|
- feat: support right-click actions after text selection #624
|
||||||
|
- feat: add ai overview minimum number of search results configuration #625
|
||||||
|
- feat: add internationalized translations of AI-related extensions #632
|
||||||
|
- feat: context menu support for secondary pages #680
|
||||||
|
|
||||||
### 🐛 Bug fix
|
### 🐛 Bug fix
|
||||||
|
|
||||||
@@ -55,6 +157,23 @@ Information about release notes of Coco Server is provided here.
|
|||||||
- fix: independent chat window has no data #554
|
- fix: independent chat window has no data #554
|
||||||
- fix: resolved navigation error on continue chat action #558
|
- fix: resolved navigation error on continue chat action #558
|
||||||
- fix: make extension search source respect parameter datasource #576
|
- fix: make extension search source respect parameter datasource #576
|
||||||
|
- fix: fixed issue with incorrect login status #600
|
||||||
|
- fix: new chat assistant id not found #603
|
||||||
|
- fix: resolve regex error on older macOS versions #605
|
||||||
|
- fix: fix chat log update and sorting issues #612
|
||||||
|
- fix: resolved an issue where number keys were not working on the web #616
|
||||||
|
- fix: do not panic when the datasource specified does not exist #618
|
||||||
|
- fix: fixed modifier keys not working with continue chat #619
|
||||||
|
- fix: invalid DSL error if input contains multiple lines #620
|
||||||
|
- fix: fix ai overview hidden height before message #622
|
||||||
|
- fix: tab key hides window in chat mode #641
|
||||||
|
- fix: arrow keys still navigated search when menu opened with Cmd+K #642
|
||||||
|
- fix: input lost when reopening dialog after search #644
|
||||||
|
- fix: web page unmount event #645
|
||||||
|
- fix: fix the problem of local path not opening #650
|
||||||
|
- fix: number keys not following settings #661
|
||||||
|
- fix: fix problem with up and down key indexing #676
|
||||||
|
- fix: arrow inserting escape sequences #683
|
||||||
|
|
||||||
### ✈️ Improvements
|
### ✈️ Improvements
|
||||||
|
|
||||||
@@ -89,6 +208,12 @@ Information about release notes of Coco Server is provided here.
|
|||||||
- chore: mark unavailable server to offline on refresh info #569
|
- chore: mark unavailable server to offline on refresh info #569
|
||||||
- chore: only show available servers in chat #570
|
- chore: only show available servers in chat #570
|
||||||
- refactor: search result related components #571
|
- refactor: search result related components #571
|
||||||
|
- chore: initialize current assistant from history #606
|
||||||
|
- chore: add onContextMenu event #629
|
||||||
|
- chore: more logs for the setup process #634
|
||||||
|
- chore: copy supports http protocol #639
|
||||||
|
- refactor: use author/ext_id as extension unique identifier #643
|
||||||
|
- chore: add special character filtering #668
|
||||||
|
|
||||||
## 0.4.0 (2025-04-27)
|
## 0.4.0 (2025-04-27)
|
||||||
|
|
||||||
|
|||||||
BIN
docs/static/img/download-mac-app.png
vendored
|
Before Width: | Height: | Size: 155 KiB |
BIN
docs/static/img/drag-to-application-folder.png
vendored
|
Before Width: | Height: | Size: 69 KiB |
BIN
docs/static/img/macos/drag-to-app-folder.png
vendored
Normal file
|
After Width: | Height: | Size: 239 KiB |
BIN
docs/static/img/macos/mac-dmg.png
vendored
Normal file
|
After Width: | Height: | Size: 586 KiB |
BIN
docs/static/img/macos/mac-download-app.png
vendored
Normal file
|
After Width: | Height: | Size: 299 KiB |
BIN
docs/static/img/macos/mac-unzip-zip-file.png
vendored
Normal file
|
After Width: | Height: | Size: 650 KiB |
BIN
docs/static/img/macos/mac-zip-file.png
vendored
Normal file
|
After Width: | Height: | Size: 441 KiB |
BIN
docs/static/img/unzip-dmg-file.png
vendored
|
Before Width: | Height: | Size: 121 KiB |
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "coco",
|
"name": "coco",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "0.5.2",
|
"version": "0.7.1",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "vite",
|
"dev": "vite",
|
||||||
@@ -18,7 +18,6 @@
|
|||||||
"release-beta": "release-it --preRelease=beta --preReleaseBase=1"
|
"release-beta": "release-it --preRelease=beta --preReleaseBase=1"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@ant-design/icons": "^6.0.0",
|
|
||||||
"@headlessui/react": "^2.2.2",
|
"@headlessui/react": "^2.2.2",
|
||||||
"@tauri-apps/api": "^2.5.0",
|
"@tauri-apps/api": "^2.5.0",
|
||||||
"@tauri-apps/plugin-autostart": "~2.2.0",
|
"@tauri-apps/plugin-autostart": "~2.2.0",
|
||||||
@@ -27,6 +26,7 @@
|
|||||||
"@tauri-apps/plugin-global-shortcut": "~2.0.0",
|
"@tauri-apps/plugin-global-shortcut": "~2.0.0",
|
||||||
"@tauri-apps/plugin-http": "~2.0.2",
|
"@tauri-apps/plugin-http": "~2.0.2",
|
||||||
"@tauri-apps/plugin-log": "~2.4.0",
|
"@tauri-apps/plugin-log": "~2.4.0",
|
||||||
|
"@tauri-apps/plugin-opener": "^2.2.7",
|
||||||
"@tauri-apps/plugin-os": "^2.2.1",
|
"@tauri-apps/plugin-os": "^2.2.1",
|
||||||
"@tauri-apps/plugin-process": "^2.2.1",
|
"@tauri-apps/plugin-process": "^2.2.1",
|
||||||
"@tauri-apps/plugin-shell": "^2.2.1",
|
"@tauri-apps/plugin-shell": "^2.2.1",
|
||||||
@@ -44,6 +44,7 @@
|
|||||||
"i18next-browser-languagedetector": "^8.1.0",
|
"i18next-browser-languagedetector": "^8.1.0",
|
||||||
"lodash-es": "^4.17.21",
|
"lodash-es": "^4.17.21",
|
||||||
"lucide-react": "^0.461.0",
|
"lucide-react": "^0.461.0",
|
||||||
|
"mdast-util-gfm-autolink-literal": "2.0.0",
|
||||||
"mermaid": "^11.6.0",
|
"mermaid": "^11.6.0",
|
||||||
"nanoid": "^5.1.5",
|
"nanoid": "^5.1.5",
|
||||||
"react": "^18.3.1",
|
"react": "^18.3.1",
|
||||||
@@ -58,6 +59,7 @@
|
|||||||
"remark-breaks": "^4.0.0",
|
"remark-breaks": "^4.0.0",
|
||||||
"remark-gfm": "^4.0.1",
|
"remark-gfm": "^4.0.1",
|
||||||
"remark-math": "^6.0.0",
|
"remark-math": "^6.0.0",
|
||||||
|
"tailwind-merge": "^3.3.1",
|
||||||
"tauri-plugin-fs-pro-api": "^2.4.0",
|
"tauri-plugin-fs-pro-api": "^2.4.0",
|
||||||
"tauri-plugin-macos-permissions-api": "^2.3.0",
|
"tauri-plugin-macos-permissions-api": "^2.3.0",
|
||||||
"tauri-plugin-screenshots-api": "^2.2.0",
|
"tauri-plugin-screenshots-api": "^2.2.0",
|
||||||
|
|||||||
142
pnpm-lock.yaml
generated
@@ -8,9 +8,6 @@ importers:
|
|||||||
|
|
||||||
.:
|
.:
|
||||||
dependencies:
|
dependencies:
|
||||||
'@ant-design/icons':
|
|
||||||
specifier: ^6.0.0
|
|
||||||
version: 6.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
|
||||||
'@headlessui/react':
|
'@headlessui/react':
|
||||||
specifier: ^2.2.2
|
specifier: ^2.2.2
|
||||||
version: 2.2.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
version: 2.2.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||||
@@ -35,6 +32,9 @@ importers:
|
|||||||
'@tauri-apps/plugin-log':
|
'@tauri-apps/plugin-log':
|
||||||
specifier: ~2.4.0
|
specifier: ~2.4.0
|
||||||
version: 2.4.0
|
version: 2.4.0
|
||||||
|
'@tauri-apps/plugin-opener':
|
||||||
|
specifier: ^2.2.7
|
||||||
|
version: 2.2.7
|
||||||
'@tauri-apps/plugin-os':
|
'@tauri-apps/plugin-os':
|
||||||
specifier: ^2.2.1
|
specifier: ^2.2.1
|
||||||
version: 2.2.1
|
version: 2.2.1
|
||||||
@@ -86,6 +86,9 @@ importers:
|
|||||||
lucide-react:
|
lucide-react:
|
||||||
specifier: ^0.461.0
|
specifier: ^0.461.0
|
||||||
version: 0.461.0(react@18.3.1)
|
version: 0.461.0(react@18.3.1)
|
||||||
|
mdast-util-gfm-autolink-literal:
|
||||||
|
specifier: 2.0.0
|
||||||
|
version: 2.0.0
|
||||||
mermaid:
|
mermaid:
|
||||||
specifier: ^11.6.0
|
specifier: ^11.6.0
|
||||||
version: 11.6.0
|
version: 11.6.0
|
||||||
@@ -128,6 +131,9 @@ importers:
|
|||||||
remark-math:
|
remark-math:
|
||||||
specifier: ^6.0.0
|
specifier: ^6.0.0
|
||||||
version: 6.0.0
|
version: 6.0.0
|
||||||
|
tailwind-merge:
|
||||||
|
specifier: ^3.3.1
|
||||||
|
version: 3.3.1
|
||||||
tauri-plugin-fs-pro-api:
|
tauri-plugin-fs-pro-api:
|
||||||
specifier: ^2.4.0
|
specifier: ^2.4.0
|
||||||
version: 2.4.0
|
version: 2.4.0
|
||||||
@@ -185,7 +191,7 @@ importers:
|
|||||||
version: 1.8.8
|
version: 1.8.8
|
||||||
'@vitejs/plugin-react':
|
'@vitejs/plugin-react':
|
||||||
specifier: ^4.4.1
|
specifier: ^4.4.1
|
||||||
version: 4.4.1(vite@5.4.19(@types/node@22.15.17)(sass@1.87.0))
|
version: 4.4.1(vite@5.4.19(@types/node@22.15.17)(sass@1.87.0)(terser@5.40.0))
|
||||||
autoprefixer:
|
autoprefixer:
|
||||||
specifier: ^10.4.21
|
specifier: ^10.4.21
|
||||||
version: 10.4.21(postcss@8.5.3)
|
version: 10.4.21(postcss@8.5.3)
|
||||||
@@ -218,7 +224,7 @@ importers:
|
|||||||
version: 5.8.3
|
version: 5.8.3
|
||||||
vite:
|
vite:
|
||||||
specifier: ^5.4.19
|
specifier: ^5.4.19
|
||||||
version: 5.4.19(@types/node@22.15.17)(sass@1.87.0)
|
version: 5.4.19(@types/node@22.15.17)(sass@1.87.0)(terser@5.40.0)
|
||||||
|
|
||||||
packages:
|
packages:
|
||||||
|
|
||||||
@@ -230,23 +236,6 @@ packages:
|
|||||||
resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==}
|
resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==}
|
||||||
engines: {node: '>=6.0.0'}
|
engines: {node: '>=6.0.0'}
|
||||||
|
|
||||||
'@ant-design/colors@8.0.0':
|
|
||||||
resolution: {integrity: sha512-6YzkKCw30EI/E9kHOIXsQDHmMvTllT8STzjMb4K2qzit33RW2pqCJP0sk+hidBntXxE+Vz4n1+RvCTfBw6OErw==}
|
|
||||||
|
|
||||||
'@ant-design/fast-color@3.0.0':
|
|
||||||
resolution: {integrity: sha512-eqvpP7xEDm2S7dUzl5srEQCBTXZMmY3ekf97zI+M2DHOYyKdJGH0qua0JACHTqbkRnD/KHFQP9J1uMJ/XWVzzA==}
|
|
||||||
engines: {node: '>=8.x'}
|
|
||||||
|
|
||||||
'@ant-design/icons-svg@4.4.2':
|
|
||||||
resolution: {integrity: sha512-vHbT+zJEVzllwP+CM+ul7reTEfBR0vgxFe7+lREAsAA7YGsYpboiq2sQNeQeRvh09GfQgs/GyFEvZpJ9cLXpXA==}
|
|
||||||
|
|
||||||
'@ant-design/icons@6.0.0':
|
|
||||||
resolution: {integrity: sha512-o0aCCAlHc1o4CQcapAwWzHeaW2x9F49g7P3IDtvtNXgHowtRWYb7kiubt8sQPFvfVIVU/jLw2hzeSlNt0FU+Uw==}
|
|
||||||
engines: {node: '>=8'}
|
|
||||||
peerDependencies:
|
|
||||||
react: '>=16.0.0'
|
|
||||||
react-dom: '>=16.0.0'
|
|
||||||
|
|
||||||
'@antfu/install-pkg@1.1.0':
|
'@antfu/install-pkg@1.1.0':
|
||||||
resolution: {integrity: sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==}
|
resolution: {integrity: sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==}
|
||||||
|
|
||||||
@@ -816,6 +805,9 @@ packages:
|
|||||||
resolution: {integrity: sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==}
|
resolution: {integrity: sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==}
|
||||||
engines: {node: '>=6.0.0'}
|
engines: {node: '>=6.0.0'}
|
||||||
|
|
||||||
|
'@jridgewell/source-map@0.3.6':
|
||||||
|
resolution: {integrity: sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ==}
|
||||||
|
|
||||||
'@jridgewell/sourcemap-codec@1.5.0':
|
'@jridgewell/sourcemap-codec@1.5.0':
|
||||||
resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==}
|
resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==}
|
||||||
|
|
||||||
@@ -993,12 +985,6 @@ packages:
|
|||||||
resolution: {integrity: sha512-c83qWb22rNRuB0UaVCI0uRPNRr8Z0FWnEIvT47jiHAmOIUHbBOg5XvV7pM5x+rKn9HRpjxquDbXYSXr3fAKFcw==}
|
resolution: {integrity: sha512-c83qWb22rNRuB0UaVCI0uRPNRr8Z0FWnEIvT47jiHAmOIUHbBOg5XvV7pM5x+rKn9HRpjxquDbXYSXr3fAKFcw==}
|
||||||
engines: {node: '>=12'}
|
engines: {node: '>=12'}
|
||||||
|
|
||||||
'@rc-component/util@1.2.1':
|
|
||||||
resolution: {integrity: sha512-AUVu6jO+lWjQnUOOECwu8iR0EdElQgWW5NBv5vP/Uf9dWbAX3udhMutRlkVXjuac2E40ghkFy+ve00mc/3Fymg==}
|
|
||||||
peerDependencies:
|
|
||||||
react: '>=18.0.0'
|
|
||||||
react-dom: '>=18.0.0'
|
|
||||||
|
|
||||||
'@react-aria/focus@3.20.2':
|
'@react-aria/focus@3.20.2':
|
||||||
resolution: {integrity: sha512-Q3rouk/rzoF/3TuH6FzoAIKrl+kzZi9LHmr8S5EqLAOyP9TXIKG34x2j42dZsAhrw7TbF9gA8tBKwnCNH4ZV+Q==}
|
resolution: {integrity: sha512-Q3rouk/rzoF/3TuH6FzoAIKrl+kzZi9LHmr8S5EqLAOyP9TXIKG34x2j42dZsAhrw7TbF9gA8tBKwnCNH4ZV+Q==}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
@@ -1259,6 +1245,9 @@ packages:
|
|||||||
'@tauri-apps/plugin-log@2.4.0':
|
'@tauri-apps/plugin-log@2.4.0':
|
||||||
resolution: {integrity: sha512-j7yrDtLNmayCBOO2esl3aZv9jSXy2an8MDLry3Ys9ZXerwUg35n1Y2uD8HoCR+8Ng/EUgx215+qOUfJasjYrHw==}
|
resolution: {integrity: sha512-j7yrDtLNmayCBOO2esl3aZv9jSXy2an8MDLry3Ys9ZXerwUg35n1Y2uD8HoCR+8Ng/EUgx215+qOUfJasjYrHw==}
|
||||||
|
|
||||||
|
'@tauri-apps/plugin-opener@2.2.7':
|
||||||
|
resolution: {integrity: sha512-uduEyvOdjpPOEeDRrhwlCspG/f9EQalHumWBtLBnp3fRp++fKGLqDOyUhSIn7PzX45b/rKep//ZQSAQoIxobLA==}
|
||||||
|
|
||||||
'@tauri-apps/plugin-os@2.2.1':
|
'@tauri-apps/plugin-os@2.2.1':
|
||||||
resolution: {integrity: sha512-cNYpNri2CCc6BaNeB6G/mOtLvg8dFyFQyCUdf2y0K8PIAKGEWdEcu8DECkydU2B+oj4OJihDPD2de5K6cbVl9A==}
|
resolution: {integrity: sha512-cNYpNri2CCc6BaNeB6G/mOtLvg8dFyFQyCUdf2y0K8PIAKGEWdEcu8DECkydU2B+oj4OJihDPD2de5K6cbVl9A==}
|
||||||
|
|
||||||
@@ -1586,6 +1575,9 @@ packages:
|
|||||||
engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7}
|
engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7}
|
||||||
hasBin: true
|
hasBin: true
|
||||||
|
|
||||||
|
buffer-from@1.1.2:
|
||||||
|
resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==}
|
||||||
|
|
||||||
bundle-name@4.1.0:
|
bundle-name@4.1.0:
|
||||||
resolution: {integrity: sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==}
|
resolution: {integrity: sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==}
|
||||||
engines: {node: '>=18'}
|
engines: {node: '>=18'}
|
||||||
@@ -1661,9 +1653,6 @@ packages:
|
|||||||
resolution: {integrity: sha512-cYY9mypksY8NRqgDB1XD1RiJL338v/551niynFTGkZOO2LHuB2OmOYxDIe/ttN9AHwrqdum1360G3ald0W9kCg==}
|
resolution: {integrity: sha512-cYY9mypksY8NRqgDB1XD1RiJL338v/551niynFTGkZOO2LHuB2OmOYxDIe/ttN9AHwrqdum1360G3ald0W9kCg==}
|
||||||
engines: {node: '>=8'}
|
engines: {node: '>=8'}
|
||||||
|
|
||||||
classnames@2.5.1:
|
|
||||||
resolution: {integrity: sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==}
|
|
||||||
|
|
||||||
cli-boxes@3.0.0:
|
cli-boxes@3.0.0:
|
||||||
resolution: {integrity: sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==}
|
resolution: {integrity: sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==}
|
||||||
engines: {node: '>=10'}
|
engines: {node: '>=10'}
|
||||||
@@ -1698,6 +1687,9 @@ packages:
|
|||||||
comma-separated-tokens@2.0.3:
|
comma-separated-tokens@2.0.3:
|
||||||
resolution: {integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==}
|
resolution: {integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==}
|
||||||
|
|
||||||
|
commander@2.20.3:
|
||||||
|
resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==}
|
||||||
|
|
||||||
commander@4.1.1:
|
commander@4.1.1:
|
||||||
resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==}
|
resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==}
|
||||||
engines: {node: '>= 6'}
|
engines: {node: '>= 6'}
|
||||||
@@ -2643,8 +2635,8 @@ packages:
|
|||||||
mdast-util-from-markdown@2.0.2:
|
mdast-util-from-markdown@2.0.2:
|
||||||
resolution: {integrity: sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==}
|
resolution: {integrity: sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==}
|
||||||
|
|
||||||
mdast-util-gfm-autolink-literal@2.0.1:
|
mdast-util-gfm-autolink-literal@2.0.0:
|
||||||
resolution: {integrity: sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==}
|
resolution: {integrity: sha512-FyzMsduZZHSc3i0Px3PQcBT4WJY/X/RCtEJKuybiC6sjPqLv7h1yqAkmILZtuxMSsUyaLUWNp71+vQH2zqp5cg==}
|
||||||
|
|
||||||
mdast-util-gfm-footnote@2.1.0:
|
mdast-util-gfm-footnote@2.1.0:
|
||||||
resolution: {integrity: sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==}
|
resolution: {integrity: sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==}
|
||||||
@@ -3140,9 +3132,6 @@ packages:
|
|||||||
typescript:
|
typescript:
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
react-is@18.3.1:
|
|
||||||
resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==}
|
|
||||||
|
|
||||||
react-markdown@9.1.0:
|
react-markdown@9.1.0:
|
||||||
resolution: {integrity: sha512-xaijuJB0kzGiUdG7nc2MOMDUDBWPyGAjZtUrow9XxUeua8IqeP+VlIfAZ3bphpcLTnSZXz6z9jcVC/TCwbfgdw==}
|
resolution: {integrity: sha512-xaijuJB0kzGiUdG7nc2MOMDUDBWPyGAjZtUrow9XxUeua8IqeP+VlIfAZ3bphpcLTnSZXz6z9jcVC/TCwbfgdw==}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
@@ -3349,6 +3338,9 @@ packages:
|
|||||||
resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==}
|
resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==}
|
||||||
engines: {node: '>=0.10.0'}
|
engines: {node: '>=0.10.0'}
|
||||||
|
|
||||||
|
source-map-support@0.5.21:
|
||||||
|
resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==}
|
||||||
|
|
||||||
source-map@0.6.1:
|
source-map@0.6.1:
|
||||||
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
|
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
|
||||||
engines: {node: '>=0.10.0'}
|
engines: {node: '>=0.10.0'}
|
||||||
@@ -3426,6 +3418,9 @@ packages:
|
|||||||
tabbable@6.2.0:
|
tabbable@6.2.0:
|
||||||
resolution: {integrity: sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==}
|
resolution: {integrity: sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==}
|
||||||
|
|
||||||
|
tailwind-merge@3.3.1:
|
||||||
|
resolution: {integrity: sha512-gBXpgUm/3rp1lMZZrM/w7D8GKqshif0zAymAhbCyIt8KMe+0v9DQ7cdYLR4FHH/cKpdTXb+A/tKKU3eolfsI+g==}
|
||||||
|
|
||||||
tailwindcss@3.4.17:
|
tailwindcss@3.4.17:
|
||||||
resolution: {integrity: sha512-w33E2aCvSDP0tW9RZuNXadXlkHXqFzSkQew/aIa2i/Sj8fThxwovwlXHSPXTbAHwEIhBFXAedUhP2tueAKP8Og==}
|
resolution: {integrity: sha512-w33E2aCvSDP0tW9RZuNXadXlkHXqFzSkQew/aIa2i/Sj8fThxwovwlXHSPXTbAHwEIhBFXAedUhP2tueAKP8Og==}
|
||||||
engines: {node: '>=14.0.0'}
|
engines: {node: '>=14.0.0'}
|
||||||
@@ -3443,6 +3438,11 @@ packages:
|
|||||||
tauri-plugin-windows-version-api@2.0.0:
|
tauri-plugin-windows-version-api@2.0.0:
|
||||||
resolution: {integrity: sha512-tty5n4ASYbXpnsD5ws2iTcTTpDCrSbzRTVp5Bo3UTpYGqlN1gBn2Zk8s3oO4w7VIM5WtJhDM9Jr/UgoTk7tFJQ==}
|
resolution: {integrity: sha512-tty5n4ASYbXpnsD5ws2iTcTTpDCrSbzRTVp5Bo3UTpYGqlN1gBn2Zk8s3oO4w7VIM5WtJhDM9Jr/UgoTk7tFJQ==}
|
||||||
|
|
||||||
|
terser@5.40.0:
|
||||||
|
resolution: {integrity: sha512-cfeKl/jjwSR5ar7d0FGmave9hFGJT8obyo0z+CrQOylLDbk7X81nPU6vq9VORa5jU30SkDnT2FXjLbR8HLP+xA==}
|
||||||
|
engines: {node: '>=10'}
|
||||||
|
hasBin: true
|
||||||
|
|
||||||
thenify-all@1.6.0:
|
thenify-all@1.6.0:
|
||||||
resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==}
|
resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==}
|
||||||
engines: {node: '>=0.8'}
|
engines: {node: '>=0.8'}
|
||||||
@@ -3777,23 +3777,6 @@ snapshots:
|
|||||||
'@jridgewell/gen-mapping': 0.3.8
|
'@jridgewell/gen-mapping': 0.3.8
|
||||||
'@jridgewell/trace-mapping': 0.3.25
|
'@jridgewell/trace-mapping': 0.3.25
|
||||||
|
|
||||||
'@ant-design/colors@8.0.0':
|
|
||||||
dependencies:
|
|
||||||
'@ant-design/fast-color': 3.0.0
|
|
||||||
|
|
||||||
'@ant-design/fast-color@3.0.0': {}
|
|
||||||
|
|
||||||
'@ant-design/icons-svg@4.4.2': {}
|
|
||||||
|
|
||||||
'@ant-design/icons@6.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
|
|
||||||
dependencies:
|
|
||||||
'@ant-design/colors': 8.0.0
|
|
||||||
'@ant-design/icons-svg': 4.4.2
|
|
||||||
'@rc-component/util': 1.2.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
|
||||||
classnames: 2.5.1
|
|
||||||
react: 18.3.1
|
|
||||||
react-dom: 18.3.1(react@18.3.1)
|
|
||||||
|
|
||||||
'@antfu/install-pkg@1.1.0':
|
'@antfu/install-pkg@1.1.0':
|
||||||
dependencies:
|
dependencies:
|
||||||
package-manager-detector: 1.3.0
|
package-manager-detector: 1.3.0
|
||||||
@@ -4263,6 +4246,12 @@ snapshots:
|
|||||||
|
|
||||||
'@jridgewell/set-array@1.2.1': {}
|
'@jridgewell/set-array@1.2.1': {}
|
||||||
|
|
||||||
|
'@jridgewell/source-map@0.3.6':
|
||||||
|
dependencies:
|
||||||
|
'@jridgewell/gen-mapping': 0.3.8
|
||||||
|
'@jridgewell/trace-mapping': 0.3.25
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@jridgewell/sourcemap-codec@1.5.0': {}
|
'@jridgewell/sourcemap-codec@1.5.0': {}
|
||||||
|
|
||||||
'@jridgewell/trace-mapping@0.3.25':
|
'@jridgewell/trace-mapping@0.3.25':
|
||||||
@@ -4430,12 +4419,6 @@ snapshots:
|
|||||||
'@pnpm/network.ca-file': 1.0.2
|
'@pnpm/network.ca-file': 1.0.2
|
||||||
config-chain: 1.1.13
|
config-chain: 1.1.13
|
||||||
|
|
||||||
'@rc-component/util@1.2.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
|
|
||||||
dependencies:
|
|
||||||
react: 18.3.1
|
|
||||||
react-dom: 18.3.1(react@18.3.1)
|
|
||||||
react-is: 18.3.1
|
|
||||||
|
|
||||||
'@react-aria/focus@3.20.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
|
'@react-aria/focus@3.20.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@react-aria/interactions': 3.25.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
'@react-aria/interactions': 3.25.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||||
@@ -4640,6 +4623,10 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
'@tauri-apps/api': 2.5.0
|
'@tauri-apps/api': 2.5.0
|
||||||
|
|
||||||
|
'@tauri-apps/plugin-opener@2.2.7':
|
||||||
|
dependencies:
|
||||||
|
'@tauri-apps/api': 2.5.0
|
||||||
|
|
||||||
'@tauri-apps/plugin-os@2.2.1':
|
'@tauri-apps/plugin-os@2.2.1':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@tauri-apps/api': 2.5.0
|
'@tauri-apps/api': 2.5.0
|
||||||
@@ -4881,14 +4868,14 @@ snapshots:
|
|||||||
|
|
||||||
'@ungap/structured-clone@1.3.0': {}
|
'@ungap/structured-clone@1.3.0': {}
|
||||||
|
|
||||||
'@vitejs/plugin-react@4.4.1(vite@5.4.19(@types/node@22.15.17)(sass@1.87.0))':
|
'@vitejs/plugin-react@4.4.1(vite@5.4.19(@types/node@22.15.17)(sass@1.87.0)(terser@5.40.0))':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@babel/core': 7.27.1
|
'@babel/core': 7.27.1
|
||||||
'@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.27.1)
|
'@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.27.1)
|
||||||
'@babel/plugin-transform-react-jsx-source': 7.27.1(@babel/core@7.27.1)
|
'@babel/plugin-transform-react-jsx-source': 7.27.1(@babel/core@7.27.1)
|
||||||
'@types/babel__core': 7.20.5
|
'@types/babel__core': 7.20.5
|
||||||
react-refresh: 0.17.0
|
react-refresh: 0.17.0
|
||||||
vite: 5.4.19(@types/node@22.15.17)(sass@1.87.0)
|
vite: 5.4.19(@types/node@22.15.17)(sass@1.87.0)(terser@5.40.0)
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
@@ -5017,6 +5004,9 @@ snapshots:
|
|||||||
node-releases: 2.0.19
|
node-releases: 2.0.19
|
||||||
update-browserslist-db: 1.1.3(browserslist@4.24.5)
|
update-browserslist-db: 1.1.3(browserslist@4.24.5)
|
||||||
|
|
||||||
|
buffer-from@1.1.2:
|
||||||
|
optional: true
|
||||||
|
|
||||||
bundle-name@4.1.0:
|
bundle-name@4.1.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
run-applescript: 7.0.0
|
run-applescript: 7.0.0
|
||||||
@@ -5087,8 +5077,6 @@ snapshots:
|
|||||||
|
|
||||||
ci-info@4.2.0: {}
|
ci-info@4.2.0: {}
|
||||||
|
|
||||||
classnames@2.5.1: {}
|
|
||||||
|
|
||||||
cli-boxes@3.0.0: {}
|
cli-boxes@3.0.0: {}
|
||||||
|
|
||||||
cli-cursor@5.0.0:
|
cli-cursor@5.0.0:
|
||||||
@@ -5113,6 +5101,9 @@ snapshots:
|
|||||||
|
|
||||||
comma-separated-tokens@2.0.3: {}
|
comma-separated-tokens@2.0.3: {}
|
||||||
|
|
||||||
|
commander@2.20.3:
|
||||||
|
optional: true
|
||||||
|
|
||||||
commander@4.1.1: {}
|
commander@4.1.1: {}
|
||||||
|
|
||||||
commander@7.2.0: {}
|
commander@7.2.0: {}
|
||||||
@@ -6117,7 +6108,7 @@ snapshots:
|
|||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
mdast-util-gfm-autolink-literal@2.0.1:
|
mdast-util-gfm-autolink-literal@2.0.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
'@types/mdast': 4.0.4
|
'@types/mdast': 4.0.4
|
||||||
ccount: 2.0.1
|
ccount: 2.0.1
|
||||||
@@ -6165,7 +6156,7 @@ snapshots:
|
|||||||
mdast-util-gfm@3.1.0:
|
mdast-util-gfm@3.1.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
mdast-util-from-markdown: 2.0.2
|
mdast-util-from-markdown: 2.0.2
|
||||||
mdast-util-gfm-autolink-literal: 2.0.1
|
mdast-util-gfm-autolink-literal: 2.0.0
|
||||||
mdast-util-gfm-footnote: 2.1.0
|
mdast-util-gfm-footnote: 2.1.0
|
||||||
mdast-util-gfm-strikethrough: 2.0.0
|
mdast-util-gfm-strikethrough: 2.0.0
|
||||||
mdast-util-gfm-table: 2.0.0
|
mdast-util-gfm-table: 2.0.0
|
||||||
@@ -6833,8 +6824,6 @@ snapshots:
|
|||||||
react-dom: 18.3.1(react@18.3.1)
|
react-dom: 18.3.1(react@18.3.1)
|
||||||
typescript: 5.8.3
|
typescript: 5.8.3
|
||||||
|
|
||||||
react-is@18.3.1: {}
|
|
||||||
|
|
||||||
react-markdown@9.1.0(@types/react@18.3.21)(react@18.3.1):
|
react-markdown@9.1.0(@types/react@18.3.21)(react@18.3.1):
|
||||||
dependencies:
|
dependencies:
|
||||||
'@types/hast': 3.0.4
|
'@types/hast': 3.0.4
|
||||||
@@ -7124,6 +7113,12 @@ snapshots:
|
|||||||
|
|
||||||
source-map-js@1.2.1: {}
|
source-map-js@1.2.1: {}
|
||||||
|
|
||||||
|
source-map-support@0.5.21:
|
||||||
|
dependencies:
|
||||||
|
buffer-from: 1.1.2
|
||||||
|
source-map: 0.6.1
|
||||||
|
optional: true
|
||||||
|
|
||||||
source-map@0.6.1:
|
source-map@0.6.1:
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
@@ -7200,6 +7195,8 @@ snapshots:
|
|||||||
|
|
||||||
tabbable@6.2.0: {}
|
tabbable@6.2.0: {}
|
||||||
|
|
||||||
|
tailwind-merge@3.3.1: {}
|
||||||
|
|
||||||
tailwindcss@3.4.17:
|
tailwindcss@3.4.17:
|
||||||
dependencies:
|
dependencies:
|
||||||
'@alloc/quick-lru': 5.2.0
|
'@alloc/quick-lru': 5.2.0
|
||||||
@@ -7243,6 +7240,14 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
'@tauri-apps/api': 2.5.0
|
'@tauri-apps/api': 2.5.0
|
||||||
|
|
||||||
|
terser@5.40.0:
|
||||||
|
dependencies:
|
||||||
|
'@jridgewell/source-map': 0.3.6
|
||||||
|
acorn: 8.14.1
|
||||||
|
commander: 2.20.3
|
||||||
|
source-map-support: 0.5.21
|
||||||
|
optional: true
|
||||||
|
|
||||||
thenify-all@1.6.0:
|
thenify-all@1.6.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
thenify: 3.3.1
|
thenify: 3.3.1
|
||||||
@@ -7429,7 +7434,7 @@ snapshots:
|
|||||||
'@types/unist': 3.0.3
|
'@types/unist': 3.0.3
|
||||||
vfile-message: 4.0.2
|
vfile-message: 4.0.2
|
||||||
|
|
||||||
vite@5.4.19(@types/node@22.15.17)(sass@1.87.0):
|
vite@5.4.19(@types/node@22.15.17)(sass@1.87.0)(terser@5.40.0):
|
||||||
dependencies:
|
dependencies:
|
||||||
esbuild: 0.21.5
|
esbuild: 0.21.5
|
||||||
postcss: 8.5.3
|
postcss: 8.5.3
|
||||||
@@ -7438,6 +7443,7 @@ snapshots:
|
|||||||
'@types/node': 22.15.17
|
'@types/node': 22.15.17
|
||||||
fsevents: 2.3.3
|
fsevents: 2.3.3
|
||||||
sass: 1.87.0
|
sass: 1.87.0
|
||||||
|
terser: 5.40.0
|
||||||
|
|
||||||
void-elements@3.1.0: {}
|
void-elements@3.1.0: {}
|
||||||
|
|
||||||
|
|||||||
1
scripts/devWeb.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
(() => {})();
|
||||||
555
src-tauri/Cargo.lock
generated
@@ -1,9 +1,9 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "coco"
|
name = "coco"
|
||||||
version = "0.5.2"
|
version = "0.7.1"
|
||||||
description = "Search, connect, collaborate – all in one place."
|
description = "Search, connect, collaborate – all in one place."
|
||||||
authors = ["INFINI Labs"]
|
authors = ["INFINI Labs"]
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
[lib]
|
[lib]
|
||||||
@@ -44,12 +44,12 @@ use_pizza_engine = []
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
pizza-common = { git = "https://github.com/infinilabs/pizza-common", branch = "main" }
|
pizza-common = { git = "https://github.com/infinilabs/pizza-common", branch = "main" }
|
||||||
|
|
||||||
tauri = { version = "2", features = ["protocol-asset", "macos-private-api", "tray-icon", "image-ico", "image-png", "unstable"] }
|
tauri = { version = "2", features = ["protocol-asset", "macos-private-api", "tray-icon", "image-ico", "image-png"] }
|
||||||
tauri-plugin-shell = "2"
|
tauri-plugin-shell = "2"
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
# Need `arbitrary_precision` feature to support storing u128
|
# Need `arbitrary_precision` feature to support storing u128
|
||||||
# see: https://docs.rs/serde_json/latest/serde_json/struct.Number.html#method.from_u128
|
# see: https://docs.rs/serde_json/latest/serde_json/struct.Number.html#method.from_u128
|
||||||
serde_json = { version = "1", features = ["arbitrary_precision"] }
|
serde_json = { version = "1", features = ["arbitrary_precision", "preserve_order"] }
|
||||||
tauri-plugin-http = "2"
|
tauri-plugin-http = "2"
|
||||||
tauri-plugin-websocket = "2"
|
tauri-plugin-websocket = "2"
|
||||||
tauri-plugin-deep-link = "2.0.0"
|
tauri-plugin-deep-link = "2.0.0"
|
||||||
@@ -62,7 +62,7 @@ tauri-plugin-drag = "2"
|
|||||||
tauri-plugin-macos-permissions = "2"
|
tauri-plugin-macos-permissions = "2"
|
||||||
tauri-plugin-fs-pro = "2"
|
tauri-plugin-fs-pro = "2"
|
||||||
tauri-plugin-screenshots = "2"
|
tauri-plugin-screenshots = "2"
|
||||||
applications = { git = "https://github.com/infinilabs/applications-rs", rev = "7bb507e6b12f73c96f3a52f0578d0246a689f381" }
|
applications = { git = "https://github.com/infinilabs/applications-rs", rev = "31b0c030a0f3bc82275fe12debe526153978671d" }
|
||||||
tokio-native-tls = "0.3" # For wss connections
|
tokio-native-tls = "0.3" # For wss connections
|
||||||
tokio = { version = "1", features = ["full"] }
|
tokio = { version = "1", features = ["full"] }
|
||||||
tokio-tungstenite = { version = "0.20", features = ["native-tls"] }
|
tokio-tungstenite = { version = "0.20", features = ["native-tls"] }
|
||||||
@@ -83,12 +83,11 @@ walkdir = "2"
|
|||||||
log = "0.4"
|
log = "0.4"
|
||||||
strsim = "0.10"
|
strsim = "0.10"
|
||||||
futures-util = "0.3.31"
|
futures-util = "0.3.31"
|
||||||
url = "2.5.2"
|
|
||||||
http = "1.1.0"
|
http = "1.1.0"
|
||||||
tungstenite = "0.24.0"
|
tungstenite = "0.24.0"
|
||||||
tokio-util = "0.7.14"
|
tokio-util = "0.7.14"
|
||||||
tauri-plugin-windows-version = "2"
|
tauri-plugin-windows-version = "2"
|
||||||
meval = "0.2"
|
meval = { git = "https://github.com/infinilabs/meval-rs" }
|
||||||
chinese-number = "0.7"
|
chinese-number = "0.7"
|
||||||
num2words = "1"
|
num2words = "1"
|
||||||
tauri-plugin-log = "2"
|
tauri-plugin-log = "2"
|
||||||
@@ -98,9 +97,19 @@ derive_more = { version = "2.0.1", features = ["display"] }
|
|||||||
anyhow = "1.0.98"
|
anyhow = "1.0.98"
|
||||||
function_name = "0.3.0"
|
function_name = "0.3.0"
|
||||||
regex = "1.11.1"
|
regex = "1.11.1"
|
||||||
|
borrowme = "0.0.15"
|
||||||
|
tauri-plugin-opener = "2"
|
||||||
|
async-recursion = "1.1.1"
|
||||||
|
zip = "4.0.0"
|
||||||
|
url = "2.5.2"
|
||||||
|
camino = "1.1.10"
|
||||||
|
tokio-stream = { version = "0.1.17", features = ["io-util"] }
|
||||||
|
cfg-if = "1.0.1"
|
||||||
|
sysinfo = "0.35.2"
|
||||||
|
|
||||||
[target."cfg(target_os = \"macos\")".dependencies]
|
[target."cfg(target_os = \"macos\")".dependencies]
|
||||||
tauri-nspanel = { git = "https://github.com/ahkohd/tauri-nspanel", branch = "v2" }
|
tauri-nspanel = { git = "https://github.com/ahkohd/tauri-nspanel", branch = "v2" }
|
||||||
|
cocoa = "0.24"
|
||||||
|
|
||||||
[target."cfg(any(target_os = \"macos\", windows, target_os = \"linux\"))".dependencies]
|
[target."cfg(any(target_os = \"macos\", windows, target_os = \"linux\"))".dependencies]
|
||||||
tauri-plugin-single-instance = { version = "2.0.0", features = ["deep-link"] }
|
tauri-plugin-single-instance = { version = "2.0.0", features = ["deep-link"] }
|
||||||
@@ -119,6 +128,9 @@ strip = true # Ensures debug symbols are removed.
|
|||||||
tauri-plugin-autostart = "^2.2"
|
tauri-plugin-autostart = "^2.2"
|
||||||
tauri-plugin-global-shortcut = "2"
|
tauri-plugin-global-shortcut = "2"
|
||||||
tauri-plugin-updater = { git = "https://github.com/infinilabs/plugins-workspace", branch = "v2" }
|
tauri-plugin-updater = { git = "https://github.com/infinilabs/plugins-workspace", branch = "v2" }
|
||||||
|
# This should be compatible with the semver used by `tauri-plugin-updater`
|
||||||
|
semver = { version = "1", features = ["serde"] }
|
||||||
|
|
||||||
[target."cfg(target_os = \"windows\")".dependencies]
|
[target."cfg(target_os = \"windows\")".dependencies]
|
||||||
enigo="0.3"
|
enigo="0.3"
|
||||||
|
windows = { version = "0.61.3", features = ["Win32_Foundation", "Win32_System_Com", "Win32_System_Ole", "Win32_System_Search", "Win32_UI_Shell_PropertiesSystem", "Win32_Data"] }
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "AIOverview",
|
|
||||||
"title": "AI Overview",
|
|
||||||
"description": "...",
|
|
||||||
"icon": "font_a-AIOverview",
|
|
||||||
"type": "ai_extension",
|
|
||||||
"enabled": true
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "Applications",
|
|
||||||
"platforms": ["macos", "linux", "windows"],
|
|
||||||
"title": "Applications",
|
|
||||||
"description": "...",
|
|
||||||
"icon": "font_Application",
|
|
||||||
"type": "group",
|
|
||||||
"enabled": true
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "Calculator",
|
|
||||||
"title": "Calculator",
|
|
||||||
"platforms": ["macos", "linux", "windows"],
|
|
||||||
"description": "...",
|
|
||||||
"icon": "font_Calculator",
|
|
||||||
"type": "calculator",
|
|
||||||
"enabled": true
|
|
||||||
}
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "QuickAIAccess",
|
|
||||||
"title": "Quick AI Access",
|
|
||||||
"description": "...",
|
|
||||||
"icon": "font_a-QuickAIAccess",
|
|
||||||
"type": "ai_extension",
|
|
||||||
"enabled": true
|
|
||||||
}
|
|
||||||
@@ -1,3 +1,14 @@
|
|||||||
fn main() {
|
fn main() {
|
||||||
tauri_build::build()
|
tauri_build::build();
|
||||||
|
|
||||||
|
// If env var `GITHUB_ACTIONS` exists, we are running in CI, set up the `ci`
|
||||||
|
// attribute
|
||||||
|
if std::env::var("GITHUB_ACTIONS").is_ok() {
|
||||||
|
println!("cargo:rustc-cfg=ci");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify `rustc` of this `cfg` attribute to suppress unknown attribute warnings.
|
||||||
|
//
|
||||||
|
// unexpected condition name: `ci`
|
||||||
|
println!("cargo::rustc-check-cfg=cfg(ci)");
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
"$schema": "../gen/schemas/desktop-schema.json",
|
"$schema": "../gen/schemas/desktop-schema.json",
|
||||||
"identifier": "default",
|
"identifier": "default",
|
||||||
"description": "Capability for the main window",
|
"description": "Capability for the main window",
|
||||||
"windows": ["main", "chat", "settings"],
|
"windows": ["main", "chat", "settings", "check"],
|
||||||
"permissions": [
|
"permissions": [
|
||||||
"core:default",
|
"core:default",
|
||||||
"core:event:allow-emit",
|
"core:event:allow-emit",
|
||||||
@@ -71,6 +71,7 @@
|
|||||||
"process:default",
|
"process:default",
|
||||||
"updater:default",
|
"updater:default",
|
||||||
"windows-version:default",
|
"windows-version:default",
|
||||||
"log:default"
|
"log:default",
|
||||||
|
"opener:default"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
[toolchain]
|
[toolchain]
|
||||||
channel = "nightly-2025-02-28"
|
channel = "nightly-2025-06-26"
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
use crate::common::assistant::ChatRequestMessage;
|
use crate::common::assistant::ChatRequestMessage;
|
||||||
use crate::common::http::GetResponse;
|
use crate::common::http::{GetResponse, convert_query_params_to_strings};
|
||||||
use crate::common::register::SearchSourceRegistry;
|
use crate::common::register::SearchSourceRegistry;
|
||||||
use crate::server::http_client::HttpClient;
|
use crate::server::http_client::HttpClient;
|
||||||
use crate::{common, server::servers::COCO_SERVERS};
|
use crate::{common, server::servers::COCO_SERVERS};
|
||||||
use futures::stream::FuturesUnordered;
|
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
|
use futures::stream::FuturesUnordered;
|
||||||
use futures_util::TryStreamExt;
|
use futures_util::TryStreamExt;
|
||||||
use http::Method;
|
use http::Method;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
@@ -20,17 +20,15 @@ pub async fn chat_history<R: Runtime>(
|
|||||||
size: u32,
|
size: u32,
|
||||||
query: Option<String>,
|
query: Option<String>,
|
||||||
) -> Result<String, String> {
|
) -> Result<String, String> {
|
||||||
let mut query_params: HashMap<String, Value> = HashMap::new();
|
let mut query_params = Vec::new();
|
||||||
if from > 0 {
|
|
||||||
query_params.insert("from".to_string(), from.into());
|
// Add from/size as number values
|
||||||
}
|
query_params.push(format!("from={}", from));
|
||||||
if size > 0 {
|
query_params.push(format!("size={}", size));
|
||||||
query_params.insert("size".to_string(), size.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(query) = query {
|
if let Some(query) = query {
|
||||||
if !query.is_empty() {
|
if !query.is_empty() {
|
||||||
query_params.insert("query".to_string(), query.into());
|
query_params.push(format!("query={}", query.to_string()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -52,13 +50,11 @@ pub async fn session_chat_history<R: Runtime>(
|
|||||||
from: u32,
|
from: u32,
|
||||||
size: u32,
|
size: u32,
|
||||||
) -> Result<String, String> {
|
) -> Result<String, String> {
|
||||||
let mut query_params: HashMap<String, Value> = HashMap::new();
|
let mut query_params = Vec::new();
|
||||||
if from > 0 {
|
|
||||||
query_params.insert("from".to_string(), from.into());
|
// Add from/size as number values
|
||||||
}
|
query_params.push(format!("from={}", from));
|
||||||
if size > 0 {
|
query_params.push(format!("size={}", size));
|
||||||
query_params.insert("size".to_string(), size.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
let path = format!("/chat/{}/_history", session_id);
|
let path = format!("/chat/{}/_history", session_id);
|
||||||
|
|
||||||
@@ -75,10 +71,9 @@ pub async fn open_session_chat<R: Runtime>(
|
|||||||
server_id: String,
|
server_id: String,
|
||||||
session_id: String,
|
session_id: String,
|
||||||
) -> Result<String, String> {
|
) -> Result<String, String> {
|
||||||
let query_params = HashMap::new();
|
|
||||||
let path = format!("/chat/{}/_open", session_id);
|
let path = format!("/chat/{}/_open", session_id);
|
||||||
|
|
||||||
let response = HttpClient::post(&server_id, path.as_str(), Some(query_params), None)
|
let response = HttpClient::post(&server_id, path.as_str(), None, None)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| format!("Error open session: {}", e))?;
|
.map_err(|e| format!("Error open session: {}", e))?;
|
||||||
|
|
||||||
@@ -91,10 +86,9 @@ pub async fn close_session_chat<R: Runtime>(
|
|||||||
server_id: String,
|
server_id: String,
|
||||||
session_id: String,
|
session_id: String,
|
||||||
) -> Result<String, String> {
|
) -> Result<String, String> {
|
||||||
let query_params = HashMap::new();
|
|
||||||
let path = format!("/chat/{}/_close", session_id);
|
let path = format!("/chat/{}/_close", session_id);
|
||||||
|
|
||||||
let response = HttpClient::post(&server_id, path.as_str(), Some(query_params), None)
|
let response = HttpClient::post(&server_id, path.as_str(), None, None)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| format!("Error close session: {}", e))?;
|
.map_err(|e| format!("Error close session: {}", e))?;
|
||||||
|
|
||||||
@@ -105,11 +99,12 @@ pub async fn cancel_session_chat<R: Runtime>(
|
|||||||
_app_handle: AppHandle<R>,
|
_app_handle: AppHandle<R>,
|
||||||
server_id: String,
|
server_id: String,
|
||||||
session_id: String,
|
session_id: String,
|
||||||
|
query_params: Option<HashMap<String, Value>>,
|
||||||
) -> Result<String, String> {
|
) -> Result<String, String> {
|
||||||
let query_params = HashMap::new();
|
|
||||||
let path = format!("/chat/{}/_cancel", session_id);
|
let path = format!("/chat/{}/_cancel", session_id);
|
||||||
|
let query_params = convert_query_params_to_strings(query_params);
|
||||||
|
|
||||||
let response = HttpClient::post(&server_id, path.as_str(), Some(query_params), None)
|
let response = HttpClient::post(&server_id, path.as_str(), query_params, None)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| format!("Error cancel session: {}", e))?;
|
.map_err(|e| format!("Error cancel session: {}", e))?;
|
||||||
|
|
||||||
@@ -140,8 +135,13 @@ pub async fn new_chat<R: Runtime>(
|
|||||||
let mut headers = HashMap::new();
|
let mut headers = HashMap::new();
|
||||||
headers.insert("WEBSOCKET-SESSION-ID".to_string(), websocket_id.into());
|
headers.insert("WEBSOCKET-SESSION-ID".to_string(), websocket_id.into());
|
||||||
|
|
||||||
let response =
|
let response = HttpClient::advanced_post(
|
||||||
HttpClient::advanced_post(&server_id, "/chat/_new", Some(headers), query_params, body)
|
&server_id,
|
||||||
|
"/chat/_new",
|
||||||
|
Some(headers),
|
||||||
|
convert_query_params_to_strings(query_params),
|
||||||
|
body,
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| format!("Error sending message: {}", e))?;
|
.map_err(|e| format!("Error sending message: {}", e))?;
|
||||||
|
|
||||||
@@ -159,6 +159,69 @@ pub async fn new_chat<R: Runtime>(
|
|||||||
Ok(chat_response)
|
Ok(chat_response)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub async fn chat_create<R: Runtime>(
|
||||||
|
app_handle: AppHandle<R>,
|
||||||
|
server_id: String,
|
||||||
|
message: String,
|
||||||
|
query_params: Option<HashMap<String, Value>>,
|
||||||
|
client_id: String,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let body = if !message.is_empty() {
|
||||||
|
let message = ChatRequestMessage {
|
||||||
|
message: Some(message),
|
||||||
|
};
|
||||||
|
Some(
|
||||||
|
serde_json::to_string(&message)
|
||||||
|
.map_err(|e| format!("Failed to serialize message: {}", e))?
|
||||||
|
.into(),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let response = HttpClient::advanced_post(
|
||||||
|
&server_id,
|
||||||
|
"/chat/_create",
|
||||||
|
None,
|
||||||
|
convert_query_params_to_strings(query_params),
|
||||||
|
body,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Error sending message: {}", e))?;
|
||||||
|
|
||||||
|
if response.status() == 429 {
|
||||||
|
log::warn!("Rate limit exceeded for chat create");
|
||||||
|
return Err("Rate limited".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if !response.status().is_success() {
|
||||||
|
return Err(format!("Request failed with status: {}", response.status()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let stream = response.bytes_stream();
|
||||||
|
let reader = tokio_util::io::StreamReader::new(
|
||||||
|
stream.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)),
|
||||||
|
);
|
||||||
|
let mut lines = tokio::io::BufReader::new(reader).lines();
|
||||||
|
|
||||||
|
log::info!("client_id_create: {}", &client_id);
|
||||||
|
|
||||||
|
while let Ok(Some(line)) = lines.next_line().await {
|
||||||
|
log::info!("Received chat stream line: {}", &line);
|
||||||
|
|
||||||
|
if let Err(err) = app_handle.emit(&client_id, line) {
|
||||||
|
log::error!("Emit failed: {:?}", err);
|
||||||
|
|
||||||
|
print!("Error sending message: {:?}", err);
|
||||||
|
|
||||||
|
let _ = app_handle.emit("chat-create-error", format!("Emit failed: {:?}", err));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
pub async fn send_message<R: Runtime>(
|
pub async fn send_message<R: Runtime>(
|
||||||
_app_handle: AppHandle<R>,
|
_app_handle: AppHandle<R>,
|
||||||
@@ -181,16 +244,83 @@ pub async fn send_message<R: Runtime>(
|
|||||||
&server_id,
|
&server_id,
|
||||||
path.as_str(),
|
path.as_str(),
|
||||||
Some(headers),
|
Some(headers),
|
||||||
query_params,
|
convert_query_params_to_strings(query_params),
|
||||||
Some(body),
|
Some(body),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| format!("Error cancel session: {}", e))?;
|
.map_err(|e| format!("Error cancel session: {}", e))?;
|
||||||
|
|
||||||
|
|
||||||
common::http::get_response_body_text(response).await
|
common::http::get_response_body_text(response).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub async fn chat_chat<R: Runtime>(
|
||||||
|
app_handle: AppHandle<R>,
|
||||||
|
server_id: String,
|
||||||
|
session_id: String,
|
||||||
|
message: String,
|
||||||
|
query_params: Option<HashMap<String, Value>>, //search,deep_thinking
|
||||||
|
client_id: String,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let body = if !message.is_empty() {
|
||||||
|
let message = ChatRequestMessage {
|
||||||
|
message: Some(message),
|
||||||
|
};
|
||||||
|
Some(
|
||||||
|
serde_json::to_string(&message)
|
||||||
|
.map_err(|e| format!("Failed to serialize message: {}", e))?
|
||||||
|
.into(),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let path = format!("/chat/{}/_chat", session_id);
|
||||||
|
|
||||||
|
let response = HttpClient::advanced_post(
|
||||||
|
&server_id,
|
||||||
|
path.as_str(),
|
||||||
|
None,
|
||||||
|
convert_query_params_to_strings(query_params),
|
||||||
|
body,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Error sending message: {}", e))?;
|
||||||
|
|
||||||
|
if response.status() == 429 {
|
||||||
|
log::warn!("Rate limit exceeded for chat create");
|
||||||
|
return Err("Rate limited".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if !response.status().is_success() {
|
||||||
|
return Err(format!("Request failed with status: {}", response.status()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let stream = response.bytes_stream();
|
||||||
|
let reader = tokio_util::io::StreamReader::new(
|
||||||
|
stream.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)),
|
||||||
|
);
|
||||||
|
let mut lines = tokio::io::BufReader::new(reader).lines();
|
||||||
|
let mut first_log = true;
|
||||||
|
|
||||||
|
log::info!("client_id: {}", &client_id);
|
||||||
|
|
||||||
|
while let Ok(Some(line)) = lines.next_line().await {
|
||||||
|
log::info!("Received chat stream line: {}", &line);
|
||||||
|
if first_log {
|
||||||
|
log::info!("first stream line: {}", &line);
|
||||||
|
first_log = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Err(err) = app_handle.emit(&client_id, line) {
|
||||||
|
log::error!("Emit failed: {:?}", err);
|
||||||
|
let _ = app_handle.emit("chat-create-error", format!("Emit failed: {:?}", err));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
pub async fn delete_session_chat(server_id: String, session_id: String) -> Result<bool, String> {
|
pub async fn delete_session_chat(server_id: String, session_id: String) -> Result<bool, String> {
|
||||||
let response =
|
let response =
|
||||||
@@ -238,25 +368,9 @@ pub async fn update_session_chat(
|
|||||||
pub async fn assistant_search<R: Runtime>(
|
pub async fn assistant_search<R: Runtime>(
|
||||||
_app_handle: AppHandle<R>,
|
_app_handle: AppHandle<R>,
|
||||||
server_id: String,
|
server_id: String,
|
||||||
from: u32,
|
query_params: Option<Vec<String>>,
|
||||||
size: u32,
|
|
||||||
query: Option<HashMap<String, Value>>,
|
|
||||||
) -> Result<Value, String> {
|
) -> Result<Value, String> {
|
||||||
let mut body = serde_json::json!({
|
let response = HttpClient::post(&server_id, "/assistant/_search", query_params, None)
|
||||||
"from": from,
|
|
||||||
"size": size,
|
|
||||||
});
|
|
||||||
|
|
||||||
if let Some(q) = query {
|
|
||||||
body["query"] = serde_json::to_value(q).map_err(|e| e.to_string())?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let response = HttpClient::post(
|
|
||||||
&server_id,
|
|
||||||
"/assistant/_search",
|
|
||||||
None,
|
|
||||||
Some(reqwest::Body::from(body.to_string())),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.map_err(|e| format!("Error searching assistants: {}", e))?;
|
.map_err(|e| format!("Error searching assistants: {}", e))?;
|
||||||
|
|
||||||
@@ -380,7 +494,8 @@ pub fn remove_icon_fields(json: &str) -> String {
|
|||||||
} else {
|
} else {
|
||||||
"".to_string()
|
"".to_string()
|
||||||
}
|
}
|
||||||
}).to_string()
|
})
|
||||||
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
|
|||||||
@@ -3,38 +3,43 @@ use std::{fs::create_dir, io::Read};
|
|||||||
use tauri::{Manager, Runtime};
|
use tauri::{Manager, Runtime};
|
||||||
use tauri_plugin_autostart::ManagerExt;
|
use tauri_plugin_autostart::ManagerExt;
|
||||||
|
|
||||||
// Start or stop according to configuration
|
/// If the state reported from the OS and the state stored by us differ, our state is
|
||||||
pub fn enable_autostart(app: &mut tauri::App) {
|
/// prioritized and seen as the correct one. Update the OS state to make them consistent.
|
||||||
use tauri_plugin_autostart::MacosLauncher;
|
pub fn ensure_autostart_state_consistent(app: &mut tauri::App) -> Result<(), String> {
|
||||||
use tauri_plugin_autostart::ManagerExt;
|
|
||||||
|
|
||||||
app.handle()
|
|
||||||
.plugin(tauri_plugin_autostart::init(
|
|
||||||
MacosLauncher::AppleScript,
|
|
||||||
None,
|
|
||||||
))
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let autostart_manager = app.autolaunch();
|
let autostart_manager = app.autolaunch();
|
||||||
|
|
||||||
// close autostart
|
let os_state = autostart_manager.is_enabled().map_err(|e| e.to_string())?;
|
||||||
// autostart_manager.disable().unwrap();
|
let coco_stored_state = current_autostart(app.app_handle()).map_err(|e| e.to_string())?;
|
||||||
// return;
|
|
||||||
|
|
||||||
match (
|
if os_state != coco_stored_state {
|
||||||
autostart_manager.is_enabled(),
|
log::warn!(
|
||||||
current_autostart(app.app_handle()),
|
"autostart inconsistent states, OS state [{}], Coco state [{}], config file could be deleted or corrupted",
|
||||||
) {
|
os_state,
|
||||||
(Ok(false), Ok(true)) => match autostart_manager.enable() {
|
coco_stored_state
|
||||||
Ok(_) => println!("Autostart enabled successfully."),
|
);
|
||||||
Err(err) => eprintln!("Failed to enable autostart: {}", err),
|
log::info!("trying to correct the inconsistent states");
|
||||||
},
|
|
||||||
(Ok(true), Ok(false)) => match autostart_manager.disable() {
|
let result = if coco_stored_state {
|
||||||
Ok(_) => println!("Autostart disable successfully."),
|
autostart_manager.enable()
|
||||||
Err(err) => eprintln!("Failed to disable autostart: {}", err),
|
} else {
|
||||||
},
|
autostart_manager.disable()
|
||||||
_ => (),
|
};
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(_) => {
|
||||||
|
log::info!("inconsistent autostart states fixed");
|
||||||
}
|
}
|
||||||
|
Err(e) => {
|
||||||
|
log::error!(
|
||||||
|
"failed to fix inconsistent autostart state due to error [{}]",
|
||||||
|
e
|
||||||
|
);
|
||||||
|
return Err(e.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn current_autostart<R: Runtime>(app: &tauri::AppHandle<R>) -> Result<bool, String> {
|
fn current_autostart<R: Runtime>(app: &tauri::AppHandle<R>) -> Result<bool, String> {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use tauri::AppHandle;
|
||||||
use crate::hide_coco;
|
use tauri::Runtime;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct RichLabel {
|
pub struct RichLabel {
|
||||||
@@ -53,7 +53,9 @@ impl OnOpened {
|
|||||||
const WHITESPACE: &str = " ";
|
const WHITESPACE: &str = " ";
|
||||||
let mut ret = action.exec.clone();
|
let mut ret = action.exec.clone();
|
||||||
ret.push_str(WHITESPACE);
|
ret.push_str(WHITESPACE);
|
||||||
ret.push_str(action.args.join(WHITESPACE).as_str());
|
if let Some(ref args) = action.args {
|
||||||
|
ret.push_str(args.join(WHITESPACE).as_str());
|
||||||
|
}
|
||||||
|
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
@@ -62,29 +64,43 @@ impl OnOpened {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
pub(crate) async fn open(on_opened: OnOpened) -> Result<(), String> {
|
pub(crate) async fn open<R: Runtime>(
|
||||||
|
tauri_app_handle: AppHandle<R>,
|
||||||
|
on_opened: OnOpened,
|
||||||
|
) -> Result<(), String> {
|
||||||
log::debug!("open({})", on_opened.url());
|
log::debug!("open({})", on_opened.url());
|
||||||
|
|
||||||
use crate::util::open as homemade_tauri_shell_open;
|
use crate::util::open as homemade_tauri_shell_open;
|
||||||
use crate::GLOBAL_TAURI_APP_HANDLE;
|
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
let global_tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
|
||||||
.get()
|
|
||||||
.expect("global tauri app handle not set");
|
|
||||||
|
|
||||||
match on_opened {
|
match on_opened {
|
||||||
OnOpened::Application { app_path } => {
|
OnOpened::Application { app_path } => {
|
||||||
homemade_tauri_shell_open(global_tauri_app_handle.clone(), app_path).await?
|
homemade_tauri_shell_open(tauri_app_handle.clone(), app_path).await?
|
||||||
}
|
}
|
||||||
OnOpened::Document { url } => {
|
OnOpened::Document { url } => {
|
||||||
homemade_tauri_shell_open(global_tauri_app_handle.clone(), url).await?
|
homemade_tauri_shell_open(tauri_app_handle.clone(), url).await?
|
||||||
}
|
}
|
||||||
OnOpened::Command { action } => {
|
OnOpened::Command { action } => {
|
||||||
let mut cmd = Command::new(action.exec);
|
let mut cmd = Command::new(action.exec);
|
||||||
cmd.args(action.args);
|
if let Some(args) = action.args {
|
||||||
|
cmd.args(args);
|
||||||
|
}
|
||||||
let output = cmd.output().map_err(|e| e.to_string())?;
|
let output = cmd.output().map_err(|e| e.to_string())?;
|
||||||
|
// Sometimes, we wanna see the result in logs even though it doesn't fail.
|
||||||
|
log::debug!(
|
||||||
|
"executing open(Command) result, exit code: [{}], stdout: [{}], stderr: [{}]",
|
||||||
|
output.status,
|
||||||
|
String::from_utf8_lossy(&output.stdout),
|
||||||
|
String::from_utf8_lossy(&output.stderr)
|
||||||
|
);
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
|
log::warn!(
|
||||||
|
"executing open(Command) failed, exit code: [{}], stdout: [{}], stderr: [{}]",
|
||||||
|
output.status,
|
||||||
|
String::from_utf8_lossy(&output.stdout),
|
||||||
|
String::from_utf8_lossy(&output.stderr)
|
||||||
|
);
|
||||||
|
|
||||||
return Err(format!(
|
return Err(format!(
|
||||||
"Command failed, stderr [{}]",
|
"Command failed, stderr [{}]",
|
||||||
String::from_utf8_lossy(&output.stderr)
|
String::from_utf8_lossy(&output.stderr)
|
||||||
@@ -93,7 +109,6 @@ pub(crate) async fn open(on_opened: OnOpened) -> Result<(), String> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
hide_coco(global_tauri_app_handle.clone()).await;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,22 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
use reqwest::StatusCode;
|
||||||
|
use serde::{Deserialize, Serialize, Serializer};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
fn serialize_optional_status_code<S>(
|
||||||
|
status_code: &Option<StatusCode>,
|
||||||
|
serializer: S,
|
||||||
|
) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
match status_code {
|
||||||
|
Some(code) => serializer.serialize_str(&format!("{:?}", code)),
|
||||||
|
None => serializer.serialize_none(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
#[allow(dead_code)]
|
|
||||||
pub struct ErrorCause {
|
pub struct ErrorCause {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub r#type: Option<String>,
|
pub r#type: Option<String>,
|
||||||
@@ -11,7 +25,7 @@ pub struct ErrorCause {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
#[allow(dead_code)]
|
#[allow(unused)]
|
||||||
pub struct ErrorDetail {
|
pub struct ErrorDetail {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub root_cause: Option<Vec<ErrorCause>>,
|
pub root_cause: Option<Vec<ErrorCause>>,
|
||||||
@@ -24,18 +38,22 @@ pub struct ErrorDetail {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
#[allow(dead_code)]
|
|
||||||
pub struct ErrorResponse {
|
pub struct ErrorResponse {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub error: Option<ErrorDetail>,
|
pub error: Option<ErrorDetail>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
#[allow(unused)]
|
||||||
pub status: Option<u16>,
|
pub status: Option<u16>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Error, Serialize)]
|
#[derive(Debug, Error, Serialize)]
|
||||||
pub enum SearchError {
|
pub enum SearchError {
|
||||||
#[error("HttpError: {0}")]
|
#[error("HttpError: status code [{status_code:?}], msg [{msg}]")]
|
||||||
HttpError(String),
|
HttpError {
|
||||||
|
#[serde(serialize_with = "serialize_optional_status_code")]
|
||||||
|
status_code: Option<StatusCode>,
|
||||||
|
msg: String,
|
||||||
|
},
|
||||||
|
|
||||||
#[error("ParseError: {0}")]
|
#[error("ParseError: {0}")]
|
||||||
ParseError(String),
|
ParseError(String),
|
||||||
@@ -43,12 +61,7 @@ pub enum SearchError {
|
|||||||
#[error("Timeout occurred")]
|
#[error("Timeout occurred")]
|
||||||
Timeout,
|
Timeout,
|
||||||
|
|
||||||
#[error("UnknownError: {0}")]
|
|
||||||
#[allow(dead_code)]
|
|
||||||
Unknown(String),
|
|
||||||
|
|
||||||
#[error("InternalError: {0}")]
|
#[error("InternalError: {0}")]
|
||||||
#[allow(dead_code)]
|
|
||||||
InternalError(String),
|
InternalError(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -59,7 +72,10 @@ impl From<reqwest::Error> for SearchError {
|
|||||||
} else if err.is_decode() {
|
} else if err.is_decode() {
|
||||||
SearchError::ParseError(err.to_string())
|
SearchError::ParseError(err.to_string())
|
||||||
} else {
|
} else {
|
||||||
SearchError::HttpError(err.to_string())
|
SearchError::HttpError {
|
||||||
|
status_code: err.status(),
|
||||||
|
msg: err.to_string(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,8 @@ use crate::common;
|
|||||||
use reqwest::Response;
|
use reqwest::Response;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use tauri_plugin_store::JsonValue;
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct GetResponse {
|
pub struct GetResponse {
|
||||||
@@ -36,7 +38,6 @@ pub async fn get_response_body_text(response: Response) -> Result<String, String
|
|||||||
return Err(fallback_error);
|
return Err(fallback_error);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
match serde_json::from_str::<common::error::ErrorResponse>(&body) {
|
match serde_json::from_str::<common::error::ErrorResponse>(&body) {
|
||||||
Ok(parsed_error) => {
|
Ok(parsed_error) => {
|
||||||
dbg!(&parsed_error);
|
dbg!(&parsed_error);
|
||||||
@@ -54,3 +55,21 @@ pub async fn get_response_body_text(response: Response) -> Result<String, String
|
|||||||
Ok(body)
|
Ok(body)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn convert_query_params_to_strings(
|
||||||
|
query_params: Option<HashMap<String, JsonValue>>,
|
||||||
|
) -> Option<Vec<String>> {
|
||||||
|
query_params.map(|map| {
|
||||||
|
map.into_iter()
|
||||||
|
.filter_map(|(k, v)| match v {
|
||||||
|
JsonValue::String(s) => Some(format!("{}={}", k, s)),
|
||||||
|
JsonValue::Number(n) => Some(format!("{}={}", k, n)),
|
||||||
|
JsonValue::Bool(b) => Some(format!("{}={}", k, b)),
|
||||||
|
_ => {
|
||||||
|
eprintln!("Skipping unsupported query value for key '{}': {:?}", k, v);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,16 +1,17 @@
|
|||||||
pub mod health;
|
|
||||||
pub mod profile;
|
|
||||||
pub mod server;
|
|
||||||
pub mod auth;
|
|
||||||
pub mod datasource;
|
|
||||||
pub mod connector;
|
|
||||||
pub mod search;
|
|
||||||
pub mod document;
|
|
||||||
pub mod traits;
|
|
||||||
pub mod register;
|
|
||||||
pub mod assistant;
|
pub mod assistant;
|
||||||
pub mod http;
|
pub mod auth;
|
||||||
|
pub mod connector;
|
||||||
|
pub mod datasource;
|
||||||
|
pub mod document;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
|
pub mod health;
|
||||||
|
pub mod http;
|
||||||
|
pub mod profile;
|
||||||
|
pub mod register;
|
||||||
|
pub mod search;
|
||||||
|
pub mod server;
|
||||||
|
pub mod traits;
|
||||||
|
|
||||||
pub static MAIN_WINDOW_LABEL: &str = "main";
|
pub static MAIN_WINDOW_LABEL: &str = "main";
|
||||||
pub static SETTINGS_WINDOW_LABEL: &str = "settings";
|
pub static SETTINGS_WINDOW_LABEL: &str = "settings";
|
||||||
|
pub static CHECK_WINDOW_LABEL: &str = "check";
|
||||||
|
|||||||
@@ -7,8 +7,8 @@ use std::error::Error;
|
|||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct SearchResponse<T> {
|
pub struct SearchResponse<T> {
|
||||||
pub took: u64,
|
pub took: Option<u64>,
|
||||||
pub timed_out: bool,
|
pub timed_out: Option<bool>,
|
||||||
pub _shards: Option<Shards>,
|
pub _shards: Option<Shards>,
|
||||||
pub hits: Hits<T>,
|
pub hits: Hits<T>,
|
||||||
}
|
}
|
||||||
@@ -83,20 +83,6 @@ where
|
|||||||
.collect())
|
.collect())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub async fn parse_search_results_with_score<T>(
|
|
||||||
response: Response,
|
|
||||||
) -> Result<Vec<(T, Option<f64>)>, Box<dyn Error>>
|
|
||||||
where
|
|
||||||
T: for<'de> Deserialize<'de> + std::fmt::Debug,
|
|
||||||
{
|
|
||||||
Ok(parse_search_hits(response)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.map(|hit| (hit._source, hit._score))
|
|
||||||
.collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize)]
|
#[derive(Debug, Clone, Serialize)]
|
||||||
pub struct SearchQuery {
|
pub struct SearchQuery {
|
||||||
pub from: u64,
|
pub from: u64,
|
||||||
|
|||||||
@@ -50,9 +50,17 @@ pub struct Server {
|
|||||||
pub updated: String,
|
pub updated: String,
|
||||||
#[serde(default = "default_enabled_type")]
|
#[serde(default = "default_enabled_type")]
|
||||||
pub enabled: bool,
|
pub enabled: bool,
|
||||||
|
/// Public Coco servers can be used without signing in.
|
||||||
#[serde(default = "default_bool_type")]
|
#[serde(default = "default_bool_type")]
|
||||||
pub public: bool,
|
pub public: bool,
|
||||||
|
|
||||||
|
/// A coco server is available if:
|
||||||
|
///
|
||||||
|
/// 1. It is still online, we check this via the `GET /base_url/provider/_info`
|
||||||
|
/// interface.
|
||||||
|
/// 2. A user is logged in to this Coco server, i.e., a token is stored in the
|
||||||
|
/// `SERVER_TOKEN_LIST_CACHE`.
|
||||||
|
/// For public Coco servers, requirement 2 is not needed.
|
||||||
#[serde(default = "default_available_type")]
|
#[serde(default = "default_available_type")]
|
||||||
pub available: bool,
|
pub available: bool,
|
||||||
|
|
||||||
@@ -84,7 +92,10 @@ pub struct ServerAccessToken {
|
|||||||
#[serde(default = "default_empty_string")] // Custom default function for empty string
|
#[serde(default = "default_empty_string")] // Custom default function for empty string
|
||||||
pub id: String,
|
pub id: String,
|
||||||
pub access_token: String,
|
pub access_token: String,
|
||||||
pub expired_at: u32, //unix timestamp in seconds
|
/// Unix timestamp in seconds
|
||||||
|
///
|
||||||
|
/// Currently, this is UNUSED.
|
||||||
|
pub expired_at: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ServerAccessToken {
|
impl ServerAccessToken {
|
||||||
|
|||||||
@@ -2,10 +2,15 @@ use crate::common::error::SearchError;
|
|||||||
use crate::common::search::SearchQuery;
|
use crate::common::search::SearchQuery;
|
||||||
use crate::common::search::{QueryResponse, QuerySource};
|
use crate::common::search::{QueryResponse, QuerySource};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
|
use tauri::AppHandle;
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
pub trait SearchSource: Send + Sync {
|
pub trait SearchSource: Send + Sync {
|
||||||
fn get_type(&self) -> QuerySource;
|
fn get_type(&self) -> QuerySource;
|
||||||
|
|
||||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError>;
|
async fn search(
|
||||||
|
&self,
|
||||||
|
tauri_app_handle: AppHandle,
|
||||||
|
query: SearchQuery,
|
||||||
|
) -> Result<QueryResponse, SearchError>;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1,13 @@
|
|||||||
pub(super) const EXTENSION_ID: &str = "AIOverview";
|
pub(super) const EXTENSION_ID: &str = "AIOverview";
|
||||||
|
|
||||||
|
/// JSON file for this extension.
|
||||||
|
pub(crate) const PLUGIN_JSON_FILE: &str = r#"
|
||||||
|
{
|
||||||
|
"id": "AIOverview",
|
||||||
|
"name": "AI Overview",
|
||||||
|
"description": "...",
|
||||||
|
"icon": "font_a-AIOverview",
|
||||||
|
"type": "ai_extension",
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
|||||||
@@ -14,6 +14,8 @@ pub use without_feature::*;
|
|||||||
|
|
||||||
#[derive(Debug, Serialize, Clone)]
|
#[derive(Debug, Serialize, Clone)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
|
#[allow(dead_code)]
|
||||||
|
|
||||||
pub struct AppEntry {
|
pub struct AppEntry {
|
||||||
path: String,
|
path: String,
|
||||||
name: String,
|
name: String,
|
||||||
@@ -33,3 +35,16 @@ pub struct AppMetadata {
|
|||||||
modified: u128,
|
modified: u128,
|
||||||
last_opened: u128,
|
last_opened: u128,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// JSON file for this extension.
|
||||||
|
pub(crate) const PLUGIN_JSON_FILE: &str = r#"
|
||||||
|
{
|
||||||
|
"id": "Applications",
|
||||||
|
"platforms": ["macos", "linux", "windows"],
|
||||||
|
"name": "Applications",
|
||||||
|
"description": "Application search",
|
||||||
|
"icon": "font_Application",
|
||||||
|
"type": "group",
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
|
use super::super::Extension;
|
||||||
|
use super::super::pizza_engine_runtime::RUNTIME_TX;
|
||||||
use super::super::pizza_engine_runtime::SearchSourceState;
|
use super::super::pizza_engine_runtime::SearchSourceState;
|
||||||
use super::super::pizza_engine_runtime::Task;
|
use super::super::pizza_engine_runtime::Task;
|
||||||
use super::super::pizza_engine_runtime::RUNTIME_TX;
|
|
||||||
use super::super::Extension;
|
|
||||||
use super::AppMetadata;
|
use super::AppMetadata;
|
||||||
|
use crate::GLOBAL_TAURI_APP_HANDLE;
|
||||||
use crate::common::document::{DataSourceReference, Document, OnOpened};
|
use crate::common::document::{DataSourceReference, Document, OnOpened};
|
||||||
use crate::common::error::SearchError;
|
use crate::common::error::SearchError;
|
||||||
use crate::common::search::{QueryResponse, QuerySource, SearchQuery};
|
use crate::common::search::{QueryResponse, QuerySource, SearchQuery};
|
||||||
@@ -10,7 +11,6 @@ use crate::common::traits::SearchSource;
|
|||||||
use crate::extension::ExtensionType;
|
use crate::extension::ExtensionType;
|
||||||
use crate::extension::LOCAL_QUERY_SOURCE_TYPE;
|
use crate::extension::LOCAL_QUERY_SOURCE_TYPE;
|
||||||
use crate::util::open;
|
use crate::util::open;
|
||||||
use crate::GLOBAL_TAURI_APP_HANDLE;
|
|
||||||
use applications::{App, AppTrait};
|
use applications::{App, AppTrait};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use log::{error, warn};
|
use log::{error, warn};
|
||||||
@@ -23,11 +23,12 @@ use pizza_engine::error::PizzaEngineError;
|
|||||||
use pizza_engine::search::{OriginalQuery, QueryContext, SearchResult, Searcher};
|
use pizza_engine::search::{OriginalQuery, QueryContext, SearchResult, Searcher};
|
||||||
use pizza_engine::store::{DiskStore, DiskStoreSnapshot};
|
use pizza_engine::store::{DiskStore, DiskStoreSnapshot};
|
||||||
use pizza_engine::writer::Writer;
|
use pizza_engine::writer::Writer;
|
||||||
use pizza_engine::{doc, Engine, EngineBuilder};
|
use pizza_engine::{Engine, EngineBuilder, doc};
|
||||||
use serde_json::Value as Json;
|
use serde_json::Value as Json;
|
||||||
|
use std::path::Path;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use tauri::{async_runtime, AppHandle, Manager, Runtime};
|
use tauri::{AppHandle, Manager, Runtime, async_runtime};
|
||||||
use tauri_plugin_fs_pro::{icon, metadata, name, IconOptions};
|
use tauri_plugin_fs_pro::{IconOptions, icon, metadata, name};
|
||||||
use tauri_plugin_global_shortcut::GlobalShortcutExt;
|
use tauri_plugin_global_shortcut::GlobalShortcutExt;
|
||||||
use tauri_plugin_global_shortcut::Shortcut;
|
use tauri_plugin_global_shortcut::Shortcut;
|
||||||
use tauri_plugin_global_shortcut::ShortcutEvent;
|
use tauri_plugin_global_shortcut::ShortcutEvent;
|
||||||
@@ -47,6 +48,7 @@ const TAURI_STORE_APP_ALIAS: &str = "app_alias";
|
|||||||
const TAURI_STORE_KEY_SEARCH_PATH: &str = "search_path";
|
const TAURI_STORE_KEY_SEARCH_PATH: &str = "search_path";
|
||||||
const TAURI_STORE_KEY_DISABLED_APP_LIST: &str = "disabled_app_list";
|
const TAURI_STORE_KEY_DISABLED_APP_LIST: &str = "disabled_app_list";
|
||||||
|
|
||||||
|
const INDEX_DIR: &str = "local_application_index";
|
||||||
|
|
||||||
/// We use this as:
|
/// We use this as:
|
||||||
///
|
///
|
||||||
@@ -210,31 +212,15 @@ impl SearchSourceState for ApplicationSearchSourceState {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Upon application start, index all the applications found in the `get_default_search_paths()`.
|
/// Index applications if they have not been indexed (by checking if `app_index_dir` exists).
|
||||||
struct IndexAllApplicationsTask<R: Runtime> {
|
async fn index_applications_if_not_indexed<R: Runtime>(
|
||||||
tauri_app_handle: AppHandle<R>,
|
tauri_app_handle: &AppHandle<R>,
|
||||||
callback: Option<tokio::sync::oneshot::Sender<Result<(), String>>>,
|
app_index_dir: &Path,
|
||||||
}
|
) -> anyhow::Result<ApplicationSearchSourceState> {
|
||||||
|
|
||||||
#[async_trait::async_trait(?Send)]
|
|
||||||
impl<R: Runtime> Task for IndexAllApplicationsTask<R> {
|
|
||||||
fn search_source_id(&self) -> &'static str {
|
|
||||||
APPLICATION_SEARCH_SOURCE_ID
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn exec(&mut self, state: &mut Option<Box<dyn SearchSourceState>>) {
|
|
||||||
let callback = self.callback.take().unwrap();
|
|
||||||
let mut app_index_dir = self
|
|
||||||
.tauri_app_handle
|
|
||||||
.path()
|
|
||||||
.app_data_dir()
|
|
||||||
.expect("failed to find the local dir");
|
|
||||||
app_index_dir.push("local_application_index");
|
|
||||||
|
|
||||||
let index_exists = app_index_dir.exists();
|
let index_exists = app_index_dir.exists();
|
||||||
|
|
||||||
let mut pizza_engine_builder = EngineBuilder::new();
|
let mut pizza_engine_builder = EngineBuilder::new();
|
||||||
let disk_store = task_exec_try!(DiskStore::new(&app_index_dir), callback);
|
let disk_store = DiskStore::new(&app_index_dir)?;
|
||||||
pizza_engine_builder.set_data_store(disk_store);
|
pizza_engine_builder.set_data_store(disk_store);
|
||||||
|
|
||||||
let mut schema = Schema::new();
|
let mut schema = Schema::new();
|
||||||
@@ -259,20 +245,39 @@ impl<R: Runtime> Task for IndexAllApplicationsTask<R> {
|
|||||||
let mut writer = pizza_engine.acquire_writer();
|
let mut writer = pizza_engine.acquire_writer();
|
||||||
|
|
||||||
if !index_exists {
|
if !index_exists {
|
||||||
let default_search_path = get_default_search_paths();
|
let search_path = {
|
||||||
let apps = task_exec_try!(list_app_in(default_search_path), callback);
|
let disabled_app_list_and_search_path_store =
|
||||||
|
tauri_app_handle.store(TAURI_STORE_DISABLED_APP_LIST_AND_SEARCH_PATH)?;
|
||||||
|
let search_path_json = disabled_app_list_and_search_path_store
|
||||||
|
.get(TAURI_STORE_KEY_SEARCH_PATH)
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
panic!("search path should be persisted in the store, but it is not, plz ensure that the store gets initialized before calling this function")
|
||||||
|
});
|
||||||
|
|
||||||
|
let search_path: Vec<String> = match search_path_json {
|
||||||
|
Json::Array(array) => array
|
||||||
|
.into_iter()
|
||||||
|
.map(|json| match json {
|
||||||
|
Json::String(str) => str,
|
||||||
|
_ => unreachable!("search path is stored in a string"),
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
_ => unreachable!("search path is stored in an array"),
|
||||||
|
};
|
||||||
|
|
||||||
|
search_path
|
||||||
|
};
|
||||||
|
let apps = list_app_in(search_path).map_err(|str| anyhow::anyhow!(str))?;
|
||||||
|
|
||||||
for app in apps.iter() {
|
for app in apps.iter() {
|
||||||
let app_path = get_app_path(app);
|
let app_path = get_app_path(app);
|
||||||
let app_name = get_app_name(app).await;
|
let app_name = get_app_name(app).await;
|
||||||
let app_icon_path = task_exec_try!(
|
let app_icon_path = get_app_icon_path(&tauri_app_handle, app)
|
||||||
get_app_icon_path(&self.tauri_app_handle, app).await,
|
.await
|
||||||
callback
|
.map_err(|str| anyhow::anyhow!(str))?;
|
||||||
);
|
let app_alias = get_app_alias(&tauri_app_handle, &app_path).unwrap_or(String::new());
|
||||||
let app_alias =
|
|
||||||
get_app_alias(&self.tauri_app_handle, &app_path).unwrap_or(String::new());
|
|
||||||
|
|
||||||
if app_name.is_empty() || app_name.eq(&self.tauri_app_handle.package_info().name) {
|
if app_name.is_empty() || app_name.eq(&tauri_app_handle.package_info().name) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -289,27 +294,86 @@ impl<R: Runtime> Task for IndexAllApplicationsTask<R> {
|
|||||||
// We don't error out because one failure won't break the whole thing
|
// We don't error out because one failure won't break the whole thing
|
||||||
if let Err(e) = writer.create_document(document).await {
|
if let Err(e) = writer.create_document(document).await {
|
||||||
warn!(
|
warn!(
|
||||||
"failed to index application [app name: '{}', app path: '{}'] due to error [{}]", app_name, app_path, e
|
"failed to index application [app name: '{}', app path: '{}'] due to error [{}]",
|
||||||
|
app_name, app_path, e
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
task_exec_try!(writer.commit(), callback);
|
writer.commit()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let snapshot = pizza_engine.create_snapshot();
|
let snapshot = pizza_engine.create_snapshot();
|
||||||
let searcher = pizza_engine.acquire_searcher();
|
let searcher = pizza_engine.acquire_searcher();
|
||||||
|
|
||||||
let state_to_store = Box::new(ApplicationSearchSourceState {
|
Ok(ApplicationSearchSourceState {
|
||||||
searcher,
|
searcher,
|
||||||
snapshot,
|
snapshot,
|
||||||
engine: pizza_engine,
|
engine: pizza_engine,
|
||||||
writer,
|
writer,
|
||||||
}) as Box<dyn SearchSourceState>;
|
})
|
||||||
|
}
|
||||||
|
|
||||||
*state = Some(state_to_store);
|
/// Upon application start, index all the applications found in the `get_default_search_paths()`.
|
||||||
|
struct IndexAllApplicationsTask<R: Runtime> {
|
||||||
|
tauri_app_handle: AppHandle<R>,
|
||||||
|
callback: Option<tokio::sync::oneshot::Sender<Result<(), String>>>,
|
||||||
|
}
|
||||||
|
|
||||||
callback.send(Ok(())).unwrap();
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl<R: Runtime> Task for IndexAllApplicationsTask<R> {
|
||||||
|
fn search_source_id(&self) -> &'static str {
|
||||||
|
APPLICATION_SEARCH_SOURCE_ID
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn exec(&mut self, state: &mut Option<Box<dyn SearchSourceState>>) {
|
||||||
|
let callback = self.callback.take().unwrap();
|
||||||
|
let mut app_index_dir = self
|
||||||
|
.tauri_app_handle
|
||||||
|
.path()
|
||||||
|
.app_data_dir()
|
||||||
|
.expect("failed to find the local dir");
|
||||||
|
app_index_dir.push(INDEX_DIR);
|
||||||
|
let app_search_source_state = task_exec_try!(
|
||||||
|
index_applications_if_not_indexed(&self.tauri_app_handle, &app_index_dir).await,
|
||||||
|
callback
|
||||||
|
);
|
||||||
|
*state = Some(Box::new(app_search_source_state));
|
||||||
|
callback.send(Ok(())).expect("rx dropped");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ReindexAllApplicationsTask<R: Runtime> {
|
||||||
|
tauri_app_handle: AppHandle<R>,
|
||||||
|
callback: Option<tokio::sync::oneshot::Sender<Result<(), String>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl<R: Runtime> Task for ReindexAllApplicationsTask<R> {
|
||||||
|
fn search_source_id(&self) -> &'static str {
|
||||||
|
APPLICATION_SEARCH_SOURCE_ID
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn exec(&mut self, state: &mut Option<Box<dyn SearchSourceState>>) {
|
||||||
|
let callback = self.callback.take().unwrap();
|
||||||
|
|
||||||
|
// Clear the state
|
||||||
|
*state = None;
|
||||||
|
let mut app_index_dir = self
|
||||||
|
.tauri_app_handle
|
||||||
|
.path()
|
||||||
|
.app_data_dir()
|
||||||
|
.expect("failed to find the local dir");
|
||||||
|
app_index_dir.push(INDEX_DIR);
|
||||||
|
task_exec_try!(tokio::fs::remove_dir_all(&app_index_dir).await, callback);
|
||||||
|
|
||||||
|
// Then re-index the apps
|
||||||
|
let app_search_source_state = task_exec_try!(
|
||||||
|
index_applications_if_not_indexed(&self.tauri_app_handle, &app_index_dir).await,
|
||||||
|
callback
|
||||||
|
);
|
||||||
|
*state = Some(Box::new(app_search_source_state));
|
||||||
|
callback.send(Ok(())).expect("rx dropped");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -327,15 +391,45 @@ impl<R: Runtime> Task for SearchApplicationsTask<R> {
|
|||||||
|
|
||||||
async fn exec(&mut self, state: &mut Option<Box<dyn SearchSourceState>>) {
|
async fn exec(&mut self, state: &mut Option<Box<dyn SearchSourceState>>) {
|
||||||
let callback = self.callback.take().unwrap();
|
let callback = self.callback.take().unwrap();
|
||||||
|
|
||||||
|
let Some(state) = state.as_mut() else {
|
||||||
|
let empty_hits = SearchResult {
|
||||||
|
tracing_id: String::new(),
|
||||||
|
explains: None,
|
||||||
|
total_hits: 0,
|
||||||
|
hits: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let rx_dropped_error = callback.send(Ok(empty_hits)).is_err();
|
||||||
|
if rx_dropped_error {
|
||||||
|
warn!(
|
||||||
|
"failed to send local app search result back because the corresponding channel receiver end has been unexpected dropped, which could happen due to a low query timeout"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
let disabled_app_list = get_disabled_app_list(&self.tauri_app_handle);
|
let disabled_app_list = get_disabled_app_list(&self.tauri_app_handle);
|
||||||
|
|
||||||
// TODO: search via alias, implement this when Pizza engine supports update
|
// TODO: search via alias, implement this when Pizza engine supports update
|
||||||
|
//
|
||||||
|
// NOTE: we use the Debug impl rather than Display for `self.query_string` as String's Debug
|
||||||
|
// impl won't interrupt escape characters. So for input like:
|
||||||
|
//
|
||||||
|
// ```text
|
||||||
|
// Google
|
||||||
|
// Chrome
|
||||||
|
// ```
|
||||||
|
//
|
||||||
|
// It will be passed to Pizza like "Google\nChrome". Using Display impl would result
|
||||||
|
// in an invalid query DSL and serde will complain.
|
||||||
let dsl = format!(
|
let dsl = format!(
|
||||||
"{{ \"query\": {{ \"bool\": {{ \"should\": [ {{ \"match\": {{ \"{FIELD_APP_NAME}\": \"{}\" }} }}, {{ \"prefix\": {{ \"{FIELD_APP_NAME}\": \"{}\" }} }} ] }} }} }}", self.query_string, self.query_string);
|
"{{ \"query\": {{ \"bool\": {{ \"should\": [ {{ \"match\": {{ \"{FIELD_APP_NAME}\": {:?} }} }}, {{ \"prefix\": {{ \"{FIELD_APP_NAME}\": {:?} }} }} ] }} }} }}",
|
||||||
|
self.query_string, self.query_string
|
||||||
|
);
|
||||||
|
|
||||||
let state = state
|
let state = state
|
||||||
.as_mut()
|
|
||||||
.expect("should be set before")
|
|
||||||
.as_mut_any()
|
.as_mut_any()
|
||||||
.downcast_mut::<ApplicationSearchSourceState>()
|
.downcast_mut::<ApplicationSearchSourceState>()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@@ -364,7 +458,9 @@ impl<R: Runtime> Task for SearchApplicationsTask<R> {
|
|||||||
|
|
||||||
let rx_dropped_error = callback.send(Ok(search_result)).is_err();
|
let rx_dropped_error = callback.send(Ok(search_result)).is_err();
|
||||||
if rx_dropped_error {
|
if rx_dropped_error {
|
||||||
warn!("failed to send local app search result back because the corresponding channel receiver end has been unexpected dropped, which could happen due to a low query timeout")
|
warn!(
|
||||||
|
"failed to send local app search result back because the corresponding channel receiver end has been unexpected dropped, which could happen due to a low query timeout"
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -418,7 +514,35 @@ impl Task for IndexNewApplicationsTask {
|
|||||||
pub struct ApplicationSearchSource;
|
pub struct ApplicationSearchSource;
|
||||||
|
|
||||||
impl ApplicationSearchSource {
|
impl ApplicationSearchSource {
|
||||||
pub async fn init<R: Runtime>(app_handle: AppHandle<R>) -> Result<(), String> {
|
pub async fn prepare_index_and_store<R: Runtime>(
|
||||||
|
app_handle: AppHandle<R>,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
app_handle
|
||||||
|
.store(TAURI_STORE_APP_HOTKEY)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
let disabled_app_list_and_search_path_store = app_handle
|
||||||
|
.store(TAURI_STORE_DISABLED_APP_LIST_AND_SEARCH_PATH)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
if disabled_app_list_and_search_path_store
|
||||||
|
.get(TAURI_STORE_KEY_DISABLED_APP_LIST)
|
||||||
|
.is_none()
|
||||||
|
{
|
||||||
|
disabled_app_list_and_search_path_store
|
||||||
|
.set(TAURI_STORE_KEY_DISABLED_APP_LIST, Json::Array(Vec::new()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// IndexAllApplicationsTask will read the apps installed in search paths and
|
||||||
|
// index them, so it depends on this configuration entry. Init this entry
|
||||||
|
// before indexing apps.
|
||||||
|
if disabled_app_list_and_search_path_store
|
||||||
|
.get(TAURI_STORE_KEY_SEARCH_PATH)
|
||||||
|
.is_none()
|
||||||
|
{
|
||||||
|
let default_search_path = get_default_search_paths();
|
||||||
|
disabled_app_list_and_search_path_store
|
||||||
|
.set(TAURI_STORE_KEY_SEARCH_PATH, default_search_path);
|
||||||
|
}
|
||||||
|
|
||||||
let (tx, rx) = tokio::sync::oneshot::channel();
|
let (tx, rx) = tokio::sync::oneshot::channel();
|
||||||
let index_applications_task = IndexAllApplicationsTask {
|
let index_applications_task = IndexAllApplicationsTask {
|
||||||
tauri_app_handle: app_handle.clone(),
|
tauri_app_handle: app_handle.clone(),
|
||||||
@@ -439,31 +563,6 @@ impl ApplicationSearchSource {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
app_handle
|
|
||||||
.store(TAURI_STORE_APP_HOTKEY)
|
|
||||||
.map_err(|e| e.to_string())?;
|
|
||||||
let disabled_app_list_and_search_path_store = app_handle
|
|
||||||
.store(TAURI_STORE_DISABLED_APP_LIST_AND_SEARCH_PATH)
|
|
||||||
.map_err(|e| e.to_string())?;
|
|
||||||
if disabled_app_list_and_search_path_store
|
|
||||||
.get(TAURI_STORE_KEY_DISABLED_APP_LIST)
|
|
||||||
.is_none()
|
|
||||||
{
|
|
||||||
disabled_app_list_and_search_path_store
|
|
||||||
.set(TAURI_STORE_KEY_DISABLED_APP_LIST, Json::Array(Vec::new()));
|
|
||||||
}
|
|
||||||
|
|
||||||
if disabled_app_list_and_search_path_store
|
|
||||||
.get(TAURI_STORE_KEY_SEARCH_PATH)
|
|
||||||
.is_none()
|
|
||||||
{
|
|
||||||
let default_search_path = get_default_search_paths();
|
|
||||||
disabled_app_list_and_search_path_store
|
|
||||||
.set(TAURI_STORE_KEY_SEARCH_PATH, default_search_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
register_app_hotkey_upon_start(app_handle.clone())?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -481,7 +580,11 @@ impl SearchSource for ApplicationSearchSource {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
async fn search(
|
||||||
|
&self,
|
||||||
|
_tauri_app_handle: AppHandle,
|
||||||
|
query: SearchQuery,
|
||||||
|
) -> Result<QueryResponse, SearchError> {
|
||||||
let query_string = query
|
let query_string = query
|
||||||
.query_strings
|
.query_strings
|
||||||
.get("query")
|
.get("query")
|
||||||
@@ -628,14 +731,43 @@ fn app_hotkey_handler<R: Runtime>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn register_app_hotkey_upon_start<R: Runtime>(
|
/// For all the applications, if it is enabled & has hotkey set, then set it up.
|
||||||
tauri_app_handle: AppHandle<R>,
|
pub(crate) fn set_apps_hotkey<R: Runtime>(tauri_app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||||
) -> Result<(), String> {
|
|
||||||
let app_hotkey_store = tauri_app_handle
|
let app_hotkey_store = tauri_app_handle
|
||||||
.store(TAURI_STORE_APP_HOTKEY)
|
.store(TAURI_STORE_APP_HOTKEY)
|
||||||
.unwrap_or_else(|_| panic!("store [{}] not found/loaded", TAURI_STORE_APP_HOTKEY));
|
.unwrap_or_else(|_| panic!("store [{}] not found/loaded", TAURI_STORE_APP_HOTKEY));
|
||||||
|
|
||||||
|
let disabled_app_list = get_disabled_app_list(&tauri_app_handle);
|
||||||
|
|
||||||
for (app_path, hotkey) in app_hotkey_store.entries() {
|
for (app_path, hotkey) in app_hotkey_store.entries() {
|
||||||
|
if disabled_app_list.contains(&app_path) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let hotkey = match hotkey {
|
||||||
|
Json::String(str) => str,
|
||||||
|
_ => unreachable!("hotkey should be stored in a string"),
|
||||||
|
};
|
||||||
|
|
||||||
|
set_app_hotkey(&tauri_app_handle, &app_path, &hotkey)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// For all the applications, if it is enabled & has hotkey set, then unset it.
|
||||||
|
pub(crate) fn unset_apps_hotkey<R: Runtime>(tauri_app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||||
|
let app_hotkey_store = tauri_app_handle
|
||||||
|
.store(TAURI_STORE_APP_HOTKEY)
|
||||||
|
.unwrap_or_else(|_| panic!("store [{}] not found/loaded", TAURI_STORE_APP_HOTKEY));
|
||||||
|
|
||||||
|
let disabled_app_list = get_disabled_app_list(&tauri_app_handle);
|
||||||
|
|
||||||
|
for (app_path, hotkey) in app_hotkey_store.entries() {
|
||||||
|
if disabled_app_list.contains(&app_path) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
let hotkey = match hotkey {
|
let hotkey = match hotkey {
|
||||||
Json::String(str) => str,
|
Json::String(str) => str,
|
||||||
_ => unreachable!("hotkey should be stored in a string"),
|
_ => unreachable!("hotkey should be stored in a string"),
|
||||||
@@ -643,13 +775,25 @@ fn register_app_hotkey_upon_start<R: Runtime>(
|
|||||||
|
|
||||||
tauri_app_handle
|
tauri_app_handle
|
||||||
.global_shortcut()
|
.global_shortcut()
|
||||||
.on_shortcut(hotkey.as_str(), app_hotkey_handler(app_path))
|
.unregister(hotkey.as_str())
|
||||||
.map_err(|e| e.to_string())?;
|
.map_err(|e| e.to_string())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Set the hotkey but won't persist this settings change.
|
||||||
|
pub(crate) fn set_app_hotkey<R: Runtime>(
|
||||||
|
tauri_app_handle: &AppHandle<R>,
|
||||||
|
app_path: &str,
|
||||||
|
hotkey: &str,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
tauri_app_handle
|
||||||
|
.global_shortcut()
|
||||||
|
.on_shortcut(hotkey, app_hotkey_handler(app_path.into()))
|
||||||
|
.map_err(|e| e.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn register_app_hotkey<R: Runtime>(
|
pub fn register_app_hotkey<R: Runtime>(
|
||||||
tauri_app_handle: &AppHandle<R>,
|
tauri_app_handle: &AppHandle<R>,
|
||||||
app_path: &str,
|
app_path: &str,
|
||||||
@@ -661,13 +805,9 @@ pub fn register_app_hotkey<R: Runtime>(
|
|||||||
let app_hotkey_store = tauri_app_handle
|
let app_hotkey_store = tauri_app_handle
|
||||||
.store(TAURI_STORE_APP_HOTKEY)
|
.store(TAURI_STORE_APP_HOTKEY)
|
||||||
.unwrap_or_else(|_| panic!("store [{}] not found/loaded", TAURI_STORE_APP_HOTKEY));
|
.unwrap_or_else(|_| panic!("store [{}] not found/loaded", TAURI_STORE_APP_HOTKEY));
|
||||||
|
|
||||||
app_hotkey_store.set(app_path, hotkey);
|
app_hotkey_store.set(app_path, hotkey);
|
||||||
|
|
||||||
tauri_app_handle
|
set_app_hotkey(tauri_app_handle, app_path, hotkey)?;
|
||||||
.global_shortcut()
|
|
||||||
.on_shortcut(hotkey, app_hotkey_handler(app_path.into()))
|
|
||||||
.map_err(|e| e.to_string())?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -702,7 +842,9 @@ pub fn unregister_app_hotkey<R: Runtime>(
|
|||||||
.global_shortcut()
|
.global_shortcut()
|
||||||
.is_registered(hotkey.as_str())
|
.is_registered(hotkey.as_str())
|
||||||
{
|
{
|
||||||
panic!("inconsistent state, tauri store a hotkey is stored in the tauri store but it is not registered");
|
panic!(
|
||||||
|
"inconsistent state, tauri store a hotkey is stored in the tauri store but it is not registered"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
tauri_app_handle
|
tauri_app_handle
|
||||||
@@ -779,6 +921,21 @@ pub fn disable_app_search<R: Runtime>(
|
|||||||
|
|
||||||
store.set(TAURI_STORE_KEY_DISABLED_APP_LIST, disabled_app_list);
|
store.set(TAURI_STORE_KEY_DISABLED_APP_LIST, disabled_app_list);
|
||||||
|
|
||||||
|
let app_hotkey_store = tauri_app_handle
|
||||||
|
.store(TAURI_STORE_APP_HOTKEY)
|
||||||
|
.unwrap_or_else(|_| panic!("store [{}] not found/loaded", TAURI_STORE_APP_HOTKEY));
|
||||||
|
let opt_hokey = app_hotkey_store.get(app_path).map(|json| match json {
|
||||||
|
Json::String(s) => s,
|
||||||
|
_ => panic!("hotkey should be stored in a string"),
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(hotkey) = opt_hokey {
|
||||||
|
tauri_app_handle
|
||||||
|
.global_shortcut()
|
||||||
|
.unregister(hotkey.as_str())
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -805,6 +962,18 @@ pub fn enable_app_search<R: Runtime>(
|
|||||||
disabled_app_list.remove(index);
|
disabled_app_list.remove(index);
|
||||||
store.set(TAURI_STORE_KEY_DISABLED_APP_LIST, disabled_app_list);
|
store.set(TAURI_STORE_KEY_DISABLED_APP_LIST, disabled_app_list);
|
||||||
|
|
||||||
|
let app_hotkey_store = tauri_app_handle
|
||||||
|
.store(TAURI_STORE_APP_HOTKEY)
|
||||||
|
.unwrap_or_else(|_| panic!("store [{}] not found/loaded", TAURI_STORE_APP_HOTKEY));
|
||||||
|
let opt_hokey = app_hotkey_store.get(app_path).map(|json| match json {
|
||||||
|
Json::String(s) => s,
|
||||||
|
_ => panic!("hotkey should be stored in a string"),
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(hotkey) = opt_hokey {
|
||||||
|
set_app_hotkey(tauri_app_handle, app_path, &hotkey)?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
None => Err(format!(
|
None => Err(format!(
|
||||||
@@ -964,21 +1133,25 @@ pub async fn get_app_list<R: Runtime>(
|
|||||||
|
|
||||||
let app_entry = Extension {
|
let app_entry = Extension {
|
||||||
id: path,
|
id: path,
|
||||||
title: name,
|
name,
|
||||||
platforms: None,
|
platforms: None,
|
||||||
|
developer: None,
|
||||||
// Leave it empty as it won't be used
|
// Leave it empty as it won't be used
|
||||||
description: String::new(),
|
description: String::new(),
|
||||||
icon: icon_path,
|
icon: icon_path,
|
||||||
r#type: ExtensionType::Application,
|
r#type: ExtensionType::Application,
|
||||||
action: None,
|
action: None,
|
||||||
quick_link: None,
|
quicklink: None,
|
||||||
commands: None,
|
commands: None,
|
||||||
scripts: None,
|
scripts: None,
|
||||||
quick_links: None,
|
quicklinks: None,
|
||||||
alias: Some(alias),
|
alias: Some(alias),
|
||||||
hotkey,
|
hotkey,
|
||||||
enabled,
|
enabled,
|
||||||
settings: None,
|
settings: None,
|
||||||
|
screenshots: None,
|
||||||
|
url: None,
|
||||||
|
version: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
app_entries.push(app_entry);
|
app_entries.push(app_entry);
|
||||||
@@ -1027,3 +1200,30 @@ pub async fn get_app_metadata(app_name: String, app_path: String) -> Result<AppM
|
|||||||
last_opened,
|
last_opened,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub async fn reindex_applications<R: Runtime>(
|
||||||
|
tauri_app_handle: AppHandle<R>,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let (tx, rx) = tokio::sync::oneshot::channel();
|
||||||
|
let reindex_applications_task = ReindexAllApplicationsTask {
|
||||||
|
tauri_app_handle: tauri_app_handle.clone(),
|
||||||
|
callback: Some(tx),
|
||||||
|
};
|
||||||
|
|
||||||
|
RUNTIME_TX
|
||||||
|
.get()
|
||||||
|
.unwrap()
|
||||||
|
.send(Box::new(reindex_applications_task))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let reindexing_applications_result = rx.await.unwrap();
|
||||||
|
if let Err(ref e) = reindexing_applications_result {
|
||||||
|
error!(
|
||||||
|
"re-indexing local applications failed, app search won't work, error [{}]",
|
||||||
|
e
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
reindexing_applications_result
|
||||||
|
}
|
||||||
|
|||||||
@@ -12,7 +12,9 @@ pub(crate) const QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME: &str = "Applicati
|
|||||||
pub struct ApplicationSearchSource;
|
pub struct ApplicationSearchSource;
|
||||||
|
|
||||||
impl ApplicationSearchSource {
|
impl ApplicationSearchSource {
|
||||||
pub async fn init<R: Runtime>(_app_handle: AppHandle<R>) -> Result<(), String> {
|
pub async fn prepare_index_and_store<R: Runtime>(
|
||||||
|
_app_handle: AppHandle<R>,
|
||||||
|
) -> Result<(), String> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -30,7 +32,11 @@ impl SearchSource for ApplicationSearchSource {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn search(&self, _query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
async fn search(
|
||||||
|
&self,
|
||||||
|
_tauri_app_handle: AppHandle,
|
||||||
|
_query: SearchQuery,
|
||||||
|
) -> Result<QueryResponse, SearchError> {
|
||||||
Ok(QueryResponse {
|
Ok(QueryResponse {
|
||||||
source: self.get_type(),
|
source: self.get_type(),
|
||||||
hits: Vec::new(),
|
hits: Vec::new(),
|
||||||
@@ -117,3 +123,23 @@ pub async fn get_app_metadata<R: Runtime>(
|
|||||||
) -> Result<AppMetadata, String> {
|
) -> Result<AppMetadata, String> {
|
||||||
unreachable!("app list should be empty, there is no way this can be invoked")
|
unreachable!("app list should be empty, there is no way this can be invoked")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn set_apps_hotkey<R: Runtime>(_tauri_app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||||
|
// no-op
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn unset_apps_hotkey<R: Runtime>(
|
||||||
|
_tauri_app_handle: &AppHandle<R>,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
// no-op
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub async fn reindex_applications<R: Runtime>(
|
||||||
|
_tauri_app_handle: AppHandle<R>,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
// no-op
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|||||||
@@ -10,9 +10,23 @@ use chinese_number::{ChineseCase, ChineseCountMethod, ChineseVariant, NumberToCh
|
|||||||
use num2words::Num2Words;
|
use num2words::Num2Words;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use tauri::AppHandle;
|
||||||
|
|
||||||
pub(crate) const DATA_SOURCE_ID: &str = "Calculator";
|
pub(crate) const DATA_SOURCE_ID: &str = "Calculator";
|
||||||
|
|
||||||
|
/// JSON file for this extension.
|
||||||
|
pub(crate) const PLUGIN_JSON_FILE: &str = r#"
|
||||||
|
{
|
||||||
|
"id": "Calculator",
|
||||||
|
"name": "Calculator",
|
||||||
|
"platforms": ["macos", "linux", "windows"],
|
||||||
|
"description": "...",
|
||||||
|
"icon": "font_Calculator",
|
||||||
|
"type": "calculator",
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
|
||||||
pub struct CalculatorSource {
|
pub struct CalculatorSource {
|
||||||
base_score: f64,
|
base_score: f64,
|
||||||
}
|
}
|
||||||
@@ -23,7 +37,7 @@ impl CalculatorSource {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_query(query: String) -> Value {
|
fn parse_query(query: &str) -> Value {
|
||||||
let mut query_json = serde_json::Map::new();
|
let mut query_json = serde_json::Map::new();
|
||||||
|
|
||||||
let operators = ["+", "-", "*", "/", "%"];
|
let operators = ["+", "-", "*", "/", "%"];
|
||||||
@@ -48,7 +62,7 @@ fn parse_query(query: String) -> Value {
|
|||||||
query_json.insert("type".to_string(), Value::String("expression".to_string()));
|
query_json.insert("type".to_string(), Value::String("expression".to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
query_json.insert("value".to_string(), Value::String(query));
|
query_json.insert("value".to_string(), Value::String(query.to_string()));
|
||||||
|
|
||||||
Value::Object(query_json)
|
Value::Object(query_json)
|
||||||
}
|
}
|
||||||
@@ -107,7 +121,11 @@ impl SearchSource for CalculatorSource {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
async fn search(
|
||||||
|
&self,
|
||||||
|
_tauri_app_handle: AppHandle,
|
||||||
|
query: SearchQuery,
|
||||||
|
) -> Result<QueryResponse, SearchError> {
|
||||||
let Some(query_string) = query.query_strings.get("query") else {
|
let Some(query_string) = query.query_strings.get("query") else {
|
||||||
return Ok(QueryResponse {
|
return Ok(QueryResponse {
|
||||||
source: self.get_type(),
|
source: self.get_type(),
|
||||||
@@ -128,11 +146,17 @@ impl SearchSource for CalculatorSource {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
match meval::eval_str(query_string) {
|
let query_string_clone = query_string.to_string();
|
||||||
|
let query_source = self.get_type();
|
||||||
|
let base_score = self.base_score;
|
||||||
|
let closure = move || -> QueryResponse {
|
||||||
|
let res_num = meval::eval_str(&query_string_clone);
|
||||||
|
|
||||||
|
match res_num {
|
||||||
Ok(num) => {
|
Ok(num) => {
|
||||||
let mut payload: HashMap<String, Value> = HashMap::new();
|
let mut payload: HashMap<String, Value> = HashMap::new();
|
||||||
|
|
||||||
let payload_query = parse_query(query_string.into());
|
let payload_query = parse_query(&query_string_clone);
|
||||||
let payload_result = parse_result(num);
|
let payload_result = parse_result(num);
|
||||||
|
|
||||||
payload.insert("query".to_string(), payload_query);
|
payload.insert("query".to_string(), payload_query);
|
||||||
@@ -151,19 +175,25 @@ impl SearchSource for CalculatorSource {
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
return Ok(QueryResponse {
|
QueryResponse {
|
||||||
source: self.get_type(),
|
source: query_source,
|
||||||
hits: vec![(doc, self.base_score)],
|
hits: vec![(doc, base_score)],
|
||||||
total_hits: 1,
|
total_hits: 1,
|
||||||
});
|
|
||||||
}
|
}
|
||||||
Err(_) => {
|
}
|
||||||
return Ok(QueryResponse {
|
Err(_) => QueryResponse {
|
||||||
source: self.get_type(),
|
source: query_source,
|
||||||
hits: Vec::new(),
|
hits: Vec::new(),
|
||||||
total_hits: 0,
|
total_hits: 0,
|
||||||
});
|
},
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let spawn_result = tokio::task::spawn_blocking(closure).await;
|
||||||
|
|
||||||
|
match spawn_result {
|
||||||
|
Ok(response) => Ok(response),
|
||||||
|
Err(e) => std::panic::resume_unwind(e.into_panic()),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
212
src-tauri/src/extension/built_in/file_search/config.rs
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
//! File Search configuration entries definition and getter/setter functions.
|
||||||
|
|
||||||
|
use serde::Deserialize;
|
||||||
|
use serde::Serialize;
|
||||||
|
use serde_json::Value;
|
||||||
|
use std::sync::LazyLock;
|
||||||
|
use tauri::AppHandle;
|
||||||
|
use tauri::Runtime;
|
||||||
|
use tauri_plugin_store::StoreExt;
|
||||||
|
|
||||||
|
// Tauri store keys for file system configuration
|
||||||
|
const TAURI_STORE_FILE_SYSTEM_CONFIG: &str = "file_system_config";
|
||||||
|
const TAURI_STORE_KEY_SEARCH_BY: &str = "search_by";
|
||||||
|
const TAURI_STORE_KEY_SEARCH_PATHS: &str = "search_paths";
|
||||||
|
const TAURI_STORE_KEY_EXCLUDE_PATHS: &str = "exclude_paths";
|
||||||
|
const TAURI_STORE_KEY_FILE_TYPES: &str = "file_types";
|
||||||
|
|
||||||
|
static HOME_DIR: LazyLock<String> = LazyLock::new(|| {
|
||||||
|
let os_string = dirs::home_dir()
|
||||||
|
.expect("$HOME should be set")
|
||||||
|
.into_os_string();
|
||||||
|
os_string
|
||||||
|
.into_string()
|
||||||
|
.expect("User home directory should be encoded with UTF-8")
|
||||||
|
});
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Copy)]
|
||||||
|
pub enum SearchBy {
|
||||||
|
Name,
|
||||||
|
NameAndContents,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct FileSearchConfig {
|
||||||
|
pub search_paths: Vec<String>,
|
||||||
|
pub exclude_paths: Vec<String>,
|
||||||
|
pub file_types: Vec<String>,
|
||||||
|
pub search_by: SearchBy,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for FileSearchConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
search_paths: vec![
|
||||||
|
format!("{}/Documents", HOME_DIR.as_str()),
|
||||||
|
format!("{}/Desktop", HOME_DIR.as_str()),
|
||||||
|
format!("{}/Downloads", HOME_DIR.as_str()),
|
||||||
|
],
|
||||||
|
exclude_paths: Vec::new(),
|
||||||
|
file_types: Vec::new(),
|
||||||
|
search_by: SearchBy::Name,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileSearchConfig {
|
||||||
|
pub(crate) fn get<R: Runtime>(tauri_app_handle: &AppHandle<R>) -> Self {
|
||||||
|
let store = tauri_app_handle
|
||||||
|
.store(TAURI_STORE_FILE_SYSTEM_CONFIG)
|
||||||
|
.unwrap_or_else(|e| {
|
||||||
|
panic!(
|
||||||
|
"store [{}] not found/loaded, error [{}]",
|
||||||
|
TAURI_STORE_FILE_SYSTEM_CONFIG, e
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
// Default value, will be used when specific config entries are not set
|
||||||
|
let default_config = FileSearchConfig::default();
|
||||||
|
|
||||||
|
let search_paths = {
|
||||||
|
if let Some(search_paths) = store.get(TAURI_STORE_KEY_SEARCH_PATHS) {
|
||||||
|
match search_paths {
|
||||||
|
Value::Array(arr) => {
|
||||||
|
let mut vec = Vec::with_capacity(arr.len());
|
||||||
|
for v in arr {
|
||||||
|
match v {
|
||||||
|
Value::String(s) => vec.push(s),
|
||||||
|
other => panic!(
|
||||||
|
"Expected all elements of 'search_paths' to be strings, but found: {:?}",
|
||||||
|
other
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
vec
|
||||||
|
}
|
||||||
|
other => panic!(
|
||||||
|
"Expected 'search_paths' to be an array of strings in the file system config store, but got: {:?}",
|
||||||
|
other
|
||||||
|
),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
store.set(
|
||||||
|
TAURI_STORE_KEY_SEARCH_PATHS,
|
||||||
|
default_config.search_paths.as_slice(),
|
||||||
|
);
|
||||||
|
default_config.search_paths
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let exclude_paths = {
|
||||||
|
if let Some(exclude_paths) = store.get(TAURI_STORE_KEY_EXCLUDE_PATHS) {
|
||||||
|
match exclude_paths {
|
||||||
|
Value::Array(arr) => {
|
||||||
|
let mut vec = Vec::with_capacity(arr.len());
|
||||||
|
for v in arr {
|
||||||
|
match v {
|
||||||
|
Value::String(s) => vec.push(s),
|
||||||
|
other => panic!(
|
||||||
|
"Expected all elements of 'exclude_paths' to be strings, but found: {:?}",
|
||||||
|
other
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
vec
|
||||||
|
}
|
||||||
|
other => panic!(
|
||||||
|
"Expected 'exclude_paths' to be an array of strings in the file system config store, but got: {:?}",
|
||||||
|
other
|
||||||
|
),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
store.set(
|
||||||
|
TAURI_STORE_KEY_EXCLUDE_PATHS,
|
||||||
|
default_config.exclude_paths.as_slice(),
|
||||||
|
);
|
||||||
|
default_config.exclude_paths
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let file_types = {
|
||||||
|
if let Some(file_types) = store.get(TAURI_STORE_KEY_FILE_TYPES) {
|
||||||
|
match file_types {
|
||||||
|
Value::Array(arr) => {
|
||||||
|
let mut vec = Vec::with_capacity(arr.len());
|
||||||
|
for v in arr {
|
||||||
|
match v {
|
||||||
|
Value::String(s) => vec.push(s),
|
||||||
|
other => panic!(
|
||||||
|
"Expected all elements of 'file_types' to be strings, but found: {:?}",
|
||||||
|
other
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
vec
|
||||||
|
}
|
||||||
|
other => panic!(
|
||||||
|
"Expected 'file_types' to be an array of strings in the file system config store, but got: {:?}",
|
||||||
|
other
|
||||||
|
),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
store.set(
|
||||||
|
TAURI_STORE_KEY_FILE_TYPES,
|
||||||
|
default_config.file_types.as_slice(),
|
||||||
|
);
|
||||||
|
default_config.file_types
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let search_by = {
|
||||||
|
if let Some(search_by) = store.get(TAURI_STORE_KEY_SEARCH_BY) {
|
||||||
|
serde_json::from_value(search_by.clone()).unwrap_or_else(|e| {
|
||||||
|
panic!(
|
||||||
|
"Failed to deserialize 'search_by' from file system config store. Invalid JSON: {:?}, error: {}",
|
||||||
|
search_by, e
|
||||||
|
)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
store.set(
|
||||||
|
TAURI_STORE_KEY_SEARCH_BY,
|
||||||
|
serde_json::to_value(default_config.search_by).unwrap(),
|
||||||
|
);
|
||||||
|
default_config.search_by
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Self {
|
||||||
|
search_by,
|
||||||
|
search_paths,
|
||||||
|
exclude_paths,
|
||||||
|
file_types,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tauri commands for managing file system configuration
|
||||||
|
#[tauri::command]
|
||||||
|
pub async fn get_file_system_config<R: Runtime>(
|
||||||
|
tauri_app_handle: AppHandle<R>,
|
||||||
|
) -> FileSearchConfig {
|
||||||
|
FileSearchConfig::get(&tauri_app_handle)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub async fn set_file_system_config<R: Runtime>(
|
||||||
|
tauri_app_handle: AppHandle<R>,
|
||||||
|
config: FileSearchConfig,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let store = tauri_app_handle
|
||||||
|
.store(TAURI_STORE_FILE_SYSTEM_CONFIG)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
store.set(TAURI_STORE_KEY_SEARCH_PATHS, config.search_paths);
|
||||||
|
store.set(TAURI_STORE_KEY_EXCLUDE_PATHS, config.exclude_paths);
|
||||||
|
store.set(TAURI_STORE_KEY_FILE_TYPES, config.file_types);
|
||||||
|
store.set(
|
||||||
|
TAURI_STORE_KEY_SEARCH_BY,
|
||||||
|
serde_json::to_value(config.search_by).unwrap(),
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -0,0 +1,186 @@
|
|||||||
|
use super::super::EXTENSION_ID;
|
||||||
|
use super::super::config::FileSearchConfig;
|
||||||
|
use super::super::config::SearchBy;
|
||||||
|
use crate::common::document::{DataSourceReference, Document};
|
||||||
|
use crate::extension::LOCAL_QUERY_SOURCE_TYPE;
|
||||||
|
use crate::extension::OnOpened;
|
||||||
|
use crate::util::file::get_file_icon;
|
||||||
|
use futures::stream::Stream;
|
||||||
|
use futures::stream::StreamExt;
|
||||||
|
use std::os::fd::OwnedFd;
|
||||||
|
use std::path::Path;
|
||||||
|
use tokio::io::AsyncBufReadExt;
|
||||||
|
use tokio::io::BufReader;
|
||||||
|
use tokio::process::Child;
|
||||||
|
use tokio::process::Command;
|
||||||
|
use tokio_stream::wrappers::LinesStream;
|
||||||
|
|
||||||
|
/// `mdfind` won't return scores, we use this score for all the documents.
|
||||||
|
const SCORE: f64 = 1.0;
|
||||||
|
|
||||||
|
pub(crate) async fn hits(
|
||||||
|
query_string: &str,
|
||||||
|
from: usize,
|
||||||
|
size: usize,
|
||||||
|
config: &FileSearchConfig,
|
||||||
|
) -> Result<Vec<(Document, f64)>, String> {
|
||||||
|
let (mut iter, mut mdfind_child_process) =
|
||||||
|
execute_mdfind_query(&query_string, from, size, &config)?;
|
||||||
|
|
||||||
|
// Convert results to documents
|
||||||
|
let mut hits: Vec<(Document, f64)> = Vec::new();
|
||||||
|
while let Some(res_file_path) = iter.next().await {
|
||||||
|
let file_path = res_file_path.map_err(|io_err| io_err.to_string())?;
|
||||||
|
|
||||||
|
let icon = get_file_icon(file_path.clone()).await;
|
||||||
|
let file_path_of_type_path = camino::Utf8Path::new(&file_path);
|
||||||
|
let r#where = file_path_of_type_path
|
||||||
|
.parent()
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
panic!(
|
||||||
|
"expect path [{}] to have a parent, but it does not",
|
||||||
|
file_path
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let file_name = file_path_of_type_path.file_name().unwrap_or_else(|| {
|
||||||
|
panic!(
|
||||||
|
"expect path [{}] to have a file name, but it does not",
|
||||||
|
file_path
|
||||||
|
);
|
||||||
|
});
|
||||||
|
let on_opened = OnOpened::Document {
|
||||||
|
url: file_path.clone(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let doc = Document {
|
||||||
|
id: file_path.clone(),
|
||||||
|
title: Some(file_name.to_string()),
|
||||||
|
source: Some(DataSourceReference {
|
||||||
|
r#type: Some(LOCAL_QUERY_SOURCE_TYPE.into()),
|
||||||
|
name: Some(EXTENSION_ID.into()),
|
||||||
|
id: Some(EXTENSION_ID.into()),
|
||||||
|
icon: Some(String::from("font_Filesearch")),
|
||||||
|
}),
|
||||||
|
category: Some(r#where),
|
||||||
|
on_opened: Some(on_opened),
|
||||||
|
url: Some(file_path),
|
||||||
|
icon: Some(icon.to_string()),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
hits.push((doc, SCORE));
|
||||||
|
}
|
||||||
|
// Kill the mdfind process once we get the needed results to prevent zombie
|
||||||
|
// processes.
|
||||||
|
mdfind_child_process
|
||||||
|
.kill()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("{:?}", e))?;
|
||||||
|
|
||||||
|
Ok(hits)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return an array containing the `mdfind` command and its arguments.
|
||||||
|
fn build_mdfind_query(query_string: &str, config: &FileSearchConfig) -> Vec<String> {
|
||||||
|
let mut args = vec!["mdfind".to_string()];
|
||||||
|
|
||||||
|
match config.search_by {
|
||||||
|
SearchBy::Name => {
|
||||||
|
args.push(format!("kMDItemFSName == '*{}*'", query_string));
|
||||||
|
}
|
||||||
|
SearchBy::NameAndContents => {
|
||||||
|
args.push(format!(
|
||||||
|
"kMDItemFSName == '*{}*' || kMDItemTextContent == '{}'",
|
||||||
|
query_string, query_string
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add search paths using -onlyin
|
||||||
|
for path in &config.search_paths {
|
||||||
|
if Path::new(path).exists() {
|
||||||
|
args.extend_from_slice(&["-onlyin".to_string(), path.to_string()]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
args
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Spawn the `mdfind` child process and return an async iterator over its output,
|
||||||
|
/// allowing us to collect the results asynchronously.
|
||||||
|
///
|
||||||
|
/// # Return value:
|
||||||
|
///
|
||||||
|
/// * impl Stream: an async iterator that will yield the matched files
|
||||||
|
/// * Child: The handle to the mdfind process, we need to kill it once we
|
||||||
|
/// collect all the results to avoid zombie processes.
|
||||||
|
fn execute_mdfind_query(
|
||||||
|
query_string: &str,
|
||||||
|
from: usize,
|
||||||
|
size: usize,
|
||||||
|
config: &FileSearchConfig,
|
||||||
|
) -> Result<(impl Stream<Item = std::io::Result<String>>, Child), String> {
|
||||||
|
let args = build_mdfind_query(query_string, &config);
|
||||||
|
let (rx, tx) = std::io::pipe().unwrap();
|
||||||
|
let rx_owned = OwnedFd::from(rx);
|
||||||
|
let async_rx = tokio::net::unix::pipe::Receiver::from_owned_fd(rx_owned).unwrap();
|
||||||
|
let buffered_rx = BufReader::new(async_rx);
|
||||||
|
let lines = LinesStream::new(buffered_rx.lines());
|
||||||
|
|
||||||
|
let child = Command::new(&args[0])
|
||||||
|
.args(&args[1..])
|
||||||
|
.stdout(tx)
|
||||||
|
.stderr(std::process::Stdio::null())
|
||||||
|
.spawn()
|
||||||
|
.map_err(|e| format!("Failed to spawn mdfind: {}", e))?;
|
||||||
|
let config_clone = config.clone();
|
||||||
|
let iter = lines
|
||||||
|
.filter(move |res_path| {
|
||||||
|
std::future::ready({
|
||||||
|
match res_path {
|
||||||
|
Ok(path) => !should_be_filtered_out(&config_clone, path),
|
||||||
|
Err(_) => {
|
||||||
|
// Don't filter out Err() values
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.skip(from)
|
||||||
|
.take(size);
|
||||||
|
|
||||||
|
Ok((iter, child))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If `file_path` should be removed from the search results given the filter
|
||||||
|
/// conditions specified in `config`.
|
||||||
|
fn should_be_filtered_out(config: &FileSearchConfig, file_path: &str) -> bool {
|
||||||
|
let is_excluded = config
|
||||||
|
.exclude_paths
|
||||||
|
.iter()
|
||||||
|
.any(|exclude_path| file_path.starts_with(exclude_path));
|
||||||
|
|
||||||
|
if is_excluded {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
let matches_file_type = if config.file_types.is_empty() {
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
let path_obj = camino::Utf8Path::new(&file_path);
|
||||||
|
if let Some(extension) = path_obj.extension() {
|
||||||
|
config
|
||||||
|
.file_types
|
||||||
|
.iter()
|
||||||
|
.any(|file_type| file_type == extension)
|
||||||
|
} else {
|
||||||
|
// `config.file_types` is not empty, then the search results
|
||||||
|
// should have extensions.
|
||||||
|
false
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
!matches_file_type
|
||||||
|
}
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
#[cfg(target_os = "macos")]
|
||||||
|
mod macos;
|
||||||
|
#[cfg(target_os = "windows")]
|
||||||
|
mod windows;
|
||||||
|
|
||||||
|
// `hits()` function is platform-specific, export the corresponding impl.
|
||||||
|
#[cfg(target_os = "macos")]
|
||||||
|
pub(crate) use macos::hits;
|
||||||
|
#[cfg(target_os = "windows")]
|
||||||
|
pub(crate) use windows::hits;
|
||||||
@@ -0,0 +1,751 @@
|
|||||||
|
//! # Credits
|
||||||
|
//!
|
||||||
|
//! https://github.com/IRONAGE-Park/rag-sample/blob/3f0ad8c8012026cd3a7e453d08f041609426cb91/src/native/windows.rs
|
||||||
|
//! is the starting point of this implementation.
|
||||||
|
|
||||||
|
use super::super::EXTENSION_ID;
|
||||||
|
use super::super::config::FileSearchConfig;
|
||||||
|
use super::super::config::SearchBy;
|
||||||
|
use crate::common::document::{DataSourceReference, Document};
|
||||||
|
use crate::extension::LOCAL_QUERY_SOURCE_TYPE;
|
||||||
|
use crate::extension::OnOpened;
|
||||||
|
use crate::util::file::get_file_icon;
|
||||||
|
use windows::{
|
||||||
|
Win32::System::{
|
||||||
|
Com::{CLSCTX_INPROC_SERVER, CoCreateInstance},
|
||||||
|
Ole::{OleInitialize, OleUninitialize},
|
||||||
|
Search::{
|
||||||
|
DB_NULL_HCHAPTER, DBACCESSOR_ROWDATA, DBBINDING, DBMEMOWNER_CLIENTOWNED,
|
||||||
|
DBPARAMIO_NOTPARAM, DBPART_VALUE, DBTYPE_WSTR, HACCESSOR, IAccessor, ICommand,
|
||||||
|
ICommandText, IDBCreateCommand, IDBCreateSession, IDBInitialize, IDataInitialize,
|
||||||
|
IRowset, MSDAINITIALIZE,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
core::{GUID, IUnknown, Interface, PWSTR, w},
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Owned version of `PWSTR` that holds the heap memory.
|
||||||
|
///
|
||||||
|
/// Use `as_pwstr()` to convert it to a raw pointer.
|
||||||
|
struct PwStrOwned(Vec<u16>);
|
||||||
|
|
||||||
|
impl PwStrOwned {
|
||||||
|
/// # SAFETY
|
||||||
|
///
|
||||||
|
/// The returned `PWSTR` is basically a raw pointer, it is only valid within the
|
||||||
|
/// lifetime of `PwStrOwned`.
|
||||||
|
unsafe fn as_pwstr(&mut self) -> PWSTR {
|
||||||
|
let raw_ptr = self.0.as_mut_ptr();
|
||||||
|
PWSTR::from_raw(raw_ptr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct `PwStrOwned` from any `str`.
|
||||||
|
impl<S: AsRef<str> + ?Sized> From<&S> for PwStrOwned {
|
||||||
|
fn from(value: &S) -> Self {
|
||||||
|
let mut utf16_bytes = value.as_ref().encode_utf16().collect::<Vec<u16>>();
|
||||||
|
utf16_bytes.push(0); // the tailing NULL
|
||||||
|
|
||||||
|
PwStrOwned(utf16_bytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper function to replace unsupported characters with whitespace.
|
||||||
|
///
|
||||||
|
/// Windows search will error out if it encounters these characters.
|
||||||
|
///
|
||||||
|
/// The complete list of unsupported characters is unknown and we don't know how
|
||||||
|
/// to escape them, so let's replace them.
|
||||||
|
fn query_string_cleanup(old: &str) -> String {
|
||||||
|
const UNSUPPORTED_CHAR: [char; 2] = ['\'', '\n'];
|
||||||
|
|
||||||
|
// Using len in bytes is ok
|
||||||
|
let mut chars = Vec::with_capacity(old.len());
|
||||||
|
for char in old.chars() {
|
||||||
|
if UNSUPPORTED_CHAR.contains(&char) {
|
||||||
|
chars.push(' ');
|
||||||
|
} else {
|
||||||
|
chars.push(char);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
chars.into_iter().collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper function to construct the Windows Search SQL.
|
||||||
|
///
|
||||||
|
/// Paging is not natively supported by windows Search SQL, it only supports `size`
|
||||||
|
/// via the `TOP` keyword ("SELECT TOP {n} {columns}"). The SQL returned by this
|
||||||
|
/// function will have `{n}` set to `from + size`, then we will manually implement
|
||||||
|
/// paging.
|
||||||
|
fn query_sql(query_string: &str, from: usize, size: usize, config: &FileSearchConfig) -> String {
|
||||||
|
let top_n = from
|
||||||
|
.checked_add(size)
|
||||||
|
.expect("[from + size] cannot fit into an [usize]");
|
||||||
|
|
||||||
|
// System.ItemUrl is a column that contains the file path
|
||||||
|
// example: "file:C:/Users/desktop.ini"
|
||||||
|
//
|
||||||
|
// System.Search.Rank is the relevance score
|
||||||
|
let mut sql = format!(
|
||||||
|
"SELECT TOP {} System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE",
|
||||||
|
top_n
|
||||||
|
);
|
||||||
|
|
||||||
|
let query_string = query_string_cleanup(query_string);
|
||||||
|
|
||||||
|
let search_by_predicate = match config.search_by {
|
||||||
|
SearchBy::Name => {
|
||||||
|
// `contains(System.FileName, '{query_string}')` would be faster
|
||||||
|
// because it uses inverted index, but that's not what we want
|
||||||
|
// due to the limitation of tokenization. For example, suppose "Coco AI.rs"
|
||||||
|
// will be tokenized to `["Coco", "AI", "rs"]`, then if users search
|
||||||
|
// via `Co`, this file won't be returned because term `Co` does not
|
||||||
|
// exist in the index.
|
||||||
|
//
|
||||||
|
// So we use wildcard instead even though it is slower.
|
||||||
|
format!("(System.FileName LIKE '%{query_string}%')")
|
||||||
|
}
|
||||||
|
SearchBy::NameAndContents => {
|
||||||
|
// Windows File Search does not support searching by file content.
|
||||||
|
//
|
||||||
|
// `CONTAINS('query_string')` would search all columns for `query_string`,
|
||||||
|
// this is the closest solution we have.
|
||||||
|
format!("((System.FileName LIKE '%{query_string}%') OR CONTAINS('{query_string}'))")
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let search_paths_predicate: Option<String> = {
|
||||||
|
if config.search_paths.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
let mut output = String::from("(");
|
||||||
|
|
||||||
|
for (idx, search_path) in config.search_paths.iter().enumerate() {
|
||||||
|
if idx != 0 {
|
||||||
|
output.push_str(" OR ");
|
||||||
|
}
|
||||||
|
|
||||||
|
output.push_str("SCOPE = 'file:");
|
||||||
|
output.push_str(&search_path);
|
||||||
|
output.push('\'');
|
||||||
|
}
|
||||||
|
|
||||||
|
output.push(')');
|
||||||
|
|
||||||
|
Some(output)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let exclude_paths_predicate: Option<String> = {
|
||||||
|
if config.exclude_paths.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
let mut output = String::from("(");
|
||||||
|
|
||||||
|
for (idx, exclude_path) in config.exclude_paths.iter().enumerate() {
|
||||||
|
if idx != 0 {
|
||||||
|
output.push_str(" AND ");
|
||||||
|
}
|
||||||
|
|
||||||
|
output.push_str("(NOT SCOPE = 'file:");
|
||||||
|
output.push_str(&exclude_path);
|
||||||
|
output.push('\'');
|
||||||
|
output.push(')');
|
||||||
|
}
|
||||||
|
|
||||||
|
output.push(')');
|
||||||
|
|
||||||
|
Some(output)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let file_types_predicate: Option<String> = {
|
||||||
|
if config.file_types.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
let mut output = String::from("(");
|
||||||
|
|
||||||
|
for (idx, file_type) in config.file_types.iter().enumerate() {
|
||||||
|
if idx != 0 {
|
||||||
|
output.push_str(" OR ");
|
||||||
|
}
|
||||||
|
|
||||||
|
// NOTE that this column contains a starting dot
|
||||||
|
output.push_str("System.FileExtension = '.");
|
||||||
|
output.push_str(&file_type);
|
||||||
|
output.push('\'');
|
||||||
|
}
|
||||||
|
|
||||||
|
output.push(')');
|
||||||
|
|
||||||
|
Some(output)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
sql.push(' ');
|
||||||
|
sql.push_str(search_by_predicate.as_str());
|
||||||
|
if let Some(search_paths_predicate) = search_paths_predicate {
|
||||||
|
sql.push_str(" AND ");
|
||||||
|
sql.push_str(search_paths_predicate.as_str());
|
||||||
|
}
|
||||||
|
if let Some(exclude_paths_predicate) = exclude_paths_predicate {
|
||||||
|
sql.push_str(" AND ");
|
||||||
|
sql.push_str(exclude_paths_predicate.as_str());
|
||||||
|
}
|
||||||
|
if let Some(file_types_predicate) = file_types_predicate {
|
||||||
|
sql.push_str(" AND ");
|
||||||
|
sql.push_str(file_types_predicate.as_str());
|
||||||
|
}
|
||||||
|
|
||||||
|
sql
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Default GUID for Search.CollatorDSO.1
|
||||||
|
const DBGUID_DEFAULT: GUID = GUID {
|
||||||
|
data1: 0xc8b521fb,
|
||||||
|
data2: 0x5cf3,
|
||||||
|
data3: 0x11ce,
|
||||||
|
data4: [0xad, 0xe5, 0x00, 0xaa, 0x00, 0x44, 0x77, 0x3d],
|
||||||
|
};
|
||||||
|
|
||||||
|
unsafe fn create_accessor_handle(accessor: &IAccessor, index: usize) -> Result<HACCESSOR, String> {
|
||||||
|
let bindings = DBBINDING {
|
||||||
|
iOrdinal: index,
|
||||||
|
obValue: 0,
|
||||||
|
obStatus: 0,
|
||||||
|
obLength: 0,
|
||||||
|
dwPart: DBPART_VALUE.0 as u32,
|
||||||
|
dwMemOwner: DBMEMOWNER_CLIENTOWNED.0 as u32,
|
||||||
|
eParamIO: DBPARAMIO_NOTPARAM.0 as u32,
|
||||||
|
cbMaxLen: 512,
|
||||||
|
dwFlags: 0,
|
||||||
|
wType: DBTYPE_WSTR.0 as u16,
|
||||||
|
bPrecision: 0,
|
||||||
|
bScale: 0,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let mut status = 0;
|
||||||
|
let mut accessor_handle = HACCESSOR::default();
|
||||||
|
unsafe {
|
||||||
|
accessor
|
||||||
|
.CreateAccessor(
|
||||||
|
DBACCESSOR_ROWDATA.0 as u32,
|
||||||
|
1,
|
||||||
|
&bindings,
|
||||||
|
0,
|
||||||
|
&mut accessor_handle,
|
||||||
|
Some(&mut status),
|
||||||
|
)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(accessor_handle)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_db_initialize() -> Result<IDBInitialize, String> {
|
||||||
|
unsafe {
|
||||||
|
let data_init: IDataInitialize =
|
||||||
|
CoCreateInstance(&MSDAINITIALIZE, None, CLSCTX_INPROC_SERVER)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let mut unknown: Option<IUnknown> = None;
|
||||||
|
data_init
|
||||||
|
.GetDataSource(
|
||||||
|
None,
|
||||||
|
CLSCTX_INPROC_SERVER.0,
|
||||||
|
w!("provider=Search.CollatorDSO.1;EXTENDED PROPERTIES=\"Application=Windows\""),
|
||||||
|
&IDBInitialize::IID,
|
||||||
|
&mut unknown as *mut _ as *mut _,
|
||||||
|
)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
Ok(unknown.unwrap().cast().map_err(|e| e.to_string())?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_command(db_init: IDBInitialize) -> Result<ICommandText, String> {
|
||||||
|
unsafe {
|
||||||
|
let db_create_session: IDBCreateSession = db_init.cast().map_err(|e| e.to_string())?;
|
||||||
|
let session: IUnknown = db_create_session
|
||||||
|
.CreateSession(None, &IUnknown::IID)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
let db_create_command: IDBCreateCommand = session.cast().map_err(|e| e.to_string())?;
|
||||||
|
Ok(db_create_command
|
||||||
|
.CreateCommand(None, &ICommand::IID)
|
||||||
|
.map_err(|e| e.to_string())?
|
||||||
|
.cast()
|
||||||
|
.map_err(|e| e.to_string())?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn execute_windows_search_sql(sql_query: &str) -> Result<Vec<(String, String)>, String> {
|
||||||
|
unsafe {
|
||||||
|
let mut pwstr_owned_sql = PwStrOwned::from(sql_query);
|
||||||
|
// SAFETY: pwstr_owned_sql will live for the whole lifetime of this function.
|
||||||
|
let sql_query = pwstr_owned_sql.as_pwstr();
|
||||||
|
|
||||||
|
let db_init = create_db_initialize()?;
|
||||||
|
db_init.Initialize().map_err(|e| e.to_string())?;
|
||||||
|
let command = create_command(db_init)?;
|
||||||
|
|
||||||
|
// Set the command text
|
||||||
|
command
|
||||||
|
.SetCommandText(&DBGUID_DEFAULT, sql_query)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
// Execute the command
|
||||||
|
let mut rowset: Option<IRowset> = None;
|
||||||
|
command
|
||||||
|
.Execute(
|
||||||
|
None,
|
||||||
|
&IRowset::IID,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
Some(&mut rowset as *mut _ as *mut _),
|
||||||
|
)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
let rowset = rowset.ok_or_else(|| {
|
||||||
|
format!(
|
||||||
|
"No rowset returned for query: {}",
|
||||||
|
// SAFETY: the raw pointer is not dangling
|
||||||
|
sql_query
|
||||||
|
.to_string()
|
||||||
|
.expect("the conversion should work as `sql_query` was created from a String",)
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let accessor: IAccessor = rowset
|
||||||
|
.cast()
|
||||||
|
.map_err(|e| format!("Failed to cast to IAccessor: {}", e.to_string()))?;
|
||||||
|
|
||||||
|
let mut output = Vec::new();
|
||||||
|
let mut count = 0;
|
||||||
|
loop {
|
||||||
|
let mut rows_fetched = 0;
|
||||||
|
let mut row_handles = [std::ptr::null_mut(); 1];
|
||||||
|
let result = rowset.GetNextRows(
|
||||||
|
DB_NULL_HCHAPTER as usize,
|
||||||
|
0,
|
||||||
|
&mut rows_fetched,
|
||||||
|
&mut row_handles,
|
||||||
|
);
|
||||||
|
if result.is_err() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if rows_fetched == 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut data = Vec::new();
|
||||||
|
|
||||||
|
for i in 0..2 {
|
||||||
|
let mut item_name = [0u16; 512];
|
||||||
|
|
||||||
|
let accessor_handle = create_accessor_handle(&accessor, i + 1)?;
|
||||||
|
rowset
|
||||||
|
.GetData(
|
||||||
|
*row_handles[0],
|
||||||
|
accessor_handle,
|
||||||
|
item_name.as_mut_ptr() as *mut _,
|
||||||
|
)
|
||||||
|
.map_err(|e| {
|
||||||
|
format!(
|
||||||
|
"Failed to get data at count {}, index {}: {}",
|
||||||
|
count,
|
||||||
|
i,
|
||||||
|
e.to_string()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let name = String::from_utf16_lossy(&item_name);
|
||||||
|
// Remove null characters
|
||||||
|
data.push(name.trim_end_matches('\u{0000}').to_string());
|
||||||
|
|
||||||
|
accessor
|
||||||
|
.ReleaseAccessor(accessor_handle, None)
|
||||||
|
.map_err(|e| {
|
||||||
|
format!(
|
||||||
|
"Failed to release accessor at count {}, index {}: {}",
|
||||||
|
count,
|
||||||
|
i,
|
||||||
|
e.to_string()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
output.push((data[0].clone(), data[1].clone()));
|
||||||
|
|
||||||
|
count += 1;
|
||||||
|
rowset
|
||||||
|
.ReleaseRows(
|
||||||
|
1,
|
||||||
|
row_handles[0],
|
||||||
|
std::ptr::null_mut(),
|
||||||
|
std::ptr::null_mut(),
|
||||||
|
std::ptr::null_mut(),
|
||||||
|
)
|
||||||
|
.map_err(|e| {
|
||||||
|
format!(
|
||||||
|
"Failed to release rows at count {}: {}",
|
||||||
|
count,
|
||||||
|
e.to_string()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn hits(
|
||||||
|
query_string: &str,
|
||||||
|
from: usize,
|
||||||
|
size: usize,
|
||||||
|
config: &FileSearchConfig,
|
||||||
|
) -> Result<Vec<(Document, f64)>, String> {
|
||||||
|
let sql = query_sql(query_string, from, size, config);
|
||||||
|
unsafe { OleInitialize(None).map_err(|e| e.to_string())? };
|
||||||
|
let result = execute_windows_search_sql(&sql)?;
|
||||||
|
unsafe { OleUninitialize() };
|
||||||
|
// .take(size) is not needed as `result` will contain `from+size` files at most
|
||||||
|
let result_with_paging = result.into_iter().skip(from);
|
||||||
|
// result_with_paging won't contain more than `size` entries
|
||||||
|
let mut hits = Vec::with_capacity(size);
|
||||||
|
|
||||||
|
const ITEM_URL_PREFIX: &str = "file:";
|
||||||
|
const ITEM_URL_PREFIX_LEN: usize = ITEM_URL_PREFIX.len();
|
||||||
|
for (item_url, score_str) in result_with_paging {
|
||||||
|
// path returned from Windows Search contains a prefix, we need to trim it.
|
||||||
|
//
|
||||||
|
// "file:C:/Users/desktop.ini" => "C:/Users/desktop.ini"
|
||||||
|
let file_path = &item_url[ITEM_URL_PREFIX_LEN..];
|
||||||
|
|
||||||
|
let icon = get_file_icon(file_path.to_string()).await;
|
||||||
|
let file_path_of_type_path = camino::Utf8Path::new(&file_path);
|
||||||
|
let r#where = file_path_of_type_path
|
||||||
|
.parent()
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
panic!(
|
||||||
|
"expect path [{}] to have a parent, but it does not",
|
||||||
|
file_path
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let file_name = file_path_of_type_path.file_name().unwrap_or_else(|| {
|
||||||
|
panic!(
|
||||||
|
"expect path [{}] to have a file name, but it does not",
|
||||||
|
file_path
|
||||||
|
);
|
||||||
|
});
|
||||||
|
let on_opened = OnOpened::Document {
|
||||||
|
url: file_path.to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let doc = Document {
|
||||||
|
id: file_path.to_string(),
|
||||||
|
title: Some(file_name.to_string()),
|
||||||
|
source: Some(DataSourceReference {
|
||||||
|
r#type: Some(LOCAL_QUERY_SOURCE_TYPE.into()),
|
||||||
|
name: Some(EXTENSION_ID.into()),
|
||||||
|
id: Some(EXTENSION_ID.into()),
|
||||||
|
icon: Some(String::from("font_Filesearch")),
|
||||||
|
}),
|
||||||
|
category: Some(r#where),
|
||||||
|
on_opened: Some(on_opened),
|
||||||
|
url: Some(file_path.into()),
|
||||||
|
icon: Some(icon.to_string()),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let score: f64 = score_str.parse().expect(
|
||||||
|
"System.Search.Rank should be in range [0, 1000], which should be valid for [f64]",
|
||||||
|
);
|
||||||
|
|
||||||
|
hits.push((doc, score));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(hits)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip these tests in our CI, they fail with the following error
|
||||||
|
// "SQL is invalid: "0x80041820""
|
||||||
|
//
|
||||||
|
// I have no idea about the underlying root cause
|
||||||
|
#[cfg(all(test, not(ci)))]
|
||||||
|
mod test_windows_search {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
/// Helper function for ensuring `sql` is valid SQL by actually executing it.
|
||||||
|
fn ensure_it_is_valid_sql(sql: &str) {
|
||||||
|
unsafe { OleInitialize(None).unwrap() };
|
||||||
|
execute_windows_search_sql(&sql).expect("SQL is invalid");
|
||||||
|
unsafe { OleUninitialize() };
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_sql_empty_config_search_by_name() {
|
||||||
|
let config = FileSearchConfig {
|
||||||
|
search_paths: Vec::new(),
|
||||||
|
exclude_paths: Vec::new(),
|
||||||
|
file_types: Vec::new(),
|
||||||
|
search_by: SearchBy::Name,
|
||||||
|
};
|
||||||
|
let sql = query_sql("coco", 0, 10, &config);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
sql,
|
||||||
|
"SELECT TOP 10 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%coco%')"
|
||||||
|
);
|
||||||
|
ensure_it_is_valid_sql(&sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_sql_empty_config_search_by_name_and_content() {
|
||||||
|
let config = FileSearchConfig {
|
||||||
|
search_paths: Vec::new(),
|
||||||
|
exclude_paths: Vec::new(),
|
||||||
|
file_types: Vec::new(),
|
||||||
|
search_by: SearchBy::NameAndContents,
|
||||||
|
};
|
||||||
|
let sql = query_sql("coco", 0, 10, &config);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
sql,
|
||||||
|
"SELECT TOP 10 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE ((System.FileName LIKE '%coco%') OR CONTAINS('coco'))"
|
||||||
|
);
|
||||||
|
ensure_it_is_valid_sql(&sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_sql_with_search_paths() {
|
||||||
|
let config = FileSearchConfig {
|
||||||
|
search_paths: vec!["C:/Users/".into()],
|
||||||
|
exclude_paths: Vec::new(),
|
||||||
|
file_types: Vec::new(),
|
||||||
|
search_by: SearchBy::Name,
|
||||||
|
};
|
||||||
|
let sql = query_sql("coco", 0, 10, &config);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
sql,
|
||||||
|
"SELECT TOP 10 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%coco%') AND (SCOPE = 'file:C:/Users/')"
|
||||||
|
);
|
||||||
|
ensure_it_is_valid_sql(&sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_sql_with_multiple_search_paths() {
|
||||||
|
let config = FileSearchConfig {
|
||||||
|
search_paths: vec![
|
||||||
|
"C:/Users/".into(),
|
||||||
|
"D:/Projects/".into(),
|
||||||
|
"E:/Documents/".into(),
|
||||||
|
],
|
||||||
|
exclude_paths: Vec::new(),
|
||||||
|
file_types: Vec::new(),
|
||||||
|
search_by: SearchBy::Name,
|
||||||
|
};
|
||||||
|
let sql = query_sql("test", 0, 5, &config);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
sql,
|
||||||
|
"SELECT TOP 5 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%test%') AND (SCOPE = 'file:C:/Users/' OR SCOPE = 'file:D:/Projects/' OR SCOPE = 'file:E:/Documents/')"
|
||||||
|
);
|
||||||
|
ensure_it_is_valid_sql(&sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_sql_with_exclude_paths() {
|
||||||
|
let config = FileSearchConfig {
|
||||||
|
search_paths: Vec::new(),
|
||||||
|
exclude_paths: vec!["C:/Windows/".into()],
|
||||||
|
file_types: Vec::new(),
|
||||||
|
search_by: SearchBy::Name,
|
||||||
|
};
|
||||||
|
let sql = query_sql("file", 0, 20, &config);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
sql,
|
||||||
|
"SELECT TOP 20 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%file%') AND ((NOT SCOPE = 'file:C:/Windows/'))"
|
||||||
|
);
|
||||||
|
ensure_it_is_valid_sql(&sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_sql_with_multiple_exclude_paths() {
|
||||||
|
let config = FileSearchConfig {
|
||||||
|
search_paths: Vec::new(),
|
||||||
|
exclude_paths: vec!["C:/Windows/".into(), "C:/System/".into(), "C:/Temp/".into()],
|
||||||
|
file_types: Vec::new(),
|
||||||
|
search_by: SearchBy::Name,
|
||||||
|
};
|
||||||
|
let sql = query_sql("data", 5, 15, &config);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
sql,
|
||||||
|
"SELECT TOP 20 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%data%') AND ((NOT SCOPE = 'file:C:/Windows/') AND (NOT SCOPE = 'file:C:/System/') AND (NOT SCOPE = 'file:C:/Temp/'))"
|
||||||
|
);
|
||||||
|
ensure_it_is_valid_sql(&sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_sql_with_file_types() {
|
||||||
|
let config = FileSearchConfig {
|
||||||
|
search_paths: Vec::new(),
|
||||||
|
exclude_paths: Vec::new(),
|
||||||
|
file_types: vec!["txt".into()],
|
||||||
|
search_by: SearchBy::Name,
|
||||||
|
};
|
||||||
|
let sql = query_sql("readme", 0, 10, &config);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
sql,
|
||||||
|
"SELECT TOP 10 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%readme%') AND (System.FileExtension = '.txt')"
|
||||||
|
);
|
||||||
|
ensure_it_is_valid_sql(&sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_sql_with_multiple_file_types() {
|
||||||
|
let config = FileSearchConfig {
|
||||||
|
search_paths: Vec::new(),
|
||||||
|
exclude_paths: Vec::new(),
|
||||||
|
file_types: vec!["rs".into(), "toml".into(), "md".into(), "json".into()],
|
||||||
|
search_by: SearchBy::Name,
|
||||||
|
};
|
||||||
|
let sql = query_sql("config", 0, 50, &config);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
sql,
|
||||||
|
"SELECT TOP 50 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%config%') AND (System.FileExtension = '.rs' OR System.FileExtension = '.toml' OR System.FileExtension = '.md' OR System.FileExtension = '.json')"
|
||||||
|
);
|
||||||
|
ensure_it_is_valid_sql(&sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_sql_all_fields_combined() {
|
||||||
|
let config = FileSearchConfig {
|
||||||
|
search_paths: vec!["C:/Projects/".into(), "D:/Code/".into()],
|
||||||
|
exclude_paths: vec!["C:/Projects/temp/".into()],
|
||||||
|
file_types: vec!["rs".into(), "ts".into()],
|
||||||
|
search_by: SearchBy::Name,
|
||||||
|
};
|
||||||
|
let sql = query_sql("main", 10, 25, &config);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
sql,
|
||||||
|
"SELECT TOP 35 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%main%') AND (SCOPE = 'file:C:/Projects/' OR SCOPE = 'file:D:/Code/') AND ((NOT SCOPE = 'file:C:/Projects/temp/')) AND (System.FileExtension = '.rs' OR System.FileExtension = '.ts')"
|
||||||
|
);
|
||||||
|
ensure_it_is_valid_sql(&sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_sql_with_special_characters() {
|
||||||
|
let config = FileSearchConfig {
|
||||||
|
search_paths: vec!["C:/Users/John Doe/".into()],
|
||||||
|
exclude_paths: Vec::new(),
|
||||||
|
file_types: vec!["c++".into()],
|
||||||
|
search_by: SearchBy::Name,
|
||||||
|
};
|
||||||
|
let sql = query_sql("hello-world", 0, 10, &config);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
sql,
|
||||||
|
"SELECT TOP 10 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%hello-world%') AND (SCOPE = 'file:C:/Users/John Doe/') AND (System.FileExtension = '.c++')"
|
||||||
|
);
|
||||||
|
ensure_it_is_valid_sql(&sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_sql_edge_case_large_offset() {
|
||||||
|
let config = FileSearchConfig {
|
||||||
|
search_paths: Vec::new(),
|
||||||
|
exclude_paths: Vec::new(),
|
||||||
|
file_types: Vec::new(),
|
||||||
|
search_by: SearchBy::Name,
|
||||||
|
};
|
||||||
|
let sql = query_sql("test", 100, 50, &config);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
sql,
|
||||||
|
"SELECT TOP 150 System.ItemUrl, System.Search.Rank FROM SystemIndex WHERE (System.FileName LIKE '%test%')"
|
||||||
|
);
|
||||||
|
ensure_it_is_valid_sql(&sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_string_cleanup_no_unsupported_chars() {
|
||||||
|
let input = "hello world";
|
||||||
|
let result = query_string_cleanup(input);
|
||||||
|
assert_eq!(result, input);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_string_cleanup_single_quote() {
|
||||||
|
let input = "don't worry";
|
||||||
|
let result = query_string_cleanup(input);
|
||||||
|
assert_eq!(result, "don t worry");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_string_cleanup_newline() {
|
||||||
|
let input = "line1\nline2";
|
||||||
|
let result = query_string_cleanup(input);
|
||||||
|
assert_eq!(result, "line1 line2");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_string_cleanup_both_unsupported_chars() {
|
||||||
|
let input = "don't\nworry";
|
||||||
|
let result = query_string_cleanup(input);
|
||||||
|
assert_eq!(result, "don t worry");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_string_cleanup_multiple_single_quotes() {
|
||||||
|
let input = "it's a 'test' string";
|
||||||
|
let result = query_string_cleanup(input);
|
||||||
|
assert_eq!(result, "it s a test string");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_string_cleanup_multiple_newlines() {
|
||||||
|
let input = "line1\n\nline2\nline3";
|
||||||
|
let result = query_string_cleanup(input);
|
||||||
|
assert_eq!(result, "line1 line2 line3");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_string_cleanup_empty_string() {
|
||||||
|
let input = "";
|
||||||
|
let result = query_string_cleanup(input);
|
||||||
|
assert_eq!(result, input);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_string_cleanup_only_unsupported_chars() {
|
||||||
|
let input = "'\n'";
|
||||||
|
let result = query_string_cleanup(input);
|
||||||
|
assert_eq!(result, " ");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_string_cleanup_unicode_characters() {
|
||||||
|
let input = "héllo wörld's\nfile";
|
||||||
|
let result = query_string_cleanup(input);
|
||||||
|
assert_eq!(result, "héllo wörld s file");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_query_string_cleanup_special_chars_preserved() {
|
||||||
|
let input = "test@file#name$with%symbols";
|
||||||
|
let result = query_string_cleanup(input);
|
||||||
|
assert_eq!(result, input);
|
||||||
|
}
|
||||||
|
}
|
||||||
97
src-tauri/src/extension/built_in/file_search/mod.rs
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
pub(crate) mod config;
|
||||||
|
pub(crate) mod implementation;
|
||||||
|
|
||||||
|
use super::super::LOCAL_QUERY_SOURCE_TYPE;
|
||||||
|
use crate::common::{
|
||||||
|
error::SearchError,
|
||||||
|
search::{QueryResponse, QuerySource, SearchQuery},
|
||||||
|
traits::SearchSource,
|
||||||
|
};
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use config::FileSearchConfig;
|
||||||
|
use hostname;
|
||||||
|
use tauri::AppHandle;
|
||||||
|
|
||||||
|
pub(crate) const EXTENSION_ID: &str = "File Search";
|
||||||
|
|
||||||
|
/// JSON file for this extension.
|
||||||
|
pub(crate) const PLUGIN_JSON_FILE: &str = r#"
|
||||||
|
{
|
||||||
|
"id": "File Search",
|
||||||
|
"name": "File Search",
|
||||||
|
"platforms": ["macos", "windows"],
|
||||||
|
"description": "Search files on your system",
|
||||||
|
"icon": "font_Filesearch",
|
||||||
|
"type": "extension"
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
|
||||||
|
pub struct FileSearchExtensionSearchSource;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl SearchSource for FileSearchExtensionSearchSource {
|
||||||
|
fn get_type(&self) -> QuerySource {
|
||||||
|
QuerySource {
|
||||||
|
r#type: LOCAL_QUERY_SOURCE_TYPE.into(),
|
||||||
|
name: hostname::get()
|
||||||
|
.unwrap_or(EXTENSION_ID.into())
|
||||||
|
.to_string_lossy()
|
||||||
|
.into(),
|
||||||
|
id: EXTENSION_ID.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn search(
|
||||||
|
&self,
|
||||||
|
tauri_app_handle: AppHandle,
|
||||||
|
query: SearchQuery,
|
||||||
|
) -> Result<QueryResponse, SearchError> {
|
||||||
|
let Some(query_string) = query.query_strings.get("query") else {
|
||||||
|
return Ok(QueryResponse {
|
||||||
|
source: self.get_type(),
|
||||||
|
hits: Vec::new(),
|
||||||
|
total_hits: 0,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
let from = usize::try_from(query.from).expect("from too big");
|
||||||
|
let size = usize::try_from(query.size).expect("size too big");
|
||||||
|
|
||||||
|
let query_string = query_string.trim();
|
||||||
|
if query_string.is_empty() {
|
||||||
|
return Ok(QueryResponse {
|
||||||
|
source: self.get_type(),
|
||||||
|
hits: Vec::new(),
|
||||||
|
total_hits: 0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get configuration from tauri store
|
||||||
|
let config = FileSearchConfig::get(&tauri_app_handle);
|
||||||
|
|
||||||
|
// If search paths are empty, then the hit should be empty.
|
||||||
|
//
|
||||||
|
// Without this, empty search paths will result in a mdfind that has no `-onlyin`
|
||||||
|
// option, which will in turn query the whole disk volume.
|
||||||
|
if config.search_paths.is_empty() {
|
||||||
|
return Ok(QueryResponse {
|
||||||
|
source: self.get_type(),
|
||||||
|
hits: Vec::new(),
|
||||||
|
total_hits: 0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute search in a blocking task
|
||||||
|
let query_source = self.get_type();
|
||||||
|
|
||||||
|
let hits = implementation::hits(&query_string, from, size, &config)
|
||||||
|
.await
|
||||||
|
.map_err(SearchError::InternalError)?;
|
||||||
|
|
||||||
|
let total_hits = hits.len();
|
||||||
|
Ok(QueryResponse {
|
||||||
|
source: query_source,
|
||||||
|
hits,
|
||||||
|
total_hits,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1 +0,0 @@
|
|||||||
|
|
||||||
@@ -3,42 +3,206 @@
|
|||||||
pub mod ai_overview;
|
pub mod ai_overview;
|
||||||
pub mod application;
|
pub mod application;
|
||||||
pub mod calculator;
|
pub mod calculator;
|
||||||
pub mod file_system;
|
#[cfg(any(target_os = "macos", target_os = "windows"))]
|
||||||
|
pub mod file_search;
|
||||||
pub mod pizza_engine_runtime;
|
pub mod pizza_engine_runtime;
|
||||||
pub mod quick_ai_access;
|
pub mod quick_ai_access;
|
||||||
|
|
||||||
use super::Extension;
|
use super::Extension;
|
||||||
use crate::extension::{alter_extension_json_file, load_extension_from_json_file};
|
use crate::SearchSourceRegistry;
|
||||||
use crate::{SearchSourceRegistry, GLOBAL_TAURI_APP_HANDLE};
|
use crate::extension::built_in::application::{set_apps_hotkey, unset_apps_hotkey};
|
||||||
use std::path::PathBuf;
|
use crate::extension::{
|
||||||
use std::sync::LazyLock;
|
ExtensionBundleIdBorrowed, PLUGIN_JSON_FILE_NAME, alter_extension_json_file,
|
||||||
use tauri::path::BaseDirectory;
|
};
|
||||||
use tauri::Manager;
|
use anyhow::Context;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use tauri::{AppHandle, Manager, Runtime};
|
||||||
|
|
||||||
pub(crate) static BUILT_IN_EXTENSION_DIRECTORY: LazyLock<PathBuf> = LazyLock::new(|| {
|
pub(crate) fn get_built_in_extension_directory<R: Runtime>(
|
||||||
let mut resource_dir = GLOBAL_TAURI_APP_HANDLE
|
tauri_app_handle: &AppHandle<R>,
|
||||||
.get()
|
) -> PathBuf {
|
||||||
.expect("global tauri app handle not set")
|
let mut resource_dir = tauri_app_handle.path().app_data_dir().expect(
|
||||||
.path()
|
|
||||||
.resolve("assets", BaseDirectory::Resource)
|
|
||||||
.expect(
|
|
||||||
"User home directory not found, which should be impossible on desktop environments",
|
"User home directory not found, which should be impossible on desktop environments",
|
||||||
);
|
);
|
||||||
resource_dir.push("extension");
|
resource_dir.push("built_in_extensions");
|
||||||
|
|
||||||
resource_dir
|
resource_dir
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper function to load the built-in extension specified by `extension_id`, used
|
||||||
|
/// in `list_built_in_extensions()`.
|
||||||
|
///
|
||||||
|
/// For built-in extensions, users are only allowed to edit these fields:
|
||||||
|
///
|
||||||
|
/// 1. alias (if this extension supports alias)
|
||||||
|
/// 2. hotkey (if this extension supports hotkey)
|
||||||
|
/// 3. enabled
|
||||||
|
///
|
||||||
|
/// If
|
||||||
|
///
|
||||||
|
/// 1. The above fields have invalid value
|
||||||
|
/// 2. Other fields are modified
|
||||||
|
///
|
||||||
|
/// we ignore and reset them to the default value.
|
||||||
|
async fn load_built_in_extension(
|
||||||
|
built_in_extensions_dir: &Path,
|
||||||
|
extension_id: &str,
|
||||||
|
default_plugin_json_file: &str,
|
||||||
|
) -> Result<Extension, String> {
|
||||||
|
let mut extension_dir = built_in_extensions_dir.join(extension_id);
|
||||||
|
let mut default_plugin_json = serde_json::from_str::<Extension>(&default_plugin_json_file).unwrap_or_else( |e| {
|
||||||
|
panic!("the default extension {} file of built-in extension [{}] cannot be parsed as a valid [struct Extension], error [{}]", PLUGIN_JSON_FILE_NAME, extension_id, e);
|
||||||
});
|
});
|
||||||
|
|
||||||
pub(super) async fn init_built_in_extension(
|
if !extension_dir.try_exists().map_err(|e| e.to_string())? {
|
||||||
|
tokio::fs::create_dir_all(extension_dir.as_path())
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let plugin_json_file_path = {
|
||||||
|
extension_dir.push(PLUGIN_JSON_FILE_NAME);
|
||||||
|
extension_dir
|
||||||
|
};
|
||||||
|
|
||||||
|
// If the JSON file does not exist, create a file with the default template and return.
|
||||||
|
if !plugin_json_file_path
|
||||||
|
.try_exists()
|
||||||
|
.map_err(|e| e.to_string())?
|
||||||
|
{
|
||||||
|
tokio::fs::write(plugin_json_file_path, default_plugin_json_file)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
return Ok(default_plugin_json);
|
||||||
|
}
|
||||||
|
|
||||||
|
let plugin_json_file_content = tokio::fs::read_to_string(plugin_json_file_path.as_path())
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
let res_plugin_json = serde_json::from_str::<Extension>(&plugin_json_file_content);
|
||||||
|
let Ok(plugin_json) = res_plugin_json else {
|
||||||
|
log::warn!(
|
||||||
|
"user invalidated built-in extension [{}] file, overwriting it with the default template",
|
||||||
|
extension_id
|
||||||
|
);
|
||||||
|
|
||||||
|
// If the JSON file cannot be parsed as `struct Extension`, overwrite it with the default template and return.
|
||||||
|
tokio::fs::write(plugin_json_file_path, default_plugin_json_file)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
return Ok(default_plugin_json);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Users are only allowed to edit the below fields
|
||||||
|
// 1. alias (if this extension supports alias)
|
||||||
|
// 2. hotkey (if this extension supports hotkey)
|
||||||
|
// 3. enabled
|
||||||
|
// so we ignore all other fields.
|
||||||
|
let alias = if default_plugin_json.supports_alias_hotkey() {
|
||||||
|
plugin_json.alias.clone()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
let hotkey = if default_plugin_json.supports_alias_hotkey() {
|
||||||
|
plugin_json.hotkey.clone()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let enabled = plugin_json.enabled;
|
||||||
|
|
||||||
|
default_plugin_json.alias = alias;
|
||||||
|
default_plugin_json.hotkey = hotkey;
|
||||||
|
default_plugin_json.enabled = enabled;
|
||||||
|
|
||||||
|
let final_plugin_json_file_content = serde_json::to_string_pretty(&default_plugin_json)
|
||||||
|
.expect("failed to serialize `struct Extension`");
|
||||||
|
tokio::fs::write(plugin_json_file_path, final_plugin_json_file_content)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
Ok(default_plugin_json)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the built-in extension list.
|
||||||
|
///
|
||||||
|
/// Will create extension files when they are not found.
|
||||||
|
///
|
||||||
|
/// Users may put extension files in the built-in extension directory, but
|
||||||
|
/// we do not care and will ignore them.
|
||||||
|
///
|
||||||
|
/// We only read alias/hotkey/enabled from the JSON file, we have ensured that if
|
||||||
|
/// alias/hotkey is not supported, then it will be `None`. Besides that, no further
|
||||||
|
/// validation is needed because nothing could go wrong.
|
||||||
|
pub(crate) async fn list_built_in_extensions<R: Runtime>(
|
||||||
|
tauri_app_handle: &AppHandle<R>,
|
||||||
|
) -> Result<Vec<Extension>, String> {
|
||||||
|
let dir = get_built_in_extension_directory(tauri_app_handle);
|
||||||
|
|
||||||
|
let mut built_in_extensions = Vec::new();
|
||||||
|
built_in_extensions.push(
|
||||||
|
load_built_in_extension(
|
||||||
|
&dir,
|
||||||
|
application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME,
|
||||||
|
application::PLUGIN_JSON_FILE,
|
||||||
|
)
|
||||||
|
.await?,
|
||||||
|
);
|
||||||
|
built_in_extensions.push(
|
||||||
|
load_built_in_extension(
|
||||||
|
&dir,
|
||||||
|
calculator::DATA_SOURCE_ID,
|
||||||
|
calculator::PLUGIN_JSON_FILE,
|
||||||
|
)
|
||||||
|
.await?,
|
||||||
|
);
|
||||||
|
built_in_extensions.push(
|
||||||
|
load_built_in_extension(
|
||||||
|
&dir,
|
||||||
|
ai_overview::EXTENSION_ID,
|
||||||
|
ai_overview::PLUGIN_JSON_FILE,
|
||||||
|
)
|
||||||
|
.await?,
|
||||||
|
);
|
||||||
|
built_in_extensions.push(
|
||||||
|
load_built_in_extension(
|
||||||
|
&dir,
|
||||||
|
quick_ai_access::EXTENSION_ID,
|
||||||
|
quick_ai_access::PLUGIN_JSON_FILE,
|
||||||
|
)
|
||||||
|
.await?,
|
||||||
|
);
|
||||||
|
|
||||||
|
cfg_if::cfg_if! {
|
||||||
|
if #[cfg(any(target_os = "macos", target_os = "windows"))] {
|
||||||
|
built_in_extensions.push(
|
||||||
|
load_built_in_extension(
|
||||||
|
&dir,
|
||||||
|
file_search::EXTENSION_ID,
|
||||||
|
file_search::PLUGIN_JSON_FILE,
|
||||||
|
)
|
||||||
|
.await?,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(built_in_extensions)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) async fn init_built_in_extension<R: Runtime>(
|
||||||
|
tauri_app_handle: &AppHandle<R>,
|
||||||
extension: &Extension,
|
extension: &Extension,
|
||||||
search_source_registry: &SearchSourceRegistry,
|
search_source_registry: &SearchSourceRegistry,
|
||||||
) {
|
) -> Result<(), String> {
|
||||||
log::trace!("initializing built-in extensions");
|
log::trace!("initializing built-in extensions [{}]", extension.id);
|
||||||
|
|
||||||
if extension.id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
if extension.id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
||||||
search_source_registry
|
search_source_registry
|
||||||
.register_source(application::ApplicationSearchSource)
|
.register_source(application::ApplicationSearchSource)
|
||||||
.await;
|
.await;
|
||||||
|
set_apps_hotkey(&tauri_app_handle)?;
|
||||||
log::debug!("built-in extension [{}] initialized", extension.id);
|
log::debug!("built-in extension [{}] initialized", extension.id);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -49,39 +213,30 @@ pub(super) async fn init_built_in_extension(
|
|||||||
.await;
|
.await;
|
||||||
log::debug!("built-in extension [{}] initialized", extension.id);
|
log::debug!("built-in extension [{}] initialized", extension.id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cfg_if::cfg_if! {
|
||||||
|
if #[cfg(any(target_os = "macos", target_os = "windows"))] {
|
||||||
|
if extension.id == file_search::EXTENSION_ID {
|
||||||
|
let file_system_search = file_search::FileSearchExtensionSearchSource;
|
||||||
|
search_source_registry
|
||||||
|
.register_source(file_system_search)
|
||||||
|
.await;
|
||||||
|
log::debug!("built-in extension [{}] initialized", extension.id);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn is_extension_built_in(extension_id: &str) -> bool {
|
Ok(())
|
||||||
if extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if extension_id.starts_with(&format!(
|
pub(crate) fn is_extension_built_in(bundle_id: &ExtensionBundleIdBorrowed<'_>) -> bool {
|
||||||
"{}.",
|
bundle_id.developer.is_none()
|
||||||
application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
|
||||||
)) {
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if extension_id == calculator::DATA_SOURCE_ID {
|
pub(crate) async fn enable_built_in_extension<R: Runtime>(
|
||||||
return true;
|
tauri_app_handle: &AppHandle<R>,
|
||||||
}
|
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||||
|
) -> Result<(), String> {
|
||||||
if extension_id == quick_ai_access::EXTENSION_ID {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if extension_id == ai_overview::EXTENSION_ID {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn enable_built_in_extension(extension_id: &str) -> Result<(), String> {
|
|
||||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
|
||||||
.get()
|
|
||||||
.expect("global tauri app handle not set");
|
|
||||||
let search_source_registry_tauri_state = tauri_app_handle.state::<SearchSourceRegistry>();
|
let search_source_registry_tauri_state = tauri_app_handle.state::<SearchSourceRegistry>();
|
||||||
|
|
||||||
let update_extension = |extension: &mut Extension| -> Result<(), String> {
|
let update_extension = |extension: &mut Extension| -> Result<(), String> {
|
||||||
@@ -89,13 +244,17 @@ pub(crate) async fn enable_built_in_extension(extension_id: &str) -> Result<(),
|
|||||||
Ok(())
|
Ok(())
|
||||||
};
|
};
|
||||||
|
|
||||||
if extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
||||||
|
&& bundle_id.sub_extension_id.is_none()
|
||||||
|
{
|
||||||
search_source_registry_tauri_state
|
search_source_registry_tauri_state
|
||||||
.register_source(application::ApplicationSearchSource)
|
.register_source(application::ApplicationSearchSource)
|
||||||
.await;
|
.await;
|
||||||
|
set_apps_hotkey(tauri_app_handle)?;
|
||||||
|
|
||||||
alter_extension_json_file(
|
alter_extension_json_file(
|
||||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
&get_built_in_extension_directory(tauri_app_handle),
|
||||||
extension_id,
|
bundle_id,
|
||||||
update_extension,
|
update_extension,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
@@ -103,54 +262,69 @@ pub(crate) async fn enable_built_in_extension(extension_id: &str) -> Result<(),
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check if this is an application
|
// Check if this is an application
|
||||||
let application_prefix = format!(
|
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
||||||
"{}.",
|
&& bundle_id.sub_extension_id.is_some()
|
||||||
application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
{
|
||||||
);
|
let app_path = bundle_id.sub_extension_id.expect("just checked it is Some");
|
||||||
if extension_id.starts_with(&application_prefix) {
|
|
||||||
let app_path = &extension_id[application_prefix.len()..];
|
|
||||||
application::enable_app_search(tauri_app_handle, app_path)?;
|
application::enable_app_search(tauri_app_handle, app_path)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
if extension_id == calculator::DATA_SOURCE_ID {
|
if bundle_id.extension_id == calculator::DATA_SOURCE_ID {
|
||||||
let calculator_search = calculator::CalculatorSource::new(2000f64);
|
let calculator_search = calculator::CalculatorSource::new(2000f64);
|
||||||
search_source_registry_tauri_state
|
search_source_registry_tauri_state
|
||||||
.register_source(calculator_search)
|
.register_source(calculator_search)
|
||||||
.await;
|
.await;
|
||||||
alter_extension_json_file(
|
alter_extension_json_file(
|
||||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
&get_built_in_extension_directory(tauri_app_handle),
|
||||||
extension_id,
|
bundle_id,
|
||||||
update_extension,
|
update_extension,
|
||||||
)?;
|
)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
if extension_id == quick_ai_access::EXTENSION_ID {
|
if bundle_id.extension_id == quick_ai_access::EXTENSION_ID {
|
||||||
alter_extension_json_file(
|
alter_extension_json_file(
|
||||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
&get_built_in_extension_directory(tauri_app_handle),
|
||||||
extension_id,
|
bundle_id,
|
||||||
update_extension,
|
update_extension,
|
||||||
)?;
|
)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
if extension_id == ai_overview::EXTENSION_ID {
|
if bundle_id.extension_id == ai_overview::EXTENSION_ID {
|
||||||
alter_extension_json_file(
|
alter_extension_json_file(
|
||||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
&get_built_in_extension_directory(tauri_app_handle),
|
||||||
extension_id,
|
bundle_id,
|
||||||
update_extension,
|
update_extension,
|
||||||
)?;
|
)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cfg_if::cfg_if! {
|
||||||
|
if #[cfg(any(target_os = "macos", target_os = "windows"))] {
|
||||||
|
if bundle_id.extension_id == file_search::EXTENSION_ID {
|
||||||
|
let file_system_search = file_search::FileSearchExtensionSearchSource;
|
||||||
|
search_source_registry_tauri_state
|
||||||
|
.register_source(file_system_search)
|
||||||
|
.await;
|
||||||
|
alter_extension_json_file(
|
||||||
|
&get_built_in_extension_directory(tauri_app_handle),
|
||||||
|
bundle_id,
|
||||||
|
update_extension,
|
||||||
|
)?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn disable_built_in_extension(extension_id: &str) -> Result<(), String> {
|
pub(crate) async fn disable_built_in_extension<R: Runtime>(
|
||||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
tauri_app_handle: &AppHandle<R>,
|
||||||
.get()
|
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||||
.expect("global tauri app handle not set");
|
) -> Result<(), String> {
|
||||||
let search_source_registry_tauri_state = tauri_app_handle.state::<SearchSourceRegistry>();
|
let search_source_registry_tauri_state = tauri_app_handle.state::<SearchSourceRegistry>();
|
||||||
|
|
||||||
let update_extension = |extension: &mut Extension| -> Result<(), String> {
|
let update_extension = |extension: &mut Extension| -> Result<(), String> {
|
||||||
@@ -158,153 +332,212 @@ pub(crate) async fn disable_built_in_extension(extension_id: &str) -> Result<(),
|
|||||||
Ok(())
|
Ok(())
|
||||||
};
|
};
|
||||||
|
|
||||||
if extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
||||||
|
&& bundle_id.sub_extension_id.is_none()
|
||||||
|
{
|
||||||
search_source_registry_tauri_state
|
search_source_registry_tauri_state
|
||||||
.remove_source(extension_id)
|
.remove_source(bundle_id.extension_id)
|
||||||
.await;
|
.await;
|
||||||
|
unset_apps_hotkey(tauri_app_handle)?;
|
||||||
|
|
||||||
alter_extension_json_file(
|
alter_extension_json_file(
|
||||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
&get_built_in_extension_directory(tauri_app_handle),
|
||||||
extension_id,
|
bundle_id,
|
||||||
update_extension,
|
update_extension,
|
||||||
)?;
|
)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if this is an application
|
// Check if this is an application
|
||||||
let application_prefix = format!(
|
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
||||||
"{}.",
|
&& bundle_id.sub_extension_id.is_some()
|
||||||
application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
{
|
||||||
);
|
let app_path = bundle_id.sub_extension_id.expect("just checked it is Some");
|
||||||
if extension_id.starts_with(&application_prefix) {
|
|
||||||
let app_path = &extension_id[application_prefix.len()..];
|
|
||||||
application::disable_app_search(tauri_app_handle, app_path)?;
|
application::disable_app_search(tauri_app_handle, app_path)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
if extension_id == calculator::DATA_SOURCE_ID {
|
if bundle_id.extension_id == calculator::DATA_SOURCE_ID {
|
||||||
search_source_registry_tauri_state
|
search_source_registry_tauri_state
|
||||||
.remove_source(extension_id)
|
.remove_source(bundle_id.extension_id)
|
||||||
.await;
|
.await;
|
||||||
alter_extension_json_file(
|
alter_extension_json_file(
|
||||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
&get_built_in_extension_directory(tauri_app_handle),
|
||||||
extension_id,
|
bundle_id,
|
||||||
update_extension,
|
update_extension,
|
||||||
)?;
|
)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
if extension_id == quick_ai_access::EXTENSION_ID {
|
if bundle_id.extension_id == quick_ai_access::EXTENSION_ID {
|
||||||
alter_extension_json_file(
|
alter_extension_json_file(
|
||||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
&get_built_in_extension_directory(tauri_app_handle),
|
||||||
extension_id,
|
bundle_id,
|
||||||
update_extension,
|
update_extension,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
if extension_id == ai_overview::EXTENSION_ID {
|
if bundle_id.extension_id == ai_overview::EXTENSION_ID {
|
||||||
alter_extension_json_file(
|
alter_extension_json_file(
|
||||||
&BUILT_IN_EXTENSION_DIRECTORY.as_path(),
|
&get_built_in_extension_directory(tauri_app_handle),
|
||||||
extension_id,
|
bundle_id,
|
||||||
update_extension,
|
update_extension,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cfg_if::cfg_if! {
|
||||||
|
if #[cfg(any(target_os = "macos", target_os = "windows"))] {
|
||||||
|
if bundle_id.extension_id == file_search::EXTENSION_ID {
|
||||||
|
search_source_registry_tauri_state
|
||||||
|
.remove_source(bundle_id.extension_id)
|
||||||
|
.await;
|
||||||
|
alter_extension_json_file(
|
||||||
|
&get_built_in_extension_directory(tauri_app_handle),
|
||||||
|
bundle_id,
|
||||||
|
update_extension,
|
||||||
|
)?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn set_built_in_extension_alias(extension_id: &str, alias: &str) {
|
pub(crate) fn set_built_in_extension_alias<R: Runtime>(
|
||||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
tauri_app_handle: &AppHandle<R>,
|
||||||
.get()
|
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||||
.expect("global tauri app handle not set");
|
alias: &str,
|
||||||
|
) {
|
||||||
let application_prefix = format!(
|
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
||||||
"{}.",
|
if let Some(app_path) = bundle_id.sub_extension_id {
|
||||||
application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
|
||||||
);
|
|
||||||
if extension_id.starts_with(&application_prefix) {
|
|
||||||
let app_path = &extension_id[application_prefix.len()..];
|
|
||||||
application::set_app_alias(tauri_app_handle, app_path, alias);
|
application::set_app_alias(tauri_app_handle, app_path, alias);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn register_built_in_extension_hotkey(
|
pub(crate) fn register_built_in_extension_hotkey<R: Runtime>(
|
||||||
extension_id: &str,
|
tauri_app_handle: &AppHandle<R>,
|
||||||
|
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||||
hotkey: &str,
|
hotkey: &str,
|
||||||
) -> Result<(), String> {
|
) -> Result<(), String> {
|
||||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
||||||
.get()
|
if let Some(app_path) = bundle_id.sub_extension_id {
|
||||||
.expect("global tauri app handle not set");
|
|
||||||
let application_prefix = format!(
|
|
||||||
"{}.",
|
|
||||||
application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
|
||||||
);
|
|
||||||
if extension_id.starts_with(&application_prefix) {
|
|
||||||
let app_path = &extension_id[application_prefix.len()..];
|
|
||||||
application::register_app_hotkey(&tauri_app_handle, app_path, hotkey)?;
|
application::register_app_hotkey(&tauri_app_handle, app_path, hotkey)?;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn unregister_built_in_extension_hotkey(extension_id: &str) -> Result<(), String> {
|
pub(crate) fn unregister_built_in_extension_hotkey<R: Runtime>(
|
||||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
tauri_app_handle: &AppHandle<R>,
|
||||||
.get()
|
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||||
.expect("global tauri app handle not set");
|
) -> Result<(), String> {
|
||||||
let application_prefix = format!(
|
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
||||||
"{}.",
|
if let Some(app_path) = bundle_id.sub_extension_id {
|
||||||
application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
|
||||||
);
|
|
||||||
if extension_id.starts_with(&application_prefix) {
|
|
||||||
let app_path = &extension_id[application_prefix.len()..];
|
|
||||||
application::unregister_app_hotkey(&tauri_app_handle, app_path)?;
|
application::unregister_app_hotkey(&tauri_app_handle, app_path)?;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn is_built_in_extension_enabled(extension_id: &str) -> Result<bool, String> {
|
fn split_extension_id(extension_id: &str) -> (&str, Option<&str>) {
|
||||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
match extension_id.find('.') {
|
||||||
.get()
|
Some(idx) => (&extension_id[..idx], Some(&extension_id[idx + 1..])),
|
||||||
.expect("global tauri app handle not set");
|
None => (extension_id, None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_extension_from_json_file(
|
||||||
|
extension_directory: &Path,
|
||||||
|
extension_id: &str,
|
||||||
|
) -> Result<Extension, String> {
|
||||||
|
let (parent_extension_id, _opt_sub_extension_id) = split_extension_id(extension_id);
|
||||||
|
let json_file_path = {
|
||||||
|
let mut extension_directory_path = extension_directory.join(parent_extension_id);
|
||||||
|
extension_directory_path.push(PLUGIN_JSON_FILE_NAME);
|
||||||
|
|
||||||
|
extension_directory_path
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut extension = serde_json::from_reader::<_, Extension>(
|
||||||
|
std::fs::File::open(&json_file_path)
|
||||||
|
.with_context(|| {
|
||||||
|
format!(
|
||||||
|
"the [{}] file for extension [{}] is missing or broken",
|
||||||
|
PLUGIN_JSON_FILE_NAME, parent_extension_id
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.map_err(|e| e.to_string())?,
|
||||||
|
)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
super::canonicalize_relative_icon_path(extension_directory, &mut extension)?;
|
||||||
|
|
||||||
|
Ok(extension)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn is_built_in_extension_enabled<R: Runtime>(
|
||||||
|
tauri_app_handle: &AppHandle<R>,
|
||||||
|
bundle_id: &ExtensionBundleIdBorrowed<'_>,
|
||||||
|
) -> Result<bool, String> {
|
||||||
let search_source_registry_tauri_state = tauri_app_handle.state::<SearchSourceRegistry>();
|
let search_source_registry_tauri_state = tauri_app_handle.state::<SearchSourceRegistry>();
|
||||||
|
|
||||||
if extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
||||||
|
&& bundle_id.sub_extension_id.is_none()
|
||||||
|
{
|
||||||
return Ok(search_source_registry_tauri_state
|
return Ok(search_source_registry_tauri_state
|
||||||
.get_source(extension_id)
|
.get_source(bundle_id.extension_id)
|
||||||
.await
|
.await
|
||||||
.is_some());
|
.is_some());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if this is an application
|
// Check if this is an application
|
||||||
let application_prefix = format!(
|
if bundle_id.extension_id == application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME {
|
||||||
"{}.",
|
if let Some(app_path) = bundle_id.sub_extension_id {
|
||||||
application::QUERYSOURCE_ID_DATASOURCE_ID_DATASOURCE_NAME
|
|
||||||
);
|
|
||||||
if extension_id.starts_with(&application_prefix) {
|
|
||||||
let app_path = &extension_id[application_prefix.len()..];
|
|
||||||
return Ok(application::is_app_search_enabled(app_path));
|
return Ok(application::is_app_search_enabled(app_path));
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if extension_id == calculator::DATA_SOURCE_ID {
|
if bundle_id.extension_id == calculator::DATA_SOURCE_ID {
|
||||||
return Ok(search_source_registry_tauri_state
|
return Ok(search_source_registry_tauri_state
|
||||||
.get_source(extension_id)
|
.get_source(bundle_id.extension_id)
|
||||||
.await
|
.await
|
||||||
.is_some());
|
.is_some());
|
||||||
}
|
}
|
||||||
|
|
||||||
if extension_id == quick_ai_access::EXTENSION_ID {
|
if bundle_id.extension_id == quick_ai_access::EXTENSION_ID {
|
||||||
let extension =
|
let extension = load_extension_from_json_file(
|
||||||
load_extension_from_json_file(&BUILT_IN_EXTENSION_DIRECTORY.as_path(), extension_id)?;
|
&get_built_in_extension_directory(tauri_app_handle),
|
||||||
|
bundle_id.extension_id,
|
||||||
|
)?;
|
||||||
return Ok(extension.enabled);
|
return Ok(extension.enabled);
|
||||||
}
|
}
|
||||||
|
|
||||||
if extension_id == ai_overview::EXTENSION_ID {
|
if bundle_id.extension_id == ai_overview::EXTENSION_ID {
|
||||||
let extension =
|
let extension = load_extension_from_json_file(
|
||||||
load_extension_from_json_file(&BUILT_IN_EXTENSION_DIRECTORY.as_path(), extension_id)?;
|
&get_built_in_extension_directory(tauri_app_handle),
|
||||||
|
bundle_id.extension_id,
|
||||||
|
)?;
|
||||||
return Ok(extension.enabled);
|
return Ok(extension.enabled);
|
||||||
}
|
}
|
||||||
|
|
||||||
unreachable!("extension [{}] is not a built-in extension", extension_id)
|
cfg_if::cfg_if! {
|
||||||
|
if #[cfg(any(target_os = "macos", target_os = "windows"))] {
|
||||||
|
if bundle_id.extension_id == file_search::EXTENSION_ID
|
||||||
|
&& bundle_id.sub_extension_id.is_none()
|
||||||
|
{
|
||||||
|
return Ok(search_source_registry_tauri_state
|
||||||
|
.get_source(bundle_id.extension_id)
|
||||||
|
.await
|
||||||
|
.is_some());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unreachable!("extension [{:?}] is not a built-in extension", bundle_id)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,8 +8,8 @@
|
|||||||
//! which forces us to create a dedicated thread/runtime to execute them.
|
//! which forces us to create a dedicated thread/runtime to execute them.
|
||||||
|
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::collections::hash_map::Entry;
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::collections::hash_map::Entry;
|
||||||
use std::sync::OnceLock;
|
use std::sync::OnceLock;
|
||||||
|
|
||||||
pub(crate) trait SearchSourceState {
|
pub(crate) trait SearchSourceState {
|
||||||
@@ -27,18 +27,32 @@ pub(crate) trait Task: Send + Sync {
|
|||||||
pub(crate) static RUNTIME_TX: OnceLock<tokio::sync::mpsc::UnboundedSender<Box<dyn Task>>> =
|
pub(crate) static RUNTIME_TX: OnceLock<tokio::sync::mpsc::UnboundedSender<Box<dyn Task>>> =
|
||||||
OnceLock::new();
|
OnceLock::new();
|
||||||
|
|
||||||
pub(crate) fn start_pizza_engine_runtime() {
|
/// This function blocks until the runtime thread is ready for accepting tasks.
|
||||||
std::thread::spawn(|| {
|
pub(crate) async fn start_pizza_engine_runtime() {
|
||||||
|
const THREAD_NAME: &str = "Pizza engine runtime thread";
|
||||||
|
|
||||||
|
log::trace!("starting Pizza engine runtime");
|
||||||
|
let (engine_start_signal_tx, engine_start_signal_rx) = tokio::sync::oneshot::channel();
|
||||||
|
|
||||||
|
std::thread::Builder::new()
|
||||||
|
.name(THREAD_NAME.into())
|
||||||
|
.spawn(move || {
|
||||||
let rt = tokio::runtime::Runtime::new().unwrap();
|
let rt = tokio::runtime::Runtime::new().unwrap();
|
||||||
|
|
||||||
let main = async {
|
let main = async {
|
||||||
let mut states: HashMap<String, Option<Box<dyn SearchSourceState>>> = HashMap::new();
|
let mut states: HashMap<String, Option<Box<dyn SearchSourceState>>> =
|
||||||
|
HashMap::new();
|
||||||
|
|
||||||
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel();
|
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel();
|
||||||
RUNTIME_TX.set(tx).unwrap();
|
RUNTIME_TX.set(tx).unwrap();
|
||||||
|
|
||||||
|
engine_start_signal_tx
|
||||||
|
.send(())
|
||||||
|
.expect("engine_start_signal_rx dropped");
|
||||||
|
|
||||||
while let Some(mut task) = rx.recv().await {
|
while let Some(mut task) = rx.recv().await {
|
||||||
let opt_search_source_state = match states.entry(task.search_source_id().into()) {
|
let opt_search_source_state = match states.entry(task.search_source_id().into())
|
||||||
|
{
|
||||||
Entry::Occupied(o) => o.into_mut(),
|
Entry::Occupied(o) => o.into_mut(),
|
||||||
Entry::Vacant(v) => v.insert(None),
|
Entry::Vacant(v) => v.insert(None),
|
||||||
};
|
};
|
||||||
@@ -47,5 +61,16 @@ pub(crate) fn start_pizza_engine_runtime() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
rt.block_on(main);
|
rt.block_on(main);
|
||||||
|
})
|
||||||
|
.unwrap_or_else(|e| {
|
||||||
|
panic!(
|
||||||
|
"failed to start thread [{}] due to error [{}]",
|
||||||
|
THREAD_NAME, e
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
engine_start_signal_rx
|
||||||
|
.await
|
||||||
|
.expect("engine_start_signal_tx dropped, the runtime thread could be dead");
|
||||||
|
log::trace!("Pizza engine runtime started");
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1,12 @@
|
|||||||
pub(super) const EXTENSION_ID: &str = "QuickAIAccess";
|
pub(super) const EXTENSION_ID: &str = "QuickAIAccess";
|
||||||
|
|
||||||
|
pub(crate) const PLUGIN_JSON_FILE: &str = r#"
|
||||||
|
{
|
||||||
|
"id": "QuickAIAccess",
|
||||||
|
"name": "Quick AI Access",
|
||||||
|
"description": "...",
|
||||||
|
"icon": "font_a-QuickAIAccess",
|
||||||
|
"type": "ai_extension",
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
|||||||
@@ -1,755 +0,0 @@
|
|||||||
use super::alter_extension_json_file;
|
|
||||||
use super::Extension;
|
|
||||||
use super::LOCAL_QUERY_SOURCE_TYPE;
|
|
||||||
use crate::common::document::open;
|
|
||||||
use crate::common::document::DataSourceReference;
|
|
||||||
use crate::common::document::Document;
|
|
||||||
use crate::common::error::SearchError;
|
|
||||||
use crate::common::search::QueryResponse;
|
|
||||||
use crate::common::search::QuerySource;
|
|
||||||
use crate::common::search::SearchQuery;
|
|
||||||
use crate::common::traits::SearchSource;
|
|
||||||
use crate::extension::split_extension_id;
|
|
||||||
use crate::GLOBAL_TAURI_APP_HANDLE;
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use function_name::named;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use std::sync::LazyLock;
|
|
||||||
use std::sync::OnceLock;
|
|
||||||
use tauri::async_runtime;
|
|
||||||
use tauri::Manager;
|
|
||||||
use tauri_plugin_global_shortcut::GlobalShortcutExt;
|
|
||||||
use tauri_plugin_global_shortcut::ShortcutState;
|
|
||||||
use tokio::sync::RwLock;
|
|
||||||
|
|
||||||
pub(crate) static THIRD_PARTY_EXTENSION_DIRECTORY: LazyLock<PathBuf> = LazyLock::new(|| {
|
|
||||||
let mut app_data_dir = GLOBAL_TAURI_APP_HANDLE
|
|
||||||
.get()
|
|
||||||
.expect("global tauri app handle not set")
|
|
||||||
.path()
|
|
||||||
.app_data_dir()
|
|
||||||
.expect(
|
|
||||||
"User home directory not found, which should be impossible on desktop environments",
|
|
||||||
);
|
|
||||||
app_data_dir.push("extension");
|
|
||||||
|
|
||||||
app_data_dir
|
|
||||||
});
|
|
||||||
|
|
||||||
/// All the third-party extensions will be registered as one search source.
|
|
||||||
///
|
|
||||||
/// Since some `#[tauri::command]`s need to access it, we store it in a global
|
|
||||||
/// static variable as well.
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub(super) struct ThirdPartyExtensionsSearchSource {
|
|
||||||
inner: Arc<ThirdPartyExtensionsSearchSourceInner>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ThirdPartyExtensionsSearchSource {
|
|
||||||
pub(super) fn new(extensions: Vec<Extension>) -> Self {
|
|
||||||
Self {
|
|
||||||
inner: Arc::new(ThirdPartyExtensionsSearchSourceInner {
|
|
||||||
extensions: RwLock::new(extensions),
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[named]
|
|
||||||
pub(super) async fn enable_extension(&self, extension_id: &str) -> Result<(), String> {
|
|
||||||
let (parent_extension_id, _opt_sub_extension_id) = split_extension_id(extension_id);
|
|
||||||
|
|
||||||
let mut extensions_write_lock = self.inner.extensions.write().await;
|
|
||||||
let opt_index = extensions_write_lock
|
|
||||||
.iter()
|
|
||||||
.position(|ext| ext.id == parent_extension_id);
|
|
||||||
|
|
||||||
let Some(index) = opt_index else {
|
|
||||||
return Err(format!(
|
|
||||||
"{} invoked with an extension that does not exist [{}]",
|
|
||||||
function_name!(),
|
|
||||||
extension_id
|
|
||||||
));
|
|
||||||
};
|
|
||||||
|
|
||||||
let extension = extensions_write_lock
|
|
||||||
.get_mut(index)
|
|
||||||
.expect("just checked this extension exists");
|
|
||||||
|
|
||||||
let update_extension = |ext: &mut Extension| -> Result<(), String> {
|
|
||||||
if ext.enabled {
|
|
||||||
return Err(format!(
|
|
||||||
"{} invoked with an extension that is already enabled [{}]",
|
|
||||||
function_name!(),
|
|
||||||
extension_id
|
|
||||||
));
|
|
||||||
}
|
|
||||||
ext.enabled = true;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
};
|
|
||||||
|
|
||||||
extension.modify(extension_id, update_extension)?;
|
|
||||||
alter_extension_json_file(
|
|
||||||
&THIRD_PARTY_EXTENSION_DIRECTORY,
|
|
||||||
extension_id,
|
|
||||||
update_extension,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[named]
|
|
||||||
pub(super) async fn disable_extension(&self, extension_id: &str) -> Result<(), String> {
|
|
||||||
let (parent_extension_id, _opt_sub_extension_id) = split_extension_id(extension_id);
|
|
||||||
|
|
||||||
let mut extensions_write_lock = self.inner.extensions.write().await;
|
|
||||||
let opt_index = extensions_write_lock
|
|
||||||
.iter()
|
|
||||||
.position(|ext| ext.id == parent_extension_id);
|
|
||||||
|
|
||||||
let Some(index) = opt_index else {
|
|
||||||
return Err(format!(
|
|
||||||
"{} invoked with an extension that does not exist [{}]",
|
|
||||||
function_name!(),
|
|
||||||
extension_id
|
|
||||||
));
|
|
||||||
};
|
|
||||||
|
|
||||||
let extension = extensions_write_lock
|
|
||||||
.get_mut(index)
|
|
||||||
.expect("just checked this extension exists");
|
|
||||||
|
|
||||||
let update_extension = |ext: &mut Extension| -> Result<(), String> {
|
|
||||||
if !ext.enabled {
|
|
||||||
return Err(format!(
|
|
||||||
"{} invoked with an extension that is already enabled [{}]",
|
|
||||||
function_name!(),
|
|
||||||
extension_id
|
|
||||||
));
|
|
||||||
}
|
|
||||||
ext.enabled = false;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
};
|
|
||||||
|
|
||||||
extension.modify(extension_id, update_extension)?;
|
|
||||||
alter_extension_json_file(
|
|
||||||
&THIRD_PARTY_EXTENSION_DIRECTORY,
|
|
||||||
extension_id,
|
|
||||||
update_extension,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[named]
|
|
||||||
pub(super) async fn set_extension_alias(
|
|
||||||
&self,
|
|
||||||
extension_id: &str,
|
|
||||||
alias: &str,
|
|
||||||
) -> Result<(), String> {
|
|
||||||
let (parent_extension_id, _opt_sub_extension_id) = split_extension_id(extension_id);
|
|
||||||
|
|
||||||
let mut extensions_write_lock = self.inner.extensions.write().await;
|
|
||||||
let opt_index = extensions_write_lock
|
|
||||||
.iter()
|
|
||||||
.position(|ext| ext.id == parent_extension_id);
|
|
||||||
|
|
||||||
let Some(index) = opt_index else {
|
|
||||||
log::warn!(
|
|
||||||
"{} invoked with an extension that does not exist [{}]",
|
|
||||||
function_name!(),
|
|
||||||
extension_id
|
|
||||||
);
|
|
||||||
return Ok(());
|
|
||||||
};
|
|
||||||
|
|
||||||
let extension = extensions_write_lock
|
|
||||||
.get_mut(index)
|
|
||||||
.expect("just checked this extension exists");
|
|
||||||
|
|
||||||
let update_extension = |ext: &mut Extension| -> Result<(), String> {
|
|
||||||
ext.alias = Some(alias.to_string());
|
|
||||||
Ok(())
|
|
||||||
};
|
|
||||||
|
|
||||||
extension.modify(extension_id, update_extension)?;
|
|
||||||
alter_extension_json_file(
|
|
||||||
&THIRD_PARTY_EXTENSION_DIRECTORY,
|
|
||||||
extension_id,
|
|
||||||
update_extension,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) async fn restore_extensions_hotkey(&self) -> Result<(), String> {
|
|
||||||
fn set_up_hotkey<R: tauri::Runtime>(
|
|
||||||
tauri_app_handle: &tauri::AppHandle<R>,
|
|
||||||
extension: &Extension,
|
|
||||||
) -> Result<(), String> {
|
|
||||||
if let Some(ref hotkey) = extension.hotkey {
|
|
||||||
let on_opened = extension.on_opened().unwrap_or_else(|| panic!( "extension has hotkey, but on_open() returns None, extension ID [{}], extension type [{:?}]", extension.id, extension.r#type));
|
|
||||||
|
|
||||||
let extension_id_clone = extension.id.clone();
|
|
||||||
|
|
||||||
tauri_app_handle
|
|
||||||
.global_shortcut()
|
|
||||||
.on_shortcut(hotkey.as_str(), move |_tauri_app_handle, _hotkey, event| {
|
|
||||||
let on_opened_clone = on_opened.clone();
|
|
||||||
let extension_id_clone = extension_id_clone.clone();
|
|
||||||
if event.state() == ShortcutState::Pressed {
|
|
||||||
async_runtime::spawn(async move {
|
|
||||||
let result = open(on_opened_clone).await;
|
|
||||||
if let Err(msg) = result {
|
|
||||||
log::warn!(
|
|
||||||
"failed to open extension [{}], error [{}]",
|
|
||||||
extension_id_clone,
|
|
||||||
msg
|
|
||||||
);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.map_err(|e| e.to_string())?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
let extensions_read_lock = self.inner.extensions.read().await;
|
|
||||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
|
||||||
.get()
|
|
||||||
.expect("global tauri app handle not set");
|
|
||||||
|
|
||||||
for extension in extensions_read_lock.iter() {
|
|
||||||
if extension.r#type.contains_sub_items() {
|
|
||||||
if let Some(commands) = &extension.commands {
|
|
||||||
for command in commands.iter().filter(|cmd| cmd.enabled) {
|
|
||||||
set_up_hotkey(tauri_app_handle, command)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(scripts) = &extension.scripts {
|
|
||||||
for script in scripts.iter().filter(|script| script.enabled) {
|
|
||||||
set_up_hotkey(tauri_app_handle, script)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(quick_links) = &extension.quick_links {
|
|
||||||
for quick_link in quick_links.iter().filter(|link| link.enabled) {
|
|
||||||
set_up_hotkey(tauri_app_handle, quick_link)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
set_up_hotkey(tauri_app_handle, extension)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[named]
|
|
||||||
pub(super) async fn register_extension_hotkey(
|
|
||||||
&self,
|
|
||||||
extension_id: &str,
|
|
||||||
hotkey: &str,
|
|
||||||
) -> Result<(), String> {
|
|
||||||
self.unregister_extension_hotkey(extension_id).await?;
|
|
||||||
|
|
||||||
let (parent_extension_id, _opt_sub_extension_id) = split_extension_id(extension_id);
|
|
||||||
let mut extensions_write_lock = self.inner.extensions.write().await;
|
|
||||||
let opt_index = extensions_write_lock
|
|
||||||
.iter()
|
|
||||||
.position(|ext| ext.id == parent_extension_id);
|
|
||||||
|
|
||||||
let Some(index) = opt_index else {
|
|
||||||
return Err(format!(
|
|
||||||
"{} invoked with an extension that does not exist [{}]",
|
|
||||||
function_name!(),
|
|
||||||
extension_id
|
|
||||||
));
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut extension = extensions_write_lock
|
|
||||||
.get_mut(index)
|
|
||||||
.expect("just checked this extension exists");
|
|
||||||
|
|
||||||
let update_extension = |ext: &mut Extension| -> Result<(), String> {
|
|
||||||
ext.hotkey = Some(hotkey.into());
|
|
||||||
Ok(())
|
|
||||||
};
|
|
||||||
|
|
||||||
// Update extension (memory and file)
|
|
||||||
extension.modify(extension_id, update_extension)?;
|
|
||||||
alter_extension_json_file(
|
|
||||||
&THIRD_PARTY_EXTENSION_DIRECTORY,
|
|
||||||
extension_id,
|
|
||||||
update_extension,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// To make borrow checker happy
|
|
||||||
let extension_dbg_string = format!("{:?}", extension);
|
|
||||||
extension = match extension.get_extension_mut(extension_id) {
|
|
||||||
Some(ext) => ext,
|
|
||||||
None => {
|
|
||||||
panic!(
|
|
||||||
"extension [{}] should be found in {}",
|
|
||||||
extension_id, extension_dbg_string
|
|
||||||
)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Set hotkey
|
|
||||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
|
||||||
.get()
|
|
||||||
.expect("global tauri app handle not set");
|
|
||||||
let on_opened = extension.on_opened().unwrap_or_else(|| panic!(
|
|
||||||
"setting hotkey for an extension that cannot be opened, extension ID [{}], extension type [{:?}]", extension_id, extension.r#type,
|
|
||||||
));
|
|
||||||
|
|
||||||
let extension_id_clone = extension_id.to_string();
|
|
||||||
tauri_app_handle
|
|
||||||
.global_shortcut()
|
|
||||||
.on_shortcut(hotkey, move |_tauri_app_handle, _hotkey, event| {
|
|
||||||
let on_opened_clone = on_opened.clone();
|
|
||||||
let extension_id_clone = extension_id_clone.clone();
|
|
||||||
if event.state() == ShortcutState::Pressed {
|
|
||||||
async_runtime::spawn(async move {
|
|
||||||
let result = open(on_opened_clone).await;
|
|
||||||
if let Err(msg) = result {
|
|
||||||
log::warn!(
|
|
||||||
"failed to open extension [{}], error [{}]",
|
|
||||||
extension_id_clone,
|
|
||||||
msg
|
|
||||||
);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.map_err(|e| e.to_string())?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// NOTE: this function won't error out if the extension specified by `extension_id`
|
|
||||||
/// has no hotkey set because we need it to behave like this.
|
|
||||||
#[named]
|
|
||||||
pub(super) async fn unregister_extension_hotkey(
|
|
||||||
&self,
|
|
||||||
extension_id: &str,
|
|
||||||
) -> Result<(), String> {
|
|
||||||
let (parent_extension_id, _opt_sub_extension_id) = split_extension_id(extension_id);
|
|
||||||
|
|
||||||
let mut extensions_write_lock = self.inner.extensions.write().await;
|
|
||||||
let opt_index = extensions_write_lock
|
|
||||||
.iter()
|
|
||||||
.position(|ext| ext.id == parent_extension_id);
|
|
||||||
|
|
||||||
let Some(index) = opt_index else {
|
|
||||||
return Err(format!(
|
|
||||||
"{} invoked with an extension that does not exist [{}]",
|
|
||||||
function_name!(),
|
|
||||||
extension_id
|
|
||||||
));
|
|
||||||
};
|
|
||||||
|
|
||||||
let parent_extension = extensions_write_lock
|
|
||||||
.get_mut(index)
|
|
||||||
.expect("just checked this extension exists");
|
|
||||||
let Some(extension) = parent_extension.get_extension_mut(extension_id) else {
|
|
||||||
return Err(format!(
|
|
||||||
"{} invoked with an extension that does not exist [{}]",
|
|
||||||
function_name!(),
|
|
||||||
extension_id
|
|
||||||
));
|
|
||||||
};
|
|
||||||
|
|
||||||
let Some(hotkey) = extension.hotkey.clone() else {
|
|
||||||
log::warn!(
|
|
||||||
"extension [{}] has no hotkey set, but we are trying to unregister it",
|
|
||||||
extension_id
|
|
||||||
);
|
|
||||||
return Ok(());
|
|
||||||
};
|
|
||||||
|
|
||||||
let update_extension = |extension: &mut Extension| -> Result<(), String> {
|
|
||||||
extension.hotkey = None;
|
|
||||||
Ok(())
|
|
||||||
};
|
|
||||||
|
|
||||||
parent_extension.modify(extension_id, update_extension)?;
|
|
||||||
alter_extension_json_file(
|
|
||||||
&THIRD_PARTY_EXTENSION_DIRECTORY,
|
|
||||||
extension_id,
|
|
||||||
update_extension,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Set hotkey
|
|
||||||
let tauri_app_handle = GLOBAL_TAURI_APP_HANDLE
|
|
||||||
.get()
|
|
||||||
.expect("global tauri app handle not set");
|
|
||||||
tauri_app_handle
|
|
||||||
.global_shortcut()
|
|
||||||
.unregister(hotkey.as_str())
|
|
||||||
.map_err(|e| e.to_string())?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[named]
|
|
||||||
pub(super) async fn is_extension_enabled(&self, extension_id: &str) -> Result<bool, String> {
|
|
||||||
let (parent_extension_id, opt_sub_extension_id) = split_extension_id(extension_id);
|
|
||||||
|
|
||||||
let extensions_read_lock = self.inner.extensions.read().await;
|
|
||||||
let opt_index = extensions_read_lock
|
|
||||||
.iter()
|
|
||||||
.position(|ext| ext.id == parent_extension_id);
|
|
||||||
|
|
||||||
let Some(index) = opt_index else {
|
|
||||||
return Err(format!(
|
|
||||||
"{} invoked with an extension that does not exist [{}]",
|
|
||||||
function_name!(),
|
|
||||||
extension_id
|
|
||||||
));
|
|
||||||
};
|
|
||||||
|
|
||||||
let extension = extensions_read_lock
|
|
||||||
.get(index)
|
|
||||||
.expect("just checked this extension exists");
|
|
||||||
|
|
||||||
if let Some(sub_extension_id) = opt_sub_extension_id {
|
|
||||||
// For a sub-extension, it is enabled iff:
|
|
||||||
//
|
|
||||||
// 1. Its parent extension is enabled, and
|
|
||||||
// 2. It is enabled
|
|
||||||
if !extension.enabled {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(ref commands) = extension.commands {
|
|
||||||
if let Some(sub_ext) = commands.iter().find(|cmd| cmd.id == sub_extension_id) {
|
|
||||||
return Ok(sub_ext.enabled);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(ref scripts) = extension.scripts {
|
|
||||||
if let Some(sub_ext) = scripts.iter().find(|script| script.id == sub_extension_id) {
|
|
||||||
return Ok(sub_ext.enabled);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(ref commands) = extension.commands {
|
|
||||||
if let Some(sub_ext) = commands
|
|
||||||
.iter()
|
|
||||||
.find(|quick_link| quick_link.id == sub_extension_id)
|
|
||||||
{
|
|
||||||
return Ok(sub_ext.enabled);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(format!(
|
|
||||||
"{} invoked with a sub-extension that does not exist [{}/{}]",
|
|
||||||
function_name!(),
|
|
||||||
parent_extension_id,
|
|
||||||
sub_extension_id
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
Ok(extension.enabled)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) static THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE: OnceLock<ThirdPartyExtensionsSearchSource> =
|
|
||||||
OnceLock::new();
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct ThirdPartyExtensionsSearchSourceInner {
|
|
||||||
extensions: RwLock<Vec<Extension>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl SearchSource for ThirdPartyExtensionsSearchSource {
|
|
||||||
fn get_type(&self) -> QuerySource {
|
|
||||||
QuerySource {
|
|
||||||
r#type: LOCAL_QUERY_SOURCE_TYPE.into(),
|
|
||||||
name: hostname::get()
|
|
||||||
.unwrap_or("My Computer".into())
|
|
||||||
.to_string_lossy()
|
|
||||||
.into(),
|
|
||||||
id: "extensions".into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
|
||||||
let Some(query_string) = query.query_strings.get("query") else {
|
|
||||||
return Ok(QueryResponse {
|
|
||||||
source: self.get_type(),
|
|
||||||
hits: Vec::new(),
|
|
||||||
total_hits: 0,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
let opt_data_source = query
|
|
||||||
.query_strings
|
|
||||||
.get("datasource")
|
|
||||||
.map(|owned_str| owned_str.as_str());
|
|
||||||
|
|
||||||
let mut hits = Vec::new();
|
|
||||||
let extensions_read_lock = self.inner.extensions.read().await;
|
|
||||||
let query_lower = query_string.to_lowercase();
|
|
||||||
|
|
||||||
for extension in extensions_read_lock.iter().filter(|ext| ext.enabled) {
|
|
||||||
if extension.r#type.contains_sub_items() {
|
|
||||||
if let Some(ref commands) = extension.commands {
|
|
||||||
for command in commands.iter().filter(|cmd| cmd.enabled) {
|
|
||||||
if let Some(hit) = extension_to_hit(command, &query_lower, opt_data_source)
|
|
||||||
{
|
|
||||||
hits.push(hit);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(ref scripts) = extension.scripts {
|
|
||||||
for script in scripts.iter().filter(|script| script.enabled) {
|
|
||||||
if let Some(hit) = extension_to_hit(script, &query_lower, opt_data_source) {
|
|
||||||
hits.push(hit);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(ref quick_links) = extension.quick_links {
|
|
||||||
for quick_link in quick_links.iter().filter(|link| link.enabled) {
|
|
||||||
if let Some(hit) =
|
|
||||||
extension_to_hit(quick_link, &query_lower, opt_data_source)
|
|
||||||
{
|
|
||||||
hits.push(hit);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if let Some(hit) = extension_to_hit(extension, &query_lower, opt_data_source) {
|
|
||||||
hits.push(hit);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let total_hits = hits.len();
|
|
||||||
|
|
||||||
Ok(QueryResponse {
|
|
||||||
source: self.get_type(),
|
|
||||||
hits,
|
|
||||||
total_hits,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extension_to_hit(
|
|
||||||
extension: &Extension,
|
|
||||||
query_lower: &str,
|
|
||||||
opt_data_source: Option<&str>,
|
|
||||||
) -> Option<(Document, f64)> {
|
|
||||||
if !extension.searchable() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let extension_type_string = extension.r#type.to_string();
|
|
||||||
|
|
||||||
if let Some(data_source) = opt_data_source {
|
|
||||||
let document_data_source_id = extension_type_string.as_str();
|
|
||||||
|
|
||||||
if document_data_source_id != data_source {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut total_score = 0.0;
|
|
||||||
|
|
||||||
// Score based on title match
|
|
||||||
// Title is considered more important, so it gets a higher weight.
|
|
||||||
if let Some(title_score) =
|
|
||||||
calculate_text_similarity(&query_lower, &extension.title.to_lowercase())
|
|
||||||
{
|
|
||||||
total_score += title_score * 1.0; // Weight for title
|
|
||||||
}
|
|
||||||
|
|
||||||
// Score based on alias match if available
|
|
||||||
// Alias is considered less important than title, so it gets a lower weight.
|
|
||||||
if let Some(alias) = &extension.alias {
|
|
||||||
if let Some(alias_score) = calculate_text_similarity(&query_lower, &alias.to_lowercase()) {
|
|
||||||
total_score += alias_score * 0.7; // Weight for alias
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Only include if there's some relevance (score is meaningfully positive)
|
|
||||||
if total_score > 0.01 {
|
|
||||||
let on_opened = extension.on_opened().unwrap_or_else(|| {
|
|
||||||
panic!(
|
|
||||||
"extension (id [{}], type [{:?}]) is searchable, and should have a valid on_opened",
|
|
||||||
extension.id, extension.r#type
|
|
||||||
)
|
|
||||||
});
|
|
||||||
let url = on_opened.url();
|
|
||||||
|
|
||||||
let document = Document {
|
|
||||||
id: extension.id.clone(),
|
|
||||||
title: Some(extension.title.clone()),
|
|
||||||
icon: Some(extension.icon.clone()),
|
|
||||||
on_opened: Some(on_opened),
|
|
||||||
url: Some(url),
|
|
||||||
category: Some(extension_type_string.clone()),
|
|
||||||
source: Some(DataSourceReference {
|
|
||||||
id: Some(extension_type_string.clone()),
|
|
||||||
name: Some(extension_type_string.clone()),
|
|
||||||
icon: None,
|
|
||||||
r#type: Some(extension_type_string),
|
|
||||||
}),
|
|
||||||
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
Some((document, total_score))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calculates a similarity score between a query and a text, aiming for a [0, 1] range.
|
|
||||||
// Assumes query and text are already lowercased.
|
|
||||||
fn calculate_text_similarity(query: &str, text: &str) -> Option<f64> {
|
|
||||||
if query.is_empty() || text.is_empty() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
if text == query {
|
|
||||||
return Some(1.0); // Perfect match
|
|
||||||
}
|
|
||||||
|
|
||||||
let query_len = query.len() as f64;
|
|
||||||
let text_len = text.len() as f64;
|
|
||||||
let ratio = query_len / text_len;
|
|
||||||
let mut score: f64 = 0.0;
|
|
||||||
|
|
||||||
// Case 1: Text starts with the query (prefix match)
|
|
||||||
// Score: base 0.5, bonus up to 0.4 for how much of `text` is covered by `query`. Max 0.9.
|
|
||||||
if text.starts_with(query) {
|
|
||||||
score = score.max(0.5 + 0.4 * ratio);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Case 2: Text contains the query (substring match, not necessarily prefix)
|
|
||||||
// Score: base 0.3, bonus up to 0.3. Max 0.6.
|
|
||||||
// `score.max` ensures that if it's both a prefix and contains, the higher score (prefix) is taken.
|
|
||||||
if text.contains(query) {
|
|
||||||
score = score.max(0.3 + 0.3 * ratio);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Case 3: Fallback for "all query characters exist in text" (order-independent)
|
|
||||||
if score < 0.2 {
|
|
||||||
if query.chars().all(|c_q| text.contains(c_q)) {
|
|
||||||
score = score.max(0.15); // Fixed low score for this weaker match type
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if score > 0.0 {
|
|
||||||
// Cap non-perfect matches slightly below 1.0 to make perfect (1.0) distinct.
|
|
||||||
Some(score.min(0.95))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
// Helper function for approximate floating point comparison
|
|
||||||
fn approx_eq(a: f64, b: f64) -> bool {
|
|
||||||
(a - b).abs() < 1e-10
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_empty_strings() {
|
|
||||||
assert_eq!(calculate_text_similarity("", "text"), None);
|
|
||||||
assert_eq!(calculate_text_similarity("query", ""), None);
|
|
||||||
assert_eq!(calculate_text_similarity("", ""), None);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_perfect_match() {
|
|
||||||
assert_eq!(calculate_text_similarity("text", "text"), Some(1.0));
|
|
||||||
assert_eq!(calculate_text_similarity("a", "a"), Some(1.0));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_prefix_match() {
|
|
||||||
// For "te" and "text":
|
|
||||||
// score = 0.5 + 0.4 * (2/4) = 0.5 + 0.2 = 0.7
|
|
||||||
let score = calculate_text_similarity("te", "text").unwrap();
|
|
||||||
assert!(approx_eq(score, 0.7));
|
|
||||||
|
|
||||||
// For "tex" and "text":
|
|
||||||
// score = 0.5 + 0.4 * (3/4) = 0.5 + 0.3 = 0.8
|
|
||||||
let score = calculate_text_similarity("tex", "text").unwrap();
|
|
||||||
assert!(approx_eq(score, 0.8));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_substring_match() {
|
|
||||||
// For "ex" and "text":
|
|
||||||
// score = 0.3 + 0.3 * (2/4) = 0.3 + 0.15 = 0.45
|
|
||||||
let score = calculate_text_similarity("ex", "text").unwrap();
|
|
||||||
assert!(approx_eq(score, 0.45));
|
|
||||||
|
|
||||||
// Prefix should score higher than substring
|
|
||||||
assert!(
|
|
||||||
calculate_text_similarity("te", "text").unwrap()
|
|
||||||
> calculate_text_similarity("ex", "text").unwrap()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_character_presence() {
|
|
||||||
// Characters present but not in sequence
|
|
||||||
// "tac" in "contact" - not a substring, but all chars exist
|
|
||||||
let score = calculate_text_similarity("tac", "contact").unwrap();
|
|
||||||
assert!(approx_eq(0.3 + 0.3 * (3.0 / 7.0), score));
|
|
||||||
|
|
||||||
assert!(calculate_text_similarity("ac", "contact").is_some());
|
|
||||||
|
|
||||||
// Should not apply if some characters are missing
|
|
||||||
assert_eq!(calculate_text_similarity("xyz", "contact"), None);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_combined_scenarios() {
|
|
||||||
// Test that character presence fallback doesn't override higher scores
|
|
||||||
// "tex" is a prefix of "text" with score 0.8
|
|
||||||
let score = calculate_text_similarity("tex", "text").unwrap();
|
|
||||||
assert!(approx_eq(score, 0.8));
|
|
||||||
|
|
||||||
// Test a case where the characters exist but it's already a substring
|
|
||||||
// "act" is a substring of "contact" with score > 0.2, so fallback won't apply
|
|
||||||
let expected_score = 0.3 + 0.3 * (3.0 / 7.0);
|
|
||||||
let actual_score = calculate_text_similarity("act", "contact").unwrap();
|
|
||||||
assert!(approx_eq(actual_score, expected_score));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_no_similarity() {
|
|
||||||
assert_eq!(calculate_text_similarity("xyz", "test"), None);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_score_capping() {
|
|
||||||
// Use a long query that's a prefix of a slightly longer text
|
|
||||||
let long_text = "abcdefghijklmnopqrstuvwxyz";
|
|
||||||
let long_prefix = "abcdefghijklmnopqrstuvwxy"; // All but last letter
|
|
||||||
|
|
||||||
// Expected score would be 0.5 + 0.4 * (25/26) = 0.5 + 0.385 = 0.885
|
|
||||||
let expected_score = 0.5 + 0.4 * (25.0 / 26.0);
|
|
||||||
let actual_score = calculate_text_similarity(long_prefix, long_text).unwrap();
|
|
||||||
assert!(approx_eq(actual_score, expected_score));
|
|
||||||
|
|
||||||
// Verify that non-perfect matches are capped at 0.95
|
|
||||||
assert!(calculate_text_similarity("almost", "almost perfect").unwrap() <= 0.95);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
1193
src-tauri/src/extension/third_party/mod.rs
vendored
Normal file
341
src-tauri/src/extension/third_party/store.rs
vendored
Normal file
@@ -0,0 +1,341 @@
|
|||||||
|
//! Extension store related stuff.
|
||||||
|
|
||||||
|
use super::LOCAL_QUERY_SOURCE_TYPE;
|
||||||
|
use crate::common::document::DataSourceReference;
|
||||||
|
use crate::common::document::Document;
|
||||||
|
use crate::common::error::SearchError;
|
||||||
|
use crate::common::search::QueryResponse;
|
||||||
|
use crate::common::search::QuerySource;
|
||||||
|
use crate::common::search::SearchQuery;
|
||||||
|
use crate::common::traits::SearchSource;
|
||||||
|
use crate::extension::Extension;
|
||||||
|
use crate::extension::PLUGIN_JSON_FILE_NAME;
|
||||||
|
use crate::extension::THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE;
|
||||||
|
use crate::extension::canonicalize_relative_icon_path;
|
||||||
|
use crate::extension::third_party::get_third_party_extension_directory;
|
||||||
|
use crate::server::http_client::HttpClient;
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use reqwest::StatusCode;
|
||||||
|
use serde_json::Map as JsonObject;
|
||||||
|
use serde_json::Value as Json;
|
||||||
|
use std::io::Read;
|
||||||
|
use tauri::AppHandle;
|
||||||
|
|
||||||
|
const DATA_SOURCE_ID: &str = "Extension Store";
|
||||||
|
|
||||||
|
pub(crate) struct ExtensionStore;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl SearchSource for ExtensionStore {
|
||||||
|
fn get_type(&self) -> QuerySource {
|
||||||
|
QuerySource {
|
||||||
|
r#type: LOCAL_QUERY_SOURCE_TYPE.into(),
|
||||||
|
name: hostname::get()
|
||||||
|
.unwrap_or(DATA_SOURCE_ID.into())
|
||||||
|
.to_string_lossy()
|
||||||
|
.into(),
|
||||||
|
id: DATA_SOURCE_ID.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn search(
|
||||||
|
&self,
|
||||||
|
_tauri_app_handle: AppHandle,
|
||||||
|
query: SearchQuery,
|
||||||
|
) -> Result<QueryResponse, SearchError> {
|
||||||
|
const SCORE: f64 = 2000.0;
|
||||||
|
|
||||||
|
let Some(query_string) = query.query_strings.get("query") else {
|
||||||
|
return Ok(QueryResponse {
|
||||||
|
source: self.get_type(),
|
||||||
|
hits: Vec::new(),
|
||||||
|
total_hits: 0,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
let lowercase_query_string = query_string.to_lowercase();
|
||||||
|
let expected_str = "extension store";
|
||||||
|
|
||||||
|
if expected_str.contains(&lowercase_query_string) {
|
||||||
|
let doc = Document {
|
||||||
|
id: DATA_SOURCE_ID.to_string(),
|
||||||
|
category: Some(DATA_SOURCE_ID.to_string()),
|
||||||
|
title: Some(DATA_SOURCE_ID.to_string()),
|
||||||
|
icon: Some("font_Store".to_string()),
|
||||||
|
source: Some(DataSourceReference {
|
||||||
|
r#type: Some(LOCAL_QUERY_SOURCE_TYPE.into()),
|
||||||
|
name: Some(DATA_SOURCE_ID.into()),
|
||||||
|
id: Some(DATA_SOURCE_ID.into()),
|
||||||
|
icon: Some("font_Store".to_string()),
|
||||||
|
}),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(QueryResponse {
|
||||||
|
source: self.get_type(),
|
||||||
|
hits: vec![(doc, SCORE)],
|
||||||
|
total_hits: 1,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Ok(QueryResponse {
|
||||||
|
source: self.get_type(),
|
||||||
|
hits: Vec::new(),
|
||||||
|
total_hits: 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub(crate) async fn search_extension(
|
||||||
|
query_params: Option<Vec<String>>,
|
||||||
|
) -> Result<Vec<Json>, String> {
|
||||||
|
let response = HttpClient::get(
|
||||||
|
"default_coco_server",
|
||||||
|
"store/extension/_search",
|
||||||
|
query_params,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to send request: {:?}", e))?;
|
||||||
|
|
||||||
|
// The response of a ES style search request
|
||||||
|
let mut response: JsonObject<String, Json> = response
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse response: {:?}", e))?;
|
||||||
|
|
||||||
|
let hits_json = response
|
||||||
|
.remove("hits")
|
||||||
|
.expect("the JSON response should contain field [hits]");
|
||||||
|
let mut hits = match hits_json {
|
||||||
|
Json::Object(obj) => obj,
|
||||||
|
_ => panic!(
|
||||||
|
"field [hits] should be a JSON object, but it is not, value: [{}]",
|
||||||
|
hits_json
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(hits_hits_json) = hits.remove("hits") else {
|
||||||
|
return Ok(Vec::new());
|
||||||
|
};
|
||||||
|
|
||||||
|
let hits_hits = match hits_hits_json {
|
||||||
|
Json::Array(arr) => arr,
|
||||||
|
_ => panic!(
|
||||||
|
"field [hits.hits] should be an array, but it is not, value: [{}]",
|
||||||
|
hits_hits_json
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut extensions = Vec::with_capacity(hits_hits.len());
|
||||||
|
for hit in hits_hits {
|
||||||
|
let mut hit_obj = match hit {
|
||||||
|
Json::Object(obj) => obj,
|
||||||
|
_ => panic!(
|
||||||
|
"each hit in [hits.hits] should be a JSON object, but it is not, value: [{}]",
|
||||||
|
hit
|
||||||
|
),
|
||||||
|
};
|
||||||
|
let source = hit_obj
|
||||||
|
.remove("_source")
|
||||||
|
.expect("each hit should contain field [_source]");
|
||||||
|
|
||||||
|
let mut source_obj = match source {
|
||||||
|
Json::Object(obj) => obj,
|
||||||
|
_ => panic!(
|
||||||
|
"field [_source] should be a JSON object, but it is not, value: [{}]",
|
||||||
|
source
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
let developer_id = source_obj
|
||||||
|
.get("developer")
|
||||||
|
.and_then(|dev| dev.get("id"))
|
||||||
|
.and_then(|id| id.as_str())
|
||||||
|
.expect("developer.id should exist")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let extension_id = source_obj
|
||||||
|
.get("id")
|
||||||
|
.and_then(|id| id.as_str())
|
||||||
|
.expect("extension id should exist")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let installed = is_extension_installed(developer_id, extension_id).await;
|
||||||
|
source_obj.insert("installed".to_string(), Json::Bool(installed));
|
||||||
|
|
||||||
|
extensions.push(Json::Object(source_obj));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(extensions)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn is_extension_installed(developer: String, extension_id: String) -> bool {
|
||||||
|
THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE
|
||||||
|
.get()
|
||||||
|
.unwrap()
|
||||||
|
.extension_exists(&developer, &extension_id)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub(crate) async fn install_extension_from_store(
|
||||||
|
tauri_app_handle: AppHandle,
|
||||||
|
id: String,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let path = format!("store/extension/{}/_download", id);
|
||||||
|
let response = HttpClient::get("default_coco_server", &path, None)
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to download extension: {}", e))?;
|
||||||
|
|
||||||
|
if response.status() == StatusCode::NOT_FOUND {
|
||||||
|
return Err(format!("extension [{}] not found", id));
|
||||||
|
}
|
||||||
|
|
||||||
|
let bytes = response
|
||||||
|
.bytes()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to read response bytes: {}", e))?;
|
||||||
|
|
||||||
|
let cursor = std::io::Cursor::new(bytes);
|
||||||
|
let mut archive =
|
||||||
|
zip::ZipArchive::new(cursor).map_err(|e| format!("Failed to read zip archive: {}", e))?;
|
||||||
|
|
||||||
|
// The plugin.json sent from the server does not conform to our `struct Extension` definition:
|
||||||
|
//
|
||||||
|
// 1. Its `developer` field is a JSON object, but we need a string
|
||||||
|
// 2. sub-extensions won't have their `id` fields set
|
||||||
|
//
|
||||||
|
// we need to correct it
|
||||||
|
let mut plugin_json = archive
|
||||||
|
.by_name(PLUGIN_JSON_FILE_NAME)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
let mut plugin_json_content = String::new();
|
||||||
|
std::io::Read::read_to_string(&mut plugin_json, &mut plugin_json_content)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
let mut extension: Json = serde_json::from_str(&plugin_json_content)
|
||||||
|
.map_err(|e| format!("Failed to parse plugin.json: {}", e))?;
|
||||||
|
|
||||||
|
let mut_ref_to_developer_object: &mut Json = extension
|
||||||
|
.as_object_mut()
|
||||||
|
.expect("plugin.json should be an object")
|
||||||
|
.get_mut("developer")
|
||||||
|
.expect("plugin.json should contain field [developer]");
|
||||||
|
let developer_id = mut_ref_to_developer_object
|
||||||
|
.get("id")
|
||||||
|
.expect("plugin.json should contain [developer.id]")
|
||||||
|
.as_str()
|
||||||
|
.expect("plugin.json field [developer.id] should be a string");
|
||||||
|
*mut_ref_to_developer_object = Json::String(developer_id.into());
|
||||||
|
|
||||||
|
// Set IDs for sub-extensions (commands, quicklinks, scripts)
|
||||||
|
let mut counter = 0;
|
||||||
|
// Helper function to set IDs for array fields
|
||||||
|
fn set_ids_for_field(extension: &mut Json, field_name: &str, counter: &mut i32) {
|
||||||
|
if let Some(field) = extension.as_object_mut().unwrap().get_mut(field_name) {
|
||||||
|
if let Some(array) = field.as_array_mut() {
|
||||||
|
for item in array {
|
||||||
|
if let Some(item_obj) = item.as_object_mut() {
|
||||||
|
if !item_obj.contains_key("id") {
|
||||||
|
item_obj.insert("id".to_string(), Json::String(counter.to_string()));
|
||||||
|
*counter += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
set_ids_for_field(&mut extension, "commands", &mut counter);
|
||||||
|
set_ids_for_field(&mut extension, "quicklinks", &mut counter);
|
||||||
|
set_ids_for_field(&mut extension, "scripts", &mut counter);
|
||||||
|
|
||||||
|
// Now the extension JSON is valid
|
||||||
|
let mut extension: Extension = serde_json::from_value(extension).unwrap_or_else(|e| {
|
||||||
|
panic!(
|
||||||
|
"cannot parse plugin.json as struct Extension, error [{:?}]",
|
||||||
|
e
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
drop(plugin_json);
|
||||||
|
|
||||||
|
// Write extension files to the extension directory
|
||||||
|
let developer = extension.developer.clone().unwrap_or_default();
|
||||||
|
let extension_id = extension.id.clone();
|
||||||
|
let extension_directory = {
|
||||||
|
let mut path = get_third_party_extension_directory(&tauri_app_handle);
|
||||||
|
path.push(developer);
|
||||||
|
path.push(extension_id.as_str());
|
||||||
|
path
|
||||||
|
};
|
||||||
|
tokio::fs::create_dir_all(extension_directory.as_path())
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
// Extract all files except plugin.json
|
||||||
|
for i in 0..archive.len() {
|
||||||
|
let mut zip_file = archive.by_index(i).map_err(|e| e.to_string())?;
|
||||||
|
// `.name()` is safe to use in our cases, the cases listed in the below
|
||||||
|
// page won't happen to us.
|
||||||
|
//
|
||||||
|
// https://docs.rs/zip/4.2.0/zip/read/struct.ZipFile.html#method.name
|
||||||
|
//
|
||||||
|
// Example names:
|
||||||
|
//
|
||||||
|
// * `assets/icon.png`
|
||||||
|
// * `assets/screenshot.png`
|
||||||
|
// * `plugin.json`
|
||||||
|
//
|
||||||
|
// Yes, the `assets` directory is not a part of it.
|
||||||
|
let zip_file_name = zip_file.name();
|
||||||
|
|
||||||
|
// Skip the plugin.json file as we'll create it from the extension variable
|
||||||
|
if zip_file_name == PLUGIN_JSON_FILE_NAME {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let dest_file_path = extension_directory.join(zip_file_name);
|
||||||
|
|
||||||
|
// For cases like `assets/xxx.png`
|
||||||
|
if let Some(parent_dir) = dest_file_path.parent()
|
||||||
|
&& !parent_dir.exists()
|
||||||
|
{
|
||||||
|
tokio::fs::create_dir_all(parent_dir)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut dest_file = tokio::fs::File::create(&dest_file_path)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
let mut src_bytes = Vec::with_capacity(
|
||||||
|
zip_file
|
||||||
|
.size()
|
||||||
|
.try_into()
|
||||||
|
.expect("we won't have a extension file that is bigger than 4GiB"),
|
||||||
|
);
|
||||||
|
zip_file
|
||||||
|
.read_to_end(&mut src_bytes)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
tokio::io::copy(&mut src_bytes.as_slice(), &mut dest_file)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
}
|
||||||
|
// Create plugin.json from the extension variable
|
||||||
|
let plugin_json_path = extension_directory.join(PLUGIN_JSON_FILE_NAME);
|
||||||
|
let extension_json = serde_json::to_string_pretty(&extension).map_err(|e| e.to_string())?;
|
||||||
|
tokio::fs::write(&plugin_json_path, extension_json)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
// Turn it into an absolute path if it is a valid relative path because frontend code need this.
|
||||||
|
canonicalize_relative_icon_path(&extension_directory, &mut extension)?;
|
||||||
|
|
||||||
|
THIRD_PARTY_EXTENSIONS_SEARCH_SOURCE
|
||||||
|
.get()
|
||||||
|
.unwrap()
|
||||||
|
.add_extension(extension)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -11,19 +11,15 @@ mod util;
|
|||||||
|
|
||||||
use crate::common::register::SearchSourceRegistry;
|
use crate::common::register::SearchSourceRegistry;
|
||||||
// use crate::common::traits::SearchSource;
|
// use crate::common::traits::SearchSource;
|
||||||
use crate::common::{MAIN_WINDOW_LABEL, SETTINGS_WINDOW_LABEL};
|
use crate::common::{CHECK_WINDOW_LABEL, MAIN_WINDOW_LABEL, SETTINGS_WINDOW_LABEL};
|
||||||
use crate::server::servers::{load_or_insert_default_server, load_servers_token};
|
use crate::server::servers::{load_or_insert_default_server, load_servers_token};
|
||||||
use autostart::{change_autostart, enable_autostart};
|
use autostart::{change_autostart, ensure_autostart_state_consistent};
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
use std::sync::OnceLock;
|
use std::sync::OnceLock;
|
||||||
use tauri::async_runtime::block_on;
|
use tauri::async_runtime::block_on;
|
||||||
use tauri::plugin::TauriPlugin;
|
use tauri::plugin::TauriPlugin;
|
||||||
#[cfg(target_os = "macos")]
|
use tauri::{AppHandle, Emitter, Manager, PhysicalPosition, Runtime, WebviewWindow, WindowEvent};
|
||||||
use tauri::ActivationPolicy;
|
|
||||||
use tauri::{
|
|
||||||
AppHandle, Emitter, Manager, PhysicalPosition, Runtime, WebviewWindow, Window, WindowEvent,
|
|
||||||
};
|
|
||||||
use tauri_plugin_autostart::MacosLauncher;
|
use tauri_plugin_autostart::MacosLauncher;
|
||||||
|
|
||||||
/// Tauri store name
|
/// Tauri store name
|
||||||
@@ -32,9 +28,14 @@ pub(crate) const COCO_TAURI_STORE: &str = "coco_tauri_store";
|
|||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref PREVIOUS_MONITOR_NAME: Mutex<Option<String>> = Mutex::new(None);
|
static ref PREVIOUS_MONITOR_NAME: Mutex<Option<String>> = Mutex::new(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// To allow us to access tauri's `AppHandle` when its context is inaccessible,
|
/// To allow us to access tauri's `AppHandle` when its context is inaccessible,
|
||||||
/// store it globally. It will be set in `init()`.
|
/// store it globally. It will be set in `init()`.
|
||||||
|
///
|
||||||
|
/// # WARNING
|
||||||
|
///
|
||||||
|
/// You may find this work, but the usage is discouraged and should be generally
|
||||||
|
/// avoided. If you do need it, always be careful that it may not be set() when
|
||||||
|
/// you access it.
|
||||||
pub(crate) static GLOBAL_TAURI_APP_HANDLE: OnceLock<AppHandle> = OnceLock::new();
|
pub(crate) static GLOBAL_TAURI_APP_HANDLE: OnceLock<AppHandle> = OnceLock::new();
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
@@ -64,6 +65,8 @@ pub fn run() {
|
|||||||
let ctx = tauri::generate_context!();
|
let ctx = tauri::generate_context!();
|
||||||
|
|
||||||
let mut app_builder = tauri::Builder::default();
|
let mut app_builder = tauri::Builder::default();
|
||||||
|
// Set up logger first
|
||||||
|
app_builder = app_builder.plugin(set_up_tauri_logger());
|
||||||
|
|
||||||
#[cfg(desktop)]
|
#[cfg(desktop)]
|
||||||
{
|
{
|
||||||
@@ -77,7 +80,7 @@ pub fn run() {
|
|||||||
.plugin(tauri_plugin_http::init())
|
.plugin(tauri_plugin_http::init())
|
||||||
.plugin(tauri_plugin_shell::init())
|
.plugin(tauri_plugin_shell::init())
|
||||||
.plugin(tauri_plugin_autostart::init(
|
.plugin(tauri_plugin_autostart::init(
|
||||||
MacosLauncher::AppleScript,
|
MacosLauncher::LaunchAgent,
|
||||||
None,
|
None,
|
||||||
))
|
))
|
||||||
.plugin(tauri_plugin_deep_link::init())
|
.plugin(tauri_plugin_deep_link::init())
|
||||||
@@ -87,9 +90,13 @@ pub fn run() {
|
|||||||
.plugin(tauri_plugin_macos_permissions::init())
|
.plugin(tauri_plugin_macos_permissions::init())
|
||||||
.plugin(tauri_plugin_screenshots::init())
|
.plugin(tauri_plugin_screenshots::init())
|
||||||
.plugin(tauri_plugin_process::init())
|
.plugin(tauri_plugin_process::init())
|
||||||
.plugin(tauri_plugin_updater::Builder::new().build())
|
.plugin(
|
||||||
|
tauri_plugin_updater::Builder::new()
|
||||||
|
.default_version_comparator(crate::util::updater::custom_version_comparator)
|
||||||
|
.build(),
|
||||||
|
)
|
||||||
.plugin(tauri_plugin_windows_version::init())
|
.plugin(tauri_plugin_windows_version::init())
|
||||||
.plugin(set_up_tauri_logger());
|
.plugin(tauri_plugin_opener::init());
|
||||||
|
|
||||||
// Conditional compilation for macOS
|
// Conditional compilation for macOS
|
||||||
#[cfg(target_os = "macos")]
|
#[cfg(target_os = "macos")]
|
||||||
@@ -107,7 +114,8 @@ pub fn run() {
|
|||||||
show_coco,
|
show_coco,
|
||||||
hide_coco,
|
hide_coco,
|
||||||
show_settings,
|
show_settings,
|
||||||
server::servers::get_server_token,
|
show_check,
|
||||||
|
hide_check,
|
||||||
server::servers::add_coco_server,
|
server::servers::add_coco_server,
|
||||||
server::servers::remove_coco_server,
|
server::servers::remove_coco_server,
|
||||||
server::servers::list_coco_servers,
|
server::servers::list_coco_servers,
|
||||||
@@ -123,7 +131,9 @@ pub fn run() {
|
|||||||
search::query_coco_fusion,
|
search::query_coco_fusion,
|
||||||
assistant::chat_history,
|
assistant::chat_history,
|
||||||
assistant::new_chat,
|
assistant::new_chat,
|
||||||
|
assistant::chat_create,
|
||||||
assistant::send_message,
|
assistant::send_message,
|
||||||
|
assistant::chat_chat,
|
||||||
assistant::session_chat_history,
|
assistant::session_chat_history,
|
||||||
assistant::open_session_chat,
|
assistant::open_session_chat,
|
||||||
assistant::close_session_chat,
|
assistant::close_session_chat,
|
||||||
@@ -143,12 +153,12 @@ pub fn run() {
|
|||||||
server::attachment::delete_attachment,
|
server::attachment::delete_attachment,
|
||||||
server::transcription::transcription,
|
server::transcription::transcription,
|
||||||
server::system_settings::get_system_settings,
|
server::system_settings::get_system_settings,
|
||||||
simulate_mouse_click,
|
|
||||||
extension::built_in::application::get_app_list,
|
extension::built_in::application::get_app_list,
|
||||||
extension::built_in::application::get_app_search_path,
|
extension::built_in::application::get_app_search_path,
|
||||||
extension::built_in::application::get_app_metadata,
|
extension::built_in::application::get_app_metadata,
|
||||||
extension::built_in::application::add_app_search_path,
|
extension::built_in::application::add_app_search_path,
|
||||||
extension::built_in::application::remove_app_search_path,
|
extension::built_in::application::remove_app_search_path,
|
||||||
|
extension::built_in::application::reindex_applications,
|
||||||
extension::list_extensions,
|
extension::list_extensions,
|
||||||
extension::enable_extension,
|
extension::enable_extension,
|
||||||
extension::disable_extension,
|
extension::disable_extension,
|
||||||
@@ -156,32 +166,68 @@ pub fn run() {
|
|||||||
extension::register_extension_hotkey,
|
extension::register_extension_hotkey,
|
||||||
extension::unregister_extension_hotkey,
|
extension::unregister_extension_hotkey,
|
||||||
extension::is_extension_enabled,
|
extension::is_extension_enabled,
|
||||||
|
extension::third_party::store::search_extension,
|
||||||
|
extension::third_party::store::install_extension_from_store,
|
||||||
|
extension::third_party::uninstall_extension,
|
||||||
settings::set_allow_self_signature,
|
settings::set_allow_self_signature,
|
||||||
settings::get_allow_self_signature,
|
settings::get_allow_self_signature,
|
||||||
assistant::ask_ai,
|
assistant::ask_ai,
|
||||||
crate::common::document::open,
|
crate::common::document::open,
|
||||||
|
#[cfg(any(target_os = "macos", target_os = "windows"))]
|
||||||
|
extension::built_in::file_search::config::get_file_system_config,
|
||||||
|
#[cfg(any(target_os = "macos", target_os = "windows"))]
|
||||||
|
extension::built_in::file_search::config::set_file_system_config,
|
||||||
|
server::synthesize::synthesize,
|
||||||
|
util::file::get_file_icon,
|
||||||
|
util::app_lang::update_app_lang,
|
||||||
|
#[cfg(target_os = "macos")]
|
||||||
|
setup::toggle_move_to_active_space_attribute,
|
||||||
])
|
])
|
||||||
.setup(|app| {
|
.setup(|app| {
|
||||||
let app_handle = app.handle().clone();
|
let app_handle = app.handle().clone();
|
||||||
GLOBAL_TAURI_APP_HANDLE
|
GLOBAL_TAURI_APP_HANDLE
|
||||||
.set(app_handle.clone())
|
.set(app_handle.clone())
|
||||||
.expect("variable already initialized");
|
.expect("global tauri AppHandle already initialized");
|
||||||
|
log::trace!("global Tauri AppHandle set");
|
||||||
|
|
||||||
|
#[cfg(target_os = "macos")]
|
||||||
|
{
|
||||||
|
log::trace!("hiding Dock icon on macOS");
|
||||||
|
app.set_activation_policy(tauri::ActivationPolicy::Accessory);
|
||||||
|
log::trace!("Dock icon should be hidden now");
|
||||||
|
}
|
||||||
|
|
||||||
let registry = SearchSourceRegistry::default();
|
let registry = SearchSourceRegistry::default();
|
||||||
|
|
||||||
app.manage(registry); // Store registry in Tauri's app state
|
app.manage(registry); // Store registry in Tauri's app state
|
||||||
app.manage(server::websocket::WebSocketManager::default());
|
app.manage(server::websocket::WebSocketManager::default());
|
||||||
|
|
||||||
block_on(async {
|
// This has to be called before initializing extensions as doing that
|
||||||
init(app.handle()).await;
|
// requires access to the shortcut store, which will be set by this
|
||||||
});
|
// function.
|
||||||
|
|
||||||
shortcut::enable_shortcut(app);
|
shortcut::enable_shortcut(app);
|
||||||
|
|
||||||
enable_autostart(app);
|
block_on(async {
|
||||||
|
init(app.handle()).await;
|
||||||
|
|
||||||
#[cfg(target_os = "macos")]
|
// We want all the extensions here, so no filter condition specified.
|
||||||
app.set_activation_policy(ActivationPolicy::Accessory);
|
match extension::list_extensions(app_handle.clone(), None, None, false).await {
|
||||||
|
Ok((_found_invalid_extensions, extensions)) => {
|
||||||
|
// Initializing extension relies on SearchSourceRegistry, so this should
|
||||||
|
// be executed after `app.manage(registry)`
|
||||||
|
if let Err(e) =
|
||||||
|
extension::init_extensions(app_handle.clone(), extensions).await
|
||||||
|
{
|
||||||
|
log::error!("initializing extensions failed with error [{}]", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("listing extensions failed with error [{}]", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
ensure_autostart_state_consistent(app)?;
|
||||||
|
|
||||||
// app.listen("theme-changed", move |event| {
|
// app.listen("theme-changed", move |event| {
|
||||||
// if let Ok(payload) = serde_json::from_str::<ThemeChangedPayload>(event.payload()) {
|
// if let Ok(payload) = serde_json::from_str::<ThemeChangedPayload>(event.payload()) {
|
||||||
@@ -206,7 +252,13 @@ pub fn run() {
|
|||||||
|
|
||||||
let main_window = app.get_webview_window(MAIN_WINDOW_LABEL).unwrap();
|
let main_window = app.get_webview_window(MAIN_WINDOW_LABEL).unwrap();
|
||||||
let settings_window = app.get_webview_window(SETTINGS_WINDOW_LABEL).unwrap();
|
let settings_window = app.get_webview_window(SETTINGS_WINDOW_LABEL).unwrap();
|
||||||
setup::default(app, main_window.clone(), settings_window.clone());
|
let check_window = app.get_webview_window(CHECK_WINDOW_LABEL).unwrap();
|
||||||
|
setup::default(
|
||||||
|
app,
|
||||||
|
main_window.clone(),
|
||||||
|
settings_window.clone(),
|
||||||
|
check_window.clone(),
|
||||||
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
@@ -251,7 +303,7 @@ pub async fn init<R: Runtime>(app_handle: &AppHandle<R>) {
|
|||||||
log::error!("Failed to load server tokens: {}", err);
|
log::error!("Failed to load server tokens: {}", err);
|
||||||
}
|
}
|
||||||
|
|
||||||
let coco_servers = server::servers::get_all_servers();
|
let coco_servers = server::servers::get_all_servers().await;
|
||||||
|
|
||||||
// Get the registry from Tauri's state
|
// Get the registry from Tauri's state
|
||||||
// let registry: State<SearchSourceRegistry> = app_handle.state::<SearchSourceRegistry>();
|
// let registry: State<SearchSourceRegistry> = app_handle.state::<SearchSourceRegistry>();
|
||||||
@@ -261,12 +313,12 @@ pub async fn init<R: Runtime>(app_handle: &AppHandle<R>) {
|
|||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
extension::built_in::pizza_engine_runtime::start_pizza_engine_runtime();
|
extension::built_in::pizza_engine_runtime::start_pizza_engine_runtime().await;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
async fn show_coco<R: Runtime>(app_handle: AppHandle<R>) {
|
async fn show_coco<R: Runtime>(app_handle: AppHandle<R>) {
|
||||||
if let Some(window) = app_handle.get_window(MAIN_WINDOW_LABEL) {
|
if let Some(window) = app_handle.get_webview_window(MAIN_WINDOW_LABEL) {
|
||||||
move_window_to_active_monitor(&window);
|
move_window_to_active_monitor(&window);
|
||||||
|
|
||||||
let _ = window.show();
|
let _ = window.show();
|
||||||
@@ -279,7 +331,7 @@ async fn show_coco<R: Runtime>(app_handle: AppHandle<R>) {
|
|||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
async fn hide_coco<R: Runtime>(app: AppHandle<R>) {
|
async fn hide_coco<R: Runtime>(app: AppHandle<R>) {
|
||||||
if let Some(window) = app.get_window(MAIN_WINDOW_LABEL) {
|
if let Some(window) = app.get_webview_window(MAIN_WINDOW_LABEL) {
|
||||||
if let Err(err) = window.hide() {
|
if let Err(err) = window.hide() {
|
||||||
log::error!("Failed to hide the window: {}", err);
|
log::error!("Failed to hide the window: {}", err);
|
||||||
} else {
|
} else {
|
||||||
@@ -290,7 +342,7 @@ async fn hide_coco<R: Runtime>(app: AppHandle<R>) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn move_window_to_active_monitor<R: Runtime>(window: &Window<R>) {
|
fn move_window_to_active_monitor<R: Runtime>(window: &WebviewWindow<R>) {
|
||||||
//dbg!("Moving window to active monitor");
|
//dbg!("Moving window to active monitor");
|
||||||
// Try to get the available monitors, handle failure gracefully
|
// Try to get the available monitors, handle failure gracefully
|
||||||
let available_monitors = match window.available_monitors() {
|
let available_monitors = match window.available_monitors() {
|
||||||
@@ -383,45 +435,8 @@ fn move_window_to_active_monitor<R: Runtime>(window: &Window<R>) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
fn open_settings(app: &tauri::AppHandle) {
|
|
||||||
use tauri::webview::WebviewBuilder;
|
|
||||||
log::debug!("settings menu item was clicked");
|
|
||||||
let window = app.get_webview_window("settings");
|
|
||||||
if let Some(window) = window {
|
|
||||||
let _ = window.show();
|
|
||||||
let _ = window.unminimize();
|
|
||||||
let _ = window.set_focus();
|
|
||||||
} else {
|
|
||||||
let window = tauri::window::WindowBuilder::new(app, "settings")
|
|
||||||
.title("Settings Window")
|
|
||||||
.fullscreen(false)
|
|
||||||
.resizable(false)
|
|
||||||
.minimizable(false)
|
|
||||||
.maximizable(false)
|
|
||||||
.inner_size(800.0, 600.0)
|
|
||||||
.build()
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let webview_builder =
|
|
||||||
WebviewBuilder::new("settings", tauri::WebviewUrl::App("/ui/settings".into()));
|
|
||||||
let _webview = window
|
|
||||||
.add_child(
|
|
||||||
webview_builder,
|
|
||||||
tauri::LogicalPosition::new(0, 0),
|
|
||||||
window.inner_size().unwrap(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
async fn get_app_search_source<R: Runtime>(app_handle: AppHandle<R>) -> Result<(), String> {
|
async fn get_app_search_source(app_handle: AppHandle) -> Result<(), String> {
|
||||||
let (_found_invalid_extensions, extensions) = extension::list_extensions()
|
|
||||||
.await
|
|
||||||
.map_err(|e| e.to_string())?;
|
|
||||||
extension::init_extensions(extensions).await?;
|
|
||||||
|
|
||||||
let _ = server::connector::refresh_all_connectors(&app_handle).await;
|
let _ = server::connector::refresh_all_connectors(&app_handle).await;
|
||||||
let _ = server::datasource::refresh_all_datasources(&app_handle).await;
|
let _ = server::datasource::refresh_all_datasources(&app_handle).await;
|
||||||
|
|
||||||
@@ -430,53 +445,36 @@ async fn get_app_search_source<R: Runtime>(app_handle: AppHandle<R>) -> Result<(
|
|||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
async fn show_settings(app_handle: AppHandle) {
|
async fn show_settings(app_handle: AppHandle) {
|
||||||
open_settings(&app_handle);
|
log::debug!("settings menu item was clicked");
|
||||||
|
let window = app_handle
|
||||||
|
.get_webview_window(SETTINGS_WINDOW_LABEL)
|
||||||
|
.expect("we have a settings window");
|
||||||
|
|
||||||
|
window.show().unwrap();
|
||||||
|
window.unminimize().unwrap();
|
||||||
|
window.set_focus().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
async fn simulate_mouse_click<R: Runtime>(window: WebviewWindow<R>, is_chat_mode: bool) {
|
async fn show_check(app_handle: AppHandle) {
|
||||||
#[cfg(target_os = "windows")]
|
log::debug!("check menu item was clicked");
|
||||||
{
|
let window = app_handle
|
||||||
use enigo::{Button, Coordinate, Direction, Enigo, Mouse, Settings};
|
.get_webview_window(CHECK_WINDOW_LABEL)
|
||||||
use std::{thread, time::Duration};
|
.expect("we have a check window");
|
||||||
|
|
||||||
if let Ok(mut enigo) = Enigo::new(&Settings::default()) {
|
window.show().unwrap();
|
||||||
// Save the current mouse position
|
window.unminimize().unwrap();
|
||||||
if let Ok((original_x, original_y)) = enigo.location() {
|
window.set_focus().unwrap();
|
||||||
// Retrieve the window's outer position (top-left corner)
|
|
||||||
if let Ok(position) = window.outer_position() {
|
|
||||||
// Retrieve the window's inner size (client area)
|
|
||||||
if let Ok(size) = window.inner_size() {
|
|
||||||
// Calculate the center position of the title bar
|
|
||||||
let x = position.x + (size.width as i32 / 2);
|
|
||||||
let y = if is_chat_mode {
|
|
||||||
position.y + size.height as i32 - 50
|
|
||||||
} else {
|
|
||||||
position.y + 30
|
|
||||||
};
|
|
||||||
|
|
||||||
// Move the mouse cursor to the calculated position
|
|
||||||
if enigo.move_mouse(x, y, Coordinate::Abs).is_ok() {
|
|
||||||
// // Simulate a left mouse click
|
|
||||||
let _ = enigo.button(Button::Left, Direction::Click);
|
|
||||||
// let _ = enigo.button(Button::Left, Direction::Release);
|
|
||||||
|
|
||||||
thread::sleep(Duration::from_millis(100));
|
|
||||||
|
|
||||||
// Move the mouse cursor back to the original position
|
|
||||||
let _ = enigo.move_mouse(original_x, original_y, Coordinate::Abs);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(target_os = "windows"))]
|
#[tauri::command]
|
||||||
{
|
async fn hide_check(app_handle: AppHandle) {
|
||||||
let _ = window;
|
log::debug!("check window was closed");
|
||||||
let _ = is_chat_mode;
|
let window = &app_handle
|
||||||
}
|
.get_webview_window(CHECK_WINDOW_LABEL)
|
||||||
|
.expect("we have a check window");
|
||||||
|
|
||||||
|
window.hide().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Log format:
|
/// Log format:
|
||||||
@@ -585,6 +583,17 @@ fn set_up_tauri_logger() -> TauriPlugin<tauri::Wry> {
|
|||||||
builder
|
builder
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// When running the built binary, set `COCO_LOG` to `coco_lib=trace` to capture all logs
|
||||||
|
// that come from Coco in the log file, which helps with debugging.
|
||||||
|
if !tauri::is_dev() {
|
||||||
|
// We have absolutely no guarantee that we (We have control over the Rust
|
||||||
|
// code, but definitely no idea about the libc C code, all the shared objects
|
||||||
|
// that we will link) will not concurrently read/write `envp`, so just use unsafe.
|
||||||
|
unsafe {
|
||||||
|
std::env::set_var("COCO_LOG", "coco_lib=trace");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let mut builder = tauri_plugin_log::Builder::new();
|
let mut builder = tauri_plugin_log::Builder::new();
|
||||||
builder = builder.format(|out, message, record| {
|
builder = builder.format(|out, message, record| {
|
||||||
let now = chrono::Local::now().format("%m-%d %H:%M:%S");
|
let now = chrono::Local::now().format("%m-%d %H:%M:%S");
|
||||||
|
|||||||
@@ -1,5 +1,112 @@
|
|||||||
// Prevents additional console window on Windows in release, DO NOT REMOVE!!
|
// Prevents additional console window on Windows in release, DO NOT REMOVE!!
|
||||||
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
|
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
|
||||||
|
|
||||||
|
use std::fs::OpenOptions;
|
||||||
|
use std::io::Write;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
/// Helper function to return the log directory.
|
||||||
|
///
|
||||||
|
/// This should return the same value as `tauri_app_handle.path().app_log_dir().unwrap()`.
|
||||||
|
fn app_log_dir() -> PathBuf {
|
||||||
|
// This function `app_log_dir()` is for the panic hook, which should be set
|
||||||
|
// before Tauri performs any initialization. At that point, we do not have
|
||||||
|
// access to the identifier provided by Tauri, so we need to define our own
|
||||||
|
// one here.
|
||||||
|
//
|
||||||
|
// NOTE: If you update identifier in the following files, update this one
|
||||||
|
// as well!
|
||||||
|
//
|
||||||
|
// src-tauri/tauri.linux.conf.json
|
||||||
|
// src-tauri/Entitlements.plist
|
||||||
|
// src-tauri/tauri.conf.json
|
||||||
|
// src-tauri/Info.plist
|
||||||
|
const IDENTIFIER: &str = "rs.coco.app";
|
||||||
|
|
||||||
|
#[cfg(target_os = "macos")]
|
||||||
|
let path = dirs::home_dir()
|
||||||
|
.expect("cannot find the home directory, Coco should never run in such a environment")
|
||||||
|
.join("Library/Logs")
|
||||||
|
.join(IDENTIFIER);
|
||||||
|
|
||||||
|
#[cfg(not(target_os = "macos"))]
|
||||||
|
let path = dirs::data_local_dir()
|
||||||
|
.expect("app local dir is None, we should not encounter this")
|
||||||
|
.join(IDENTIFIER)
|
||||||
|
.join("logs");
|
||||||
|
|
||||||
|
path
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set up panic hook to log panic information to a file
|
||||||
|
fn setup_panic_hook() {
|
||||||
|
std::panic::set_hook(Box::new(|panic_info| {
|
||||||
|
let timestamp = chrono::Local::now();
|
||||||
|
// "%Y-%m-%d %H:%M:%S"
|
||||||
|
//
|
||||||
|
// I would like to use the above format, but Windows does not allow that
|
||||||
|
// and complains with OS error 123.
|
||||||
|
let datetime_str = timestamp.format("%Y-%m-%d-%H-%M-%S").to_string();
|
||||||
|
|
||||||
|
let log_dir = app_log_dir();
|
||||||
|
|
||||||
|
// Ensure the log directory exists
|
||||||
|
if let Err(e) = std::fs::create_dir_all(&log_dir) {
|
||||||
|
eprintln!("Panic hook error: failed to create log directory: {}", e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let panic_file = log_dir.join(format!("{}_rust_panic.log", datetime_str));
|
||||||
|
|
||||||
|
// Prepare panic information
|
||||||
|
let panic_message = if let Some(s) = panic_info.payload().downcast_ref::<&str>() {
|
||||||
|
s.to_string()
|
||||||
|
} else if let Some(s) = panic_info.payload().downcast_ref::<String>() {
|
||||||
|
s.clone()
|
||||||
|
} else {
|
||||||
|
"Unknown panic message".to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
let location = if let Some(location) = panic_info.location() {
|
||||||
|
format!(
|
||||||
|
"{}:{}:{}",
|
||||||
|
location.file(),
|
||||||
|
location.line(),
|
||||||
|
location.column()
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
"Unknown location".to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Use `force_capture()` instead of `capture()` as we want backtrace
|
||||||
|
// regardless of whether the corresponding env vars are set or not.
|
||||||
|
let backtrace = std::backtrace::Backtrace::force_capture();
|
||||||
|
|
||||||
|
let panic_log = format!(
|
||||||
|
"Time: [{}]\nLocation: [{}]\nMessage: [{}]\nBacktrace: \n{}",
|
||||||
|
datetime_str, location, panic_message, backtrace
|
||||||
|
);
|
||||||
|
|
||||||
|
// Write to panic file
|
||||||
|
match OpenOptions::new()
|
||||||
|
.create(true)
|
||||||
|
.append(true)
|
||||||
|
.open(&panic_file)
|
||||||
|
{
|
||||||
|
Ok(mut file) => {
|
||||||
|
if let Err(e) = writeln!(file, "{}", panic_log) {
|
||||||
|
eprintln!("Panic hook error: Failed to write panic to file: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
eprintln!("Panic hook error: Failed to open panic log file: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
// Panic hook setup should be the first thing to do, everything could panic!
|
||||||
|
setup_panic_hook();
|
||||||
coco_lib::run();
|
coco_lib::run();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,17 +4,20 @@ use crate::common::search::{
|
|||||||
FailedRequest, MultiSourceQueryResponse, QueryHits, QueryResponse, QuerySource, SearchQuery,
|
FailedRequest, MultiSourceQueryResponse, QueryHits, QueryResponse, QuerySource, SearchQuery,
|
||||||
};
|
};
|
||||||
use crate::common::traits::SearchSource;
|
use crate::common::traits::SearchSource;
|
||||||
|
use crate::server::servers::logout_coco_server;
|
||||||
|
use crate::server::servers::mark_server_as_offline;
|
||||||
use function_name::named;
|
use function_name::named;
|
||||||
use futures::stream::FuturesUnordered;
|
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
|
use futures::stream::FuturesUnordered;
|
||||||
|
use reqwest::StatusCode;
|
||||||
use std::cmp::Reverse;
|
use std::cmp::Reverse;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::future::Future;
|
use std::future::Future;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tauri::{AppHandle, Manager, Runtime};
|
use tauri::{AppHandle, Manager};
|
||||||
use tokio::time::error::Elapsed;
|
use tokio::time::error::Elapsed;
|
||||||
use tokio::time::{timeout, Duration};
|
use tokio::time::{Duration, timeout};
|
||||||
|
|
||||||
/// Helper function to return the Future used for querying querysources.
|
/// Helper function to return the Future used for querying querysources.
|
||||||
///
|
///
|
||||||
@@ -31,6 +34,7 @@ fn same_type_futures(
|
|||||||
query_source_trait_object: Arc<dyn SearchSource>,
|
query_source_trait_object: Arc<dyn SearchSource>,
|
||||||
timeout_duration: Duration,
|
timeout_duration: Duration,
|
||||||
search_query: SearchQuery,
|
search_query: SearchQuery,
|
||||||
|
tauri_app_handle: AppHandle,
|
||||||
) -> impl Future<
|
) -> impl Future<
|
||||||
Output = (
|
Output = (
|
||||||
QuerySource,
|
QuerySource,
|
||||||
@@ -42,7 +46,9 @@ fn same_type_futures(
|
|||||||
// Store `query_source` as part of future for debugging purposes.
|
// Store `query_source` as part of future for debugging purposes.
|
||||||
query_source,
|
query_source,
|
||||||
timeout(timeout_duration, async {
|
timeout(timeout_duration, async {
|
||||||
query_source_trait_object.search(search_query).await
|
query_source_trait_object
|
||||||
|
.search(tauri_app_handle.clone(), search_query)
|
||||||
|
.await
|
||||||
})
|
})
|
||||||
.await,
|
.await,
|
||||||
)
|
)
|
||||||
@@ -51,8 +57,8 @@ fn same_type_futures(
|
|||||||
|
|
||||||
#[named]
|
#[named]
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
pub async fn query_coco_fusion<R: Runtime>(
|
pub async fn query_coco_fusion(
|
||||||
app_handle: AppHandle<R>,
|
app_handle: AppHandle,
|
||||||
from: u64,
|
from: u64,
|
||||||
size: u64,
|
size: u64,
|
||||||
query_strings: HashMap<String, String>,
|
query_strings: HashMap<String, String>,
|
||||||
@@ -77,8 +83,10 @@ pub async fn query_coco_fusion<R: Runtime>(
|
|||||||
let timeout_duration = Duration::from_millis(query_timeout);
|
let timeout_duration = Duration::from_millis(query_timeout);
|
||||||
|
|
||||||
log::debug!(
|
log::debug!(
|
||||||
"{}(): {:?}, timeout: {:?}",
|
"{}() invoked with parameters: from: [{}], size: [{}], query_strings: [{:?}], timeout: [{:?}]",
|
||||||
function_name!(),
|
function_name!(),
|
||||||
|
from,
|
||||||
|
size,
|
||||||
query_strings,
|
query_strings,
|
||||||
timeout_duration
|
timeout_duration
|
||||||
);
|
);
|
||||||
@@ -92,11 +100,30 @@ pub async fn query_coco_fusion<R: Runtime>(
|
|||||||
query_source_id
|
query_source_id
|
||||||
);
|
);
|
||||||
|
|
||||||
let query_source_trait_object_index = sources_list
|
let opt_query_source_trait_object_index = sources_list
|
||||||
.iter()
|
.iter()
|
||||||
.position(|query_source| &query_source.get_type().id == query_source_id).unwrap_or_else(|| {
|
.position(|query_source| &query_source.get_type().id == query_source_id);
|
||||||
panic!("frontend code invoked {}() with parameter [querysource={}], but we do not have this query source, the states are inconsistent! Available query sources {:?}", function_name!(), query_source_id, sources_list.iter().map(|qs| qs.get_type().id).collect::<Vec<_>>());
|
|
||||||
|
let Some(query_source_trait_object_index) = opt_query_source_trait_object_index else {
|
||||||
|
// It is possible (an edge case) that the frontend invokes `query_coco_fusion()` with a
|
||||||
|
// datasource that does not exist in the source list:
|
||||||
|
//
|
||||||
|
// 1. Search applications
|
||||||
|
// 2. Navigate to the application sub page
|
||||||
|
// 3. Disable the application extension in settings
|
||||||
|
// 4. hide the search window
|
||||||
|
// 5. Re-open the search window and search for something
|
||||||
|
//
|
||||||
|
// The application search source is not in the source list because the extension
|
||||||
|
// has been disabled, but the last search is indeed invoked with parameter
|
||||||
|
// `datasource=application`.
|
||||||
|
return Ok(MultiSourceQueryResponse {
|
||||||
|
failed: Vec::new(),
|
||||||
|
hits: Vec::new(),
|
||||||
|
total_hits: 0,
|
||||||
});
|
});
|
||||||
|
};
|
||||||
|
|
||||||
let query_source_trait_object = sources_list.remove(query_source_trait_object_index);
|
let query_source_trait_object = sources_list.remove(query_source_trait_object_index);
|
||||||
let query_source = query_source_trait_object.get_type();
|
let query_source = query_source_trait_object.get_type();
|
||||||
|
|
||||||
@@ -105,16 +132,25 @@ pub async fn query_coco_fusion<R: Runtime>(
|
|||||||
query_source_trait_object,
|
query_source_trait_object,
|
||||||
timeout_duration,
|
timeout_duration,
|
||||||
search_query,
|
search_query,
|
||||||
|
app_handle.clone(),
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
|
log::debug!(
|
||||||
|
"will query querysources {:?}",
|
||||||
|
sources_list
|
||||||
|
.iter()
|
||||||
|
.map(|search_source| search_source.get_type().id.clone())
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
);
|
||||||
|
|
||||||
for query_source_trait_object in sources_list {
|
for query_source_trait_object in sources_list {
|
||||||
let query_source = query_source_trait_object.get_type().clone();
|
let query_source = query_source_trait_object.get_type().clone();
|
||||||
log::debug!("will query querysource [{}]", query_source.id);
|
|
||||||
futures.push(same_type_futures(
|
futures.push(same_type_futures(
|
||||||
query_source,
|
query_source,
|
||||||
query_source_trait_object,
|
query_source_trait_object,
|
||||||
timeout_duration,
|
timeout_duration,
|
||||||
search_query.clone(),
|
search_query.clone(),
|
||||||
|
app_handle.clone(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -172,9 +208,38 @@ pub async fn query_coco_fusion<R: Runtime>(
|
|||||||
query_source.id,
|
query_source.id,
|
||||||
search_error
|
search_error
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let mut status_code_num: u16 = 0;
|
||||||
|
|
||||||
|
if let SearchError::HttpError {
|
||||||
|
status_code: opt_status_code,
|
||||||
|
msg: _,
|
||||||
|
} = search_error
|
||||||
|
{
|
||||||
|
if let Some(status_code) = opt_status_code {
|
||||||
|
status_code_num = status_code.as_u16();
|
||||||
|
if status_code != StatusCode::OK {
|
||||||
|
if status_code == StatusCode::UNAUTHORIZED {
|
||||||
|
// This Coco server is unavailable. In addition to marking it as
|
||||||
|
// unavailable, we need to log out because the status code is 401.
|
||||||
|
logout_coco_server(app_handle.clone(), query_source.id.clone()).await.unwrap_or_else(|e| {
|
||||||
|
panic!(
|
||||||
|
"the search request to Coco server [id {}, name {}] failed with status code {}, the login token is invalid, we are trying to log out, but failed with error [{}]",
|
||||||
|
query_source.id, query_source.name, StatusCode::UNAUTHORIZED, e
|
||||||
|
);
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
// This Coco server is unavailable
|
||||||
|
mark_server_as_offline(app_handle.clone(), &query_source.id)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
failed_requests.push(FailedRequest {
|
failed_requests.push(FailedRequest {
|
||||||
source: query_source,
|
source: query_source,
|
||||||
status: 0,
|
status: status_code_num,
|
||||||
error: Some(search_error.to_string()),
|
error: Some(search_error.to_string()),
|
||||||
reason: None,
|
reason: None,
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -15,42 +15,6 @@ pub struct UploadAttachmentResponse {
|
|||||||
pub attachments: Vec<String>,
|
pub attachments: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct AttachmentSource {
|
|
||||||
pub id: String,
|
|
||||||
pub created: String,
|
|
||||||
pub updated: String,
|
|
||||||
pub session: String,
|
|
||||||
pub name: String,
|
|
||||||
pub icon: String,
|
|
||||||
pub url: String,
|
|
||||||
pub size: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct AttachmentHit {
|
|
||||||
pub _index: String,
|
|
||||||
pub _type: Option<String>,
|
|
||||||
pub _id: String,
|
|
||||||
pub _score: Option<f64>,
|
|
||||||
pub _source: AttachmentSource,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct AttachmentHits {
|
|
||||||
pub total: Value,
|
|
||||||
pub max_score: Option<f64>,
|
|
||||||
pub hits: Option<Vec<AttachmentHit>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct GetAttachmentResponse {
|
|
||||||
pub took: u32,
|
|
||||||
pub timed_out: bool,
|
|
||||||
pub _shards: Option<Value>,
|
|
||||||
pub hits: AttachmentHits,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct DeleteAttachmentResponse {
|
pub struct DeleteAttachmentResponse {
|
||||||
pub _id: String,
|
pub _id: String,
|
||||||
@@ -60,7 +24,6 @@ pub struct DeleteAttachmentResponse {
|
|||||||
#[command]
|
#[command]
|
||||||
pub async fn upload_attachment(
|
pub async fn upload_attachment(
|
||||||
server_id: String,
|
server_id: String,
|
||||||
session_id: String,
|
|
||||||
file_paths: Vec<PathBuf>,
|
file_paths: Vec<PathBuf>,
|
||||||
) -> Result<UploadAttachmentResponse, String> {
|
) -> Result<UploadAttachmentResponse, String> {
|
||||||
let mut form = Form::new();
|
let mut form = Form::new();
|
||||||
@@ -82,10 +45,12 @@ pub async fn upload_attachment(
|
|||||||
form = form.part("files", part);
|
form = form.part("files", part);
|
||||||
}
|
}
|
||||||
|
|
||||||
let server = get_server_by_id(&server_id).ok_or("Server not found")?;
|
let server = get_server_by_id(&server_id)
|
||||||
let url = HttpClient::join_url(&server.endpoint, &format!("chat/{}/_upload", session_id));
|
.await
|
||||||
|
.ok_or("Server not found")?;
|
||||||
|
let url = HttpClient::join_url(&server.endpoint, &format!("attachment/_upload"));
|
||||||
|
|
||||||
let token = get_server_token(&server_id).await?;
|
let token = get_server_token(&server_id).await;
|
||||||
let mut headers = HashMap::new();
|
let mut headers = HashMap::new();
|
||||||
if let Some(token) = token {
|
if let Some(token) = token {
|
||||||
headers.insert("X-API-TOKEN".to_string(), token.access_token);
|
headers.insert("X-API-TOKEN".to_string(), token.access_token);
|
||||||
@@ -107,12 +72,9 @@ pub async fn upload_attachment(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[command]
|
#[command]
|
||||||
pub async fn get_attachment(
|
pub async fn get_attachment(server_id: String, session_id: String) -> Result<Value, String> {
|
||||||
server_id: String,
|
let mut query_params = Vec::new();
|
||||||
session_id: String,
|
query_params.push(format!("session={}", session_id));
|
||||||
) -> Result<GetAttachmentResponse, String> {
|
|
||||||
let mut query_params = HashMap::new();
|
|
||||||
query_params.insert("session".to_string(), serde_json::Value::String(session_id));
|
|
||||||
|
|
||||||
let response = HttpClient::get(&server_id, "/attachment/_search", Some(query_params))
|
let response = HttpClient::get(&server_id, "/attachment/_search", Some(query_params))
|
||||||
.await
|
.await
|
||||||
@@ -120,7 +82,7 @@ pub async fn get_attachment(
|
|||||||
|
|
||||||
let body = get_response_body_text(response).await?;
|
let body = get_response_body_text(response).await?;
|
||||||
|
|
||||||
serde_json::from_str::<GetAttachmentResponse>(&body)
|
serde_json::from_str::<Value>(&body)
|
||||||
.map_err(|e| format!("Failed to parse attachment response: {}", e))
|
.map_err(|e| format!("Failed to parse attachment response: {}", e))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -20,15 +20,15 @@ pub async fn handle_sso_callback<R: Runtime>(
|
|||||||
code: String,
|
code: String,
|
||||||
) -> Result<(), String> {
|
) -> Result<(), String> {
|
||||||
// Retrieve the server details using the server ID
|
// Retrieve the server details using the server ID
|
||||||
let server = get_server_by_id(&server_id);
|
let server = get_server_by_id(&server_id).await;
|
||||||
|
|
||||||
let expire_in = 3600; // TODO, need to update to actual expire_in value
|
let expire_in = 3600; // TODO, need to update to actual expire_in value
|
||||||
if let Some(mut server) = server {
|
if let Some(mut server) = server {
|
||||||
// Save the access token for the server
|
// Save the access token for the server
|
||||||
let access_token = ServerAccessToken::new(server_id.clone(), code.clone(), expire_in);
|
let access_token = ServerAccessToken::new(server_id.clone(), code.clone(), expire_in);
|
||||||
// dbg!(&server_id, &request_id, &code, &token);
|
// dbg!(&server_id, &request_id, &code, &token);
|
||||||
save_access_token(server_id.clone(), access_token);
|
save_access_token(server_id.clone(), access_token).await;
|
||||||
persist_servers_token(&app_handle)?;
|
persist_servers_token(&app_handle).await?;
|
||||||
|
|
||||||
// Register the server to the search source
|
// Register the server to the search source
|
||||||
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
||||||
@@ -41,7 +41,7 @@ pub async fn handle_sso_callback<R: Runtime>(
|
|||||||
Ok(p) => {
|
Ok(p) => {
|
||||||
server.profile = Some(p);
|
server.profile = Some(p);
|
||||||
server.available = true;
|
server.available = true;
|
||||||
save_server(&server);
|
save_server(&server).await;
|
||||||
persist_servers(&app_handle).await?;
|
persist_servers(&app_handle).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
use crate::common::connector::Connector;
|
use crate::common::connector::Connector;
|
||||||
use crate::common::search::parse_search_results;
|
use crate::common::search::parse_search_results;
|
||||||
use crate::server::http_client::HttpClient;
|
use crate::server::http_client::{HttpClient, status_code_check};
|
||||||
use crate::server::servers::get_all_servers;
|
use crate::server::servers::get_all_servers;
|
||||||
|
use http::StatusCode;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::{Arc, RwLock};
|
use std::sync::{Arc, RwLock};
|
||||||
@@ -29,7 +30,7 @@ pub fn get_connector_by_id(server_id: &str, connector_id: &str) -> Option<Connec
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn refresh_all_connectors<R: Runtime>(app_handle: &AppHandle<R>) -> Result<(), String> {
|
pub async fn refresh_all_connectors<R: Runtime>(app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||||
let servers = get_all_servers();
|
let servers = get_all_servers().await;
|
||||||
|
|
||||||
// Collect all the tasks for fetching and refreshing connectors
|
// Collect all the tasks for fetching and refreshing connectors
|
||||||
let mut server_map = HashMap::new();
|
let mut server_map = HashMap::new();
|
||||||
@@ -107,6 +108,7 @@ pub async fn fetch_connectors_by_server(id: &str) -> Result<Vec<Connector>, Stri
|
|||||||
// dbg!("Error fetching connector for id {}: {}", &id, &e);
|
// dbg!("Error fetching connector for id {}: {}", &id, &e);
|
||||||
format!("Error fetching connector: {}", e)
|
format!("Error fetching connector: {}", e)
|
||||||
})?;
|
})?;
|
||||||
|
status_code_check(&resp, &[StatusCode::OK, StatusCode::CREATED])?;
|
||||||
|
|
||||||
// Parse the search results directly from the response body
|
// Parse the search results directly from the response body
|
||||||
let datasource: Vec<Connector> = parse_search_results(resp)
|
let datasource: Vec<Connector> = parse_search_results(resp)
|
||||||
|
|||||||
@@ -1,21 +1,14 @@
|
|||||||
use crate::common::datasource::DataSource;
|
use crate::common::datasource::DataSource;
|
||||||
use crate::common::search::parse_search_results;
|
use crate::common::search::parse_search_results;
|
||||||
use crate::server::connector::get_connector_by_id;
|
use crate::server::connector::get_connector_by_id;
|
||||||
use crate::server::http_client::HttpClient;
|
use crate::server::http_client::{HttpClient, status_code_check};
|
||||||
use crate::server::servers::get_all_servers;
|
use crate::server::servers::get_all_servers;
|
||||||
|
use http::StatusCode;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use serde_json::Value;
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::{Arc, RwLock};
|
use std::sync::{Arc, RwLock};
|
||||||
use tauri::{AppHandle, Runtime};
|
use tauri::{AppHandle, Runtime};
|
||||||
|
|
||||||
#[derive(serde::Deserialize, Debug)]
|
|
||||||
pub struct GetDatasourcesByServerOptions {
|
|
||||||
pub from: Option<u32>,
|
|
||||||
pub size: Option<u32>,
|
|
||||||
pub query: Option<serde_json::Value>,
|
|
||||||
}
|
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref DATASOURCE_CACHE: Arc<RwLock<HashMap<String, HashMap<String, DataSource>>>> =
|
static ref DATASOURCE_CACHE: Arc<RwLock<HashMap<String, HashMap<String, DataSource>>>> =
|
||||||
Arc::new(RwLock::new(HashMap::new()));
|
Arc::new(RwLock::new(HashMap::new()));
|
||||||
@@ -41,7 +34,7 @@ pub fn get_datasources_from_cache(server_id: &str) -> Option<HashMap<String, Dat
|
|||||||
pub async fn refresh_all_datasources<R: Runtime>(_app_handle: &AppHandle<R>) -> Result<(), String> {
|
pub async fn refresh_all_datasources<R: Runtime>(_app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||||
// dbg!("Attempting to refresh all datasources");
|
// dbg!("Attempting to refresh all datasources");
|
||||||
|
|
||||||
let servers = get_all_servers();
|
let servers = get_all_servers().await;
|
||||||
|
|
||||||
let mut server_map = HashMap::new();
|
let mut server_map = HashMap::new();
|
||||||
|
|
||||||
@@ -97,29 +90,13 @@ pub async fn refresh_all_datasources<R: Runtime>(_app_handle: &AppHandle<R>) ->
|
|||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
pub async fn datasource_search(
|
pub async fn datasource_search(
|
||||||
id: &str,
|
id: &str,
|
||||||
options: Option<GetDatasourcesByServerOptions>,
|
query_params: Option<Vec<String>>, //["query=abc", "filter=er", "filter=efg", "from=0", "size=5"],
|
||||||
) -> Result<Vec<DataSource>, String> {
|
) -> Result<Vec<DataSource>, String> {
|
||||||
let from = options.as_ref().and_then(|opt| opt.from).unwrap_or(0);
|
|
||||||
let size = options.as_ref().and_then(|opt| opt.size).unwrap_or(10000);
|
|
||||||
|
|
||||||
let mut body = serde_json::json!({
|
|
||||||
"from": from,
|
|
||||||
"size": size,
|
|
||||||
});
|
|
||||||
|
|
||||||
if let Some(q) = options.and_then(|get_data_source_options| get_data_source_options.query ) {
|
|
||||||
body["query"] = q;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Perform the async HTTP request outside the cache lock
|
// Perform the async HTTP request outside the cache lock
|
||||||
let resp = HttpClient::post(
|
let resp = HttpClient::post(id, "/datasource/_search", query_params, None)
|
||||||
id,
|
|
||||||
"/datasource/_search",
|
|
||||||
None,
|
|
||||||
Some(reqwest::Body::from(body.to_string())),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.map_err(|e| format!("Error fetching datasource: {}", e))?;
|
.map_err(|e| format!("Error fetching datasource: {}", e))?;
|
||||||
|
status_code_check(&resp, &[StatusCode::OK, StatusCode::CREATED])?;
|
||||||
|
|
||||||
// Parse the search results from the response
|
// Parse the search results from the response
|
||||||
let datasources: Vec<DataSource> = parse_search_results(resp).await.map_err(|e| {
|
let datasources: Vec<DataSource> = parse_search_results(resp).await.map_err(|e| {
|
||||||
@@ -136,28 +113,13 @@ pub async fn datasource_search(
|
|||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
pub async fn mcp_server_search(
|
pub async fn mcp_server_search(
|
||||||
id: &str,
|
id: &str,
|
||||||
from: u32,
|
query_params: Option<Vec<String>>,
|
||||||
size: u32,
|
|
||||||
query: Option<HashMap<String, Value>>,
|
|
||||||
) -> Result<Vec<DataSource>, String> {
|
) -> Result<Vec<DataSource>, String> {
|
||||||
let mut body = serde_json::json!({
|
|
||||||
"from": from,
|
|
||||||
"size": size,
|
|
||||||
});
|
|
||||||
|
|
||||||
if let Some(q) = query {
|
|
||||||
body["query"] = serde_json::to_value(q).map_err(|e| e.to_string())?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Perform the async HTTP request outside the cache lock
|
// Perform the async HTTP request outside the cache lock
|
||||||
let resp = HttpClient::post(
|
let resp = HttpClient::post(id, "/mcp_server/_search", query_params, None)
|
||||||
id,
|
|
||||||
"/mcp_server/_search",
|
|
||||||
None,
|
|
||||||
Some(reqwest::Body::from(body.to_string())),
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
.map_err(|e| format!("Error fetching datasource: {}", e))?;
|
.map_err(|e| format!("Error fetching datasource: {}", e))?;
|
||||||
|
status_code_check(&resp, &[StatusCode::OK, StatusCode::CREATED])?;
|
||||||
|
|
||||||
// Parse the search results from the response
|
// Parse the search results from the response
|
||||||
let mcp_server: Vec<DataSource> = parse_search_results(resp).await.map_err(|e| {
|
let mcp_server: Vec<DataSource> = parse_search_results(resp).await.map_err(|e| {
|
||||||
|
|||||||
@@ -1,17 +1,19 @@
|
|||||||
use crate::server::servers::{get_server_by_id, get_server_token};
|
use crate::server::servers::{get_server_by_id, get_server_token};
|
||||||
use http::{HeaderName, HeaderValue};
|
use crate::util::app_lang::get_app_lang;
|
||||||
|
use crate::util::platform::Platform;
|
||||||
|
use http::{HeaderName, HeaderValue, StatusCode};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use reqwest::{Client, Method, RequestBuilder};
|
use reqwest::{Client, Method, RequestBuilder};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::sync::LazyLock;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use tauri_plugin_store::JsonValue;
|
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
pub(crate) fn new_reqwest_http_client(accept_invalid_certs: bool) -> Client {
|
pub(crate) fn new_reqwest_http_client(accept_invalid_certs: bool) -> Client {
|
||||||
Client::builder()
|
Client::builder()
|
||||||
.read_timeout(Duration::from_secs(3)) // Set a timeout of 3 second
|
.read_timeout(Duration::from_secs(60)) // Set a timeout of 60 second
|
||||||
.connect_timeout(Duration::from_secs(3)) // Set a timeout of 3 second
|
.connect_timeout(Duration::from_secs(30)) // Set a timeout of 30 second
|
||||||
.timeout(Duration::from_secs(10)) // Set a timeout of 10 seconds
|
.timeout(Duration::from_secs(5 * 60)) // Set a timeout of 5 minute
|
||||||
.danger_accept_invalid_certs(accept_invalid_certs) // allow self-signed certificates
|
.danger_accept_invalid_certs(accept_invalid_certs) // allow self-signed certificates
|
||||||
.build()
|
.build()
|
||||||
.expect("Failed to build client")
|
.expect("Failed to build client")
|
||||||
@@ -27,6 +29,26 @@ pub static HTTP_CLIENT: Lazy<Mutex<Client>> = Lazy::new(|| {
|
|||||||
Mutex::new(new_reqwest_http_client(allow_self_signature))
|
Mutex::new(new_reqwest_http_client(allow_self_signature))
|
||||||
});
|
});
|
||||||
|
|
||||||
|
/// These header values won't change during a process's lifetime.
|
||||||
|
static STATIC_HEADERS: LazyLock<HashMap<String, String>> = LazyLock::new(|| {
|
||||||
|
HashMap::from([
|
||||||
|
(
|
||||||
|
"X-OS-NAME".into(),
|
||||||
|
Platform::current()
|
||||||
|
.to_os_name_http_header_str()
|
||||||
|
.into_owned(),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"X-OS-VER".into(),
|
||||||
|
sysinfo::System::os_version()
|
||||||
|
.expect("sysinfo::System::os_version() should be Some on major systems"),
|
||||||
|
),
|
||||||
|
("X-OS-ARCH".into(), sysinfo::System::cpu_arch()),
|
||||||
|
("X-APP-NAME".into(), "coco-app".into()),
|
||||||
|
("X-APP-VER".into(), env!("CARGO_PKG_VERSION").into()),
|
||||||
|
])
|
||||||
|
});
|
||||||
|
|
||||||
pub struct HttpClient;
|
pub struct HttpClient;
|
||||||
|
|
||||||
impl HttpClient {
|
impl HttpClient {
|
||||||
@@ -40,7 +62,7 @@ impl HttpClient {
|
|||||||
pub async fn send_raw_request(
|
pub async fn send_raw_request(
|
||||||
method: Method,
|
method: Method,
|
||||||
url: &str,
|
url: &str,
|
||||||
query_params: Option<HashMap<String, JsonValue>>,
|
query_params: Option<Vec<String>>,
|
||||||
headers: Option<HashMap<String, String>>,
|
headers: Option<HashMap<String, String>>,
|
||||||
body: Option<reqwest::Body>,
|
body: Option<reqwest::Body>,
|
||||||
) -> Result<reqwest::Response, String> {
|
) -> Result<reqwest::Response, String> {
|
||||||
@@ -74,7 +96,7 @@ impl HttpClient {
|
|||||||
method: Method,
|
method: Method,
|
||||||
url: &str,
|
url: &str,
|
||||||
headers: Option<HashMap<String, String>>,
|
headers: Option<HashMap<String, String>>,
|
||||||
query_params: Option<HashMap<String, JsonValue>>, // Add query parameters
|
query_params: Option<Vec<String>>, // Add query parameters
|
||||||
body: Option<reqwest::Body>,
|
body: Option<reqwest::Body>,
|
||||||
) -> RequestBuilder {
|
) -> RequestBuilder {
|
||||||
let client = HTTP_CLIENT.lock().await; // Acquire the lock on HTTP_CLIENT
|
let client = HTTP_CLIENT.lock().await; // Acquire the lock on HTTP_CLIENT
|
||||||
@@ -82,8 +104,32 @@ impl HttpClient {
|
|||||||
// Build the request
|
// Build the request
|
||||||
let mut request_builder = client.request(method.clone(), url);
|
let mut request_builder = client.request(method.clone(), url);
|
||||||
|
|
||||||
if let Some(h) = headers {
|
// Populate the headers defined by us
|
||||||
let mut req_headers = reqwest::header::HeaderMap::new();
|
let mut req_headers = reqwest::header::HeaderMap::new();
|
||||||
|
for (key, value) in STATIC_HEADERS.iter() {
|
||||||
|
let key = HeaderName::from_bytes(key.as_bytes())
|
||||||
|
.expect("headers defined by us should be valid");
|
||||||
|
let value = HeaderValue::from_str(value.trim()).unwrap_or_else(|e| {
|
||||||
|
panic!(
|
||||||
|
"header value [{}] is invalid, error [{}], this should be unreachable",
|
||||||
|
value, e
|
||||||
|
);
|
||||||
|
});
|
||||||
|
req_headers.insert(key, value);
|
||||||
|
}
|
||||||
|
let app_lang = get_app_lang().await.to_string();
|
||||||
|
req_headers.insert(
|
||||||
|
"X-APP-LANG",
|
||||||
|
HeaderValue::from_str(&app_lang).unwrap_or_else(|e| {
|
||||||
|
panic!(
|
||||||
|
"header value [{}] is invalid, error [{}], this should be unreachable",
|
||||||
|
app_lang, e
|
||||||
|
);
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Headers from the function parameter
|
||||||
|
if let Some(h) = headers {
|
||||||
for (key, value) in h.into_iter() {
|
for (key, value) in h.into_iter() {
|
||||||
match (
|
match (
|
||||||
HeaderName::from_bytes(key.as_bytes()),
|
HeaderName::from_bytes(key.as_bytes()),
|
||||||
@@ -106,24 +152,9 @@ impl HttpClient {
|
|||||||
request_builder = request_builder.headers(req_headers);
|
request_builder = request_builder.headers(req_headers);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(query) = query_params {
|
if let Some(params) = query_params {
|
||||||
// Convert only supported value types into strings
|
let query: Vec<(&str, &str)> =
|
||||||
let query: HashMap<String, String> = query
|
params.iter().filter_map(|s| s.split_once('=')).collect();
|
||||||
.into_iter()
|
|
||||||
.filter_map(|(k, v)| {
|
|
||||||
match v {
|
|
||||||
JsonValue::String(s) => Some((k, s)),
|
|
||||||
JsonValue::Number(n) => Some((k, n.to_string())),
|
|
||||||
JsonValue::Bool(b) => Some((k, b.to_string())),
|
|
||||||
_ => {
|
|
||||||
dbg!(
|
|
||||||
"Unsupported query parameter type. Only strings, numbers, and booleans are supported.",k,v,
|
|
||||||
);
|
|
||||||
None
|
|
||||||
} // skip arrays, objects, nulls
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
request_builder = request_builder.query(&query);
|
request_builder = request_builder.query(&query);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -140,18 +171,18 @@ impl HttpClient {
|
|||||||
method: Method,
|
method: Method,
|
||||||
path: &str,
|
path: &str,
|
||||||
custom_headers: Option<HashMap<String, String>>,
|
custom_headers: Option<HashMap<String, String>>,
|
||||||
query_params: Option<HashMap<String, JsonValue>>,
|
query_params: Option<Vec<String>>,
|
||||||
body: Option<reqwest::Body>,
|
body: Option<reqwest::Body>,
|
||||||
) -> Result<reqwest::Response, String> {
|
) -> Result<reqwest::Response, String> {
|
||||||
// Fetch the server using the server_id
|
// Fetch the server using the server_id
|
||||||
let server = get_server_by_id(server_id);
|
let server = get_server_by_id(server_id).await;
|
||||||
if let Some(s) = server {
|
if let Some(s) = server {
|
||||||
// Construct the URL
|
// Construct the URL
|
||||||
let url = HttpClient::join_url(&s.endpoint, path);
|
let url = HttpClient::join_url(&s.endpoint, path);
|
||||||
|
|
||||||
// Retrieve the token for the server (token is optional)
|
// Retrieve the token for the server (token is optional)
|
||||||
let token = get_server_token(server_id)
|
let token = get_server_token(server_id)
|
||||||
.await?
|
.await
|
||||||
.map(|t| t.access_token.clone());
|
.map(|t| t.access_token.clone());
|
||||||
|
|
||||||
let mut headers = if let Some(custom_headers) = custom_headers {
|
let mut headers = if let Some(custom_headers) = custom_headers {
|
||||||
@@ -174,7 +205,7 @@ impl HttpClient {
|
|||||||
|
|
||||||
Self::send_raw_request(method, &url, query_params, Some(headers), body).await
|
Self::send_raw_request(method, &url, query_params, Some(headers), body).await
|
||||||
} else {
|
} else {
|
||||||
Err("Server not found".to_string())
|
Err(format!("Server [{}] not found", server_id))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -182,7 +213,7 @@ impl HttpClient {
|
|||||||
pub async fn get(
|
pub async fn get(
|
||||||
server_id: &str,
|
server_id: &str,
|
||||||
path: &str,
|
path: &str,
|
||||||
query_params: Option<HashMap<String, JsonValue>>, // Add query parameters
|
query_params: Option<Vec<String>>,
|
||||||
) -> Result<reqwest::Response, String> {
|
) -> Result<reqwest::Response, String> {
|
||||||
HttpClient::send_request(server_id, Method::GET, path, None, query_params, None).await
|
HttpClient::send_request(server_id, Method::GET, path, None, query_params, None).await
|
||||||
}
|
}
|
||||||
@@ -191,7 +222,7 @@ impl HttpClient {
|
|||||||
pub async fn post(
|
pub async fn post(
|
||||||
server_id: &str,
|
server_id: &str,
|
||||||
path: &str,
|
path: &str,
|
||||||
query_params: Option<HashMap<String, JsonValue>>, // Add query parameters
|
query_params: Option<Vec<String>>,
|
||||||
body: Option<reqwest::Body>,
|
body: Option<reqwest::Body>,
|
||||||
) -> Result<reqwest::Response, String> {
|
) -> Result<reqwest::Response, String> {
|
||||||
HttpClient::send_request(server_id, Method::POST, path, None, query_params, body).await
|
HttpClient::send_request(server_id, Method::POST, path, None, query_params, body).await
|
||||||
@@ -201,7 +232,7 @@ impl HttpClient {
|
|||||||
server_id: &str,
|
server_id: &str,
|
||||||
path: &str,
|
path: &str,
|
||||||
custom_headers: Option<HashMap<String, String>>,
|
custom_headers: Option<HashMap<String, String>>,
|
||||||
query_params: Option<HashMap<String, JsonValue>>, // Add query parameters
|
query_params: Option<Vec<String>>,
|
||||||
body: Option<reqwest::Body>,
|
body: Option<reqwest::Body>,
|
||||||
) -> Result<reqwest::Response, String> {
|
) -> Result<reqwest::Response, String> {
|
||||||
HttpClient::send_request(
|
HttpClient::send_request(
|
||||||
@@ -221,7 +252,7 @@ impl HttpClient {
|
|||||||
server_id: &str,
|
server_id: &str,
|
||||||
path: &str,
|
path: &str,
|
||||||
custom_headers: Option<HashMap<String, String>>,
|
custom_headers: Option<HashMap<String, String>>,
|
||||||
query_params: Option<HashMap<String, JsonValue>>, // Add query parameters
|
query_params: Option<Vec<String>>,
|
||||||
body: Option<reqwest::Body>,
|
body: Option<reqwest::Body>,
|
||||||
) -> Result<reqwest::Response, String> {
|
) -> Result<reqwest::Response, String> {
|
||||||
HttpClient::send_request(
|
HttpClient::send_request(
|
||||||
@@ -241,7 +272,7 @@ impl HttpClient {
|
|||||||
server_id: &str,
|
server_id: &str,
|
||||||
path: &str,
|
path: &str,
|
||||||
custom_headers: Option<HashMap<String, String>>,
|
custom_headers: Option<HashMap<String, String>>,
|
||||||
query_params: Option<HashMap<String, JsonValue>>, // Add query parameters
|
query_params: Option<Vec<String>>,
|
||||||
) -> Result<reqwest::Response, String> {
|
) -> Result<reqwest::Response, String> {
|
||||||
HttpClient::send_request(
|
HttpClient::send_request(
|
||||||
server_id,
|
server_id,
|
||||||
@@ -254,3 +285,30 @@ impl HttpClient {
|
|||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper function to check status code.
|
||||||
|
///
|
||||||
|
/// If the status code is not in the `allowed_status_codes` list, return an error.
|
||||||
|
pub(crate) fn status_code_check(
|
||||||
|
response: &reqwest::Response,
|
||||||
|
allowed_status_codes: &[StatusCode],
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let status_code = response.status();
|
||||||
|
|
||||||
|
if !allowed_status_codes.contains(&status_code) {
|
||||||
|
let msg = format!(
|
||||||
|
"Response of request [{}] status code failed: status code [{}], which is not in the 'allow' list {:?}",
|
||||||
|
response.url(),
|
||||||
|
status_code,
|
||||||
|
allowed_status_codes
|
||||||
|
.iter()
|
||||||
|
.map(|status| status.to_string())
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
);
|
||||||
|
log::warn!("{}", msg);
|
||||||
|
|
||||||
|
Err(msg)
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ pub mod http_client;
|
|||||||
pub mod profile;
|
pub mod profile;
|
||||||
pub mod search;
|
pub mod search;
|
||||||
pub mod servers;
|
pub mod servers;
|
||||||
|
pub mod synthesize;
|
||||||
pub mod system_settings;
|
pub mod system_settings;
|
||||||
pub mod transcription;
|
pub mod transcription;
|
||||||
pub mod websocket;
|
pub mod websocket;
|
||||||
|
|||||||
@@ -6,11 +6,10 @@ use crate::common::server::Server;
|
|||||||
use crate::common::traits::SearchSource;
|
use crate::common::traits::SearchSource;
|
||||||
use crate::server::http_client::HttpClient;
|
use crate::server::http_client::HttpClient;
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
// use futures::stream::StreamExt;
|
|
||||||
use ordered_float::OrderedFloat;
|
use ordered_float::OrderedFloat;
|
||||||
|
use reqwest::StatusCode;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use tauri_plugin_store::JsonValue;
|
use tauri::AppHandle;
|
||||||
// use std::hash::Hash;
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub(crate) struct DocumentsSizedCollector {
|
pub(crate) struct DocumentsSizedCollector {
|
||||||
@@ -91,21 +90,40 @@ impl SearchSource for CocoSearchSource {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn search(&self, query: SearchQuery) -> Result<QueryResponse, SearchError> {
|
async fn search(
|
||||||
|
&self,
|
||||||
|
_tauri_app_handle: AppHandle,
|
||||||
|
query: SearchQuery,
|
||||||
|
) -> Result<QueryResponse, SearchError> {
|
||||||
let url = "/query/_search";
|
let url = "/query/_search";
|
||||||
let mut total_hits = 0;
|
let mut total_hits = 0;
|
||||||
let mut hits: Vec<(Document, f64)> = Vec::new();
|
let mut hits: Vec<(Document, f64)> = Vec::new();
|
||||||
|
|
||||||
let mut query_args: HashMap<String, JsonValue> = HashMap::new();
|
let mut query_params = Vec::new();
|
||||||
query_args.insert("from".into(), JsonValue::Number(query.from.into()));
|
|
||||||
query_args.insert("size".into(), JsonValue::Number(query.size.into()));
|
// Add from/size as number values
|
||||||
|
query_params.push(format!("from={}", query.from));
|
||||||
|
query_params.push(format!("size={}", query.size));
|
||||||
|
|
||||||
|
// Add query strings
|
||||||
for (key, value) in query.query_strings {
|
for (key, value) in query.query_strings {
|
||||||
query_args.insert(key, JsonValue::String(value));
|
query_params.push(format!("{}={}", key, value));
|
||||||
}
|
}
|
||||||
|
|
||||||
let response = HttpClient::get(&self.server.id, &url, Some(query_args))
|
let response = HttpClient::get(&self.server.id, &url, Some(query_params))
|
||||||
.await
|
.await
|
||||||
.map_err(|e| SearchError::HttpError(format!("{}", e)))?;
|
.map_err(|e| SearchError::HttpError {
|
||||||
|
status_code: None,
|
||||||
|
msg: format!("{}", e),
|
||||||
|
})?;
|
||||||
|
let status_code = response.status();
|
||||||
|
|
||||||
|
if ![StatusCode::OK, StatusCode::CREATED].contains(&status_code) {
|
||||||
|
return Err(SearchError::HttpError {
|
||||||
|
status_code: Some(status_code),
|
||||||
|
msg: format!("Request failed with status code [{}]", status_code),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
// Use the helper function to parse the response body
|
// Use the helper function to parse the response body
|
||||||
let response_body = get_response_body_text(response)
|
let response_body = get_response_body_text(response)
|
||||||
@@ -120,7 +138,6 @@ impl SearchSource for CocoSearchSource {
|
|||||||
let parsed: SearchResponse<Document> = serde_json::from_str(&response_body)
|
let parsed: SearchResponse<Document> = serde_json::from_str(&response_body)
|
||||||
.map_err(|e| SearchError::ParseError(format!("{}", e)))?;
|
.map_err(|e| SearchError::ParseError(format!("{}", e)))?;
|
||||||
|
|
||||||
|
|
||||||
// Process the parsed response
|
// Process the parsed response
|
||||||
total_hits = parsed.hits.total.value as usize;
|
total_hits = parsed.hits.total.value as usize;
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
use crate::COCO_TAURI_STORE;
|
||||||
use crate::common::http::get_response_body_text;
|
use crate::common::http::get_response_body_text;
|
||||||
use crate::common::register::SearchSourceRegistry;
|
use crate::common::register::SearchSourceRegistry;
|
||||||
use crate::common::server::{AuthProvider, Provider, Server, ServerAccessToken, Sso, Version};
|
use crate::common::server::{AuthProvider, Provider, Server, ServerAccessToken, Sso, Version};
|
||||||
@@ -5,68 +6,72 @@ use crate::server::connector::fetch_connectors_by_server;
|
|||||||
use crate::server::datasource::datasource_search;
|
use crate::server::datasource::datasource_search;
|
||||||
use crate::server::http_client::HttpClient;
|
use crate::server::http_client::HttpClient;
|
||||||
use crate::server::search::CocoSearchSource;
|
use crate::server::search::CocoSearchSource;
|
||||||
use crate::COCO_TAURI_STORE;
|
use function_name;
|
||||||
use lazy_static::lazy_static;
|
use http::StatusCode;
|
||||||
use reqwest::Method;
|
use reqwest::Method;
|
||||||
use serde_json::from_value;
|
|
||||||
use serde_json::Value as JsonValue;
|
use serde_json::Value as JsonValue;
|
||||||
|
use serde_json::from_value;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::LazyLock;
|
||||||
use std::sync::RwLock;
|
|
||||||
use tauri::Runtime;
|
use tauri::Runtime;
|
||||||
use tauri::{AppHandle, Manager};
|
use tauri::{AppHandle, Manager};
|
||||||
use tauri_plugin_store::StoreExt;
|
use tauri_plugin_store::StoreExt;
|
||||||
// Assuming you're using serde_json
|
use tokio::sync::RwLock;
|
||||||
|
|
||||||
lazy_static! {
|
/// Coco sever list
|
||||||
static ref SERVER_CACHE: Arc<RwLock<HashMap<String, Server>>> =
|
static SERVER_LIST_CACHE: LazyLock<RwLock<HashMap<String, Server>>> =
|
||||||
Arc::new(RwLock::new(HashMap::new()));
|
LazyLock::new(|| RwLock::new(HashMap::new()));
|
||||||
static ref SERVER_TOKEN: Arc<RwLock<HashMap<String, ServerAccessToken>>> =
|
|
||||||
Arc::new(RwLock::new(HashMap::new()));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
/// If a server has a token stored here that has not expired, it is considered logged in.
|
||||||
fn check_server_exists(id: &str) -> bool {
|
///
|
||||||
let cache = SERVER_CACHE.read().unwrap(); // Acquire read lock
|
/// Since the `expire_at` field of `struct ServerAccessToken` is currently unused,
|
||||||
cache.contains_key(id)
|
/// all servers stored here are treated as logged in.
|
||||||
}
|
static SERVER_TOKEN_LIST_CACHE: LazyLock<RwLock<HashMap<String, ServerAccessToken>>> =
|
||||||
|
LazyLock::new(|| RwLock::new(HashMap::new()));
|
||||||
|
|
||||||
pub fn get_server_by_id(id: &str) -> Option<Server> {
|
/// `SERVER_LIST_CACHE` will be stored in KV store COCO_TAURI_STORE, under this key.
|
||||||
let cache = SERVER_CACHE.read().unwrap(); // Acquire read lock
|
pub const COCO_SERVERS: &str = "coco_servers";
|
||||||
|
|
||||||
|
/// `SERVER_TOKEN_LIST_CACHE` will be stored in KV store COCO_TAURI_STORE, under this key.
|
||||||
|
const COCO_SERVER_TOKENS: &str = "coco_server_tokens";
|
||||||
|
|
||||||
|
pub async fn get_server_by_id(id: &str) -> Option<Server> {
|
||||||
|
let cache = SERVER_LIST_CACHE.read().await;
|
||||||
cache.get(id).cloned()
|
cache.get(id).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
pub async fn get_server_token(id: &str) -> Option<ServerAccessToken> {
|
||||||
pub async fn get_server_token(id: &str) -> Result<Option<ServerAccessToken>, String> {
|
let cache = SERVER_TOKEN_LIST_CACHE.read().await;
|
||||||
let cache = SERVER_TOKEN.read().map_err(|err| err.to_string())?;
|
|
||||||
|
|
||||||
Ok(cache.get(id).cloned())
|
cache.get(id).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save_access_token(server_id: String, token: ServerAccessToken) -> bool {
|
pub async fn save_access_token(server_id: String, token: ServerAccessToken) -> bool {
|
||||||
let mut cache = SERVER_TOKEN.write().unwrap();
|
let mut cache = SERVER_TOKEN_LIST_CACHE.write().await;
|
||||||
cache.insert(server_id, token).is_none()
|
cache.insert(server_id, token).is_none()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_endpoint_exists(endpoint: &str) -> bool {
|
async fn check_endpoint_exists(endpoint: &str) -> bool {
|
||||||
let cache = SERVER_CACHE.read().unwrap();
|
let cache = SERVER_LIST_CACHE.read().await;
|
||||||
cache.values().any(|server| server.endpoint == endpoint)
|
cache.values().any(|server| server.endpoint == endpoint)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save_server(server: &Server) -> bool {
|
/// Return true if `server` does not exists in the server list, i.e., it is a newly-added
|
||||||
let mut cache = SERVER_CACHE.write().unwrap();
|
/// server.
|
||||||
cache.insert(server.id.clone(), server.clone()).is_none() // If the server id did not exist, `insert` will return `None`
|
pub async fn save_server(server: &Server) -> bool {
|
||||||
|
let mut cache = SERVER_LIST_CACHE.write().await;
|
||||||
|
cache.insert(server.id.clone(), server.clone()).is_none()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_server_by_id(id: String) -> bool {
|
/// Return the removed `Server` if it exists in the server list.
|
||||||
|
async fn remove_server_by_id(id: &str) -> Option<Server> {
|
||||||
log::debug!("remove server by id: {}", &id);
|
log::debug!("remove server by id: {}", &id);
|
||||||
let mut cache = SERVER_CACHE.write().unwrap();
|
let mut cache = SERVER_LIST_CACHE.write().await;
|
||||||
let deleted = cache.remove(id.as_str());
|
cache.remove(id)
|
||||||
deleted.is_some()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn persist_servers<R: Runtime>(app_handle: &AppHandle<R>) -> Result<(), String> {
|
pub async fn persist_servers<R: Runtime>(app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||||
let cache = SERVER_CACHE.read().unwrap(); // Acquire a read lock, not a write lock, since you're not modifying the cache
|
let cache = SERVER_LIST_CACHE.read().await;
|
||||||
|
|
||||||
// Convert HashMap to Vec for serialization (iterating over values of HashMap)
|
// Convert HashMap to Vec for serialization (iterating over values of HashMap)
|
||||||
let servers: Vec<Server> = cache.values().cloned().collect();
|
let servers: Vec<Server> = cache.values().cloned().collect();
|
||||||
@@ -86,14 +91,16 @@ pub async fn persist_servers<R: Runtime>(app_handle: &AppHandle<R>) -> Result<()
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn remove_server_token(id: &str) -> bool {
|
/// Return true if the server token of the server specified by `id` exists in
|
||||||
|
/// the token list and gets deleted.
|
||||||
|
pub async fn remove_server_token(id: &str) -> bool {
|
||||||
log::debug!("remove server token by id: {}", &id);
|
log::debug!("remove server token by id: {}", &id);
|
||||||
let mut cache = SERVER_TOKEN.write().unwrap();
|
let mut cache = SERVER_TOKEN_LIST_CACHE.write().await;
|
||||||
cache.remove(id).is_some()
|
cache.remove(id).is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn persist_servers_token<R: Runtime>(app_handle: &AppHandle<R>) -> Result<(), String> {
|
pub async fn persist_servers_token<R: Runtime>(app_handle: &AppHandle<R>) -> Result<(), String> {
|
||||||
let cache = SERVER_TOKEN.read().unwrap(); // Acquire a read lock, not a write lock, since you're not modifying the cache
|
let cache = SERVER_TOKEN_LIST_CACHE.read().await;
|
||||||
|
|
||||||
// Convert HashMap to Vec for serialization (iterating over values of HashMap)
|
// Convert HashMap to Vec for serialization (iterating over values of HashMap)
|
||||||
let servers: Vec<ServerAccessToken> = cache.values().cloned().collect();
|
let servers: Vec<ServerAccessToken> = cache.values().cloned().collect();
|
||||||
@@ -173,26 +180,42 @@ pub async fn load_servers_token<R: Runtime>(
|
|||||||
servers.ok_or_else(|| "Failed to read servers from store: No servers found".to_string())?;
|
servers.ok_or_else(|| "Failed to read servers from store: No servers found".to_string())?;
|
||||||
|
|
||||||
// Convert each item in the JsonValue array to a Server
|
// Convert each item in the JsonValue array to a Server
|
||||||
if let JsonValue::Array(servers_array) = servers {
|
match servers {
|
||||||
// Deserialize each JsonValue into Server, filtering out any errors
|
JsonValue::Array(servers_array) => {
|
||||||
let deserialized_tokens: Vec<ServerAccessToken> = servers_array
|
let mut deserialized_tokens: Vec<ServerAccessToken> =
|
||||||
.into_iter()
|
Vec::with_capacity(servers_array.len());
|
||||||
.filter_map(|server_json| from_value(server_json).ok()) // Only keep valid Server instances
|
for server_json in servers_array {
|
||||||
.collect();
|
match from_value(server_json.clone()) {
|
||||||
|
Ok(token) => {
|
||||||
|
deserialized_tokens.push(token);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
panic!(
|
||||||
|
"failed to deserialize JSON [{}] to [struct ServerAccessToken], error [{}], store [{}] key [{}] is possibly corrupted!",
|
||||||
|
server_json, e, COCO_TAURI_STORE, COCO_SERVER_TOKENS
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if deserialized_tokens.is_empty() {
|
if deserialized_tokens.is_empty() {
|
||||||
return Err("Failed to deserialize any servers from the store.".to_string());
|
return Err("Failed to deserialize any servers from the store.".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
for server in deserialized_tokens.iter() {
|
for server in deserialized_tokens.iter() {
|
||||||
save_access_token(server.id.clone(), server.clone());
|
save_access_token(server.id.clone(), server.clone()).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
log::debug!("loaded {:?} servers's token", &deserialized_tokens.len());
|
log::debug!("loaded {:?} servers's token", &deserialized_tokens.len());
|
||||||
|
|
||||||
Ok(deserialized_tokens)
|
Ok(deserialized_tokens)
|
||||||
} else {
|
}
|
||||||
Err("Failed to read servers from store: Invalid format".to_string())
|
_ => {
|
||||||
|
unreachable!(
|
||||||
|
"coco server tokens should be stored in an array under store [{}] key [{}], but it is not",
|
||||||
|
COCO_TAURI_STORE, COCO_SERVER_TOKENS
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -214,26 +237,41 @@ pub async fn load_servers<R: Runtime>(app_handle: &AppHandle<R>) -> Result<Vec<S
|
|||||||
servers.ok_or_else(|| "Failed to read servers from store: No servers found".to_string())?;
|
servers.ok_or_else(|| "Failed to read servers from store: No servers found".to_string())?;
|
||||||
|
|
||||||
// Convert each item in the JsonValue array to a Server
|
// Convert each item in the JsonValue array to a Server
|
||||||
if let JsonValue::Array(servers_array) = servers {
|
match servers {
|
||||||
// Deserialize each JsonValue into Server, filtering out any errors
|
JsonValue::Array(servers_array) => {
|
||||||
let deserialized_servers: Vec<Server> = servers_array
|
let mut deserialized_servers = Vec::with_capacity(servers_array.len());
|
||||||
.into_iter()
|
for server_json in servers_array {
|
||||||
.filter_map(|server_json| from_value(server_json).ok()) // Only keep valid Server instances
|
match from_value(server_json.clone()) {
|
||||||
.collect();
|
Ok(server) => {
|
||||||
|
deserialized_servers.push(server);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
panic!(
|
||||||
|
"failed to deserialize JSON [{}] to [struct Server], error [{}], store [{}] key [{}] is possibly corrupted!",
|
||||||
|
server_json, e, COCO_TAURI_STORE, COCO_SERVERS
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if deserialized_servers.is_empty() {
|
if deserialized_servers.is_empty() {
|
||||||
return Err("Failed to deserialize any servers from the store.".to_string());
|
return Err("Failed to deserialize any servers from the store.".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
for server in deserialized_servers.iter() {
|
for server in deserialized_servers.iter() {
|
||||||
save_server(&server);
|
save_server(&server).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
log::debug!("load servers: {:?}", &deserialized_servers);
|
log::debug!("load servers: {:?}", &deserialized_servers);
|
||||||
|
|
||||||
Ok(deserialized_servers)
|
Ok(deserialized_servers)
|
||||||
} else {
|
}
|
||||||
Err("Failed to read servers from store: Invalid format".to_string())
|
_ => {
|
||||||
|
unreachable!(
|
||||||
|
"coco servers should be stored in an array under store [{}] key [{}], but it is not",
|
||||||
|
COCO_TAURI_STORE, COCO_SERVERS
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -250,7 +288,7 @@ pub async fn load_or_insert_default_server<R: Runtime>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let default = get_default_server();
|
let default = get_default_server();
|
||||||
save_server(&default);
|
save_server(&default).await;
|
||||||
|
|
||||||
log::debug!("loaded default servers");
|
log::debug!("loaded default servers");
|
||||||
|
|
||||||
@@ -259,33 +297,23 @@ pub async fn load_or_insert_default_server<R: Runtime>(
|
|||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
pub async fn list_coco_servers<R: Runtime>(
|
pub async fn list_coco_servers<R: Runtime>(
|
||||||
_app_handle: AppHandle<R>,
|
app_handle: AppHandle<R>,
|
||||||
) -> Result<Vec<Server>, String> {
|
) -> Result<Vec<Server>, String> {
|
||||||
//hard fresh all server's info, in order to get the actual health
|
//hard fresh all server's info, in order to get the actual health
|
||||||
refresh_all_coco_server_info(_app_handle.clone()).await;
|
refresh_all_coco_server_info(app_handle.clone()).await;
|
||||||
|
|
||||||
|
let servers: Vec<Server> = get_all_servers().await;
|
||||||
|
|
||||||
let servers: Vec<Server> = get_all_servers();
|
|
||||||
Ok(servers)
|
Ok(servers)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
pub async fn get_all_servers() -> Vec<Server> {
|
||||||
pub fn get_servers_as_hashmap() -> HashMap<String, Server> {
|
let cache = SERVER_LIST_CACHE.read().await;
|
||||||
let cache = SERVER_CACHE.read().unwrap();
|
|
||||||
cache.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_all_servers() -> Vec<Server> {
|
|
||||||
let cache = SERVER_CACHE.read().unwrap();
|
|
||||||
cache.values().cloned().collect()
|
cache.values().cloned().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// We store added Coco servers in the Tauri store using this key.
|
|
||||||
pub const COCO_SERVERS: &str = "coco_servers";
|
|
||||||
|
|
||||||
const COCO_SERVER_TOKENS: &str = "coco_server_tokens";
|
|
||||||
|
|
||||||
pub async fn refresh_all_coco_server_info<R: Runtime>(app_handle: AppHandle<R>) {
|
pub async fn refresh_all_coco_server_info<R: Runtime>(app_handle: AppHandle<R>) {
|
||||||
let servers = get_all_servers();
|
let servers = get_all_servers().await;
|
||||||
for server in servers {
|
for server in servers {
|
||||||
let _ = refresh_coco_server_info(app_handle.clone(), server.id.clone()).await;
|
let _ = refresh_coco_server_info(app_handle.clone(), server.id.clone()).await;
|
||||||
}
|
}
|
||||||
@@ -298,7 +326,7 @@ pub async fn refresh_coco_server_info<R: Runtime>(
|
|||||||
) -> Result<Server, String> {
|
) -> Result<Server, String> {
|
||||||
// Retrieve the server from the cache
|
// Retrieve the server from the cache
|
||||||
let cached_server = {
|
let cached_server = {
|
||||||
let cache = SERVER_CACHE.read().unwrap();
|
let cache = SERVER_LIST_CACHE.read().await;
|
||||||
cache.get(&id).cloned()
|
cache.get(&id).cloned()
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -313,21 +341,16 @@ pub async fn refresh_coco_server_info<R: Runtime>(
|
|||||||
let profile = server.profile;
|
let profile = server.profile;
|
||||||
|
|
||||||
// Send request to fetch updated server info
|
// Send request to fetch updated server info
|
||||||
let response = HttpClient::get(&id, "/provider/_info", None)
|
let response = match HttpClient::get(&id, "/provider/_info", None).await {
|
||||||
.await
|
Ok(response) => response,
|
||||||
.map_err(|e| {
|
Err(e) => {
|
||||||
format!("Failed to contact the server: {}", e)
|
mark_server_as_offline(app_handle, &id).await;
|
||||||
});
|
return Err(e);
|
||||||
|
|
||||||
if response.is_err() {
|
|
||||||
let _ = mark_server_as_offline(app_handle, &id).await;
|
|
||||||
return Err(response.err().unwrap());
|
|
||||||
}
|
}
|
||||||
|
};
|
||||||
let response = response?;
|
|
||||||
|
|
||||||
if !response.status().is_success() {
|
if !response.status().is_success() {
|
||||||
let _ = mark_server_as_offline(app_handle, &id).await;
|
mark_server_as_offline(app_handle, &id).await;
|
||||||
return Err(format!("Request failed with status: {}", response.status()));
|
return Err(format!("Request failed with status: {}", response.status()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -342,12 +365,22 @@ pub async fn refresh_coco_server_info<R: Runtime>(
|
|||||||
updated_server.id = id.clone();
|
updated_server.id = id.clone();
|
||||||
updated_server.builtin = is_builtin;
|
updated_server.builtin = is_builtin;
|
||||||
updated_server.enabled = is_enabled;
|
updated_server.enabled = is_enabled;
|
||||||
updated_server.available = true;
|
updated_server.available = {
|
||||||
|
if server.public {
|
||||||
|
// Public Coco servers are available as long as they are online.
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
// For non-public Coco servers, we still need to check if it is
|
||||||
|
// logged in, i.e., has a token stored in `SERVER_TOKEN_LIST_CACHE`.
|
||||||
|
get_server_token(&id).await.is_some()
|
||||||
|
}
|
||||||
|
};
|
||||||
updated_server.profile = profile;
|
updated_server.profile = profile;
|
||||||
trim_endpoint_last_forward_slash(&mut updated_server);
|
trim_endpoint_last_forward_slash(&mut updated_server);
|
||||||
|
|
||||||
// Save and persist
|
// Save and persist
|
||||||
save_server(&updated_server);
|
save_server(&updated_server).await;
|
||||||
|
try_register_server_to_search_source(app_handle.clone(), &updated_server).await;
|
||||||
persist_servers(&app_handle)
|
persist_servers(&app_handle)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| format!("Failed to persist servers: {}", e))?;
|
.map_err(|e| format!("Failed to persist servers: {}", e))?;
|
||||||
@@ -370,10 +403,10 @@ pub async fn add_coco_server<R: Runtime>(
|
|||||||
|
|
||||||
let endpoint = endpoint.trim_end_matches('/');
|
let endpoint = endpoint.trim_end_matches('/');
|
||||||
|
|
||||||
if check_endpoint_exists(endpoint) {
|
if check_endpoint_exists(endpoint).await {
|
||||||
log::debug!(
|
log::debug!(
|
||||||
"This Coco server has already been registered: {:?}",
|
"trying to register a Coco server [{}] that has already been registered",
|
||||||
&endpoint
|
endpoint
|
||||||
);
|
);
|
||||||
return Err("This Coco server has already been registered.".into());
|
return Err("This Coco server has already been registered.".into());
|
||||||
}
|
}
|
||||||
@@ -385,6 +418,15 @@ pub async fn add_coco_server<R: Runtime>(
|
|||||||
|
|
||||||
log::debug!("Get provider info response: {:?}", &response);
|
log::debug!("Get provider info response: {:?}", &response);
|
||||||
|
|
||||||
|
if response.status() != StatusCode::OK {
|
||||||
|
log::debug!(
|
||||||
|
"trying to register a Coco server [{}] that is possibly down",
|
||||||
|
endpoint
|
||||||
|
);
|
||||||
|
|
||||||
|
return Err("This Coco server is possibly down".into());
|
||||||
|
}
|
||||||
|
|
||||||
let body = get_response_body_text(response).await?;
|
let body = get_response_body_text(response).await?;
|
||||||
|
|
||||||
let mut server: Server = serde_json::from_str(&body)
|
let mut server: Server = serde_json::from_str(&body)
|
||||||
@@ -392,15 +434,32 @@ pub async fn add_coco_server<R: Runtime>(
|
|||||||
|
|
||||||
trim_endpoint_last_forward_slash(&mut server);
|
trim_endpoint_last_forward_slash(&mut server);
|
||||||
|
|
||||||
|
// The JSON returned from `provider/_info` won't have this field, serde will set
|
||||||
|
// it to an empty string during deserialization, we need to set a valid value here.
|
||||||
if server.id.is_empty() {
|
if server.id.is_empty() {
|
||||||
server.id = pizza_common::utils::uuid::Uuid::new().to_string();
|
server.id = pizza_common::utils::uuid::Uuid::new().to_string();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Use the default name, if it is not set.
|
||||||
if server.name.is_empty() {
|
if server.name.is_empty() {
|
||||||
server.name = "Coco Server".to_string();
|
server.name = "Coco Server".to_string();
|
||||||
}
|
}
|
||||||
|
|
||||||
save_server(&server);
|
// Update the `available` field
|
||||||
|
if server.public {
|
||||||
|
// Serde already sets this to true, but just to make the code clear, do it again.
|
||||||
|
server.available = true;
|
||||||
|
} else {
|
||||||
|
let opt_token = get_server_token(&server.id).await;
|
||||||
|
assert!(
|
||||||
|
opt_token.is_none(),
|
||||||
|
"this Coco server is newly-added, we should have no token stored for it!"
|
||||||
|
);
|
||||||
|
// This is a non-public Coco server, and it is not logged in, so it is unavailable.
|
||||||
|
server.available = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
save_server(&server).await;
|
||||||
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
||||||
|
|
||||||
persist_servers(&app_handle)
|
persist_servers(&app_handle)
|
||||||
@@ -412,6 +471,7 @@ pub async fn add_coco_server<R: Runtime>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
|
#[function_name::named]
|
||||||
pub async fn remove_coco_server<R: Runtime>(
|
pub async fn remove_coco_server<R: Runtime>(
|
||||||
app_handle: AppHandle<R>,
|
app_handle: AppHandle<R>,
|
||||||
id: String,
|
id: String,
|
||||||
@@ -419,24 +479,47 @@ pub async fn remove_coco_server<R: Runtime>(
|
|||||||
let registry = app_handle.state::<SearchSourceRegistry>();
|
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||||
registry.remove_source(id.as_str()).await;
|
registry.remove_source(id.as_str()).await;
|
||||||
|
|
||||||
remove_server_token(id.as_str());
|
let opt_server = remove_server_by_id(id.as_str()).await;
|
||||||
remove_server_by_id(id);
|
let Some(server) = opt_server else {
|
||||||
|
panic!(
|
||||||
|
"[{}()] invoked with a server [{}] that does not exist! Mismatched states between frontend and backend!",
|
||||||
|
function_name!(),
|
||||||
|
id
|
||||||
|
);
|
||||||
|
};
|
||||||
persist_servers(&app_handle)
|
persist_servers(&app_handle)
|
||||||
.await
|
.await
|
||||||
.expect("failed to save servers");
|
.expect("failed to save servers");
|
||||||
persist_servers_token(&app_handle).expect("failed to save server tokens");
|
|
||||||
|
// Only non-public Coco servers require tokens
|
||||||
|
if !server.public {
|
||||||
|
// If is logged in, clear the token as well.
|
||||||
|
let deleted = remove_server_token(id.as_str()).await;
|
||||||
|
if deleted {
|
||||||
|
persist_servers_token(&app_handle)
|
||||||
|
.await
|
||||||
|
.expect("failed to save server tokens");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
|
#[function_name::named]
|
||||||
pub async fn enable_server<R: Runtime>(app_handle: AppHandle<R>, id: String) -> Result<(), ()> {
|
pub async fn enable_server<R: Runtime>(app_handle: AppHandle<R>, id: String) -> Result<(), ()> {
|
||||||
println!("enable_server: {}", id);
|
let opt_server = get_server_by_id(id.as_str()).await;
|
||||||
|
|
||||||
|
let Some(mut server) = opt_server else {
|
||||||
|
panic!(
|
||||||
|
"[{}()] invoked with a server [{}] that does not exist! Mismatched states between frontend and backend!",
|
||||||
|
function_name!(),
|
||||||
|
id
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
let server = get_server_by_id(id.as_str());
|
|
||||||
if let Some(mut server) = server {
|
|
||||||
server.enabled = true;
|
server.enabled = true;
|
||||||
save_server(&server);
|
save_server(&server).await;
|
||||||
|
|
||||||
// Register the server to the search source
|
// Register the server to the search source
|
||||||
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
||||||
@@ -444,26 +527,56 @@ pub async fn enable_server<R: Runtime>(app_handle: AppHandle<R>, id: String) ->
|
|||||||
persist_servers(&app_handle)
|
persist_servers(&app_handle)
|
||||||
.await
|
.await
|
||||||
.expect("failed to save servers");
|
.expect("failed to save servers");
|
||||||
}
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
#[function_name::named]
|
||||||
|
pub async fn disable_server<R: Runtime>(app_handle: AppHandle<R>, id: String) -> Result<(), ()> {
|
||||||
|
let opt_server = get_server_by_id(id.as_str()).await;
|
||||||
|
|
||||||
|
let Some(mut server) = opt_server else {
|
||||||
|
panic!(
|
||||||
|
"[{}()] invoked with a server [{}] that does not exist! Mismatched states between frontend and backend!",
|
||||||
|
function_name!(),
|
||||||
|
id
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
server.enabled = false;
|
||||||
|
|
||||||
|
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||||
|
registry.remove_source(id.as_str()).await;
|
||||||
|
|
||||||
|
save_server(&server).await;
|
||||||
|
persist_servers(&app_handle)
|
||||||
|
.await
|
||||||
|
.expect("failed to save servers");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// For non-public Coco servers, we add it to the search source as long as it is
|
||||||
|
/// enabled.
|
||||||
|
///
|
||||||
|
/// For public Coco server, an extra token is required.
|
||||||
pub async fn try_register_server_to_search_source(
|
pub async fn try_register_server_to_search_source(
|
||||||
app_handle: AppHandle<impl Runtime>,
|
app_handle: AppHandle<impl Runtime>,
|
||||||
server: &Server,
|
server: &Server,
|
||||||
) {
|
) {
|
||||||
if server.enabled {
|
if server.enabled {
|
||||||
log::trace!(
|
log::trace!(
|
||||||
"Server {} is public: {} and available: {}",
|
"Server [name: {}, id: {}] is public: {} and available: {}",
|
||||||
&server.name,
|
&server.name,
|
||||||
|
&server.id,
|
||||||
&server.public,
|
&server.public,
|
||||||
&server.available
|
&server.available
|
||||||
);
|
);
|
||||||
|
|
||||||
if !server.public {
|
if !server.public {
|
||||||
let token = get_server_token(&server.id).await;
|
let opt_token = get_server_token(&server.id).await;
|
||||||
|
|
||||||
if !token.is_ok() || token.is_ok() && token.unwrap().is_none() {
|
if opt_token.is_none() {
|
||||||
log::debug!("Server {} is not public and no token was found", &server.id);
|
log::debug!("Server {} is not public and no token was found", &server.id);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -475,96 +588,110 @@ pub async fn try_register_server_to_search_source(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
#[function_name::named]
|
||||||
pub async fn mark_server_as_offline<R: Runtime>(
|
#[allow(unused)]
|
||||||
app_handle: AppHandle<R>, id: &str) -> Result<(), ()> {
|
async fn mark_server_as_online<R: Runtime>(app_handle: AppHandle<R>, id: &str) {
|
||||||
// println!("server_is_offline: {}", id);
|
let server = get_server_by_id(id).await;
|
||||||
let server = get_server_by_id(id);
|
if let Some(mut server) = server {
|
||||||
|
server.available = true;
|
||||||
|
server.health = None;
|
||||||
|
save_server(&server).await;
|
||||||
|
|
||||||
|
try_register_server_to_search_source(app_handle.clone(), &server).await;
|
||||||
|
} else {
|
||||||
|
log::warn!(
|
||||||
|
"[{}()] invoked with a server [{}] that does not exist!",
|
||||||
|
function_name!(),
|
||||||
|
id
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[function_name::named]
|
||||||
|
pub(crate) async fn mark_server_as_offline<R: Runtime>(app_handle: AppHandle<R>, id: &str) {
|
||||||
|
let server = get_server_by_id(id).await;
|
||||||
if let Some(mut server) = server {
|
if let Some(mut server) = server {
|
||||||
server.available = false;
|
server.available = false;
|
||||||
server.health = None;
|
server.health = None;
|
||||||
save_server(&server);
|
save_server(&server).await;
|
||||||
|
|
||||||
let registry = app_handle.state::<SearchSourceRegistry>();
|
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||||
registry.remove_source(id).await;
|
registry.remove_source(id).await;
|
||||||
|
} else {
|
||||||
|
log::warn!(
|
||||||
|
"[{}()] invoked with a server [{}] that does not exist!",
|
||||||
|
function_name!(),
|
||||||
|
id
|
||||||
|
);
|
||||||
}
|
}
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tauri::command]
|
|
||||||
pub async fn disable_server<R: Runtime>(app_handle: AppHandle<R>, id: String) -> Result<(), ()> {
|
|
||||||
let server = get_server_by_id(id.as_str());
|
|
||||||
if let Some(mut server) = server {
|
|
||||||
server.enabled = false;
|
|
||||||
|
|
||||||
let registry = app_handle.state::<SearchSourceRegistry>();
|
|
||||||
registry.remove_source(id.as_str()).await;
|
|
||||||
|
|
||||||
save_server(&server);
|
|
||||||
persist_servers(&app_handle)
|
|
||||||
.await
|
|
||||||
.expect("failed to save servers");
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
|
#[function_name::named]
|
||||||
pub async fn logout_coco_server<R: Runtime>(
|
pub async fn logout_coco_server<R: Runtime>(
|
||||||
app_handle: AppHandle<R>,
|
app_handle: AppHandle<R>,
|
||||||
id: String,
|
id: String,
|
||||||
) -> Result<(), String> {
|
) -> Result<(), String> {
|
||||||
log::debug!("Attempting to log out server by id: {}", &id);
|
log::debug!("Attempting to log out server by id: {}", &id);
|
||||||
|
|
||||||
// Check if server token exists
|
|
||||||
if let Some(_token) = get_server_token(id.as_str()).await? {
|
|
||||||
log::debug!("Found server token for id: {}", &id);
|
|
||||||
|
|
||||||
// Remove the server token from cache
|
|
||||||
remove_server_token(id.as_str());
|
|
||||||
|
|
||||||
// Persist the updated tokens
|
|
||||||
if let Err(e) = persist_servers_token(&app_handle) {
|
|
||||||
log::debug!("Failed to save tokens for id: {}. Error: {:?}", &id, &e);
|
|
||||||
return Err(format!("Failed to save tokens: {}", &e));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Log the case where server token is not found
|
|
||||||
log::debug!("No server token found for id: {}", &id);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if the server exists
|
// Check if the server exists
|
||||||
if let Some(mut server) = get_server_by_id(id.as_str()) {
|
let Some(mut server) = get_server_by_id(id.as_str()).await else {
|
||||||
log::debug!("Found server for id: {}", &id);
|
panic!(
|
||||||
|
"[{}()] invoked with a server [{}] that does not exist! Mismatched states between frontend and backend!",
|
||||||
|
function_name!(),
|
||||||
|
id
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
// Clear server profile
|
// Clear server profile
|
||||||
server.profile = None;
|
server.profile = None;
|
||||||
let _ = mark_server_as_offline(app_handle.clone(), id.as_str()).await;
|
// Logging out from a non-public Coco server makes it unavailable
|
||||||
|
if !server.public {
|
||||||
|
server.available = false;
|
||||||
|
}
|
||||||
// Save the updated server data
|
// Save the updated server data
|
||||||
save_server(&server);
|
save_server(&server).await;
|
||||||
|
|
||||||
// Persist the updated server data
|
// Persist the updated server data
|
||||||
if let Err(e) = persist_servers(&app_handle).await {
|
if let Err(e) = persist_servers(&app_handle).await {
|
||||||
log::debug!("Failed to save server for id: {}. Error: {:?}", &id, &e);
|
log::debug!("Failed to save server for id: {}. Error: {:?}", &id, &e);
|
||||||
return Err(format!("Failed to save server: {}", &e));
|
return Err(format!("Failed to save server: {}", &e));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let has_token = get_server_token(id.as_str()).await.is_some();
|
||||||
|
if server.public {
|
||||||
|
if has_token {
|
||||||
|
panic!("Public Coco server won't have token")
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
// Log the case where server is not found
|
assert!(
|
||||||
log::debug!("No server found for id: {}", &id);
|
has_token,
|
||||||
return Err(format!("No server found for id: {}", id));
|
"This is a non-public Coco server, and it is logged in, we should have a token"
|
||||||
|
);
|
||||||
|
// Remove the server token from cache
|
||||||
|
remove_server_token(id.as_str()).await;
|
||||||
|
|
||||||
|
// Persist the updated tokens
|
||||||
|
if let Err(e) = persist_servers_token(&app_handle).await {
|
||||||
|
log::debug!("Failed to save tokens for id: {}. Error: {:?}", &id, &e);
|
||||||
|
return Err(format!("Failed to save tokens: {}", &e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove it from the search source if it becomes unavailable
|
||||||
|
if !server.available {
|
||||||
|
let registry = app_handle.state::<SearchSourceRegistry>();
|
||||||
|
registry.remove_source(id.as_str()).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
log::debug!("Successfully logged out server with id: {}", &id);
|
log::debug!("Successfully logged out server with id: {}", &id);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Removes the trailing slash from the server's endpoint if present.
|
/// Helper function to remove the trailing slash from the server's endpoint if present.
|
||||||
fn trim_endpoint_last_forward_slash(server: &mut Server) {
|
fn trim_endpoint_last_forward_slash(server: &mut Server) {
|
||||||
if server.endpoint.ends_with('/') {
|
let endpoint = &mut server.endpoint;
|
||||||
server.endpoint.pop(); // Remove the last character
|
while endpoint.ends_with('/') {
|
||||||
while server.endpoint.ends_with('/') {
|
endpoint.pop();
|
||||||
server.endpoint.pop();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -573,6 +700,10 @@ fn provider_info_url(endpoint: &str) -> String {
|
|||||||
format!("{endpoint}/provider/_info")
|
format!("{endpoint}/provider/_info")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_trim_endpoint_last_forward_slash() {
|
fn test_trim_endpoint_last_forward_slash() {
|
||||||
let mut server = Server {
|
let mut server = Server {
|
||||||
@@ -612,3 +743,4 @@ fn test_trim_endpoint_last_forward_slash() {
|
|||||||
|
|
||||||
assert_eq!(server.endpoint, "https://example.com");
|
assert_eq!(server.endpoint, "https://example.com");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|||||||
57
src-tauri/src/server/synthesize.rs
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
use crate::server::http_client::HttpClient;
|
||||||
|
use futures_util::StreamExt;
|
||||||
|
use http::Method;
|
||||||
|
use serde_json::json;
|
||||||
|
use tauri::{AppHandle, Emitter, Runtime, command};
|
||||||
|
|
||||||
|
#[command]
|
||||||
|
pub async fn synthesize<R: Runtime>(
|
||||||
|
app_handle: AppHandle<R>,
|
||||||
|
client_id: String,
|
||||||
|
server_id: String,
|
||||||
|
voice: String,
|
||||||
|
content: String,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let body = json!({
|
||||||
|
"voice": voice,
|
||||||
|
"content": content,
|
||||||
|
})
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let response = HttpClient::send_request(
|
||||||
|
server_id.as_str(),
|
||||||
|
Method::POST,
|
||||||
|
"/services/audio/synthesize",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
Some(reqwest::Body::from(body.to_string())),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
log::info!("Synthesize response status: {}", response.status());
|
||||||
|
|
||||||
|
if response.status() == 429 {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
if !response.status().is_success() {
|
||||||
|
return Err(format!("Request Failed: {}", response.status()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut stream = response.bytes_stream();
|
||||||
|
while let Some(chunk) = stream.next().await {
|
||||||
|
match chunk {
|
||||||
|
Ok(bytes) => {
|
||||||
|
if let Err(err) = app_handle.emit(&client_id, bytes.to_vec()) {
|
||||||
|
log::error!("Emit error: {:?}", err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("Stream error: {:?}", e);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -1,43 +1,96 @@
|
|||||||
use crate::common::http::get_response_body_text;
|
use crate::common::http::get_response_body_text;
|
||||||
use crate::server::http_client::HttpClient;
|
use crate::server::http_client::HttpClient;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value as JsonValue;
|
use serde_json::{Value, from_str};
|
||||||
use std::collections::HashMap;
|
|
||||||
use tauri::command;
|
use tauri::command;
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct TranscriptionResponse {
|
pub struct TranscriptionResponse {
|
||||||
pub text: String,
|
task_id: String,
|
||||||
|
results: Vec<Value>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[command]
|
#[command]
|
||||||
pub async fn transcription(
|
pub async fn transcription(
|
||||||
server_id: String,
|
server_id: String,
|
||||||
audio_type: String,
|
|
||||||
audio_content: String,
|
audio_content: String,
|
||||||
) -> Result<TranscriptionResponse, String> {
|
) -> Result<TranscriptionResponse, String> {
|
||||||
let mut query_params = HashMap::new();
|
// Send request to initiate transcription task
|
||||||
query_params.insert("type".to_string(), JsonValue::String(audio_type));
|
let init_response = HttpClient::post(
|
||||||
query_params.insert("content".to_string(), JsonValue::String(audio_content));
|
|
||||||
|
|
||||||
// Send the HTTP POST request
|
|
||||||
let response = HttpClient::post(
|
|
||||||
&server_id,
|
&server_id,
|
||||||
"/services/audio/transcription",
|
"/services/audio/transcription",
|
||||||
Some(query_params),
|
None,
|
||||||
|
Some(audio_content.into()),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to initiate transcription: {}", e))?;
|
||||||
|
|
||||||
|
// Extract response body as text
|
||||||
|
let init_response_text = get_response_body_text(init_response)
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to read initial response body: {}", e))?;
|
||||||
|
|
||||||
|
// Parse response JSON to extract task ID
|
||||||
|
let init_response_json: Value = from_str(&init_response_text).map_err(|e| {
|
||||||
|
format!(
|
||||||
|
"Failed to parse initial response JSON: {}. Raw response: {}",
|
||||||
|
e, init_response_text
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let transcription_task_id = init_response_json["task_id"]
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| {
|
||||||
|
format!(
|
||||||
|
"Missing or invalid task_id in initial response: {}",
|
||||||
|
init_response_text
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
// Set up polling with timeout
|
||||||
|
let polling_start = std::time::Instant::now();
|
||||||
|
const POLLING_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(30);
|
||||||
|
const POLLING_INTERVAL: std::time::Duration = std::time::Duration::from_millis(200);
|
||||||
|
|
||||||
|
let mut transcription_response: TranscriptionResponse;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
// Poll for transcription results
|
||||||
|
let poll_response = HttpClient::get(
|
||||||
|
&server_id,
|
||||||
|
&format!("/services/audio/task/{}", transcription_task_id),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| format!("Error sending transcription request: {}", e))?;
|
.map_err(|e| format!("Failed to poll transcription task: {}", e))?;
|
||||||
|
|
||||||
// Use get_response_body_text to extract the response body as text
|
// Extract poll response body
|
||||||
let response_body = get_response_body_text(response)
|
let poll_response_text = get_response_body_text(poll_response)
|
||||||
.await
|
.await
|
||||||
.map_err(|e| format!("Failed to read response body: {}", e))?;
|
.map_err(|e| format!("Failed to read poll response body: {}", e))?;
|
||||||
|
|
||||||
// Deserialize the response body into TranscriptionResponse
|
// Parse poll response JSON
|
||||||
let transcription_response: TranscriptionResponse = serde_json::from_str(&response_body)
|
transcription_response = from_str(&poll_response_text).map_err(|e| {
|
||||||
.map_err(|e| format!("Failed to parse transcription response: {}", e))?;
|
format!(
|
||||||
|
"Failed to parse poll response JSON: {}. Raw response: {}",
|
||||||
|
e, poll_response_text
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Check if transcription results are available
|
||||||
|
if !transcription_response.results.is_empty() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for timeout
|
||||||
|
if polling_start.elapsed() >= POLLING_TIMEOUT {
|
||||||
|
return Err("Transcription task timed out after 30 seconds".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait before next poll
|
||||||
|
tokio::time::sleep(POLLING_INTERVAL).await;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(transcription_response)
|
Ok(transcription_response)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,12 +4,12 @@ use std::collections::HashMap;
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tauri::{AppHandle, Emitter, Runtime};
|
use tauri::{AppHandle, Emitter, Runtime};
|
||||||
use tokio::net::TcpStream;
|
use tokio::net::TcpStream;
|
||||||
use tokio::sync::{mpsc, Mutex};
|
use tokio::sync::{Mutex, mpsc};
|
||||||
use tokio_tungstenite::tungstenite::handshake::client::generate_key;
|
|
||||||
use tokio_tungstenite::tungstenite::Message;
|
|
||||||
use tokio_tungstenite::MaybeTlsStream;
|
use tokio_tungstenite::MaybeTlsStream;
|
||||||
use tokio_tungstenite::WebSocketStream;
|
use tokio_tungstenite::WebSocketStream;
|
||||||
use tokio_tungstenite::{connect_async_tls_with_config, Connector};
|
use tokio_tungstenite::tungstenite::Message;
|
||||||
|
use tokio_tungstenite::tungstenite::handshake::client::generate_key;
|
||||||
|
use tokio_tungstenite::{Connector, connect_async_tls_with_config};
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct WebSocketManager {
|
pub struct WebSocketManager {
|
||||||
connections: Arc<Mutex<HashMap<String, Arc<WebSocketInstance>>>>,
|
connections: Arc<Mutex<HashMap<String, Arc<WebSocketInstance>>>>,
|
||||||
@@ -53,9 +53,11 @@ pub async fn connect_to_server<R: Runtime>(
|
|||||||
// Disconnect old connection first
|
// Disconnect old connection first
|
||||||
disconnect(client_id.clone(), state.clone()).await.ok();
|
disconnect(client_id.clone(), state.clone()).await.ok();
|
||||||
|
|
||||||
let server = get_server_by_id(&id).ok_or(format!("Server with ID {} not found", id))?;
|
let server = get_server_by_id(&id)
|
||||||
|
.await
|
||||||
|
.ok_or(format!("Server with ID {} not found", id))?;
|
||||||
let endpoint = convert_to_websocket(&server.endpoint)?;
|
let endpoint = convert_to_websocket(&server.endpoint)?;
|
||||||
let token = get_server_token(&id).await?.map(|t| t.access_token.clone());
|
let token = get_server_token(&id).await.map(|t| t.access_token.clone());
|
||||||
|
|
||||||
let mut request =
|
let mut request =
|
||||||
tokio_tungstenite::tungstenite::client::IntoClientRequest::into_client_request(&endpoint)
|
tokio_tungstenite::tungstenite::client::IntoClientRequest::into_client_request(&endpoint)
|
||||||
|
|||||||
@@ -1,3 +1,9 @@
|
|||||||
use tauri::{App, WebviewWindow};
|
use tauri::{App, WebviewWindow};
|
||||||
|
|
||||||
pub fn platform(_app: &mut App, _main_window: WebviewWindow, _settings_window: WebviewWindow) {}
|
pub fn platform(
|
||||||
|
_app: &mut App,
|
||||||
|
_main_window: WebviewWindow,
|
||||||
|
_settings_window: WebviewWindow,
|
||||||
|
_check_window: WebviewWindow,
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
//credits to: https://github.com/ayangweb/ayangweb-EcoPaste/blob/169323dbe6365ffe4abb64d867439ed2ea84c6d1/src-tauri/src/core/setup/mac.rs
|
//! credits to: https://github.com/ayangweb/ayangweb-EcoPaste/blob/169323dbe6365ffe4abb64d867439ed2ea84c6d1/src-tauri/src/core/setup/mac.rs
|
||||||
use tauri::{ActivationPolicy, App, Emitter, EventTarget, WebviewWindow};
|
|
||||||
use tauri_nspanel::{cocoa::appkit::NSWindowCollectionBehavior, panel_delegate, WebviewWindowExt};
|
use cocoa::appkit::NSWindow;
|
||||||
|
use tauri::Manager;
|
||||||
|
use tauri::{App, AppHandle, Emitter, EventTarget, WebviewWindow};
|
||||||
|
use tauri_nspanel::{WebviewWindowExt, cocoa::appkit::NSWindowCollectionBehavior, panel_delegate};
|
||||||
|
|
||||||
use crate::common::MAIN_WINDOW_LABEL;
|
use crate::common::MAIN_WINDOW_LABEL;
|
||||||
|
|
||||||
@@ -12,9 +15,12 @@ const WINDOW_BLUR_EVENT: &str = "tauri://blur";
|
|||||||
const WINDOW_MOVED_EVENT: &str = "tauri://move";
|
const WINDOW_MOVED_EVENT: &str = "tauri://move";
|
||||||
const WINDOW_RESIZED_EVENT: &str = "tauri://resize";
|
const WINDOW_RESIZED_EVENT: &str = "tauri://resize";
|
||||||
|
|
||||||
pub fn platform(app: &mut App, main_window: WebviewWindow, _settings_window: WebviewWindow) {
|
pub fn platform(
|
||||||
app.set_activation_policy(ActivationPolicy::Accessory);
|
_app: &mut App,
|
||||||
|
main_window: WebviewWindow,
|
||||||
|
_settings_window: WebviewWindow,
|
||||||
|
_check_window: WebviewWindow,
|
||||||
|
) {
|
||||||
// Convert ns_window to ns_panel
|
// Convert ns_window to ns_panel
|
||||||
let panel = main_window.to_panel().unwrap();
|
let panel = main_window.to_panel().unwrap();
|
||||||
|
|
||||||
@@ -26,7 +32,7 @@ pub fn platform(app: &mut App, main_window: WebviewWindow, _settings_window: Web
|
|||||||
|
|
||||||
// Share the window across all desktop spaces and full screen
|
// Share the window across all desktop spaces and full screen
|
||||||
panel.set_collection_behaviour(
|
panel.set_collection_behaviour(
|
||||||
NSWindowCollectionBehavior::NSWindowCollectionBehaviorCanJoinAllSpaces
|
NSWindowCollectionBehavior::NSWindowCollectionBehaviorMoveToActiveSpace
|
||||||
| NSWindowCollectionBehavior::NSWindowCollectionBehaviorStationary
|
| NSWindowCollectionBehavior::NSWindowCollectionBehaviorStationary
|
||||||
| NSWindowCollectionBehavior::NSWindowCollectionBehaviorFullScreenAuxiliary,
|
| NSWindowCollectionBehavior::NSWindowCollectionBehaviorFullScreenAuxiliary,
|
||||||
);
|
);
|
||||||
@@ -75,3 +81,50 @@ pub fn platform(app: &mut App, main_window: WebviewWindow, _settings_window: Web
|
|||||||
// Set the delegate object for the window to handle window events
|
// Set the delegate object for the window to handle window events
|
||||||
panel.set_delegate(delegate);
|
panel.set_delegate(delegate);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Change NS window attribute between `NSWindowCollectionBehaviorCanJoinAllSpaces`
|
||||||
|
/// and `NSWindowCollectionBehaviorMoveToActiveSpace` accordingly.
|
||||||
|
///
|
||||||
|
/// NOTE: this tauri command is not async because we should run it in the main
|
||||||
|
/// thread, or `ns_window.setCollectionBehavior_(collection_behavior)` would lead
|
||||||
|
/// to UB.
|
||||||
|
#[tauri::command]
|
||||||
|
pub(crate) fn toggle_move_to_active_space_attribute(tauri_app_hanlde: AppHandle) {
|
||||||
|
use cocoa::appkit::NSWindowCollectionBehavior;
|
||||||
|
use cocoa::base::id;
|
||||||
|
|
||||||
|
let main_window = tauri_app_hanlde
|
||||||
|
.get_webview_window(MAIN_WINDOW_LABEL)
|
||||||
|
.unwrap();
|
||||||
|
let ns_window = main_window.ns_window().unwrap() as id;
|
||||||
|
let mut collection_behavior = unsafe { ns_window.collectionBehavior() };
|
||||||
|
let join_all_spaces = collection_behavior
|
||||||
|
.contains(NSWindowCollectionBehavior::NSWindowCollectionBehaviorCanJoinAllSpaces);
|
||||||
|
let move_to_active_space = collection_behavior
|
||||||
|
.contains(NSWindowCollectionBehavior::NSWindowCollectionBehaviorMoveToActiveSpace);
|
||||||
|
|
||||||
|
match (join_all_spaces, move_to_active_space) {
|
||||||
|
(true, false) => {
|
||||||
|
collection_behavior
|
||||||
|
.remove(NSWindowCollectionBehavior::NSWindowCollectionBehaviorCanJoinAllSpaces);
|
||||||
|
collection_behavior
|
||||||
|
.insert(NSWindowCollectionBehavior::NSWindowCollectionBehaviorMoveToActiveSpace);
|
||||||
|
}
|
||||||
|
(false, true) => {
|
||||||
|
collection_behavior
|
||||||
|
.remove(NSWindowCollectionBehavior::NSWindowCollectionBehaviorMoveToActiveSpace);
|
||||||
|
collection_behavior
|
||||||
|
.insert(NSWindowCollectionBehavior::NSWindowCollectionBehaviorCanJoinAllSpaces);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
panic!(
|
||||||
|
"invalid NS window attribute, NSWindowCollectionBehaviorCanJoinAllSpaces is set [{}], NSWindowCollectionBehaviorMoveToActiveSpace is set [{}]",
|
||||||
|
join_all_spaces, move_to_active_space
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
ns_window.setCollectionBehavior_(collection_behavior);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -18,10 +18,20 @@ pub use windows::*;
|
|||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
pub use linux::*;
|
pub use linux::*;
|
||||||
|
|
||||||
pub fn default(app: &mut App, main_window: WebviewWindow, settings_window: WebviewWindow) {
|
pub fn default(
|
||||||
|
app: &mut App,
|
||||||
|
main_window: WebviewWindow,
|
||||||
|
settings_window: WebviewWindow,
|
||||||
|
check_window: WebviewWindow,
|
||||||
|
) {
|
||||||
// Development mode automatically opens the console: https://tauri.app/develop/debug
|
// Development mode automatically opens the console: https://tauri.app/develop/debug
|
||||||
#[cfg(all(dev, debug_assertions))]
|
#[cfg(debug_assertions)]
|
||||||
main_window.open_devtools();
|
main_window.open_devtools();
|
||||||
|
|
||||||
platform(app, main_window.clone(), settings_window.clone());
|
platform(
|
||||||
|
app,
|
||||||
|
main_window.clone(),
|
||||||
|
settings_window.clone(),
|
||||||
|
check_window.clone(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,9 @@
|
|||||||
use tauri::{App, WebviewWindow};
|
use tauri::{App, WebviewWindow};
|
||||||
|
|
||||||
pub fn platform(_app: &mut App, _main_window: WebviewWindow, _settings_window: WebviewWindow) {}
|
pub fn platform(
|
||||||
|
_app: &mut App,
|
||||||
|
_main_window: WebviewWindow,
|
||||||
|
_settings_window: WebviewWindow,
|
||||||
|
_check_window: WebviewWindow,
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
use crate::{hide_coco, show_coco, COCO_TAURI_STORE};
|
use crate::{COCO_TAURI_STORE, hide_coco, show_coco};
|
||||||
use tauri::{async_runtime, App, AppHandle, Manager, Runtime};
|
use tauri::{App, AppHandle, Manager, Runtime, async_runtime};
|
||||||
use tauri_plugin_global_shortcut::{GlobalShortcutExt, Shortcut, ShortcutState};
|
use tauri_plugin_global_shortcut::{GlobalShortcutExt, Shortcut, ShortcutState};
|
||||||
use tauri_plugin_store::{JsonValue, StoreExt};
|
use tauri_plugin_store::{JsonValue, StoreExt};
|
||||||
|
|
||||||
@@ -17,6 +17,7 @@ const DEFAULT_SHORTCUT: &str = "ctrl+shift+space";
|
|||||||
|
|
||||||
/// Set up the shortcut upon app start.
|
/// Set up the shortcut upon app start.
|
||||||
pub fn enable_shortcut(app: &App) {
|
pub fn enable_shortcut(app: &App) {
|
||||||
|
log::trace!("setting up Coco hotkey");
|
||||||
let store = app
|
let store = app
|
||||||
.store(COCO_TAURI_STORE)
|
.store(COCO_TAURI_STORE)
|
||||||
.expect("creating a store should not fail");
|
.expect("creating a store should not fail");
|
||||||
@@ -43,6 +44,7 @@ pub fn enable_shortcut(app: &App) {
|
|||||||
.expect("default shortcut should never be invalid");
|
.expect("default shortcut should never be invalid");
|
||||||
_register_shortcut_upon_start(app, default_shortcut);
|
_register_shortcut_upon_start(app, default_shortcut);
|
||||||
}
|
}
|
||||||
|
log::trace!("Coco hotkey has been set");
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the stored shortcut as a string, same as [`_get_shortcut()`], except that
|
/// Get the stored shortcut as a string, same as [`_get_shortcut()`], except that
|
||||||
@@ -97,7 +99,7 @@ fn _register_shortcut<R: Runtime>(app: &AppHandle<R>, shortcut: Shortcut) {
|
|||||||
.on_shortcut(shortcut, move |app, scut, event| {
|
.on_shortcut(shortcut, move |app, scut, event| {
|
||||||
if scut == &shortcut {
|
if scut == &shortcut {
|
||||||
dbg!("shortcut pressed");
|
dbg!("shortcut pressed");
|
||||||
let main_window = app.get_window(MAIN_WINDOW_LABEL).unwrap();
|
let main_window = app.get_webview_window(MAIN_WINDOW_LABEL).unwrap();
|
||||||
if let ShortcutState::Pressed = event.state() {
|
if let ShortcutState::Pressed = event.state() {
|
||||||
let app_handle = app.clone();
|
let app_handle = app.clone();
|
||||||
if main_window.is_visible().unwrap() {
|
if main_window.is_visible().unwrap() {
|
||||||
@@ -126,7 +128,7 @@ fn _register_shortcut_upon_start(app: &App, shortcut: Shortcut) {
|
|||||||
tauri_plugin_global_shortcut::Builder::new()
|
tauri_plugin_global_shortcut::Builder::new()
|
||||||
.with_handler(move |app, scut, event| {
|
.with_handler(move |app, scut, event| {
|
||||||
if scut == &shortcut {
|
if scut == &shortcut {
|
||||||
let window = app.get_window(MAIN_WINDOW_LABEL).unwrap();
|
let window = app.get_webview_window(MAIN_WINDOW_LABEL).unwrap();
|
||||||
if let ShortcutState::Pressed = event.state() {
|
if let ShortcutState::Pressed = event.state() {
|
||||||
let app_handle = app.clone();
|
let app_handle = app.clone();
|
||||||
|
|
||||||
|
|||||||
62
src-tauri/src/util/app_lang.rs
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
//! Configuration entry App language is persisted in the frontend code, but we
|
||||||
|
//! need to access it on the backend.
|
||||||
|
//!
|
||||||
|
//! So we duplicate it here **in the MEMORY** and expose a setter method to the
|
||||||
|
//! frontend so that the value can be updated and stay update-to-date.
|
||||||
|
|
||||||
|
use function_name::named;
|
||||||
|
use tokio::sync::RwLock;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||||
|
#[allow(non_camel_case_types)]
|
||||||
|
pub(crate) enum Lang {
|
||||||
|
en_US,
|
||||||
|
zh_CN,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for Lang {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
Lang::en_US => write!(f, "en_US"),
|
||||||
|
Lang::zh_CN => write!(f, "zh_CN"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::str::FromStr for Lang {
|
||||||
|
type Err = String;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
match s {
|
||||||
|
"en" => Ok(Lang::en_US),
|
||||||
|
"zh" => Ok(Lang::zh_CN),
|
||||||
|
_ => Err(format!("Invalid language: {}", s)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Cache the language config in memory.
|
||||||
|
static APP_LANG: RwLock<Option<Lang>> = RwLock::const_new(None);
|
||||||
|
|
||||||
|
/// Frontend code uses this interface to update the in-memory cached `APP_LANG` config.
|
||||||
|
#[named]
|
||||||
|
#[tauri::command]
|
||||||
|
pub(crate) async fn update_app_lang(lang: String) {
|
||||||
|
let app_lang = lang.parse::<Lang>().unwrap_or_else(|e| {
|
||||||
|
panic!(
|
||||||
|
"frontend code passes an invalid argument [{}] to interface [{}], parsing error [{}]",
|
||||||
|
lang,
|
||||||
|
function_name!(),
|
||||||
|
e
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut write_guard = APP_LANG.write().await;
|
||||||
|
*write_guard = Some(app_lang);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper getter method to handle the `None` case.
|
||||||
|
pub(crate) async fn get_app_lang() -> Lang {
|
||||||
|
let opt_lang = *APP_LANG.read().await;
|
||||||
|
opt_lang.expect("frontend code did not invoke [update_app_lang()] to set the APP_LANG")
|
||||||
|
}
|
||||||
174
src-tauri/src/util/file.rs
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
#[derive(Debug, Clone, PartialEq, Copy)]
|
||||||
|
pub(crate) enum FileType {
|
||||||
|
Folder,
|
||||||
|
JPEGImage,
|
||||||
|
PNGImage,
|
||||||
|
PDFDocument,
|
||||||
|
PlainTextDocument,
|
||||||
|
MicrosoftWordDocument,
|
||||||
|
MicrosoftExcelSpreadsheet,
|
||||||
|
AudioFile,
|
||||||
|
VideoFile,
|
||||||
|
CHeaderFile,
|
||||||
|
TOMLDocument,
|
||||||
|
RustScript,
|
||||||
|
CSourceCode,
|
||||||
|
MarkdownDocument,
|
||||||
|
TerminalSettings,
|
||||||
|
ZipArchive,
|
||||||
|
Dmg,
|
||||||
|
Html,
|
||||||
|
Json,
|
||||||
|
Xml,
|
||||||
|
Yaml,
|
||||||
|
Css,
|
||||||
|
Vue,
|
||||||
|
React,
|
||||||
|
Sql,
|
||||||
|
Csv,
|
||||||
|
Javascript,
|
||||||
|
Lnk,
|
||||||
|
Typescript,
|
||||||
|
Python,
|
||||||
|
Java,
|
||||||
|
Golang,
|
||||||
|
Ruby,
|
||||||
|
Php,
|
||||||
|
Sass,
|
||||||
|
Sketch,
|
||||||
|
AdobeAi,
|
||||||
|
AdobePsd,
|
||||||
|
AdobePr,
|
||||||
|
AdobeAu,
|
||||||
|
AdobeAe,
|
||||||
|
AdobeLr,
|
||||||
|
AdobeXd,
|
||||||
|
AdobeFl,
|
||||||
|
AdobeId,
|
||||||
|
Svg,
|
||||||
|
Epub,
|
||||||
|
Unknown,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_file_type(path: &str) -> FileType {
|
||||||
|
let path = camino::Utf8Path::new(path);
|
||||||
|
|
||||||
|
// stat() is more precise than file extension, use it if possible.
|
||||||
|
if path.is_dir() {
|
||||||
|
return FileType::Folder;
|
||||||
|
}
|
||||||
|
|
||||||
|
let Some(ext) = path.extension() else {
|
||||||
|
return FileType::Unknown;
|
||||||
|
};
|
||||||
|
|
||||||
|
let ext = ext.to_lowercase();
|
||||||
|
match ext.as_str() {
|
||||||
|
"pdf" => FileType::PDFDocument,
|
||||||
|
"txt" | "text" => FileType::PlainTextDocument,
|
||||||
|
"doc" | "docx" => FileType::MicrosoftWordDocument,
|
||||||
|
"xls" | "xlsx" => FileType::MicrosoftExcelSpreadsheet,
|
||||||
|
"jpg" | "jpeg" => FileType::JPEGImage,
|
||||||
|
"png" => FileType::PNGImage,
|
||||||
|
"mp3" | "wav" | "flac" | "aac" | "ogg" | "m4a" => FileType::AudioFile,
|
||||||
|
"mp4" | "avi" | "mov" | "mkv" | "wmv" | "flv" | "webm" => FileType::VideoFile,
|
||||||
|
"h" | "hpp" => FileType::CHeaderFile,
|
||||||
|
"c" | "cpp" | "cc" | "cxx" => FileType::CSourceCode,
|
||||||
|
"toml" => FileType::TOMLDocument,
|
||||||
|
"rs" => FileType::RustScript,
|
||||||
|
"md" | "markdown" => FileType::MarkdownDocument,
|
||||||
|
"terminal" => FileType::TerminalSettings,
|
||||||
|
"zip" | "rar" | "7z" | "tar" | "gz" | "bz2" => FileType::ZipArchive,
|
||||||
|
"dmg" => FileType::Dmg,
|
||||||
|
"html" | "htm" => FileType::Html,
|
||||||
|
"json" => FileType::Json,
|
||||||
|
"xml" => FileType::Xml,
|
||||||
|
"yaml" | "yml" => FileType::Yaml,
|
||||||
|
"css" => FileType::Css,
|
||||||
|
"vue" => FileType::Vue,
|
||||||
|
"jsx" | "tsx" => FileType::React,
|
||||||
|
"sql" => FileType::Sql,
|
||||||
|
"csv" => FileType::Csv,
|
||||||
|
"js" | "mjs" => FileType::Javascript,
|
||||||
|
"ts" => FileType::Typescript,
|
||||||
|
"py" | "pyw" => FileType::Python,
|
||||||
|
"java" => FileType::Java,
|
||||||
|
"go" => FileType::Golang,
|
||||||
|
"rb" => FileType::Ruby,
|
||||||
|
"php" => FileType::Php,
|
||||||
|
"sass" | "scss" => FileType::Sass,
|
||||||
|
"sketch" => FileType::Sketch,
|
||||||
|
"ai" => FileType::AdobeAi,
|
||||||
|
"psd" => FileType::AdobePsd,
|
||||||
|
"prproj" => FileType::AdobePr,
|
||||||
|
"aup" | "aup3" => FileType::AdobeAu,
|
||||||
|
"aep" => FileType::AdobeAe,
|
||||||
|
"lrcat" => FileType::AdobeLr,
|
||||||
|
"xd" => FileType::AdobeXd,
|
||||||
|
"fla" => FileType::AdobeFl,
|
||||||
|
"indd" => FileType::AdobeId,
|
||||||
|
"svg" => FileType::Svg,
|
||||||
|
"epub" => FileType::Epub,
|
||||||
|
"lnk" => FileType::Lnk,
|
||||||
|
_ => FileType::Unknown,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn type_to_icon(ty: FileType) -> &'static str {
|
||||||
|
match ty {
|
||||||
|
FileType::Folder => "font_file_folder",
|
||||||
|
FileType::JPEGImage => "font_file_image",
|
||||||
|
FileType::PNGImage => "font_file_image",
|
||||||
|
FileType::PDFDocument => "font_file_document_pdf",
|
||||||
|
FileType::PlainTextDocument => "font_file_txt",
|
||||||
|
FileType::MicrosoftWordDocument => "font_file_document_word",
|
||||||
|
FileType::MicrosoftExcelSpreadsheet => "font_file_spreadsheet_excel",
|
||||||
|
FileType::AudioFile => "font_file_audio",
|
||||||
|
FileType::VideoFile => "font_file_video",
|
||||||
|
FileType::CHeaderFile => "font_file_csource",
|
||||||
|
FileType::TOMLDocument => "font_file_toml",
|
||||||
|
FileType::RustScript => "font_file_rustscript1",
|
||||||
|
FileType::CSourceCode => "font_file_csource",
|
||||||
|
FileType::MarkdownDocument => "font_file_markdown",
|
||||||
|
FileType::TerminalSettings => "font_file_terminal1",
|
||||||
|
FileType::ZipArchive => "font_file_zip",
|
||||||
|
FileType::Dmg => "font_file_dmg",
|
||||||
|
FileType::Html => "font_file_html",
|
||||||
|
FileType::Json => "font_file_json",
|
||||||
|
FileType::Xml => "font_file_xml",
|
||||||
|
FileType::Yaml => "font_file_yaml",
|
||||||
|
FileType::Css => "font_file_css",
|
||||||
|
FileType::Vue => "font_file_vue",
|
||||||
|
FileType::React => "font_file_react",
|
||||||
|
FileType::Sql => "font_file_sql",
|
||||||
|
FileType::Csv => "font_file_csv",
|
||||||
|
FileType::Javascript => "font_file_javascript",
|
||||||
|
FileType::Lnk => "font_file_lnk",
|
||||||
|
FileType::Typescript => "font_file_typescript",
|
||||||
|
FileType::Python => "font_file_python",
|
||||||
|
FileType::Java => "font_file_java",
|
||||||
|
FileType::Golang => "font_file_golang",
|
||||||
|
FileType::Ruby => "font_file_ruby",
|
||||||
|
FileType::Php => "font_file_php",
|
||||||
|
FileType::Sass => "font_file_sass",
|
||||||
|
FileType::Sketch => "font_file_sketch",
|
||||||
|
FileType::AdobeAi => "font_file_adobe_ai",
|
||||||
|
FileType::AdobePsd => "font_file_adobe_psd",
|
||||||
|
FileType::AdobePr => "font_file_adobe_pr",
|
||||||
|
FileType::AdobeAu => "font_file_adobe_au",
|
||||||
|
FileType::AdobeAe => "font_file_adobe_ae",
|
||||||
|
FileType::AdobeLr => "font_file_adobe_lr",
|
||||||
|
FileType::AdobeXd => "font_file_adobe_xd",
|
||||||
|
FileType::AdobeFl => "font_file_adobe_fl",
|
||||||
|
FileType::AdobeId => "font_file_adobe_id",
|
||||||
|
FileType::Svg => "font_file_svg",
|
||||||
|
FileType::Epub => "font_file_epub",
|
||||||
|
FileType::Unknown => "font_file_unknown",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tauri::command]
|
||||||
|
pub(crate) async fn get_file_icon(path: String) -> &'static str {
|
||||||
|
let ty = get_file_type(path.as_str()).await;
|
||||||
|
type_to_icon(ty)
|
||||||
|
}
|
||||||
@@ -1,10 +1,20 @@
|
|||||||
|
pub(crate) mod app_lang;
|
||||||
|
pub(crate) mod file;
|
||||||
|
pub(crate) mod platform;
|
||||||
|
pub(crate) mod updater;
|
||||||
|
|
||||||
use std::{path::Path, process::Command};
|
use std::{path::Path, process::Command};
|
||||||
use tauri::{AppHandle, Runtime};
|
use tauri::{AppHandle, Runtime};
|
||||||
use tauri_plugin_shell::ShellExt;
|
use tauri_plugin_shell::ShellExt;
|
||||||
|
|
||||||
|
/// We use this env variable to determine the DE on Linux.
|
||||||
|
const XDG_CURRENT_DESKTOP: &str = "XDG_CURRENT_DESKTOP";
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq)]
|
||||||
enum LinuxDesktopEnvironment {
|
enum LinuxDesktopEnvironment {
|
||||||
Gnome,
|
Gnome,
|
||||||
Kde,
|
Kde,
|
||||||
|
Unsupported { xdg_current_desktop: String },
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LinuxDesktopEnvironment {
|
impl LinuxDesktopEnvironment {
|
||||||
@@ -30,6 +40,14 @@ impl LinuxDesktopEnvironment {
|
|||||||
.arg(path)
|
.arg(path)
|
||||||
.output()
|
.output()
|
||||||
.map_err(|e| e.to_string())?,
|
.map_err(|e| e.to_string())?,
|
||||||
|
Self::Unsupported {
|
||||||
|
xdg_current_desktop,
|
||||||
|
} => {
|
||||||
|
return Err(format!(
|
||||||
|
"Cannot open apps as this Linux desktop environment [{}] is not supported",
|
||||||
|
xdg_current_desktop
|
||||||
|
));
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if !cmd_output.status.success() {
|
if !cmd_output.status.success() {
|
||||||
@@ -44,20 +62,23 @@ impl LinuxDesktopEnvironment {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// None means that it is likely that we do not have a desktop environment.
|
||||||
fn get_linux_desktop_environment() -> Option<LinuxDesktopEnvironment> {
|
fn get_linux_desktop_environment() -> Option<LinuxDesktopEnvironment> {
|
||||||
let de_os_str = std::env::var_os("XDG_CURRENT_DESKTOP")?;
|
let de_os_str = std::env::var_os(XDG_CURRENT_DESKTOP)?;
|
||||||
let de_str = de_os_str
|
let de_str = de_os_str.into_string().unwrap_or_else(|_os_string| {
|
||||||
.into_string()
|
panic!("${} should be UTF-8 encoded", XDG_CURRENT_DESKTOP);
|
||||||
.expect("$XDG_CURRENT_DESKTOP should be UTF-8 encoded");
|
});
|
||||||
|
|
||||||
let de = match de_str.as_str() {
|
let de = match de_str.as_str() {
|
||||||
"GNOME" => LinuxDesktopEnvironment::Gnome,
|
"GNOME" => LinuxDesktopEnvironment::Gnome,
|
||||||
|
// Ubuntu uses "ubuntu:GNOME" instead of just "GNOME", they really love
|
||||||
|
// their distro name.
|
||||||
|
"ubuntu:GNOME" => LinuxDesktopEnvironment::Gnome,
|
||||||
"KDE" => LinuxDesktopEnvironment::Kde,
|
"KDE" => LinuxDesktopEnvironment::Kde,
|
||||||
|
|
||||||
unsupported_de => unimplemented!(
|
_ => LinuxDesktopEnvironment::Unsupported {
|
||||||
"This desktop environment [{}] has not been supported yet",
|
xdg_current_desktop: de_str,
|
||||||
unsupported_de
|
},
|
||||||
),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
Some(de)
|
Some(de)
|
||||||
@@ -72,7 +93,7 @@ pub async fn open<R: Runtime>(app_handle: AppHandle<R>, path: String) -> Result<
|
|||||||
let borrowed_path = Path::new(&path);
|
let borrowed_path = Path::new(&path);
|
||||||
if let Some(file_extension) = borrowed_path.extension() {
|
if let Some(file_extension) = borrowed_path.extension() {
|
||||||
if file_extension == "desktop" {
|
if file_extension == "desktop" {
|
||||||
let desktop_environment = get_linux_desktop_environment().expect("The Linux OS is running without a desktop, Coco could never run in such a environment");
|
let desktop_environment = get_linux_desktop_environment().expect("The Linux OS is running without a desktop, Coco could never run in such an environment");
|
||||||
return desktop_environment.launch_app_via_desktop_file(path);
|
return desktop_environment.launch_app_via_desktop_file(path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -83,3 +104,55 @@ pub async fn open<R: Runtime>(app_handle: AppHandle<R>, path: String) -> Result<
|
|||||||
.open(path, None)
|
.open(path, None)
|
||||||
.map_err(|e| e.to_string())
|
.map_err(|e| e.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
// This test modifies env var XDG_CURRENT_DESKTOP, which is kinda unsafe
|
||||||
|
// but considering this is just test, it is ok to do so.
|
||||||
|
#[test]
|
||||||
|
fn test_get_linux_desktop_environment() {
|
||||||
|
// SAFETY: Rust code won't modify/read XDG_CURRENT_DESKTOP concurrently, we
|
||||||
|
// have no guarantee from the underlying C code.
|
||||||
|
unsafe {
|
||||||
|
// Save the original value if it exists
|
||||||
|
let original_value = std::env::var_os(XDG_CURRENT_DESKTOP);
|
||||||
|
|
||||||
|
// Test when XDG_CURRENT_DESKTOP is not set
|
||||||
|
std::env::remove_var(XDG_CURRENT_DESKTOP);
|
||||||
|
assert!(get_linux_desktop_environment().is_none());
|
||||||
|
|
||||||
|
// Test GNOME
|
||||||
|
std::env::set_var(XDG_CURRENT_DESKTOP, "GNOME");
|
||||||
|
let result = get_linux_desktop_environment();
|
||||||
|
assert_eq!(result.unwrap(), LinuxDesktopEnvironment::Gnome);
|
||||||
|
|
||||||
|
// Test ubuntu:GNOME
|
||||||
|
std::env::set_var(XDG_CURRENT_DESKTOP, "ubuntu:GNOME");
|
||||||
|
let result = get_linux_desktop_environment();
|
||||||
|
assert_eq!(result.unwrap(), LinuxDesktopEnvironment::Gnome);
|
||||||
|
|
||||||
|
// Test KDE
|
||||||
|
std::env::set_var(XDG_CURRENT_DESKTOP, "KDE");
|
||||||
|
let result = get_linux_desktop_environment();
|
||||||
|
assert_eq!(result.unwrap(), LinuxDesktopEnvironment::Kde);
|
||||||
|
|
||||||
|
// Test unsupported desktop environment
|
||||||
|
std::env::set_var(XDG_CURRENT_DESKTOP, "XFCE");
|
||||||
|
let result = get_linux_desktop_environment();
|
||||||
|
assert_eq!(
|
||||||
|
result.unwrap(),
|
||||||
|
LinuxDesktopEnvironment::Unsupported {
|
||||||
|
xdg_current_desktop: "XFCE".into()
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Restore the original value
|
||||||
|
match original_value {
|
||||||
|
Some(value) => std::env::set_var(XDG_CURRENT_DESKTOP, value),
|
||||||
|
None => std::env::remove_var(XDG_CURRENT_DESKTOP),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
34
src-tauri/src/util/platform.rs
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
use derive_more::Display;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Serialize, Copy, Clone, Hash, PartialEq, Eq, Display)]
|
||||||
|
#[serde(rename_all(serialize = "lowercase", deserialize = "lowercase"))]
|
||||||
|
pub(crate) enum Platform {
|
||||||
|
#[display("macOS")]
|
||||||
|
Macos,
|
||||||
|
#[display("Linux")]
|
||||||
|
Linux,
|
||||||
|
#[display("windows")]
|
||||||
|
Windows,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Platform {
|
||||||
|
/// Helper function to determine the current platform.
|
||||||
|
pub(crate) fn current() -> Platform {
|
||||||
|
let os_str = std::env::consts::OS;
|
||||||
|
serde_plain::from_str(os_str).unwrap_or_else(|_e| {
|
||||||
|
panic!("std::env::consts::OS is [{}], which is not a valid value for [enum Platform], valid values: ['macos', 'linux', 'windows']", os_str)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the `X-OS-NAME` HTTP request header.
|
||||||
|
pub(crate) fn to_os_name_http_header_str(&self) -> Cow<'static, str> {
|
||||||
|
match self {
|
||||||
|
Self::Macos => Cow::Borrowed("macos"),
|
||||||
|
Self::Windows => Cow::Borrowed("windows"),
|
||||||
|
// For Linux, we need the actual distro `ID`, not just a "linux".
|
||||||
|
Self::Linux => Cow::Owned(sysinfo::System::distribution_id()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
67
src-tauri/src/util/updater.rs
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
use semver::Version;
|
||||||
|
use tauri_plugin_updater::RemoteRelease;
|
||||||
|
|
||||||
|
/// Helper function to extract the build number out of `version`.
|
||||||
|
///
|
||||||
|
/// If the version string is in the `x.y.z` format and does not include a build
|
||||||
|
/// number, we assume a build number of 0.
|
||||||
|
fn extract_version_number(version: &Version) -> u32 {
|
||||||
|
let pre = &version.pre;
|
||||||
|
|
||||||
|
if pre.is_empty() {
|
||||||
|
// A special value for the versions that do not have array
|
||||||
|
0
|
||||||
|
} else {
|
||||||
|
let pre_str = pre.as_str();
|
||||||
|
let build_number_str = {
|
||||||
|
match pre_str.strip_prefix("SNAPSHOT-") {
|
||||||
|
Some(str) => str,
|
||||||
|
None => pre_str,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let build_number : u32 = build_number_str.parse().unwrap_or_else(|e| {
|
||||||
|
panic!(
|
||||||
|
"invalid build number, cannot parse [{}] to a valid build number, error [{}], version [{}]",
|
||||||
|
build_number_str, e, version
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
build_number
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// # Local version format
|
||||||
|
///
|
||||||
|
/// Packages built in our CI use the following format:
|
||||||
|
///
|
||||||
|
/// * `x.y.z-SNAPSHOT-<build number>`
|
||||||
|
/// * `x.y.z-<build number>`
|
||||||
|
///
|
||||||
|
/// If you build Coco from src, the version will be in format `x.y.z`
|
||||||
|
///
|
||||||
|
/// # Remote version format
|
||||||
|
///
|
||||||
|
/// `x.y.z-<build number>`
|
||||||
|
///
|
||||||
|
/// # How we compare versions
|
||||||
|
///
|
||||||
|
/// We compare versions based solely on the build number.
|
||||||
|
/// If the version string is in the `x.y.z` format and does not include a build number,
|
||||||
|
/// we assume a build number of 0. As a result, such versions are considered older
|
||||||
|
/// than any version with an explicit build number.
|
||||||
|
pub(crate) fn custom_version_comparator(local: Version, remote_release: RemoteRelease) -> bool {
|
||||||
|
let remote = remote_release.version;
|
||||||
|
|
||||||
|
let local_build_number = extract_version_number(&local);
|
||||||
|
let remote_build_number = extract_version_number(&remote);
|
||||||
|
|
||||||
|
let should_update = remote_build_number > local_build_number;
|
||||||
|
log::debug!(
|
||||||
|
"custom version comparator invoked, local version [{}], remote version [{}], should update [{}]",
|
||||||
|
local,
|
||||||
|
remote,
|
||||||
|
should_update
|
||||||
|
);
|
||||||
|
|
||||||
|
should_update
|
||||||
|
}
|
||||||
@@ -41,9 +41,9 @@
|
|||||||
"title": "Coco AI Settings",
|
"title": "Coco AI Settings",
|
||||||
"url": "/ui/settings",
|
"url": "/ui/settings",
|
||||||
"width": 1000,
|
"width": 1000,
|
||||||
|
"minWidth": 1000,
|
||||||
"height": 700,
|
"height": 700,
|
||||||
"minHeight": 700,
|
"minHeight": 700,
|
||||||
"minWidth": 1000,
|
|
||||||
"center": true,
|
"center": true,
|
||||||
"transparent": true,
|
"transparent": true,
|
||||||
"maximizable": false,
|
"maximizable": false,
|
||||||
@@ -55,6 +55,26 @@
|
|||||||
"effects": ["sidebar"],
|
"effects": ["sidebar"],
|
||||||
"state": "active"
|
"state": "active"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "check",
|
||||||
|
"title": "Coco AI Update",
|
||||||
|
"url": "/ui/check",
|
||||||
|
"width": 340,
|
||||||
|
"minWidth": 340,
|
||||||
|
"height": 260,
|
||||||
|
"minHeight": 260,
|
||||||
|
"center": false,
|
||||||
|
"transparent": true,
|
||||||
|
"maximizable": false,
|
||||||
|
"skipTaskbar": false,
|
||||||
|
"dragDropEnabled": false,
|
||||||
|
"hiddenTitle": true,
|
||||||
|
"visible": false,
|
||||||
|
"windowEffects": {
|
||||||
|
"effects": ["sidebar"],
|
||||||
|
"state": "active"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"security": {
|
"security": {
|
||||||
@@ -93,20 +113,6 @@
|
|||||||
"icons/Square310x310Logo.png",
|
"icons/Square310x310Logo.png",
|
||||||
"icons/StoreLogo.png"
|
"icons/StoreLogo.png"
|
||||||
],
|
],
|
||||||
"macOS": {
|
|
||||||
"minimumSystemVersion": "10.12",
|
|
||||||
"hardenedRuntime": true,
|
|
||||||
"dmg": {
|
|
||||||
"appPosition": {
|
|
||||||
"x": 180,
|
|
||||||
"y": 180
|
|
||||||
},
|
|
||||||
"applicationFolderPosition": {
|
|
||||||
"x": 480,
|
|
||||||
"y": 180
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"resources": ["assets/**/*", "icons"]
|
"resources": ["assets/**/*", "icons"]
|
||||||
},
|
},
|
||||||
"plugins": {
|
"plugins": {
|
||||||
|
|||||||
15
src-tauri/tauri.linux.conf.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"identifier": "rs.coco.app",
|
||||||
|
"bundle": {
|
||||||
|
"linux": {
|
||||||
|
"deb": {
|
||||||
|
"depends": ["gstreamer1.0-plugins-good"],
|
||||||
|
"desktopTemplate": "./Coco.desktop"
|
||||||
|
},
|
||||||
|
"rpm": {
|
||||||
|
"depends": ["gstreamer1-plugins-good"],
|
||||||
|
"desktopTemplate": "./Coco.desktop"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -96,7 +96,7 @@ export const Get = <T>(
|
|||||||
|
|
||||||
export const Post = <T>(
|
export const Post = <T>(
|
||||||
url: string,
|
url: string,
|
||||||
data: IAnyObj,
|
data: IAnyObj | undefined,
|
||||||
params: IAnyObj = {},
|
params: IAnyObj = {},
|
||||||
headers: IAnyObj = {}
|
headers: IAnyObj = {}
|
||||||
): Promise<[any, FcResponse<T> | undefined]> => {
|
): Promise<[any, FcResponse<T> | undefined]> => {
|
||||||
|
|||||||
63
src/api/streamFetch.ts
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
export async function streamPost({
|
||||||
|
url,
|
||||||
|
body,
|
||||||
|
queryParams,
|
||||||
|
headers,
|
||||||
|
onMessage,
|
||||||
|
onError,
|
||||||
|
}: {
|
||||||
|
url: string;
|
||||||
|
body: any;
|
||||||
|
queryParams?: Record<string, any>;
|
||||||
|
headers?: Record<string, string>;
|
||||||
|
onMessage: (chunk: string) => void;
|
||||||
|
onError?: (err: any) => void;
|
||||||
|
}) {
|
||||||
|
const appStore = JSON.parse(localStorage.getItem("app-store") || "{}");
|
||||||
|
|
||||||
|
let baseURL = appStore.state?.endpoint_http;
|
||||||
|
if (!baseURL || baseURL === "undefined") {
|
||||||
|
baseURL = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
const headersStr = localStorage.getItem("headers") || "{}";
|
||||||
|
const headersStorage = JSON.parse(headersStr);
|
||||||
|
|
||||||
|
const query = new URLSearchParams(queryParams || {}).toString();
|
||||||
|
const fullUrl = `${baseURL}${url}?${query}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const res = await fetch(fullUrl, {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
...(headersStorage),
|
||||||
|
...(headers || {}),
|
||||||
|
},
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!res.ok || !res.body) throw new Error("Stream failed");
|
||||||
|
|
||||||
|
const reader = res.body.getReader();
|
||||||
|
const decoder = new TextDecoder("utf-8");
|
||||||
|
let buffer = "";
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
const { done, value } = await reader.read();
|
||||||
|
if (done) break;
|
||||||
|
|
||||||
|
buffer += decoder.decode(value, { stream: true });
|
||||||
|
|
||||||
|
const lines = buffer.split("\n");
|
||||||
|
for (let i = 0; i < lines.length - 1; i++) {
|
||||||
|
const line = lines[i].trim();
|
||||||
|
if (line) onMessage(line);
|
||||||
|
}
|
||||||
|
buffer = lines[lines.length - 1];
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error("streamPost error:", err);
|
||||||
|
onError?.(err);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,133 +0,0 @@
|
|||||||
import { fetch } from "@tauri-apps/plugin-http";
|
|
||||||
|
|
||||||
import { clientEnv } from "@/utils/env";
|
|
||||||
import { useLogStore } from "@/stores/logStore";
|
|
||||||
import { get_server_token } from "@/commands";
|
|
||||||
interface FetchRequestConfig {
|
|
||||||
url: string;
|
|
||||||
method?: "GET" | "POST" | "PUT" | "DELETE";
|
|
||||||
headers?: Record<string, string>;
|
|
||||||
body?: any;
|
|
||||||
timeout?: number;
|
|
||||||
parseAs?: "json" | "text" | "binary";
|
|
||||||
baseURL?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface FetchResponse<T = any> {
|
|
||||||
data: T;
|
|
||||||
status: number;
|
|
||||||
statusText: string;
|
|
||||||
headers: Headers;
|
|
||||||
}
|
|
||||||
|
|
||||||
const timeoutPromise = (ms: number) => {
|
|
||||||
return new Promise<never>((_, reject) =>
|
|
||||||
setTimeout(() => reject(new Error(`Request timed out after ${ms} ms`)), ms)
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const tauriFetch = async <T = any>({
|
|
||||||
url,
|
|
||||||
method = "GET",
|
|
||||||
headers = {},
|
|
||||||
body,
|
|
||||||
timeout = 30,
|
|
||||||
parseAs = "json",
|
|
||||||
baseURL = clientEnv.COCO_SERVER_URL
|
|
||||||
}: FetchRequestConfig): Promise<FetchResponse<T>> => {
|
|
||||||
const addLog = useLogStore.getState().addLog;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const appStore = JSON.parse(localStorage.getItem("app-store") || "{}");
|
|
||||||
const connectStore = JSON.parse(localStorage.getItem("connect-store") || "{}");
|
|
||||||
console.log("baseURL", appStore.state?.endpoint_http)
|
|
||||||
|
|
||||||
baseURL = appStore.state?.endpoint_http || baseURL;
|
|
||||||
|
|
||||||
const authStore = JSON.parse(localStorage.getItem("auth-store") || "{}")
|
|
||||||
const auth = authStore?.state?.auth
|
|
||||||
console.log("auth", auth)
|
|
||||||
|
|
||||||
if (baseURL.endsWith("/")) {
|
|
||||||
baseURL = baseURL.slice(0, -1);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!url.startsWith("http://") && !url.startsWith("https://")) {
|
|
||||||
// If not, prepend the defaultPrefix
|
|
||||||
url = baseURL + url;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (method !== "GET") {
|
|
||||||
headers["Content-Type"] = "application/json";
|
|
||||||
}
|
|
||||||
|
|
||||||
const server_id = connectStore.state?.currentService?.id || "default_coco_server"
|
|
||||||
const res: any = await get_server_token(server_id);
|
|
||||||
|
|
||||||
headers["X-API-TOKEN"] = headers["X-API-TOKEN"] || res?.access_token || undefined;
|
|
||||||
|
|
||||||
// debug API
|
|
||||||
const requestInfo = {
|
|
||||||
url,
|
|
||||||
method,
|
|
||||||
headers,
|
|
||||||
body,
|
|
||||||
timeout,
|
|
||||||
parseAs,
|
|
||||||
};
|
|
||||||
|
|
||||||
const fetchPromise = fetch(url, {
|
|
||||||
method,
|
|
||||||
headers,
|
|
||||||
body,
|
|
||||||
});
|
|
||||||
|
|
||||||
const response = await Promise.race([
|
|
||||||
fetchPromise,
|
|
||||||
timeoutPromise(timeout * 1000),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const statusText = response.ok ? "OK" : "Error";
|
|
||||||
|
|
||||||
let data: any;
|
|
||||||
if (parseAs === "json") {
|
|
||||||
data = await response.json();
|
|
||||||
} else if (parseAs === "text") {
|
|
||||||
data = await response.text();
|
|
||||||
} else {
|
|
||||||
data = await response.arrayBuffer();
|
|
||||||
}
|
|
||||||
|
|
||||||
// debug API
|
|
||||||
const log = {
|
|
||||||
request: requestInfo,
|
|
||||||
response: {
|
|
||||||
data,
|
|
||||||
status: response.status,
|
|
||||||
statusText,
|
|
||||||
headers: response.headers,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
addLog(log);
|
|
||||||
|
|
||||||
return log.response;
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Request failed:", error);
|
|
||||||
|
|
||||||
// debug API
|
|
||||||
const log = {
|
|
||||||
request: {
|
|
||||||
url,
|
|
||||||
method,
|
|
||||||
headers,
|
|
||||||
body,
|
|
||||||
timeout,
|
|
||||||
parseAs,
|
|
||||||
},
|
|
||||||
error,
|
|
||||||
};
|
|
||||||
addLog(log);
|
|
||||||
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
1
src/assets/assets/fonts/icons/iconfont-app.js
Normal file
1
src/assets/assets/fonts/icons/iconfont.js
Normal file
|
Before Width: | Height: | Size: 1.8 KiB After Width: | Height: | Size: 1.8 KiB |
|
Before Width: | Height: | Size: 1.8 KiB After Width: | Height: | Size: 1.8 KiB |
@@ -1,7 +1,7 @@
|
|||||||
import { invoke } from "@tauri-apps/api/core";
|
import { invoke } from "@tauri-apps/api/core";
|
||||||
|
import { emit } from "@tauri-apps/api/event";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
ServerTokenResponse,
|
|
||||||
Server,
|
Server,
|
||||||
Connector,
|
Connector,
|
||||||
DataSource,
|
DataSource,
|
||||||
@@ -17,6 +17,24 @@ import {
|
|||||||
} from "@/types/commands";
|
} from "@/types/commands";
|
||||||
import { useAppStore } from "@/stores/appStore";
|
import { useAppStore } from "@/stores/appStore";
|
||||||
import { useAuthStore } from "@/stores/authStore";
|
import { useAuthStore } from "@/stores/authStore";
|
||||||
|
import { useConnectStore } from "@/stores/connectStore";
|
||||||
|
|
||||||
|
export function handleLogout(serverId?: string) {
|
||||||
|
const setIsCurrentLogin = useAuthStore.getState().setIsCurrentLogin;
|
||||||
|
const { currentService, setCurrentService, serverList, setServerList } =
|
||||||
|
useConnectStore.getState();
|
||||||
|
const id = serverId || currentService?.id;
|
||||||
|
if (!id) return;
|
||||||
|
setIsCurrentLogin(false);
|
||||||
|
emit("login_or_logout", false);
|
||||||
|
if (currentService?.id === id) {
|
||||||
|
setCurrentService({ ...currentService, profile: null });
|
||||||
|
}
|
||||||
|
const updatedServerList = serverList.map((server) =>
|
||||||
|
server.id === id ? { ...server, profile: null } : server
|
||||||
|
);
|
||||||
|
setServerList(updatedServerList);
|
||||||
|
}
|
||||||
|
|
||||||
// Endpoints that don't require authentication
|
// Endpoints that don't require authentication
|
||||||
const WHITELIST_SERVERS = [
|
const WHITELIST_SERVERS = [
|
||||||
@@ -29,6 +47,7 @@ const WHITELIST_SERVERS = [
|
|||||||
"refresh_coco_server_info",
|
"refresh_coco_server_info",
|
||||||
"handle_sso_callback",
|
"handle_sso_callback",
|
||||||
"query_coco_fusion",
|
"query_coco_fusion",
|
||||||
|
"open_session_chat", // TODO: quick ai access is a configured service, even if the current service is not logged in, it should not affect the configured service.
|
||||||
];
|
];
|
||||||
|
|
||||||
async function invokeWithErrorHandler<T>(
|
async function invokeWithErrorHandler<T>(
|
||||||
@@ -36,7 +55,14 @@ async function invokeWithErrorHandler<T>(
|
|||||||
args?: Record<string, any>
|
args?: Record<string, any>
|
||||||
): Promise<T> {
|
): Promise<T> {
|
||||||
const isCurrentLogin = useAuthStore.getState().isCurrentLogin;
|
const isCurrentLogin = useAuthStore.getState().isCurrentLogin;
|
||||||
if (!WHITELIST_SERVERS.includes(command) && !isCurrentLogin) {
|
const currentService = useConnectStore.getState().currentService;
|
||||||
|
|
||||||
|
// Not logged in
|
||||||
|
console.log(command, isCurrentLogin, currentService?.profile);
|
||||||
|
if (
|
||||||
|
!WHITELIST_SERVERS.includes(command) &&
|
||||||
|
(!isCurrentLogin || !currentService?.profile)
|
||||||
|
) {
|
||||||
console.error("This command requires authentication");
|
console.error("This command requires authentication");
|
||||||
throw new Error("This command requires authentication");
|
throw new Error("This command requires authentication");
|
||||||
}
|
}
|
||||||
@@ -50,7 +76,7 @@ async function invokeWithErrorHandler<T>(
|
|||||||
const failedResult = result as any;
|
const failedResult = result as any;
|
||||||
if (failedResult.failed?.length > 0 && failedResult?.hits?.length == 0) {
|
if (failedResult.failed?.length > 0 && failedResult?.hits?.length == 0) {
|
||||||
failedResult.failed.forEach((error: any) => {
|
failedResult.failed.forEach((error: any) => {
|
||||||
addError(error.error, 'error');
|
addError(error.error, "error");
|
||||||
// console.error(error.error);
|
// console.error(error.error);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -66,15 +92,16 @@ async function invokeWithErrorHandler<T>(
|
|||||||
return result;
|
return result;
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
const errorMessage = error || "Command execution failed";
|
const errorMessage = error || "Command execution failed";
|
||||||
|
// 401 Unauthorized
|
||||||
|
if (errorMessage.includes("Unauthorized")) {
|
||||||
|
handleLogout();
|
||||||
|
} else {
|
||||||
addError(command + ":" + errorMessage, "error");
|
addError(command + ":" + errorMessage, "error");
|
||||||
|
}
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function get_server_token(id: string): Promise<ServerTokenResponse> {
|
|
||||||
return invokeWithErrorHandler(`get_server_token`, { id });
|
|
||||||
}
|
|
||||||
|
|
||||||
export function list_coco_servers(): Promise<Server[]> {
|
export function list_coco_servers(): Promise<Server[]> {
|
||||||
return invokeWithErrorHandler(`list_coco_servers`);
|
return invokeWithErrorHandler(`list_coco_servers`);
|
||||||
}
|
}
|
||||||
@@ -123,12 +150,26 @@ export function get_connectors_by_server(id: string): Promise<Connector[]> {
|
|||||||
return invokeWithErrorHandler(`get_connectors_by_server`, { id });
|
return invokeWithErrorHandler(`get_connectors_by_server`, { id });
|
||||||
}
|
}
|
||||||
|
|
||||||
export function datasource_search(id: string): Promise<DataSource[]> {
|
export function datasource_search({
|
||||||
return invokeWithErrorHandler(`datasource_search`, { id });
|
id,
|
||||||
|
queryParams,
|
||||||
|
}: {
|
||||||
|
id: string;
|
||||||
|
//["query=abc", "filter=er", "filter=efg", "from=0", "size=5"]
|
||||||
|
queryParams?: string[];
|
||||||
|
}): Promise<DataSource[]> {
|
||||||
|
return invokeWithErrorHandler(`datasource_search`, { id, queryParams });
|
||||||
}
|
}
|
||||||
|
|
||||||
export function mcp_server_search(id: string): Promise<DataSource[]> {
|
export function mcp_server_search({
|
||||||
return invokeWithErrorHandler(`mcp_server_search`, { id });
|
id,
|
||||||
|
queryParams,
|
||||||
|
}: {
|
||||||
|
id: string;
|
||||||
|
//["query=abc", "filter=er", "filter=efg", "from=0", "size=5"]
|
||||||
|
queryParams?: string[];
|
||||||
|
}): Promise<DataSource[]> {
|
||||||
|
return invokeWithErrorHandler(`mcp_server_search`, { id, queryParams });
|
||||||
}
|
}
|
||||||
|
|
||||||
export function connect_to_server(id: string, clientId: string): Promise<void> {
|
export function connect_to_server(id: string, clientId: string): Promise<void> {
|
||||||
@@ -206,13 +247,16 @@ export function open_session_chat({
|
|||||||
export function cancel_session_chat({
|
export function cancel_session_chat({
|
||||||
serverId,
|
serverId,
|
||||||
sessionId,
|
sessionId,
|
||||||
|
queryParams,
|
||||||
}: {
|
}: {
|
||||||
serverId: string;
|
serverId: string;
|
||||||
sessionId: string;
|
sessionId: string;
|
||||||
|
queryParams?: Record<string, any>;
|
||||||
}): Promise<string> {
|
}): Promise<string> {
|
||||||
return invokeWithErrorHandler(`cancel_session_chat`, {
|
return invokeWithErrorHandler(`cancel_session_chat`, {
|
||||||
serverId,
|
serverId,
|
||||||
sessionId,
|
sessionId,
|
||||||
|
queryParams,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -223,7 +267,7 @@ export function new_chat({
|
|||||||
queryParams,
|
queryParams,
|
||||||
}: {
|
}: {
|
||||||
serverId: string;
|
serverId: string;
|
||||||
websocketId?: string;
|
websocketId: string;
|
||||||
message: string;
|
message: string;
|
||||||
queryParams?: Record<string, any>;
|
queryParams?: Record<string, any>;
|
||||||
}): Promise<GetResponse> {
|
}): Promise<GetResponse> {
|
||||||
@@ -235,6 +279,25 @@ export function new_chat({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function chat_create({
|
||||||
|
serverId,
|
||||||
|
message,
|
||||||
|
queryParams,
|
||||||
|
clientId,
|
||||||
|
}: {
|
||||||
|
serverId: string;
|
||||||
|
message: string;
|
||||||
|
queryParams?: Record<string, any>;
|
||||||
|
clientId: string;
|
||||||
|
}): Promise<GetResponse> {
|
||||||
|
return invokeWithErrorHandler(`chat_create`, {
|
||||||
|
serverId,
|
||||||
|
message,
|
||||||
|
queryParams,
|
||||||
|
clientId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
export function send_message({
|
export function send_message({
|
||||||
serverId,
|
serverId,
|
||||||
websocketId,
|
websocketId,
|
||||||
@@ -243,7 +306,7 @@ export function send_message({
|
|||||||
queryParams,
|
queryParams,
|
||||||
}: {
|
}: {
|
||||||
serverId: string;
|
serverId: string;
|
||||||
websocketId?: string;
|
websocketId: string;
|
||||||
sessionId: string;
|
sessionId: string;
|
||||||
message: string;
|
message: string;
|
||||||
queryParams?: Record<string, any>;
|
queryParams?: Record<string, any>;
|
||||||
@@ -257,6 +320,28 @@ export function send_message({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function chat_chat({
|
||||||
|
serverId,
|
||||||
|
sessionId,
|
||||||
|
message,
|
||||||
|
queryParams,
|
||||||
|
clientId,
|
||||||
|
}: {
|
||||||
|
serverId: string;
|
||||||
|
sessionId: string;
|
||||||
|
message: string;
|
||||||
|
queryParams?: Record<string, any>;
|
||||||
|
clientId: string;
|
||||||
|
}): Promise<string> {
|
||||||
|
return invokeWithErrorHandler(`chat_chat`, {
|
||||||
|
serverId,
|
||||||
|
sessionId,
|
||||||
|
message,
|
||||||
|
queryParams,
|
||||||
|
clientId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
export const delete_session_chat = (serverId: string, sessionId: string) => {
|
export const delete_session_chat = (serverId: string, sessionId: string) => {
|
||||||
return invokeWithErrorHandler<boolean>(`delete_session_chat`, {
|
return invokeWithErrorHandler<boolean>(`delete_session_chat`, {
|
||||||
serverId,
|
serverId,
|
||||||
@@ -268,15 +353,14 @@ export const update_session_chat = (payload: {
|
|||||||
serverId: string;
|
serverId: string;
|
||||||
sessionId: string;
|
sessionId: string;
|
||||||
title?: string;
|
title?: string;
|
||||||
context?: {
|
context?: Record<string, any>;
|
||||||
attachments?: string[];
|
|
||||||
};
|
|
||||||
}): Promise<boolean> => {
|
}): Promise<boolean> => {
|
||||||
return invokeWithErrorHandler<boolean>("update_session_chat", payload);
|
return invokeWithErrorHandler<boolean>("update_session_chat", payload);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const assistant_search = (payload: {
|
export const assistant_search = (payload: {
|
||||||
serverId: string;
|
serverId: string;
|
||||||
|
queryParams?: string[];
|
||||||
}): Promise<boolean> => {
|
}): Promise<boolean> => {
|
||||||
return invokeWithErrorHandler<boolean>("assistant_search", payload);
|
return invokeWithErrorHandler<boolean>("assistant_search", payload);
|
||||||
};
|
};
|
||||||
@@ -333,3 +417,7 @@ export const query_coco_fusion = (payload: {
|
|||||||
...payload,
|
...payload,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const get_app_search_source = () => {
|
||||||
|
return invokeWithErrorHandler<void>("get_app_search_source");
|
||||||
|
};
|
||||||
@@ -27,3 +27,15 @@ export function show_coco(): Promise<void> {
|
|||||||
export function show_settings(): Promise<void> {
|
export function show_settings(): Promise<void> {
|
||||||
return invoke('show_settings');
|
return invoke('show_settings');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function show_check(): Promise<void> {
|
||||||
|
return invoke('show_check');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function hide_check(): Promise<void> {
|
||||||
|
return invoke('hide_check');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function toggle_move_to_active_space_attribute(): Promise<void> {
|
||||||
|
return invoke('toggle_move_to_active_space_attribute');
|
||||||
|
}
|
||||||
@@ -1,9 +1,8 @@
|
|||||||
import { useRef } from "react";
|
import { useRef } from "react";
|
||||||
|
|
||||||
import { Post } from "@/api/axiosRequest";
|
|
||||||
import platformAdapter from "@/utils/platformAdapter";
|
import platformAdapter from "@/utils/platformAdapter";
|
||||||
import { useConnectStore } from "@/stores/connectStore";
|
import { useConnectStore } from "@/stores/connectStore";
|
||||||
import { useAppStore } from "@/stores/appStore";
|
import { parseSearchQuery, unrequitable } from "@/utils";
|
||||||
|
|
||||||
interface AssistantFetcherProps {
|
interface AssistantFetcherProps {
|
||||||
debounceKeyword?: string;
|
debounceKeyword?: string;
|
||||||
@@ -14,14 +13,8 @@ export const AssistantFetcher = ({
|
|||||||
debounceKeyword = "",
|
debounceKeyword = "",
|
||||||
assistantIDs = [],
|
assistantIDs = [],
|
||||||
}: AssistantFetcherProps) => {
|
}: AssistantFetcherProps) => {
|
||||||
const isTauri = useAppStore((state) => state.isTauri);
|
const { currentService, currentAssistant, setCurrentAssistant } =
|
||||||
|
useConnectStore();
|
||||||
const currentService = useConnectStore((state) => state.currentService);
|
|
||||||
|
|
||||||
const currentAssistant = useConnectStore((state) => state.currentAssistant);
|
|
||||||
const setCurrentAssistant = useConnectStore((state) => {
|
|
||||||
return state.setCurrentAssistant;
|
|
||||||
});
|
|
||||||
|
|
||||||
const lastServerId = useRef<string | null>(null);
|
const lastServerId = useRef<string | null>(null);
|
||||||
|
|
||||||
@@ -29,63 +22,38 @@ export const AssistantFetcher = ({
|
|||||||
current: number;
|
current: number;
|
||||||
pageSize: number;
|
pageSize: number;
|
||||||
serverId?: string;
|
serverId?: string;
|
||||||
|
query?: string;
|
||||||
}) => {
|
}) => {
|
||||||
try {
|
try {
|
||||||
const { pageSize, current, serverId = currentService?.id } = params;
|
if (unrequitable()) {
|
||||||
|
return {
|
||||||
|
total: 0,
|
||||||
|
list: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
const from = (current - 1) * pageSize;
|
const {
|
||||||
const size = pageSize;
|
pageSize,
|
||||||
|
current,
|
||||||
|
serverId = currentService?.id,
|
||||||
|
query,
|
||||||
|
} = params;
|
||||||
|
|
||||||
let response: any;
|
const queryParams = parseSearchQuery({
|
||||||
|
from: (current - 1) * pageSize,
|
||||||
|
size: pageSize,
|
||||||
|
query: query ?? debounceKeyword,
|
||||||
|
fuzziness: 5,
|
||||||
|
filters: {
|
||||||
|
enabled: true,
|
||||||
|
id: assistantIDs,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const body: Record<string, any> = {
|
const response = await platformAdapter.fetchAssistant(
|
||||||
serverId,
|
serverId,
|
||||||
from,
|
queryParams
|
||||||
size,
|
);
|
||||||
};
|
|
||||||
|
|
||||||
body.query = {
|
|
||||||
bool: {
|
|
||||||
must: [{ term: { enabled: true } }],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
if (debounceKeyword) {
|
|
||||||
body.query.bool.must.push({
|
|
||||||
query_string: {
|
|
||||||
fields: ["combined_fulltext"],
|
|
||||||
query: debounceKeyword,
|
|
||||||
fuzziness: "AUTO",
|
|
||||||
fuzzy_prefix_length: 2,
|
|
||||||
fuzzy_max_expansions: 10,
|
|
||||||
fuzzy_transpositions: true,
|
|
||||||
allow_leading_wildcard: false,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (assistantIDs.length > 0) {
|
|
||||||
body.query.bool.must.push({
|
|
||||||
terms: {
|
|
||||||
id: assistantIDs.map((id) => id),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isTauri) {
|
|
||||||
if (!currentService?.id) {
|
|
||||||
throw new Error("currentService is undefined");
|
|
||||||
}
|
|
||||||
|
|
||||||
response = await platformAdapter.commands("assistant_search", body);
|
|
||||||
} else {
|
|
||||||
const [error, res] = await Post(`/assistant/_search`, body);
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw new Error(error);
|
|
||||||
}
|
|
||||||
|
|
||||||
response = res;
|
|
||||||
}
|
|
||||||
|
|
||||||
let assistantList = response?.hits?.hits ?? [];
|
let assistantList = response?.hits?.hits ?? [];
|
||||||
|
|
||||||
|
|||||||
@@ -49,11 +49,19 @@ export function AssistantList({ assistantIDs = [] }: AssistantListProps) {
|
|||||||
assistantIDs,
|
assistantIDs,
|
||||||
});
|
});
|
||||||
|
|
||||||
const { pagination, runAsync } = usePagination(fetchAssistant, {
|
const getAssistants = (params: { current: number; pageSize: number }) => {
|
||||||
|
return fetchAssistant(params);
|
||||||
|
};
|
||||||
|
|
||||||
|
const { pagination, runAsync } = usePagination(getAssistants, {
|
||||||
defaultPageSize: 5,
|
defaultPageSize: 5,
|
||||||
refreshDeps: [currentService?.id, debounceKeyword],
|
refreshDeps: [currentService?.id, debounceKeyword, currentService?.enabled],
|
||||||
onSuccess(data) {
|
onSuccess(data) {
|
||||||
setAssistants(data.list);
|
setAssistants(data.list);
|
||||||
|
|
||||||
|
if (data.list.length === 0) {
|
||||||
|
setCurrentAssistant(void 0);
|
||||||
|
}
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -174,7 +182,7 @@ export function AssistantList({ assistantIDs = [] }: AssistantListProps) {
|
|||||||
</PopoverButton>
|
</PopoverButton>
|
||||||
|
|
||||||
<PopoverPanel
|
<PopoverPanel
|
||||||
className="absolute z-50 top-full mt-1 left-0 w-60 rounded-xl bg-white dark:bg-[#202126] p-3 text-sm/6 text-[#333] dark:text-[#D8D8D8] shadow-lg border dark:border-white/10 focus:outline-none max-h-[calc(100vh-80px)] overflow-y-auto"
|
className="absolute z-50 top-full mt-1 left-0 w-60 rounded-xl bg-white dark:bg-[#202126] p-3 text-sm/6 text-[#333] dark:text-[#D8D8D8] shadow-lg border dark:border-white/10 focus:outline-none max-h-[calc(100vh-150px)] overflow-y-auto"
|
||||||
onMouseMove={handleMouseMove}
|
onMouseMove={handleMouseMove}
|
||||||
>
|
>
|
||||||
<div className="flex items-center justify-between text-sm font-bold">
|
<div className="flex items-center justify-between text-sm font-bold">
|
||||||
@@ -215,7 +223,7 @@ export function AssistantList({ assistantIDs = [] }: AssistantListProps) {
|
|||||||
placeholder={t("assistant.popover.search")}
|
placeholder={t("assistant.popover.search")}
|
||||||
className="w-full h-8 px-2 bg-transparent border rounded-md dark:border-white/10"
|
className="w-full h-8 px-2 bg-transparent border rounded-md dark:border-white/10"
|
||||||
onChange={(event) => {
|
onChange={(event) => {
|
||||||
setKeyword(event.target.value.trim());
|
setKeyword(event.target.value);
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
</VisibleKey>
|
</VisibleKey>
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ import { useChatStore } from "@/stores/chatStore";
|
|||||||
import { useConnectStore } from "@/stores/connectStore";
|
import { useConnectStore } from "@/stores/connectStore";
|
||||||
import { useWindows } from "@/hooks/useWindows";
|
import { useWindows } from "@/hooks/useWindows";
|
||||||
import useMessageChunkData from "@/hooks/useMessageChunkData";
|
import useMessageChunkData from "@/hooks/useMessageChunkData";
|
||||||
import useWebSocket from "@/hooks/useWebSocket";
|
|
||||||
import { useChatActions } from "@/hooks/useChatActions";
|
import { useChatActions } from "@/hooks/useChatActions";
|
||||||
import { useMessageHandler } from "@/hooks/useMessageHandler";
|
import { useMessageHandler } from "@/hooks/useMessageHandler";
|
||||||
import { ChatSidebar } from "./ChatSidebar";
|
import { ChatSidebar } from "./ChatSidebar";
|
||||||
@@ -23,7 +22,6 @@ import type { Chat, StartPage } from "@/types/chat";
|
|||||||
import PrevSuggestion from "@/components/ChatMessage/PrevSuggestion";
|
import PrevSuggestion from "@/components/ChatMessage/PrevSuggestion";
|
||||||
import { useAppStore } from "@/stores/appStore";
|
import { useAppStore } from "@/stores/appStore";
|
||||||
import { useSearchStore } from "@/stores/searchStore";
|
import { useSearchStore } from "@/stores/searchStore";
|
||||||
// import ReadAloud from "./ReadAloud";
|
|
||||||
import { useAuthStore } from "@/stores/authStore";
|
import { useAuthStore } from "@/stores/authStore";
|
||||||
import Splash from "./Splash";
|
import Splash from "./Splash";
|
||||||
|
|
||||||
@@ -41,12 +39,13 @@ interface ChatAIProps {
|
|||||||
showChatHistory?: boolean;
|
showChatHistory?: boolean;
|
||||||
assistantIDs?: string[];
|
assistantIDs?: string[];
|
||||||
startPage?: StartPage;
|
startPage?: StartPage;
|
||||||
|
formatUrl?: (data: any) => string;
|
||||||
|
instanceId?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ChatAIRef {
|
export interface ChatAIRef {
|
||||||
init: (value: string) => void;
|
init: (value: string) => void;
|
||||||
cancelChat: () => void;
|
cancelChat: () => void;
|
||||||
reconnect: () => void;
|
|
||||||
clearChat: () => void;
|
clearChat: () => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -67,31 +66,36 @@ const ChatAI = memo(
|
|||||||
showChatHistory,
|
showChatHistory,
|
||||||
assistantIDs,
|
assistantIDs,
|
||||||
startPage,
|
startPage,
|
||||||
|
formatUrl,
|
||||||
|
instanceId,
|
||||||
},
|
},
|
||||||
ref
|
ref
|
||||||
) => {
|
) => {
|
||||||
useImperativeHandle(ref, () => ({
|
useImperativeHandle(ref, () => ({
|
||||||
init: init,
|
init: init,
|
||||||
cancelChat: () => cancelChat(activeChat),
|
cancelChat: () => cancelChat(activeChat),
|
||||||
reconnect: reconnect,
|
|
||||||
clearChat: clearChat,
|
clearChat: clearChat,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const { curChatEnd, setCurChatEnd, connected, setConnected } =
|
const curChatEnd = useChatStore((state) => state.curChatEnd);
|
||||||
useChatStore();
|
const setCurChatEnd = useChatStore((state) => state.setCurChatEnd);
|
||||||
|
|
||||||
|
const isTauri = useAppStore((state) => state.isTauri);
|
||||||
|
|
||||||
const isCurrentLogin = useAuthStore((state) => state.isCurrentLogin);
|
const isCurrentLogin = useAuthStore((state) => state.isCurrentLogin);
|
||||||
|
const setIsCurrentLogin = useAuthStore((state) => {
|
||||||
const visibleStartPage = useConnectStore((state) => {
|
return state.setIsCurrentLogin;
|
||||||
return state.visibleStartPage;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const { currentService, visibleStartPage } = useConnectStore();
|
||||||
|
|
||||||
const addError = useAppStore.getState().addError;
|
const addError = useAppStore.getState().addError;
|
||||||
|
|
||||||
const [activeChat, setActiveChat] = useState<Chat>();
|
const [activeChat, setActiveChat] = useState<Chat>();
|
||||||
const [timedoutShow, setTimedoutShow] = useState(false);
|
const [timedoutShow, setTimedoutShow] = useState(false);
|
||||||
|
|
||||||
const curIdRef = useRef("");
|
const curIdRef = useRef("");
|
||||||
|
const curSessionIdRef = useRef("");
|
||||||
|
|
||||||
const [isSidebarOpenChat, setIsSidebarOpenChat] = useState(isSidebarOpen);
|
const [isSidebarOpenChat, setIsSidebarOpenChat] = useState(isSidebarOpen);
|
||||||
const [chats, setChats] = useState<Chat[]>([]);
|
const [chats, setChats] = useState<Chat[]>([]);
|
||||||
@@ -108,25 +112,28 @@ const ChatAI = memo(
|
|||||||
}, [activeChatProp]);
|
}, [activeChatProp]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (askAiServerId || !askAiSessionId || chats.length === 0) return;
|
if (!isTauri) return;
|
||||||
|
|
||||||
const matched = chats.find((item) => item._id === askAiSessionId);
|
if (!currentService?.enabled) {
|
||||||
|
setActiveChat(void 0);
|
||||||
|
setIsCurrentLogin(false);
|
||||||
|
}
|
||||||
|
|
||||||
if (matched) {
|
if (showChatHistory) {
|
||||||
onSelectChat(matched);
|
getChatHistory();
|
||||||
|
}
|
||||||
|
}, [currentService?.enabled, showChatHistory]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (askAiServerId || !askAiSessionId) return;
|
||||||
|
|
||||||
|
onSelectChat({ _id: askAiSessionId });
|
||||||
|
|
||||||
setAskAiSessionId(void 0);
|
setAskAiSessionId(void 0);
|
||||||
}
|
}, [askAiSessionId, askAiServerId]);
|
||||||
}, [chats, askAiSessionId, askAiServerId]);
|
|
||||||
|
|
||||||
const [Question, setQuestion] = useState<string>("");
|
const [Question, setQuestion] = useState<string>("");
|
||||||
|
|
||||||
const [websocketSessionId, setWebsocketSessionId] = useState("");
|
|
||||||
|
|
||||||
const onWebsocketSessionId = useCallback((sessionId: string) => {
|
|
||||||
setWebsocketSessionId(sessionId);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const {
|
const {
|
||||||
data: {
|
data: {
|
||||||
query_intent,
|
query_intent,
|
||||||
@@ -153,15 +160,6 @@ const ChatAI = memo(
|
|||||||
|
|
||||||
const dealMsgRef = useRef<((msg: string) => void) | null>(null);
|
const dealMsgRef = useRef<((msg: string) => void) | null>(null);
|
||||||
|
|
||||||
const clientId = isChatPage ? "standalone" : "popup";
|
|
||||||
const { reconnect, updateDealMsg } = useWebSocket({
|
|
||||||
clientId,
|
|
||||||
connected,
|
|
||||||
setConnected,
|
|
||||||
dealMsgRef,
|
|
||||||
onWebsocketSessionId,
|
|
||||||
});
|
|
||||||
|
|
||||||
const {
|
const {
|
||||||
chatClose,
|
chatClose,
|
||||||
cancelChat,
|
cancelChat,
|
||||||
@@ -181,17 +179,21 @@ const ChatAI = memo(
|
|||||||
clearAllChunkData,
|
clearAllChunkData,
|
||||||
setQuestion,
|
setQuestion,
|
||||||
curIdRef,
|
curIdRef,
|
||||||
|
curSessionIdRef,
|
||||||
setChats,
|
setChats,
|
||||||
|
dealMsgRef,
|
||||||
|
setLoadingStep,
|
||||||
|
isChatPage,
|
||||||
isSearchActive,
|
isSearchActive,
|
||||||
isDeepThinkActive,
|
isDeepThinkActive,
|
||||||
isMCPActive,
|
isMCPActive,
|
||||||
changeInput,
|
changeInput,
|
||||||
websocketSessionId,
|
showChatHistory,
|
||||||
showChatHistory
|
|
||||||
);
|
);
|
||||||
|
|
||||||
const { dealMsg } = useMessageHandler(
|
const { dealMsg } = useMessageHandler(
|
||||||
curIdRef,
|
curIdRef,
|
||||||
|
curSessionIdRef,
|
||||||
setCurChatEnd,
|
setCurChatEnd,
|
||||||
setTimedoutShow,
|
setTimedoutShow,
|
||||||
(chat) => cancelChat(chat || activeChat),
|
(chat) => cancelChat(chat || activeChat),
|
||||||
@@ -199,6 +201,13 @@ const ChatAI = memo(
|
|||||||
handlers
|
handlers
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const updateDealMsg = useCallback(
|
||||||
|
(newDealMsg: (msg: string) => void) => {
|
||||||
|
dealMsgRef.current = newDealMsg;
|
||||||
|
},
|
||||||
|
[dealMsgRef]
|
||||||
|
);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (dealMsg) {
|
if (dealMsg) {
|
||||||
dealMsgRef.current = dealMsg;
|
dealMsgRef.current = dealMsg;
|
||||||
@@ -228,9 +237,9 @@ const ChatAI = memo(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (!activeChat?._id) {
|
if (!activeChat?._id) {
|
||||||
await createNewChat(value, activeChat, websocketSessionId);
|
await createNewChat(value);
|
||||||
} else {
|
} else {
|
||||||
await handleSendMessage(value, activeChat, websocketSessionId);
|
await handleSendMessage(value, activeChat);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Failed to initialize chat:", error);
|
console.error("Failed to initialize chat:", error);
|
||||||
@@ -242,7 +251,6 @@ const ChatAI = memo(
|
|||||||
activeChat?._id,
|
activeChat?._id,
|
||||||
createNewChat,
|
createNewChat,
|
||||||
handleSendMessage,
|
handleSendMessage,
|
||||||
websocketSessionId,
|
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -254,7 +262,8 @@ const ChatAI = memo(
|
|||||||
const onSelectChat = useCallback(
|
const onSelectChat = useCallback(
|
||||||
async (chat: Chat) => {
|
async (chat: Chat) => {
|
||||||
setTimedoutShow(false);
|
setTimedoutShow(false);
|
||||||
clearAllChunkData();
|
|
||||||
|
await clearAllChunkData();
|
||||||
await cancelChat(activeChat);
|
await cancelChat(activeChat);
|
||||||
await chatClose(activeChat);
|
await chatClose(activeChat);
|
||||||
const response = await openSessionChat(chat);
|
const response = await openSessionChat(chat);
|
||||||
@@ -318,6 +327,7 @@ const ChatAI = memo(
|
|||||||
(chatId: string, title: string) => {
|
(chatId: string, title: string) => {
|
||||||
setChats((prev) => {
|
setChats((prev) => {
|
||||||
const chatIndex = prev.findIndex((chat) => chat._id === chatId);
|
const chatIndex = prev.findIndex((chat) => chat._id === chatId);
|
||||||
|
|
||||||
if (chatIndex === -1) return prev;
|
if (chatIndex === -1) return prev;
|
||||||
|
|
||||||
const modifiedChat = {
|
const modifiedChat = {
|
||||||
@@ -326,8 +336,8 @@ const ChatAI = memo(
|
|||||||
};
|
};
|
||||||
|
|
||||||
const result = [...prev];
|
const result = [...prev];
|
||||||
result.splice(chatIndex, 1);
|
result.splice(chatIndex, 1, modifiedChat);
|
||||||
return [modifiedChat, ...result];
|
return result;
|
||||||
});
|
});
|
||||||
|
|
||||||
if (activeChat?._id === chatId) {
|
if (activeChat?._id === chatId) {
|
||||||
@@ -358,6 +368,7 @@ const ChatAI = memo(
|
|||||||
)}
|
)}
|
||||||
<div
|
<div
|
||||||
data-tauri-drag-region
|
data-tauri-drag-region
|
||||||
|
data-chat-instance={instanceId}
|
||||||
className={`flex flex-col rounded-md h-full overflow-hidden relative`}
|
className={`flex flex-col rounded-md h-full overflow-hidden relative`}
|
||||||
>
|
>
|
||||||
<ChatHeader
|
<ChatHeader
|
||||||
@@ -366,16 +377,15 @@ const ChatAI = memo(
|
|||||||
setIsSidebarOpen={toggleSidebar}
|
setIsSidebarOpen={toggleSidebar}
|
||||||
isSidebarOpen={isSidebarOpenChat}
|
isSidebarOpen={isSidebarOpenChat}
|
||||||
activeChat={activeChat}
|
activeChat={activeChat}
|
||||||
reconnect={reconnect}
|
|
||||||
isChatPage={isChatPage}
|
isChatPage={isChatPage}
|
||||||
showChatHistory={showChatHistory}
|
showChatHistory={showChatHistory}
|
||||||
assistantIDs={assistantIDs}
|
assistantIDs={assistantIDs}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
{isCurrentLogin ? (<>
|
{isCurrentLogin ? (
|
||||||
|
<>
|
||||||
<ChatContent
|
<ChatContent
|
||||||
activeChat={activeChat}
|
activeChat={activeChat}
|
||||||
curChatEnd={curChatEnd}
|
|
||||||
query_intent={query_intent}
|
query_intent={query_intent}
|
||||||
tools={tools}
|
tools={tools}
|
||||||
fetch_source={fetch_source}
|
fetch_source={fetch_source}
|
||||||
@@ -390,11 +400,11 @@ const ChatAI = memo(
|
|||||||
handleSendMessage(value, activeChat)
|
handleSendMessage(value, activeChat)
|
||||||
}
|
}
|
||||||
getFileUrl={getFileUrl}
|
getFileUrl={getFileUrl}
|
||||||
|
formatUrl={formatUrl}
|
||||||
|
curIdRef={curIdRef}
|
||||||
/>
|
/>
|
||||||
<Splash assistantIDs={assistantIDs} startPage={startPage} />
|
<Splash assistantIDs={assistantIDs} startPage={startPage} />
|
||||||
</>
|
</>
|
||||||
|
|
||||||
|
|
||||||
) : (
|
) : (
|
||||||
<ConnectPrompt />
|
<ConnectPrompt />
|
||||||
)}
|
)}
|
||||||
@@ -402,8 +412,6 @@ const ChatAI = memo(
|
|||||||
{!activeChat?._id && !visibleStartPage && (
|
{!activeChat?._id && !visibleStartPage && (
|
||||||
<PrevSuggestion sendMessage={init} />
|
<PrevSuggestion sendMessage={init} />
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* <ReadAloud /> */}
|
|
||||||
</div>
|
</div>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -3,17 +3,16 @@ import { useTranslation } from "react-i18next";
|
|||||||
|
|
||||||
import { ChatMessage } from "@/components/ChatMessage";
|
import { ChatMessage } from "@/components/ChatMessage";
|
||||||
import { Greetings } from "./Greetings";
|
import { Greetings } from "./Greetings";
|
||||||
import FileList from "@/components/Assistant/FileList";
|
// import FileList from "@/components/Assistant/FileList";
|
||||||
import { useChatScroll } from "@/hooks/useChatScroll";
|
import { useChatScroll } from "@/hooks/useChatScroll";
|
||||||
import { useChatStore } from "@/stores/chatStore";
|
import { useChatStore } from "@/stores/chatStore";
|
||||||
import type { Chat, IChunkData } from "@/types/chat";
|
import type { Chat, IChunkData } from "@/types/chat";
|
||||||
import { useConnectStore } from "@/stores/connectStore";
|
import { useConnectStore } from "@/stores/connectStore";
|
||||||
import SessionFile from "./SessionFile";
|
// import SessionFile from "./SessionFile";
|
||||||
import ScrollToBottom from "@/components/Common/ScrollToBottom";
|
import ScrollToBottom from "@/components/Common/ScrollToBottom";
|
||||||
|
|
||||||
interface ChatContentProps {
|
interface ChatContentProps {
|
||||||
activeChat?: Chat;
|
activeChat?: Chat;
|
||||||
curChatEnd: boolean;
|
|
||||||
query_intent?: IChunkData;
|
query_intent?: IChunkData;
|
||||||
tools?: IChunkData;
|
tools?: IChunkData;
|
||||||
fetch_source?: IChunkData;
|
fetch_source?: IChunkData;
|
||||||
@@ -26,11 +25,12 @@ interface ChatContentProps {
|
|||||||
Question: string;
|
Question: string;
|
||||||
handleSendMessage: (content: string, newChat?: Chat) => void;
|
handleSendMessage: (content: string, newChat?: Chat) => void;
|
||||||
getFileUrl: (path: string) => string;
|
getFileUrl: (path: string) => string;
|
||||||
|
formatUrl?: (data: any) => string;
|
||||||
|
curIdRef: React.MutableRefObject<string>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const ChatContent = ({
|
export const ChatContent = ({
|
||||||
activeChat,
|
activeChat,
|
||||||
curChatEnd,
|
|
||||||
query_intent,
|
query_intent,
|
||||||
tools,
|
tools,
|
||||||
fetch_source,
|
fetch_source,
|
||||||
@@ -42,21 +42,24 @@ export const ChatContent = ({
|
|||||||
timedoutShow,
|
timedoutShow,
|
||||||
Question,
|
Question,
|
||||||
handleSendMessage,
|
handleSendMessage,
|
||||||
getFileUrl,
|
formatUrl,
|
||||||
}: ChatContentProps) => {
|
}: ChatContentProps) => {
|
||||||
const sessionId = useConnectStore((state) => state.currentSessionId);
|
// const sessionId = useConnectStore((state) => state.currentSessionId);
|
||||||
const setCurrentSessionId = useConnectStore((state) => {
|
const setCurrentSessionId = useConnectStore((state) => {
|
||||||
return state.setCurrentSessionId;
|
return state.setCurrentSessionId;
|
||||||
});
|
});
|
||||||
|
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
|
|
||||||
const uploadFiles = useChatStore((state) => state.uploadFiles);
|
// const uploadFiles = useChatStore((state) => state.uploadFiles);
|
||||||
const messagesEndRef = useRef<HTMLDivElement>(null);
|
const messagesEndRef = useRef<HTMLDivElement>(null);
|
||||||
|
|
||||||
const { scrollToBottom } = useChatScroll(messagesEndRef);
|
const { scrollToBottom } = useChatScroll(messagesEndRef);
|
||||||
const scrollRef = useRef<HTMLDivElement>(null);
|
const scrollRef = useRef<HTMLDivElement>(null);
|
||||||
const [isAtBottom, setIsAtBottom] = useState(true);
|
const [isAtBottom, setIsAtBottom] = useState(true);
|
||||||
|
const visibleStartPage = useConnectStore((state) => state.visibleStartPage);
|
||||||
|
|
||||||
|
const curChatEnd = useChatStore((state) => state.curChatEnd);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
setIsAtBottom(true);
|
setIsAtBottom(true);
|
||||||
@@ -66,7 +69,7 @@ export const ChatContent = ({
|
|||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
scrollToBottom();
|
scrollToBottom();
|
||||||
}, [
|
}, [
|
||||||
activeChat?.id,
|
activeChat?._id,
|
||||||
query_intent?.message_chunk,
|
query_intent?.message_chunk,
|
||||||
fetch_source?.message_chunk,
|
fetch_source?.message_chunk,
|
||||||
pick_source?.message_chunk,
|
pick_source?.message_chunk,
|
||||||
@@ -100,7 +103,8 @@ export const ChatContent = ({
|
|||||||
className="flex-1 w-full overflow-x-hidden overflow-y-auto border-t border-[rgba(0,0,0,0.1)] dark:border-[rgba(255,255,255,0.15)] custom-scrollbar relative"
|
className="flex-1 w-full overflow-x-hidden overflow-y-auto border-t border-[rgba(0,0,0,0.1)] dark:border-[rgba(255,255,255,0.15)] custom-scrollbar relative"
|
||||||
onScroll={handleScroll}
|
onScroll={handleScroll}
|
||||||
>
|
>
|
||||||
{(!activeChat || activeChat?.messages?.length === 0) && <Greetings />}
|
{(!activeChat || activeChat?.messages?.length === 0) &&
|
||||||
|
!visibleStartPage && <Greetings />}
|
||||||
|
|
||||||
{activeChat?.messages?.map((message, index) => (
|
{activeChat?.messages?.map((message, index) => (
|
||||||
<ChatMessage
|
<ChatMessage
|
||||||
@@ -119,7 +123,7 @@ export const ChatContent = ({
|
|||||||
deep_read ||
|
deep_read ||
|
||||||
think ||
|
think ||
|
||||||
response) &&
|
response) &&
|
||||||
activeChat?._id ? (
|
activeChat?._source?.id ? (
|
||||||
<ChatMessage
|
<ChatMessage
|
||||||
key={"current"}
|
key={"current"}
|
||||||
message={{
|
message={{
|
||||||
@@ -142,6 +146,7 @@ export const ChatContent = ({
|
|||||||
think={think}
|
think={think}
|
||||||
response={response}
|
response={response}
|
||||||
loadingStep={loadingStep}
|
loadingStep={loadingStep}
|
||||||
|
formatUrl={formatUrl}
|
||||||
/>
|
/>
|
||||||
) : null}
|
) : null}
|
||||||
|
|
||||||
@@ -163,13 +168,13 @@ export const ChatContent = ({
|
|||||||
<div ref={messagesEndRef} />
|
<div ref={messagesEndRef} />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{sessionId && uploadFiles.length > 0 && (
|
{/* {uploadFiles.length > 0 && (
|
||||||
<div key={sessionId} className="max-h-[120px] overflow-auto p-2">
|
<div key={sessionId} className="max-h-[120px] overflow-auto p-2">
|
||||||
<FileList sessionId={sessionId} getFileUrl={getFileUrl} />
|
<FileList />
|
||||||
</div>
|
</div>
|
||||||
)}
|
)} */}
|
||||||
|
|
||||||
{sessionId && <SessionFile sessionId={sessionId} />}
|
{/* {sessionId && <SessionFile sessionId={sessionId} />} */}
|
||||||
|
|
||||||
<ScrollToBottom scrollRef={scrollRef} isAtBottom={isAtBottom} />
|
<ScrollToBottom scrollRef={scrollRef} isAtBottom={isAtBottom} />
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -7,14 +7,12 @@ import PinIcon from "@/icons/Pin";
|
|||||||
import WindowsFullIcon from "@/icons/WindowsFull";
|
import WindowsFullIcon from "@/icons/WindowsFull";
|
||||||
import { useAppStore } from "@/stores/appStore";
|
import { useAppStore } from "@/stores/appStore";
|
||||||
import type { Chat } from "@/types/chat";
|
import type { Chat } from "@/types/chat";
|
||||||
import platformAdapter from "@/utils/platformAdapter";
|
|
||||||
import VisibleKey from "../Common/VisibleKey";
|
import VisibleKey from "../Common/VisibleKey";
|
||||||
import { useShortcutsStore } from "@/stores/shortcutsStore";
|
import { useShortcutsStore } from "@/stores/shortcutsStore";
|
||||||
import { HISTORY_PANEL_ID } from "@/constants";
|
import { HISTORY_PANEL_ID } from "@/constants";
|
||||||
import { AssistantList } from "./AssistantList";
|
import { AssistantList } from "./AssistantList";
|
||||||
import { ServerList } from "./ServerList";
|
import { ServerList } from "./ServerList";
|
||||||
import { Server } from "@/types/server"
|
import { useTogglePin } from "@/hooks/useTogglePin";
|
||||||
|
|
||||||
|
|
||||||
interface ChatHeaderProps {
|
interface ChatHeaderProps {
|
||||||
clearChat: () => void;
|
clearChat: () => void;
|
||||||
@@ -22,7 +20,6 @@ interface ChatHeaderProps {
|
|||||||
setIsSidebarOpen: () => void;
|
setIsSidebarOpen: () => void;
|
||||||
isSidebarOpen: boolean;
|
isSidebarOpen: boolean;
|
||||||
activeChat: Chat | undefined;
|
activeChat: Chat | undefined;
|
||||||
reconnect: (server?: Server) => void;
|
|
||||||
isChatPage?: boolean;
|
isChatPage?: boolean;
|
||||||
showChatHistory?: boolean;
|
showChatHistory?: boolean;
|
||||||
assistantIDs?: string[];
|
assistantIDs?: string[];
|
||||||
@@ -34,37 +31,15 @@ export function ChatHeader({
|
|||||||
isSidebarOpen,
|
isSidebarOpen,
|
||||||
setIsSidebarOpen,
|
setIsSidebarOpen,
|
||||||
activeChat,
|
activeChat,
|
||||||
reconnect,
|
|
||||||
isChatPage = false,
|
isChatPage = false,
|
||||||
showChatHistory = true,
|
showChatHistory = true,
|
||||||
assistantIDs,
|
assistantIDs,
|
||||||
}: ChatHeaderProps) {
|
}: ChatHeaderProps) {
|
||||||
const isPinned = useAppStore((state) => state.isPinned);
|
const { isTauri } = useAppStore();
|
||||||
const setIsPinned = useAppStore((state) => state.setIsPinned);
|
const { isPinned, togglePin } = useTogglePin();
|
||||||
|
|
||||||
const isTauri = useAppStore((state) => state.isTauri);
|
const { historicalRecords, newSession, fixedWindow, external } =
|
||||||
const historicalRecords = useShortcutsStore((state) => {
|
useShortcutsStore();
|
||||||
return state.historicalRecords;
|
|
||||||
});
|
|
||||||
const newSession = useShortcutsStore((state) => {
|
|
||||||
return state.newSession;
|
|
||||||
});
|
|
||||||
const fixedWindow = useShortcutsStore((state) => {
|
|
||||||
return state.fixedWindow;
|
|
||||||
});
|
|
||||||
|
|
||||||
const external = useShortcutsStore((state) => state.external);
|
|
||||||
|
|
||||||
const togglePin = async () => {
|
|
||||||
try {
|
|
||||||
const newPinned = !isPinned;
|
|
||||||
await platformAdapter.setAlwaysOnTop(newPinned);
|
|
||||||
setIsPinned(newPinned);
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Failed to toggle window pin state:", err);
|
|
||||||
setIsPinned(isPinned);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<header
|
<header
|
||||||
@@ -98,7 +73,11 @@ export function ChatHeader({
|
|||||||
onClick={clearChat}
|
onClick={clearChat}
|
||||||
className="p-2 py-1 rounded-lg hover:bg-gray-100 dark:hover:bg-gray-800"
|
className="p-2 py-1 rounded-lg hover:bg-gray-100 dark:hover:bg-gray-800"
|
||||||
>
|
>
|
||||||
<VisibleKey shortcutClassName="top-2.5" shortcut={newSession} onKeyPress={clearChat}>
|
<VisibleKey
|
||||||
|
shortcutClassName="top-2.5"
|
||||||
|
shortcut={newSession}
|
||||||
|
onKeyPress={clearChat}
|
||||||
|
>
|
||||||
<MessageSquarePlus className="h-4 w-4 relative top-0.5" />
|
<MessageSquarePlus className="h-4 w-4 relative top-0.5" />
|
||||||
</VisibleKey>
|
</VisibleKey>
|
||||||
</button>
|
</button>
|
||||||
@@ -124,10 +103,7 @@ export function ChatHeader({
|
|||||||
</VisibleKey>
|
</VisibleKey>
|
||||||
</button>
|
</button>
|
||||||
|
|
||||||
<ServerList
|
<ServerList clearChat={clearChat} />
|
||||||
reconnect={reconnect}
|
|
||||||
clearChat={clearChat}
|
|
||||||
/>
|
|
||||||
|
|
||||||
{isChatPage ? null : (
|
{isChatPage ? null : (
|
||||||
<button className="inline-flex" onClick={onOpenChatAI}>
|
<button className="inline-flex" onClick={onOpenChatAI}>
|
||||||
|
|||||||
@@ -39,8 +39,8 @@ export const ChatSidebar: React.FC<ChatSidebarProps> = ({
|
|||||||
>
|
>
|
||||||
{isSidebarOpen && (
|
{isSidebarOpen && (
|
||||||
<HistoryList
|
<HistoryList
|
||||||
id={HISTORY_PANEL_ID}
|
historyPanelId={HISTORY_PANEL_ID}
|
||||||
list={chats}
|
chats={chats}
|
||||||
active={activeChat}
|
active={activeChat}
|
||||||
onSearch={onSearch}
|
onSearch={onSearch}
|
||||||
onRefresh={fetchChatHistory}
|
onRefresh={fetchChatHistory}
|
||||||
|
|||||||
@@ -4,22 +4,16 @@ import { X } from "lucide-react";
|
|||||||
import { useAsyncEffect } from "ahooks";
|
import { useAsyncEffect } from "ahooks";
|
||||||
import { useTranslation } from "react-i18next";
|
import { useTranslation } from "react-i18next";
|
||||||
|
|
||||||
import { useChatStore } from "@/stores/chatStore";
|
import { useChatStore, UploadFile } from "@/stores/chatStore";
|
||||||
import { useConnectStore } from "@/stores/connectStore";
|
import { useConnectStore } from "@/stores/connectStore";
|
||||||
import FileIcon from "../Common/Icons/FileIcon";
|
|
||||||
import platformAdapter from "@/utils/platformAdapter";
|
import platformAdapter from "@/utils/platformAdapter";
|
||||||
|
import Tooltip2 from "../Common/Tooltip2";
|
||||||
|
import FileIcon from "../Common/Icons/FileIcon";
|
||||||
|
|
||||||
interface FileListProps {
|
const FileList = () => {
|
||||||
sessionId: string;
|
|
||||||
getFileUrl: (path: string) => string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const FileList = (props: FileListProps) => {
|
|
||||||
const { sessionId } = props;
|
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const uploadFiles = useChatStore((state) => state.uploadFiles);
|
const { uploadFiles, setUploadFiles } = useChatStore();
|
||||||
const setUploadFiles = useChatStore((state) => state.setUploadFiles);
|
const { currentService } = useConnectStore();
|
||||||
const currentService = useConnectStore((state) => state.currentService);
|
|
||||||
|
|
||||||
const serverId = useMemo(() => {
|
const serverId = useMemo(() => {
|
||||||
return currentService.id;
|
return currentService.id;
|
||||||
@@ -39,29 +33,41 @@ const FileList = (props: FileListProps) => {
|
|||||||
|
|
||||||
if (uploaded) continue;
|
if (uploaded) continue;
|
||||||
|
|
||||||
|
try {
|
||||||
const attachmentIds: any = await platformAdapter.commands(
|
const attachmentIds: any = await platformAdapter.commands(
|
||||||
"upload_attachment",
|
"upload_attachment",
|
||||||
{
|
{
|
||||||
serverId,
|
serverId,
|
||||||
sessionId,
|
|
||||||
filePaths: [path],
|
filePaths: [path],
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!attachmentIds) continue;
|
if (!attachmentIds) {
|
||||||
|
throw new Error("Failed to get attachment id");
|
||||||
|
} else {
|
||||||
Object.assign(item, {
|
Object.assign(item, {
|
||||||
uploaded: true,
|
uploaded: true,
|
||||||
attachmentId: attachmentIds[0],
|
attachmentId: attachmentIds[0],
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
|
||||||
setUploadFiles(uploadFiles);
|
setUploadFiles(uploadFiles);
|
||||||
|
} catch (error) {
|
||||||
|
Object.assign(item, {
|
||||||
|
uploadFailed: true,
|
||||||
|
failedMessage: String(error),
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}, [uploadFiles]);
|
}, [uploadFiles]);
|
||||||
|
|
||||||
const deleteFile = async (id: string, attachmentId: string) => {
|
const deleteFile = async (file: UploadFile) => {
|
||||||
|
const { id, uploadFailed, attachmentId } = file;
|
||||||
|
|
||||||
setUploadFiles(uploadFiles.filter((file) => file.id !== id));
|
setUploadFiles(uploadFiles.filter((file) => file.id !== id));
|
||||||
|
|
||||||
|
if (uploadFailed) return;
|
||||||
|
|
||||||
platformAdapter.commands("delete_attachment", {
|
platformAdapter.commands("delete_attachment", {
|
||||||
serverId,
|
serverId,
|
||||||
id: attachmentId,
|
id: attachmentId,
|
||||||
@@ -71,30 +77,46 @@ const FileList = (props: FileListProps) => {
|
|||||||
return (
|
return (
|
||||||
<div className="flex flex-wrap gap-y-2 -mx-1 text-sm">
|
<div className="flex flex-wrap gap-y-2 -mx-1 text-sm">
|
||||||
{uploadFiles.map((file) => {
|
{uploadFiles.map((file) => {
|
||||||
const { id, name, extname, size, uploaded, attachmentId } = file;
|
const {
|
||||||
|
id,
|
||||||
|
name,
|
||||||
|
path,
|
||||||
|
extname,
|
||||||
|
size,
|
||||||
|
uploaded,
|
||||||
|
attachmentId,
|
||||||
|
uploadFailed,
|
||||||
|
failedMessage,
|
||||||
|
} = file;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div key={id} className="w-1/3 px-1">
|
<div key={id} className="w-1/3 px-1">
|
||||||
<div className="relative group flex items-center gap-1 p-1 rounded-[4px] bg-[#dedede] dark:bg-[#202126]">
|
<div className="relative group flex items-center gap-1 p-1 rounded-[4px] bg-[#dedede] dark:bg-[#202126]">
|
||||||
{attachmentId && (
|
{(uploadFailed || attachmentId) && (
|
||||||
<div
|
<div
|
||||||
className="absolute flex justify-center items-center size-[14px] bg-red-600 top-0 right-0 rounded-full cursor-pointer translate-x-[5px] -translate-y-[5px] transition opacity-0 group-hover:opacity-100 "
|
className="absolute flex justify-center items-center size-[14px] bg-red-600 top-0 right-0 rounded-full cursor-pointer translate-x-[5px] -translate-y-[5px] transition opacity-0 group-hover:opacity-100 "
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
deleteFile(id, attachmentId);
|
deleteFile(file);
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<X className="size-[10px] text-white" />
|
<X className="size-[10px] text-white" />
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<FileIcon extname={extname} />
|
<FileIcon path={path} />
|
||||||
|
|
||||||
<div className="flex flex-col justify-between overflow-hidden">
|
<div className="flex flex-col justify-between overflow-hidden">
|
||||||
<div className="truncate text-[#333333] dark:text-[#D8D8D8]">
|
<div className="truncate text-[#333333] dark:text-[#D8D8D8]">
|
||||||
{name}
|
{name}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="text-xs text-[#999999]">
|
<div className="text-xs">
|
||||||
|
{uploadFailed && failedMessage ? (
|
||||||
|
<Tooltip2 content={failedMessage}>
|
||||||
|
<span className="text-red-500">Upload Failed</span>
|
||||||
|
</Tooltip2>
|
||||||
|
) : (
|
||||||
|
<div className="text-[#999]">
|
||||||
{uploaded ? (
|
{uploaded ? (
|
||||||
<div className="flex gap-2">
|
<div className="flex gap-2">
|
||||||
{extname && <span>{extname}</span>}
|
{extname && <span>{extname}</span>}
|
||||||
@@ -106,6 +128,8 @@ const FileList = (props: FileListProps) => {
|
|||||||
<span>{t("assistant.fileList.uploading")}</span>
|
<span>{t("assistant.fileList.uploading")}</span>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -19,11 +19,10 @@ import { useAuthStore } from "@/stores/authStore";
|
|||||||
import { useSearchStore } from "@/stores/searchStore";
|
import { useSearchStore } from "@/stores/searchStore";
|
||||||
|
|
||||||
interface ServerListProps {
|
interface ServerListProps {
|
||||||
reconnect: (server?: IServer) => void;
|
|
||||||
clearChat: () => void;
|
clearChat: () => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function ServerList({ reconnect, clearChat }: ServerListProps) {
|
export function ServerList({ clearChat }: ServerListProps) {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
|
|
||||||
const isCurrentLogin = useAuthStore((state) => state.isCurrentLogin);
|
const isCurrentLogin = useAuthStore((state) => state.isCurrentLogin);
|
||||||
@@ -38,23 +37,33 @@ export function ServerList({ reconnect, clearChat }: ServerListProps) {
|
|||||||
|
|
||||||
const [serverList, setServerList] = useState<IServer[]>([]);
|
const [serverList, setServerList] = useState<IServer[]>([]);
|
||||||
const [isRefreshing, setIsRefreshing] = useState(false);
|
const [isRefreshing, setIsRefreshing] = useState(false);
|
||||||
|
const [highlightId, setHighlightId] = useState<string>("");
|
||||||
|
|
||||||
const askAiServerId = useSearchStore((state) => {
|
const askAiServerId = useSearchStore((state) => {
|
||||||
return state.askAiServerId;
|
return state.askAiServerId;
|
||||||
});
|
});
|
||||||
const setAskAiServerId = useSearchStore((state) => {
|
const setAskAiServerId = useSearchStore((state) => {
|
||||||
return state.setAskAiServerId;
|
return state.setAskAiServerId;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const popoverRef = useRef<HTMLDivElement>(null);
|
||||||
const serverListButtonRef = useRef<HTMLButtonElement>(null);
|
const serverListButtonRef = useRef<HTMLButtonElement>(null);
|
||||||
|
|
||||||
const fetchServers = useCallback(
|
const fetchServers = useCallback(
|
||||||
async (resetSelection: boolean) => {
|
async (resetSelection: boolean) => {
|
||||||
platformAdapter
|
platformAdapter.commands("list_coco_servers").then((res: any) => {
|
||||||
.commands("list_coco_servers")
|
console.log("list_coco_servers", res);
|
||||||
.then((res: any) => {
|
if (!Array.isArray(res)) {
|
||||||
const enabledServers = (res as IServer[]).filter(
|
// If res is not an array, it might be an error message or something else.
|
||||||
|
// Log it and don't proceed.
|
||||||
|
// console.log("list_coco_servers did not return an array:", res);
|
||||||
|
setServerList([]); // Clear the list or handle as appropriate
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const enabledServers = (res as IServer[])?.filter(
|
||||||
(server) => server.enabled && server.available
|
(server) => server.enabled && server.available
|
||||||
);
|
);
|
||||||
//console.log("list_coco_servers", enabledServers);
|
|
||||||
setServerList(enabledServers);
|
setServerList(enabledServers);
|
||||||
|
|
||||||
if (resetSelection && enabledServers.length > 0) {
|
if (resetSelection && enabledServers.length > 0) {
|
||||||
@@ -68,14 +77,17 @@ export function ServerList({ reconnect, clearChat }: ServerListProps) {
|
|||||||
switchServer(enabledServers[enabledServers.length - 1]);
|
switchServer(enabledServers[enabledServers.length - 1]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
|
||||||
.catch((err: any) => {
|
|
||||||
console.error(err);
|
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
[currentService?.id]
|
[currentService?.id]
|
||||||
);
|
);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!isTauri) return;
|
||||||
|
|
||||||
|
fetchServers(true);
|
||||||
|
}, [currentService?.enabled]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!askAiServerId || serverList.length === 0) return;
|
if (!askAiServerId || serverList.length === 0) return;
|
||||||
|
|
||||||
@@ -133,38 +145,56 @@ export function ServerList({ reconnect, clearChat }: ServerListProps) {
|
|||||||
}
|
}
|
||||||
//
|
//
|
||||||
setIsCurrentLogin(true);
|
setIsCurrentLogin(true);
|
||||||
// The Rust backend will automatically disconnect,
|
|
||||||
// so we don't need to handle disconnection on the frontend
|
|
||||||
// src-tauri/src/server/websocket.rs
|
|
||||||
reconnect && reconnect(server);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("switchServer:", error);
|
console.error("switchServer:", error);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
useKeyPress(["uparrow", "downarrow"], (_, key) => {
|
useKeyPress(
|
||||||
|
["uparrow", "downarrow", "enter"],
|
||||||
|
(event, key) => {
|
||||||
const isClose = isNil(serverListButtonRef.current?.dataset["open"]);
|
const isClose = isNil(serverListButtonRef.current?.dataset["open"]);
|
||||||
const length = serverList.length;
|
const length = serverList.length;
|
||||||
|
|
||||||
if (isClose || length <= 1) return;
|
if (isClose || length <= 1) return;
|
||||||
|
|
||||||
|
event.stopPropagation();
|
||||||
|
event.preventDefault();
|
||||||
|
|
||||||
const currentIndex = serverList.findIndex((server) => {
|
const currentIndex = serverList.findIndex((server) => {
|
||||||
return server.id === currentService?.id;
|
return (
|
||||||
|
server.id === (highlightId === "" ? currentService?.id : highlightId)
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
let nextIndex = currentIndex;
|
let nextIndex = currentIndex;
|
||||||
|
|
||||||
if (key === "uparrow") {
|
if (key === "uparrow") {
|
||||||
nextIndex = currentIndex > 0 ? currentIndex - 1 : length - 1;
|
nextIndex = currentIndex > 0 ? currentIndex - 1 : length - 1;
|
||||||
|
setHighlightId(serverList[nextIndex].id);
|
||||||
} else if (key === "downarrow") {
|
} else if (key === "downarrow") {
|
||||||
nextIndex = currentIndex < serverList.length - 1 ? currentIndex + 1 : 0;
|
nextIndex = currentIndex < serverList.length - 1 ? currentIndex + 1 : 0;
|
||||||
|
setHighlightId(serverList[nextIndex].id);
|
||||||
|
} else if (key === "enter" && currentIndex >= 0) {
|
||||||
|
if (document.activeElement instanceof HTMLTextAreaElement) return;
|
||||||
|
const selectedServer = serverList[currentIndex];
|
||||||
|
if (selectedServer) {
|
||||||
|
switchServer(selectedServer);
|
||||||
|
serverListButtonRef.current?.click();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
target: popoverRef,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
switchServer(serverList[nextIndex]);
|
const handleMouseMove = useCallback(() => {
|
||||||
});
|
setHighlightId("");
|
||||||
|
}, []);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Popover className="relative">
|
<Popover ref={popoverRef} className="relative">
|
||||||
<PopoverButton ref={serverListButtonRef} className="flex items-center">
|
<PopoverButton ref={serverListButtonRef} className="flex items-center">
|
||||||
<VisibleKey
|
<VisibleKey
|
||||||
shortcut={serviceList}
|
shortcut={serviceList}
|
||||||
@@ -176,11 +206,14 @@ export function ServerList({ reconnect, clearChat }: ServerListProps) {
|
|||||||
</VisibleKey>
|
</VisibleKey>
|
||||||
</PopoverButton>
|
</PopoverButton>
|
||||||
|
|
||||||
<PopoverPanel className="absolute right-0 z-10 mt-2 min-w-[240px] bg-white dark:bg-[#202126] rounded-lg shadow-lg border border-gray-200 dark:border-gray-700">
|
<PopoverPanel
|
||||||
|
onMouseMove={handleMouseMove}
|
||||||
|
className="absolute right-0 z-10 mt-2 min-w-[240px] bg-white dark:bg-[#202126] rounded-lg shadow-lg border border-gray-200 dark:border-gray-700"
|
||||||
|
>
|
||||||
<div className="p-3">
|
<div className="p-3">
|
||||||
<div className="flex items-center justify-between mb-3 whitespace-nowrap">
|
<div className="flex items-center justify-between mb-3 whitespace-nowrap">
|
||||||
<h3 className="text-sm font-medium text-gray-900 dark:text-gray-100">
|
<h3 className="text-sm font-medium text-gray-900 dark:text-gray-100">
|
||||||
Servers
|
{t("assistant.chat.servers")}
|
||||||
</h3>
|
</h3>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<button
|
<button
|
||||||
@@ -212,8 +245,10 @@ export function ServerList({ reconnect, clearChat }: ServerListProps) {
|
|||||||
<div
|
<div
|
||||||
key={server.id}
|
key={server.id}
|
||||||
onClick={() => switchServer(server)}
|
onClick={() => switchServer(server)}
|
||||||
className={`w-full flex items-center justify-between gap-1 p-2 rounded-lg transition-colors whitespace-nowrap ${
|
className={`w-full flex items-center justify-between gap-1 p-2 rounded-lg transition-colors whitespace-nowrap
|
||||||
currentService?.id === server.id
|
${
|
||||||
|
currentService?.id === server.id ||
|
||||||
|
highlightId === server.id
|
||||||
? "bg-gray-100 dark:bg-gray-800"
|
? "bg-gray-100 dark:bg-gray-800"
|
||||||
: "hover:bg-gray-50 dark:hover:bg-gray-800/50"
|
: "hover:bg-gray-50 dark:hover:bg-gray-800/50"
|
||||||
}`}
|
}`}
|
||||||
@@ -233,7 +268,8 @@ export function ServerList({ reconnect, clearChat }: ServerListProps) {
|
|||||||
{server.name}
|
{server.name}
|
||||||
</div>
|
</div>
|
||||||
<div className="text-xs text-gray-500 dark:text-gray-400 truncate max-w-[200px]">
|
<div className="text-xs text-gray-500 dark:text-gray-400 truncate max-w-[200px]">
|
||||||
AI Assistant: {server.stats?.assistant_count || 1}
|
{t("assistant.chat.aiAssistant")}:{" "}
|
||||||
|
{server.stats?.assistant_count || 1}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -6,10 +6,10 @@ import {useTranslation} from "react-i18next";
|
|||||||
|
|
||||||
import { useConnectStore } from "@/stores/connectStore";
|
import { useConnectStore } from "@/stores/connectStore";
|
||||||
import Checkbox from "@/components/Common/Checkbox";
|
import Checkbox from "@/components/Common/Checkbox";
|
||||||
import FileIcon from "@/components/Common/Icons/FileIcon";
|
|
||||||
import { AttachmentHit } from "@/types/commands";
|
import { AttachmentHit } from "@/types/commands";
|
||||||
import { useAppStore } from "@/stores/appStore";
|
import { useAppStore } from "@/stores/appStore";
|
||||||
import platformAdapter from "@/utils/platformAdapter";
|
import platformAdapter from "@/utils/platformAdapter";
|
||||||
|
import FileIcon from "../Common/Icons/FileIcon";
|
||||||
|
|
||||||
interface SessionFileProps {
|
interface SessionFileProps {
|
||||||
sessionId: string;
|
sessionId: string;
|
||||||
@@ -37,6 +37,8 @@ const SessionFile = (props: SessionFileProps) => {
|
|||||||
|
|
||||||
const getUploadedFiles = async () => {
|
const getUploadedFiles = async () => {
|
||||||
if (isTauri) {
|
if (isTauri) {
|
||||||
|
console.log("sessionId", sessionId);
|
||||||
|
|
||||||
const response: any = await platformAdapter.commands("get_attachment", {
|
const response: any = await platformAdapter.commands("get_attachment", {
|
||||||
serverId,
|
serverId,
|
||||||
sessionId,
|
sessionId,
|
||||||
@@ -91,8 +93,7 @@ const SessionFile = (props: SessionFileProps) => {
|
|||||||
>
|
>
|
||||||
<Files className="size-5 text-white" />
|
<Files className="size-5 text-white" />
|
||||||
|
|
||||||
<div
|
<div className="absolute -top-2 -right-2 flex items-center justify-center min-w-4 h-4 px-1 text-white text-xs rounded-full bg-[#3DB954]">
|
||||||
className="absolute -top-2 -right-2 flex items-center justify-center min-w-4 h-4 px-1 text-white text-xs rounded-full bg-[#3DB954]">
|
|
||||||
{uploadedFiles?.length}
|
{uploadedFiles?.length}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -135,16 +136,16 @@ const SessionFile = (props: SessionFileProps) => {
|
|||||||
key={id}
|
key={id}
|
||||||
className="flex items-center justify-between min-h-12 px-2 rounded-[4px] bg-[#ededed] dark:bg-[#202126]"
|
className="flex items-center justify-between min-h-12 px-2 rounded-[4px] bg-[#ededed] dark:bg-[#202126]"
|
||||||
>
|
>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-1">
|
||||||
<FileIcon extname={icon}/>
|
<FileIcon path={name} />
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
<div className="text-sm leading-4 text-[#333] dark:text-[#D8D8D8]">
|
<div className="text-sm leading-4 text-[#333] dark:text-[#D8D8D8]">
|
||||||
{name}
|
{name}
|
||||||
</div>
|
</div>
|
||||||
<div className="text-xs text-[#999]">
|
<div className="text-xs text-[#999]">
|
||||||
<span>{icon}</span>
|
{icon && <span className="pr-2">{icon}</span>}
|
||||||
<span className="pl-2">
|
<span>
|
||||||
{filesize(size, { standard: "jedec", spacer: "" })}
|
{filesize(size, { standard: "jedec", spacer: "" })}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||