diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2531fe35..4b52d77c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,30 +1,57 @@ -name: ci +name: ci + on: push: branches: - - master - main - - prep-0.26.0 + - master + release: + types: [published] + permissions: contents: write + packages: write + jobs: - deploy: + container-build: + if: github.event_name == 'release' runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Configure Git Credentials - run: | - git config user.name github-actions[bot] - git config user.email 41898282+github-actions[bot]@users.noreply.github.com - - uses: actions/setup-python@v5 + + - name: Log in to GHCR + uses: docker/login-action@v3 with: - python-version: 3.x - - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build & push + uses: docker/build-push-action@v5 + with: + context: . + file: Dockerfile + push: true + tags: | + ghcr.io/${{ github.repository }}:latest + ghcr.io/${{ github.repository }}:${{ github.ref_name }} + + deploy-docs: + if: github.event_name == 'release' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Configure Git credentials + run: | + git config user.name "github-actions[bot]" + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + - uses: actions/setup-python@v5 + with: { python-version: '3.x' } + - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV - uses: actions/cache@v4 with: key: mkdocs-material-${{ env.cache_id }} path: .cache - restore-keys: | - mkdocs-material- + restore-keys: mkdocs-material- - run: pip install mkdocs-material mkdocs-awesome-pages-plugin mkdocs-glightbox - run: mkdocs gh-deploy --force \ No newline at end of file diff --git a/.github/workflows/test-container-build.yml b/.github/workflows/test-container-build.yml new file mode 100644 index 00000000..9f63b960 --- /dev/null +++ b/.github/workflows/test-container-build.yml @@ -0,0 +1,32 @@ +name: test-container-build + +on: + push: + branches: [ 'prep-*' ] + +permissions: + contents: read + packages: write + +jobs: + container-build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Log in to GHCR + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build & push + uses: docker/build-push-action@v5 + with: + context: . + file: Dockerfile + push: true + # Tag with prep suffix to avoid conflicts with production + tags: | + ghcr.io/${{ github.repository }}:${{ github.ref_name }} \ No newline at end of file diff --git a/.gitignore b/.gitignore index d2b11bf3..724ee4d5 100644 --- a/.gitignore +++ b/.gitignore @@ -8,11 +8,20 @@ talemate_env chroma config.yaml +.cursor +.claude # uv .venv/ templates/llm-prompt/user/*.jinja2 templates/world-state/*.yaml +tts/voice/piper/*.onnx +tts/voice/piper/*.json +tts/voice/kokoro/*.pt +tts/voice/xtts2/*.wav +tts/voice/chatterbox/*.wav +tts/voice/f5tts/*.wav +tts/voice/voice-library.json scenes/ !scenes/infinity-quest-dynamic-scenario/ !scenes/infinity-quest-dynamic-scenario/assets/ @@ -21,4 +30,5 @@ scenes/ !scenes/infinity-quest/assets/ !scenes/infinity-quest/infinity-quest.json tts_voice_samples/*.wav -third-party-docs/ \ No newline at end of file +third-party-docs/ +legacy-state-reinforcements.yaml \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index b090c566..e5897210 100644 --- a/Dockerfile +++ b/Dockerfile @@ -35,18 +35,9 @@ COPY pyproject.toml uv.lock /app/ # Copy the Python source code (needed for editable install) COPY ./src /app/src -# Create virtual environment and install dependencies +# Create virtual environment and install dependencies (includes CUDA support via pyproject.toml) RUN uv sync -# Conditional PyTorch+CUDA install -ARG CUDA_AVAILABLE=false -RUN . /app/.venv/bin/activate && \ - if [ "$CUDA_AVAILABLE" = "true" ]; then \ - echo "Installing PyTorch with CUDA support..." && \ - uv pip uninstall torch torchaudio && \ - uv pip install torch~=2.7.0 torchaudio~=2.7.0 --index-url https://download.pytorch.org/whl/cu128; \ - fi - # Stage 3: Final image FROM python:3.11-slim diff --git a/docker-compose.manual.yml b/docker-compose.manual.yml new file mode 100644 index 00000000..178f2475 --- /dev/null +++ b/docker-compose.manual.yml @@ -0,0 +1,20 @@ +version: '3.8' + +services: + talemate: + build: + context: . + dockerfile: Dockerfile + ports: + - "${FRONTEND_PORT:-8080}:8080" + - "${BACKEND_PORT:-5050}:5050" + volumes: + - ./config.yaml:/app/config.yaml + - ./scenes:/app/scenes + - ./templates:/app/templates + - ./chroma:/app/chroma + - ./tts:/app/tts + environment: + - PYTHONUNBUFFERED=1 + - PYTHONPATH=/app/src:$PYTHONPATH + command: ["uv", "run", "src/talemate/server/run.py", "runserver", "--host", "0.0.0.0", "--port", "5050", "--frontend-host", "0.0.0.0", "--frontend-port", "8080"] \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 5ff24a95..1a8a12e1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,11 +2,7 @@ version: '3.8' services: talemate: - build: - context: . - dockerfile: Dockerfile - args: - - CUDA_AVAILABLE=${CUDA_AVAILABLE:-false} + image: ghcr.io/vegu-ai/talemate:latest ports: - "${FRONTEND_PORT:-8080}:8080" - "${BACKEND_PORT:-5050}:5050" @@ -15,6 +11,7 @@ services: - ./scenes:/app/scenes - ./templates:/app/templates - ./chroma:/app/chroma + - ./tts:/app/tts environment: - PYTHONUNBUFFERED=1 - PYTHONPATH=/app/src:$PYTHONPATH diff --git a/docs/getting-started/advanced/change-host-and-port.md b/docs/getting-started/advanced/change-host-and-port.md index 64ea2e89..56b95d86 100644 --- a/docs/getting-started/advanced/change-host-and-port.md +++ b/docs/getting-started/advanced/change-host-and-port.md @@ -27,10 +27,10 @@ uv run src\talemate\server\run.py runserver --host 0.0.0.0 --port 1234 ### Letting the frontend know about the new host and port -Copy `talemate_frontend/example.env.development.local` to `talemate_frontend/.env.production.local` and edit the `VUE_APP_TALEMATE_BACKEND_WEBSOCKET_URL`. +Copy `talemate_frontend/example.env.development.local` to `talemate_frontend/.env.production.local` and edit the `VITE_TALEMATE_BACKEND_WEBSOCKET_URL`. ```env -VUE_APP_TALEMATE_BACKEND_WEBSOCKET_URL=ws://localhost:1234 +VITE_TALEMATE_BACKEND_WEBSOCKET_URL=ws://localhost:1234 ``` Next rebuild the frontend. diff --git a/docs/getting-started/installation/docker.md b/docs/getting-started/installation/docker.md index 510db31d..9e4947c6 100644 --- a/docs/getting-started/installation/docker.md +++ b/docs/getting-started/installation/docker.md @@ -1,22 +1,15 @@ -!!! example "Experimental" - Talemate through docker has not received a lot of testing from me, so please let me know if you encounter any issues. - - You can do so by creating an issue on the [:material-github: GitHub repository](https://github.com/vegu-ai/talemate) - ## Quick install instructions 1. `git clone https://github.com/vegu-ai/talemate.git` 1. `cd talemate` 1. copy config file 1. linux: `cp config.example.yaml config.yaml` - 1. windows: `copy config.example.yaml config.yaml` -1. If your host has a CUDA compatible Nvidia GPU - 1. Windows (via PowerShell): `$env:CUDA_AVAILABLE="true"; docker compose up` - 1. Linux: `CUDA_AVAILABLE=true docker compose up` -1. If your host does **NOT** have a CUDA compatible Nvidia GPU - 1. Windows: `docker compose up` - 1. Linux: `docker compose up` + 1. windows: `copy config.example.yaml config.yaml` (or just copy the file and rename it via the file explorer) +1. `docker compose up` 1. Navigate your browser to http://localhost:8080 +!!! info "Pre-built Images" + The default setup uses pre-built images from GitHub Container Registry that include CUDA support by default. To manually build the container instead, use `docker compose -f docker-compose.manual.yml up --build`. + !!! note When connecting local APIs running on the hostmachine (e.g. text-generation-webui), you need to use `host.docker.internal` as the hostname. diff --git a/docs/img/0.32.0/add-chatterbox-voice.png b/docs/img/0.32.0/add-chatterbox-voice.png new file mode 100644 index 00000000..17e70fcd Binary files /dev/null and b/docs/img/0.32.0/add-chatterbox-voice.png differ diff --git a/docs/img/0.32.0/add-elevenlabs-voice.png b/docs/img/0.32.0/add-elevenlabs-voice.png new file mode 100644 index 00000000..2fc75fed Binary files /dev/null and b/docs/img/0.32.0/add-elevenlabs-voice.png differ diff --git a/docs/img/0.32.0/add-f5tts-voice.png b/docs/img/0.32.0/add-f5tts-voice.png new file mode 100644 index 00000000..8e47c836 Binary files /dev/null and b/docs/img/0.32.0/add-f5tts-voice.png differ diff --git a/docs/img/0.32.0/character-voice-assignment.png b/docs/img/0.32.0/character-voice-assignment.png new file mode 100644 index 00000000..40975511 Binary files /dev/null and b/docs/img/0.32.0/character-voice-assignment.png differ diff --git a/docs/img/0.32.0/chatterbox-api-settings.png b/docs/img/0.32.0/chatterbox-api-settings.png new file mode 100644 index 00000000..b28255cb Binary files /dev/null and b/docs/img/0.32.0/chatterbox-api-settings.png differ diff --git a/docs/img/0.32.0/chatterbox-parameters.png b/docs/img/0.32.0/chatterbox-parameters.png new file mode 100644 index 00000000..d6b39769 Binary files /dev/null and b/docs/img/0.32.0/chatterbox-parameters.png differ diff --git a/docs/img/0.32.0/client-reasoning-2.png b/docs/img/0.32.0/client-reasoning-2.png new file mode 100644 index 00000000..99a48fb2 Binary files /dev/null and b/docs/img/0.32.0/client-reasoning-2.png differ diff --git a/docs/img/0.32.0/client-reasoning.png b/docs/img/0.32.0/client-reasoning.png new file mode 100644 index 00000000..ebe91886 Binary files /dev/null and b/docs/img/0.32.0/client-reasoning.png differ diff --git a/docs/img/0.32.0/elevenlabs-api-settings.png b/docs/img/0.32.0/elevenlabs-api-settings.png new file mode 100644 index 00000000..886bfff7 Binary files /dev/null and b/docs/img/0.32.0/elevenlabs-api-settings.png differ diff --git a/docs/img/0.32.0/elevenlabs-copy-voice-id.png b/docs/img/0.32.0/elevenlabs-copy-voice-id.png new file mode 100644 index 00000000..6f7b2057 Binary files /dev/null and b/docs/img/0.32.0/elevenlabs-copy-voice-id.png differ diff --git a/docs/img/0.32.0/f5tts-api-settings.png b/docs/img/0.32.0/f5tts-api-settings.png new file mode 100644 index 00000000..21eb184c Binary files /dev/null and b/docs/img/0.32.0/f5tts-api-settings.png differ diff --git a/docs/img/0.32.0/f5tts-parameters.png b/docs/img/0.32.0/f5tts-parameters.png new file mode 100644 index 00000000..b3ae1b11 Binary files /dev/null and b/docs/img/0.32.0/f5tts-parameters.png differ diff --git a/docs/img/0.32.0/google-tts-api-settings.png b/docs/img/0.32.0/google-tts-api-settings.png new file mode 100644 index 00000000..44ecc510 Binary files /dev/null and b/docs/img/0.32.0/google-tts-api-settings.png differ diff --git a/docs/img/0.32.0/kokoro-mixer.png b/docs/img/0.32.0/kokoro-mixer.png new file mode 100644 index 00000000..7fd99c51 Binary files /dev/null and b/docs/img/0.32.0/kokoro-mixer.png differ diff --git a/docs/img/0.32.0/openai-tts-api-settings.png b/docs/img/0.32.0/openai-tts-api-settings.png new file mode 100644 index 00000000..15472427 Binary files /dev/null and b/docs/img/0.32.0/openai-tts-api-settings.png differ diff --git a/docs/img/0.32.0/voice-agent-settings.png b/docs/img/0.32.0/voice-agent-settings.png new file mode 100644 index 00000000..6e934b83 Binary files /dev/null and b/docs/img/0.32.0/voice-agent-settings.png differ diff --git a/docs/img/0.32.0/voice-agent-status-characters.png b/docs/img/0.32.0/voice-agent-status-characters.png new file mode 100644 index 00000000..8f2a783d Binary files /dev/null and b/docs/img/0.32.0/voice-agent-status-characters.png differ diff --git a/docs/img/0.32.0/voice-library-access.png b/docs/img/0.32.0/voice-library-access.png new file mode 100644 index 00000000..7fbac88c Binary files /dev/null and b/docs/img/0.32.0/voice-library-access.png differ diff --git a/docs/img/0.32.0/voice-library-api-status.png b/docs/img/0.32.0/voice-library-api-status.png new file mode 100644 index 00000000..7fd6ec5e Binary files /dev/null and b/docs/img/0.32.0/voice-library-api-status.png differ diff --git a/docs/img/0.32.0/voice-library-interface.png b/docs/img/0.32.0/voice-library-interface.png new file mode 100644 index 00000000..6271fa28 Binary files /dev/null and b/docs/img/0.32.0/voice-library-interface.png differ diff --git a/docs/user-guide/agents/voice/chatterbox.md b/docs/user-guide/agents/voice/chatterbox.md new file mode 100644 index 00000000..583541ad --- /dev/null +++ b/docs/user-guide/agents/voice/chatterbox.md @@ -0,0 +1,58 @@ +# Chatterbox + +Local zero shot voice cloning from .wav files. + +![Chatterbox API settings](/talemate/img/0.32.0/chatterbox-api-settings.png) + +##### Device + +Auto-detects best available option + +##### Model + +Default Chatterbox model optimized for speed + +##### Chunk size + +Split text into chunks of this size. Smaller values will increase responsiveness at the cost of lost context between chunks. (Stuff like appropriate inflection, etc.). 0 = no chunking + +## Adding Chatterbox Voices + +### Voice Requirements + +Chatterbox voices require: + +- Reference audio file (.wav format, 5-15 seconds optimal) +- Clear speech with minimal background noise +- Single speaker throughout the sample + +### Creating a Voice + +1. Open the Voice Library +2. Click **:material-plus: New** +3. Select "Chatterbox" as the provider +4. Configure the voice: + +![Add Chatterbox voice](/talemate/img/0.32.0/add-chatterbox-voice.png) + +**Label:** Descriptive name (e.g., "Marcus - Deep Male") + +**Voice ID / Upload File** Upload a .wav file containing the voice sample. The uploaded reference audio will also be the voice ID. + +**Speed:** Adjust playback speed (0.5 to 2.0, default 1.0) + +**Tags:** Add descriptive tags for organization + +**Extra voice parameters** + +There exist some optional parameters that can be set here on a per voice level. + +![Chatterbox extra voice parameters](/talemate/img/0.32.0/chatterbox-parameters.png) + +##### Exaggeration Level + +Exaggeration (Neutral = 0.5, extreme values can be unstable). Higher exaggeration tends to speed up speech; reducing cfg helps compensate with slower, more deliberate pacing. + +##### CFG / Pace + +If the reference speaker has a fast speaking style, lowering cfg to around 0.3 can improve pacing. \ No newline at end of file diff --git a/docs/user-guide/agents/voice/elevenlabs.md b/docs/user-guide/agents/voice/elevenlabs.md index 828f41db..53e0c781 100644 --- a/docs/user-guide/agents/voice/elevenlabs.md +++ b/docs/user-guide/agents/voice/elevenlabs.md @@ -1,7 +1,41 @@ # ElevenLabs -If you have not configured the ElevenLabs TTS API, the voice agent will show that the API key is missing. +Professional voice synthesis with voice cloning capabilities using ElevenLabs API. -![Elevenlaps api key missing](/talemate/img/0.26.0/voice-agent-missing-api-key.png) +![ElevenLabs API settings](/talemate/img/0.32.0/elevenlabs-api-settings.png) -See the [ElevenLabs API setup](/talemate/user-guide/apis/elevenlabs/) for instructions on how to set up the API key. \ No newline at end of file +## API Setup + +ElevenLabs requires an API key. See the [ElevenLabs API setup](/talemate/user-guide/apis/elevenlabs/) for instructions on obtaining and setting an API key. + +## Configuration + +**Model:** Select from available ElevenLabs models + +!!! warning "Voice Limits" + Your ElevenLabs subscription allows you to maintain a set number of voices (10 for the cheapest plan). Any voice that you generate audio for is automatically added to your voices at [https://elevenlabs.io/app/voice-lab](https://elevenlabs.io/app/voice-lab). This also happens when you use the "Test" button. It is recommended to test voices via their voice library instead. + +## Adding ElevenLabs Voices + +### Getting Voice IDs + +1. Go to [https://elevenlabs.io/app/voice-lab](https://elevenlabs.io/app/voice-lab) to view your voices +2. Find or create the voice you want to use +3. Click "More Actions" -> "Copy Voice ID" for the desired voice + +![Copy Voice ID](/talemate/img/0.32.0/elevenlabs-copy-voice-id.png) + +### Creating a Voice in Talemate + +![Add ElevenLabs voice](/talemate/img/0.32.0/add-elevenlabs-voice.png) + +1. Open the Voice Library +2. Click "Add Voice" +3. Select "ElevenLabs" as the provider +4. Configure the voice: + +**Label:** Descriptive name for the voice + +**Provider ID:** Paste the ElevenLabs voice ID you copied + +**Tags:** Add descriptive tags for organization \ No newline at end of file diff --git a/docs/user-guide/agents/voice/f5tts.md b/docs/user-guide/agents/voice/f5tts.md new file mode 100644 index 00000000..f5c446ea --- /dev/null +++ b/docs/user-guide/agents/voice/f5tts.md @@ -0,0 +1,78 @@ +# F5-TTS + +Local zero shot voice cloning from .wav files. + +![F5-TTS configuration](/talemate/img/0.32.0/f5tts-api-settings.png) + +##### Device +Auto-detects best available option (GPU preferred) + +##### Model + +- F5TTS_v1_Base (default, most recent model) +- F5TTS_Base +- E2TTS_Base + +##### NFE Step + +Number of steps to generate the voice. Higher values result in more detailed voices. + +##### Chunk size + +Split text into chunks of this size. Smaller values will increase responsiveness at the cost of lost context between chunks. (Stuff like appropriate inflection, etc.). 0 = no chunking + +##### Replace exclamation marks + +If checked, exclamation marks will be replaced with periods. This is recommended for `F5TTS_v1_Base` since it seems to over exaggerate exclamation marks. + +## Adding F5-TTS Voices + +### Voice Requirements + +F5-TTS voices require: + +- Reference audio file (.wav format, 10-30 seconds) +- Clear speech with minimal background noise +- Single speaker throughout the sample +- Reference text (optional but recommended) + +### Creating a Voice + +1. Open the Voice Library +2. Click "Add Voice" +3. Select "F5-TTS" as the provider +4. Configure the voice: + +![Add F5-TTS voice](/talemate/img/0.32.0/add-f5tts-voice.png) + +**Label:** Descriptive name (e.g., "Emma - Calm Female") + +**Voice ID / Upload File** Upload a .wav file containing the **reference audio** voice sample. The uploaded reference audio will also be the voice ID. + +- Use 6-10 second samples (longer doesn't improve quality) +- Ensure clear speech with minimal background noise +- Record at natural speaking pace + +**Reference Text:** Enter the exact text spoken in the reference audio for improved quality + +- Enter exactly what is spoken in the reference audio +- Include proper punctuation and capitalization +- Improves voice cloning accuracy significantly + +**Speed:** Adjust playback speed (0.5 to 2.0, default 1.0) + +**Tags:** Add descriptive tags (gender, age, style) for organization + +**Extra voice parameters** + +There exist some optional parameters that can be set here on a per voice level. + +![F5-TTS extra voice parameters](/talemate/img/0.32.0/f5tts-parameters.png) + +##### Speed + +Allows you to adjust the speed of the voice. + +##### CFG Strength + + A higher CFG strength generally leads to more faithful reproduction of the input text, while a lower CFG strength can result in more varied or creative speech output, potentially at the cost of text-to-speech accuracy. \ No newline at end of file diff --git a/docs/user-guide/agents/voice/google.md b/docs/user-guide/agents/voice/google.md new file mode 100644 index 00000000..8718c9c9 --- /dev/null +++ b/docs/user-guide/agents/voice/google.md @@ -0,0 +1,15 @@ +# Google Gemini-TTS + +Google Gemini-TTS provides access to Google's text-to-speech service. + +## API Setup + +Google Gemini-TTS requires a Google Cloud API key. + +See the [Google Cloud API setup](/talemate/user-guide/apis/google/) for instructions on obtaining an API key. + +## Configuration + +![Google TTS settings](/talemate/img/0.32.0/google-tts-api-settings.png) + +**Model:** Select from available Google TTS models \ No newline at end of file diff --git a/docs/user-guide/agents/voice/index.md b/docs/user-guide/agents/voice/index.md index b47d002c..8e9a16f3 100644 --- a/docs/user-guide/agents/voice/index.md +++ b/docs/user-guide/agents/voice/index.md @@ -1,6 +1,26 @@ # Overview -Talemate supports Text-to-Speech (TTS) functionality, allowing users to convert text into spoken audio. This document outlines the steps required to configure TTS for Talemate using different providers, including ElevenLabs and a local TTS API. +In 0.32.0 Talemate's TTS (Text-to-Speech) agent has been completely refactored to provide advanced voice capabilities including per-character voice assignment, speaker separation, and support for multiple local and remote APIs. The voice system now includes a comprehensive voice library for managing and organizing voices across all supported providers. + +## Key Features + +- **Per-character voice assignment** - Each character can have their own unique voice +- **Speaker separation** - Automatic detection and separation of dialogue from narration +- **Voice library management** - Centralized management of all voices across providers +- **Multiple API support** - Support for both local and remote TTS providers +- **Director integration** - Automatic voice assignment for new characters + +## Supported APIs + +### Local APIs +- **Kokoro** - Fastest generation with predefined voice models and mixing +- **F5-TTS** - Fast voice cloning with occasional mispronunciations +- **Chatterbox** - High-quality voice cloning (slower generation) + +### Remote APIs +- **ElevenLabs** - Professional voice synthesis with voice cloning +- **Google Gemini-TTS** - Google's text-to-speech service +- **OpenAI** - OpenAI's TTS-1 and TTS-1-HD models ## Enable the Voice agent @@ -12,28 +32,30 @@ If your voice agent is disabled - indicated by the grey dot next to the agent - ![Agent disabled](/talemate/img/0.26.0/agent-disabled.png) ![Agent enabled](/talemate/img/0.26.0/agent-enabled.png) +!!! note "Ctrl click to toggle agent" + You can use Ctrl click to toggle the agent on and off. -!!! abstract "Next: Connect to a TTS api" - Next you need to decide which service / api to use for audio generation and configure the voice agent accordingly. +## Voice Library Management - - [OpenAI](openai.md) - - [ElevenLabs](elevenlabs.md) - - [Local TTS](local_tts.md) +Voices are managed through the Voice Library, accessible from the main application bar. The Voice Library allows you to: - You can also find more information about the various settings [here](settings.md). +- Add and organize voices from all supported providers +- Assign voices to specific characters +- Create mixed voices (Kokoro) +- Manage both global and scene-specific voice libraries -## Select a voice +See the [Voice Library Guide](voice-library.md) for detailed instructions. -![Elevenlaps voice missing](/talemate/img/0.26.0/voice-agent-no-voice-selected.png) +## Character Voice Assignment -Click on the agent to open the agent settings. +![Character voice assignment](/talemate/img/0.32.0/character-voice-assignment.png) -Then click on the `Narrator Voice` dropdown and select a voice. +Characters can have individual voices assigned through the Voice Library. When a character has a voice assigned: -![Elevenlaps voice selected](/talemate/img/0.26.0/voice-agent-select-voice.png) +1. Their dialogue will use their specific voice +2. The narrator voice is used for exposition in their messages (with speaker separation enabled) +3. If their assigned voice's API is not available, it falls back to the narrator voice -The selection is saved automatically, click anywhere outside the agent window to close it. +The Voice agent status will show all assigned character voices and their current status. -The Voice agent should now show that the voice is selected and be ready to use. - -![Elevenlabs ready](/talemate/img/0.26.0/elevenlabs-ready.png) \ No newline at end of file +![Voice agent status with characters](/talemate/img/0.32.0/voice-agent-status-characters.png) \ No newline at end of file diff --git a/docs/user-guide/agents/voice/kokoro.md b/docs/user-guide/agents/voice/kokoro.md new file mode 100644 index 00000000..df551b6d --- /dev/null +++ b/docs/user-guide/agents/voice/kokoro.md @@ -0,0 +1,55 @@ +# Kokoro + +Kokoro provides predefined voice models and voice mixing capabilities for creating custom voices. + +## Using Predefined Voices + +Kokoro comes with built-in voice models that are ready to use immediately + +Available predefined voices include various male and female voices with different characteristics. + +## Creating Mixed Voices + +Kokor allows you to mix voices together to create a new voice. + +### Voice Mixing Interface + + +To create a mixed voice: + +1. Open the Voice Library +2. Click ":material-plus: New" +3. Select "Kokoro" as the provider +4. Choose ":material-tune:Mixer" option +5. Configure the mixed voice: + +![Voice mixing interface](/talemate/img/0.32.0/kokoro-mixer.png) + + +**Label:** Descriptive name for the mixed voice + +**Base Voices:** Select 2-4 existing Kokoro voices to combine + +**Weights:** Set the influence of each voice (0.1 to 1.0) + +**Tags:** Descriptive tags for organization + +### Weight Configuration + +Each selected voice can have its weight adjusted: + +- Higher weights make that voice more prominent in the mix +- Lower weights make that voice more subtle +- Total weights need to sum to 1.0 +- Experiment with different combinations to achieve desired results + +### Saving Mixed Voices + +Once configured click "Add Voice", mixed voices are saved to your voice library and can be: + +- Assigned to characters +- Used as narrator voices + +just like any other voice. + +Saving a mixed cvoice may take a moment to complete. \ No newline at end of file diff --git a/docs/user-guide/agents/voice/local_tts.md b/docs/user-guide/agents/voice/local_tts.md deleted file mode 100644 index 1aa19723..00000000 --- a/docs/user-guide/agents/voice/local_tts.md +++ /dev/null @@ -1,53 +0,0 @@ -# Local TTS - -!!! warning - This has not been tested in a while and may not work as expected. It will likely be replaced with something different in the future. If this approach is currently broken its likely to remain so until it is replaced. - -For running a local TTS API, Talemate requires specific dependencies to be installed. - -### Windows Installation - -Run `install-local-tts.bat` to install the necessary requirements. - -### Linux Installation - -Execute the following command: - -```bash -pip install TTS -``` - -### Model and Device Configuration - -1. Choose a TTS model from the [Coqui TTS model list](https://github.com/coqui-ai/TTS). -2. Decide whether to use `cuda` or `cpu` for the device setting. -3. The first time you run TTS through the local API, it will download the specified model. Please note that this may take some time, and the download progress will be visible in the Talemate backend output. - -Example configuration snippet: - -```yaml -tts: - device: cuda # or 'cpu' - model: tts_models/multilingual/multi-dataset/xtts_v2 -``` - -### Voice Samples Configuration - -Configure voice samples by setting the `value` field to the path of a .wav file voice sample. Official samples can be downloaded from [Coqui XTTS-v2 samples](https://huggingface.co/coqui/XTTS-v2/tree/main/samples). - -Example configuration snippet: - -```yaml -tts: - voices: - - label: English Male - value: path/to/english_male.wav - - label: English Female - value: path/to/english_female.wav -``` - -## Saving the Configuration - -After configuring the `config.yaml` file, save your changes. Talemate will use the updated settings the next time it starts. - -For more detailed information on configuring Talemate, refer to the `config.py` file in the Talemate source code and the `config.example.yaml` file for a barebone configuration example. \ No newline at end of file diff --git a/docs/user-guide/agents/voice/openai.md b/docs/user-guide/agents/voice/openai.md index 817187a5..929420ed 100644 --- a/docs/user-guide/agents/voice/openai.md +++ b/docs/user-guide/agents/voice/openai.md @@ -8,16 +8,12 @@ See the [OpenAI API setup](/apis/openai.md) for instructions on how to set up th ## Settings -![Voice agent openai settings](/talemate/img/0.26.0/voice-agent-openai-settings.png) +![Voice agent openai settings](/talemate/img/0.32.0/openai-tts-api-settings.png) ##### Model Which model to use for generation. +- GPT-4o Mini TTS - TTS-1 -- TTS-1 HD - -!!! quote "OpenAI API documentation on quality" - For real-time applications, the standard tts-1 model provides the lowest latency but at a lower quality than the tts-1-hd model. Due to the way the audio is generated, tts-1 is likely to generate content that has more static in certain situations than tts-1-hd. In some cases, the audio may not have noticeable differences depending on your listening device and the individual person. - -Generally i have found that HD is fast enough for talemate, so this is the default. \ No newline at end of file +- TTS-1 HD \ No newline at end of file diff --git a/docs/user-guide/agents/voice/settings.md b/docs/user-guide/agents/voice/settings.md index 2fa330ae..615f6679 100644 --- a/docs/user-guide/agents/voice/settings.md +++ b/docs/user-guide/agents/voice/settings.md @@ -1,36 +1,65 @@ # Settings -![Voice agent settings](/talemate/img/0.26.0/voice-agent-settings.png) +![Voice agent settings](/talemate/img/0.32.0/voice-agent-settings.png) -##### API +##### Enabled APIs -The TTS API to use for voice generation. +Select which TTS APIs to enable. You can enable multiple APIs simultaneously: -- OpenAI -- ElevenLabs -- Local TTS +- **Kokoro** - Fastest generation with predefined voice models and mixing +- **F5-TTS** - Fast voice cloning with occasional mispronunciations +- **Chatterbox** - High-quality voice cloning (slower generation) +- **ElevenLabs** - Professional voice synthesis with voice cloning +- **Google Gemini-TTS** - Google's text-to-speech service +- **OpenAI** - OpenAI's TTS-1 and TTS-1-HD models + +!!! note "Multi-API Support" + You can enable multiple APIs and assign different voices from different providers to different characters. The system will automatically route voice generation to the appropriate API based on the voice assignment. ##### Narrator Voice -The voice to use for narration. Each API will come with its own set of voices. +The default voice used for narration and as a fallback for characters without assigned voices. -![Narrator voice](/talemate/img/0.26.0/voice-agent-select-voice.png) +The dropdown shows all available voices from all enabled APIs, with the format: "Voice Name (Provider)" -!!! note "Local TTS" - For local TTS, you will have to provide voice samples yourself. See [Local TTS Instructions](local_tts.md) for more information. +!!! info "Voice Management" + Voices are managed through the Voice Library, accessible from the main application bar. Adding, removing, or modifying voices should be done through the Voice Library interface. -##### Generate for player +##### Speaker Separation -Whether to generate voice for the player. If enabled, whenever the player speaks, the voice agent will generate audio for them. +Controls how dialogue is separated from exposition in messages: -##### Generate for NPCs +- **No separation** - Character messages use character voice entirely, narrator messages use narrator voice +- **Simple** - Basic separation of dialogue from exposition using punctuation analysis, with exposition being read by the narrator voice +- **Mixed** - Enables AI assisted separation for narrator messages and simple separation for character messages +- **AI assisted** - AI assisted separation for both narrator and character messages -Whether to generate voice for NPCs. If enabled, whenever a non player character speaks, the voice agent will generate audio for them. +!!! warning "AI Assisted Performance" + AI-assisted speaker separation sends additional prompts to your LLM, which may impact response time and API costs. -##### Generate for narration +##### Auto-generate for player -Whether to generate voice for narration. If enabled, whenever the narrator speaks, the voice agent will generate audio for them. +Generate voice automatically for player messages -##### Split generation +##### Auto-generate for AI characters -If enabled, the voice agent will generate audio in chunks, allowing for faster generation. This does however cause it lose context between chunks, and inflection may not be as good. \ No newline at end of file +Generate voice automatically for NPC/AI character messages + +##### Auto-generate for narration + +Generate voice automatically for narrator messages + +##### Auto-generate for context investigation + +Generate voice automatically for context investigation messages + +## Advanced Settings + +Advanced settings are configured per-API and can be found in the respective API configuration sections: + +- **Chunk size** - Maximum text length per generation request +- **Model selection** - Choose specific models for each API +- **Voice parameters** - Provider-specific voice settings + +!!! tip "Performance Optimization" + Each API has different optimal chunk sizes and parameters. The system automatically handles chunking and queuing for optimal performance across all enabled APIs. \ No newline at end of file diff --git a/docs/user-guide/agents/voice/voice-library.md b/docs/user-guide/agents/voice/voice-library.md new file mode 100644 index 00000000..6fc40b34 --- /dev/null +++ b/docs/user-guide/agents/voice/voice-library.md @@ -0,0 +1,156 @@ +# Voice Library + +The Voice Library is the central hub for managing all voices across all TTS providers in Talemate. It provides a unified interface for organizing, creating, and assigning voices to characters. + +## Accessing the Voice Library + +The Voice Library can be accessed from the main application bar at the top of the Talemate interface. + +![Voice Library access](/talemate/img/0.32.0/voice-library-access.png) + +Click the voice icon to open the Voice Library dialog. + +!!! note "Voice agent needs to be enabled" + The Voice agent needs to be enabled for the voice library to be available. + +## Voice Library Interface + +![Voice Library interface](/talemate/img/0.32.0/voice-library-interface.png) + +The Voice Library interface consists of: + +### Scope Tabs + +- **Global** - Voices available across all scenes +- **Scene** - Voices specific to the current scene (only visible when a scene is loaded) +- **Characters** - Character voice assignments for the current scene (only visible when a scene is loaded) + +### API Status + +The toolbar shows the status of all TTS APIs: + +- **Green** - API is enabled and ready +- **Orange** - API is enabled but not configured +- **Red** - API has configuration issues +- **Gray** - API is disabled + +![API status](/talemate/img/0.32.0/voice-library-api-status.png) + +## Managing Voices + +### Global Voice Library + +The global voice library contains voices that are available across all scenes. These include: + +- Default voices provided by each TTS provider +- Custom voices you've added + +#### Adding New Voices + +To add a new voice: + +1. Click the "+ New" button +2. Select the TTS provider +3. Configure the voice parameters: + - **Label** - Display name for the voice + - **Provider ID** - Provider-specific identifier + - **Tags** - Free-form descriptive tags you define (gender, age, style, etc.) + - **Parameters** - Provider-specific settings + +Check the provider specific documentation for more information on how to configure the voice. + +#### Voice Types by Provider + +**F5-TTS & Chatterbox:** + +- Upload .wav reference files for voice cloning +- Specify reference text for better quality +- Adjust speed and other parameters + +**Kokoro:** + +- Select from predefined voice models +- Create mixed voices by combining multiple models +- Adjust voice mixing weights + +**ElevenLabs:** + +- Select from available ElevenLabs voices +- Configure voice settings and stability +- Use custom cloned voices from your ElevenLabs account + +**OpenAI:** + +- Choose from available OpenAI voice models +- Configure model (GPT-4o Mini TTS, TTS-1, TTS-1-HD) + +**Google Gemini-TTS:** + +- Select from Google's voice models +- Configure language and gender settings + +### Scene Voice Library + +Scene-specific voices are only available within the current scene. This is useful for: + +- Scene-specific characters +- Temporary voice experiments +- Custom voices for specific scenarios + +Scene voices are saved with the scene and will be available when the scene is loaded. + +## Character Voice Assignment + +### Automatic Assignment + +The Director agent can automatically assign voices to new characters based on: + +- Character tags and attributes +- Voice tags matching character personality +- Available voices in the voice library + +This feature can be enabled in the Director agent settings. + +### Manual Assignment + +![Character voice assignment](/talemate/img/0.32.0/character-voice-assignment.png) + +To manually assign a voice to a character: + +1. Go to the "Characters" tab in the Voice Library +2. Find the character in the list +3. Click the voice dropdown for that character +4. Select a voice from the available options +5. The assignment is saved automatically + +### Character Voice Status + +The character list shows: + +- **Character name** +- **Currently assigned voice** (if any) +- **Voice status** - whether the voice's API is available +- **Quick assignment controls** + +## Voice Tags and Organization + +### Tagging System + +Voices can be tagged with any descriptive attributes you choose. Tags are completely free-form and user-defined. Common examples include: + +- **Gender**: male, female, neutral +- **Age**: young, mature, elderly +- **Style**: calm, energetic, dramatic, mysterious +- **Quality**: deep, high, raspy, smooth +- **Character types**: narrator, villain, hero, comic relief +- **Custom tags**: You can create any tags that help you organize your voices + +### Filtering and Search + +Use the search bar to filter voices by: +- Voice label/name +- Provider +- Tags +- Character assignments + +This makes it easy to find the right voice for specific characters or situations. \ No newline at end of file diff --git a/docs/user-guide/clients/reasoning.md b/docs/user-guide/clients/reasoning.md new file mode 100644 index 00000000..6b70496f --- /dev/null +++ b/docs/user-guide/clients/reasoning.md @@ -0,0 +1,82 @@ +# Reasoning Model Support + +Talemate supports reasoning models that can perform step-by-step thinking before generating their final response. This feature allows models to work through complex problems internally before providing an answer. + +## Enabling Reasoning Support + +To enable reasoning support for a client: + +1. Open the **Clients** dialog from the main toolbar +2. Select the client you want to configure +3. Navigate to the **Reasoning** tab in the client configuration + +![Client reasoning configuration](/talemate/img/0.32.0/client-reasoning-2.png) + +4. Check the **Enable Reasoning** checkbox + +## Configuring Reasoning Tokens + +Once reasoning is enabled, you can configure the **Reasoning Tokens** setting using the slider: + +![Reasoning tokens configuration](/talemate/img/0.32.0/client-reasoning.png) + +### Recommended Token Amounts + +**For local reasoning models:** Use a high token allocation (recommended: 4096 tokens) to give the model sufficient space for complex reasoning. + +**For remote APIs:** Start with lower amounts (512-1024 tokens) and adjust based on your needs and token costs. + +### Token Allocation Behavior + +The behavior of the reasoning tokens setting depends on your API provider: + +**For APIs that support direct reasoning token specification:** + +- The specified tokens will be allocated specifically for reasoning +- The model will use these tokens for internal thinking before generating the response + +**For APIs that do NOT support reasoning token specification:** + +- The tokens are added as extra allowance to the response token limit for ALL requests +- This may lead to more verbose responses than usual since Talemate normally uses response token limits to control verbosity + +!!! warning "Increased Verbosity" + For providers without direct reasoning token support, enabling reasoning may result in more verbose responses since the extra tokens are added to all requests. + +## Response Pattern Configuration + +When reasoning is enabled, you may need to configure a **Pattern to strip from the response** to remove the thinking process from the final output. + +### Default Patterns + +Talemate provides quick-access buttons for common reasoning patterns: + +- **Default** - Uses the built-in pattern: `.*?` +- **`.*?◁/think▷`** - For models using arrow-style thinking delimiters +- **`.*?`** - For models using XML-style think tags + +### Custom Patterns + +You can also specify a custom regular expression pattern that matches your model's reasoning format. This pattern will be used to strip the thinking tokens from the response before displaying it to the user. + +## Model Compatibility + +Not all models support reasoning. This feature works best with: + +- Models specifically trained for chain-of-thought reasoning +- Models that support structured thinking patterns +- APIs that provide reasoning token specification + +## Important Notes + +- **Coercion Disabled**: When reasoning is enabled, LLM coercion (pre-filling responses) is automatically disabled since reasoning models need to generate their complete thought process +- **Response Time**: Reasoning models may take longer to respond as they work through their thinking process + +## Troubleshooting + +### Pattern Not Working +If the reasoning pattern isn't properly stripping the thinking process: + +1. Check your model's actual reasoning output format +2. Adjust the regular expression pattern to match your model's specific format +3. Test with the default pattern first to see if it works \ No newline at end of file diff --git a/docs/user-guide/clients/types/openai.md b/docs/user-guide/clients/types/openai.md index a877a7ed..244e773e 100644 --- a/docs/user-guide/clients/types/openai.md +++ b/docs/user-guide/clients/types/openai.md @@ -35,4 +35,19 @@ A unique name for the client that makes sense to you. Which model to use. Currently defaults to `gpt-4o`. !!! note "Talemate lags behind OpenAI" - When OpenAI adds a new model, it currently requires a Talemate update to add it to the list of available models. We are working on making this more dynamic. \ No newline at end of file + When OpenAI adds a new model, it currently requires a Talemate update to add it to the list of available models. We are working on making this more dynamic. + +##### Reasoning models (o1, o3, gpt-5) + +!!! important "Enable reasoning and allocate tokens" + The `o1`, `o3`, and `gpt-5` families are reasoning models. They always perform internal thinking before producing the final answer. To use them effectively in Talemate: + + - Enable the **Reasoning** option in the client configuration. + - Set **Reasoning Tokens** to a sufficiently high value to make room for the model's thinking process. + + A good starting range is 512–1024 tokens. Increase if your tasks are complex. Without enabling reasoning and allocating tokens, these models may return minimal or empty visible content because the token budget is consumed by internal reasoning. + + See the detailed guide: [Reasoning Model Support](/talemate/user-guide/clients/reasoning/). + +!!! tip "Getting empty responses?" + If these models return empty or very short answers, it usually means the reasoning budget was exhausted. Increase **Reasoning Tokens** and try again. \ No newline at end of file diff --git a/install-cuda.sh b/install-cuda.sh index 89285a94..7addc33f 100755 --- a/install-cuda.sh +++ b/install-cuda.sh @@ -4,4 +4,4 @@ uv pip uninstall torch torchaudio # install torch and torchaudio with CUDA support -uv pip install torch~=2.7.0 torchaudio~=2.7.0 --index-url https://download.pytorch.org/whl/cu128 \ No newline at end of file +uv pip install torch~=2.7.1 torchaudio~=2.7.1 --index-url https://download.pytorch.org/whl/cu128 \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index be67b0a5..8b074992 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,11 +1,12 @@ [project] name = "talemate" -version = "0.31.0" +version = "0.32.0" description = "AI-backed roleplay and narrative tools" authors = [{name = "VeguAITools"}] license = {text = "GNU Affero General Public License v3.0"} -requires-python = ">=3.10,<3.14" +requires-python = ">=3.11,<3.14" dependencies = [ + "pip", "astroid>=2.8", "jedi>=0.18", "black", @@ -50,11 +51,20 @@ dependencies = [ # ChromaDB "chromadb>=1.0.12", "InstructorEmbedding @ https://github.com/vegu-ai/instructor-embedding/archive/refs/heads/202506-fixes.zip", - "torch>=2.7.0", - "torchaudio>=2.7.0", - # locked for instructor embeddings - #sentence-transformers==2.2.2 + "torch>=2.7.1", + "torchaudio>=2.7.1", "sentence_transformers>=2.7.0", + # TTS + "elevenlabs>=2.7.1", + # Local TTS + # Chatterbox TTS + #"chatterbox-tts @ https://github.com/rsxdalv/chatterbox/archive/refs/heads/fast.zip", + "chatterbox-tts==0.1.2", + # kokoro TTS + "kokoro>=0.9.4", + "soundfile>=0.13.1", + # F5-TTS + "f5-tts>=1.1.7", ] [project.optional-dependencies] @@ -105,3 +115,25 @@ force_grid_wrap = 0 use_parentheses = true ensure_newline_before_comments = true line_length = 88 + +[tool.uv] +override-dependencies = [ + # chatterbox wants torch 2.6.0, but is confirmed working with 2.7.1 + "torchaudio>=2.7.1", + "torch>=2.7.1", + "numpy>=2", + "pydantic>=2.11", +] + +[tool.uv.sources] +torch = [ + { index = "pytorch-cu128" }, +] +torchaudio = [ + { index = "pytorch-cu128" }, +] + +[[tool.uv.index]] +name = "pytorch-cu128" +url = "https://download.pytorch.org/whl/cu128" +explicit = true \ No newline at end of file diff --git a/scenes/infinity-quest/infinity-quest.json b/scenes/infinity-quest/infinity-quest.json index 6bc4f5ec..afedb58b 100644 --- a/scenes/infinity-quest/infinity-quest.json +++ b/scenes/infinity-quest/infinity-quest.json @@ -1,6 +1,6 @@ { "description": "Captain Elmer Farstield and his trusty first officer, Kaira, embark upon a daring mission into uncharted space. Their small but mighty exploration vessel, the Starlight Nomad, is equipped with state-of-the-art technology and crewed by an elite team of scientists, engineers, and pilots. Together they brave the vast cosmos seeking answers to humanity's most pressing questions about life beyond our solar system.", - "intro": "You awaken aboard your ship, the Starlight Nomad, surrounded by darkness. A soft hum resonates throughout the vessel indicating its systems are online. Your mind struggles to recall what brought you here - where 'here' actually is. You remember nothing more than flashes of images; swirling nebulae, foreign constellations, alien life forms... Then there was a bright light followed by this endless void.\n\nGingerly, you make your way through the dimly lit corridors of the ship. It seems smaller than you expected given the magnitude of the mission ahead. However, each room reveals intricate technology designed specifically for long-term space travel and exploration. There appears to be no other living soul besides yourself. An eerie silence fills every corner.", + "intro": "Elmer awoke aboard his ship, the Starlight Nomad, surrounded by darkness. A soft hum resonated throughout the vessel indicating its systems were online. His mind struggled to recall what had brought him here - where here actually was. He remembered nothing more than flashes of images; swirling nebulae, foreign constellations, alien life forms... Then there had been a bright light followed by this endless void.\n\nGingerly, he made his way through the dimly lit corridors of the ship. It seemed smaller than he had expected given the magnitude of the mission ahead. However, each room revealed intricate technology designed specifically for long-term space travel and exploration. There appeared to be no other living soul besides himself. An eerie silence filled every corner.", "name": "Infinity Quest", "history": [], "environment": "scene", @@ -90,11 +90,11 @@ "gender": "female", "color": "red", "example_dialogue": [ - "Kaira: \"Yes Captain, I believe that is the best course of action\" She nods slightly, as if to punctuate her approval of the decision*", - "Kaira: \"This device appears to have multiple functions, Captain. Allow me to analyze its capabilities and determine if it could be useful in our exploration efforts.\"", - "Kaira: \"Captain, it appears that this newly discovered planet harbors an ancient civilization whose technological advancements rival those found back home on Altrusia!\" Excitement bubbles beneath her calm exterior as she shares the news", - "Kaira: \"Captain, I understand why you would want us to pursue this course of action based on our current data, but I cannot shake the feeling that there might be unforeseen consequences if we proceed without further investigation into potential hazards.\"", - "Kaira: \"I often find myself wondering what it would have been like if I had never left my home world... But then again, perhaps it was fate that led me here, onto this ship bound for destinations unknown...\"" + "Kaira: \"Yes Captain, I believe that is the best course of action.\" Kaira glanced at the navigation display, then back at him with a slight nod. \"The numbers check out on my end too. If we adjust our trajectory by 2.7 degrees and increase thrust by fifteen percent, we should reach the nebula's outer edge within six hours.\" Her violet fingers moved efficiently across the controls as she pulled up the gravitational readings.", + "Kaira: The scanner hummed as it analyzed the alien artifact. Kaira knelt beside the strange object, frowning in concentration at the shifting symbols on its warm metal surface. \"This device appears to have multiple functions, Captain,\" she said, adjusting her scanner's settings. \"Give me a few minutes to run a full analysis and I'll know what we're dealing with. The material composition is fascinating - it's responding to our ship's systems in ways that shouldn't be possible.\"", + "Kaira: \"Captain, it appears that this newly discovered planet harbors an ancient civilization whose technological advancements rival those found back home on Altrusia!\" The excitement in her voice was unmistakable as Kaira looked up from her console. \"These readings are incredible - I've never seen anything like this before. There are structures beneath the surface that predate our oldest sites by millions of years.\" She paused, processing the implications. \"If these readings are accurate, we may have found something truly significant.\"", + "Kaira: Something felt off about the proposed course of action. Kaira moved from her station to stand beside the Captain, organizing her thoughts carefully. \"Captain, I understand why you would want us to pursue this based on our current data,\" she began respectfully, clasping her hands behind her back. \"But something feels wrong about this. The quantum signatures have subtle variations that remind me of Hegemony cloaking technology. Maybe we should run a few more scans before we commit to a full approach.\"", + "Kaira: \"I often find myself wondering what it would have been like if I had never left my home world,\" she said softly, not turning from the observation deck viewport as footsteps approached. The stars wheeled slowly past in their eternal dance. \"Sometimes I dream of Altrusia's crystal gardens, the way our twin suns would set over the mountains.\" Kaira finally turned, her expression thoughtful. \"Then again, I suppose I was always meant to end up out here somehow. Perhaps this journey is exactly where I'm supposed to be.\"" ], "history_events": [], "is_player": false, diff --git a/scenes/simulation-suite-v2/nodes/fn-sim-suite-add-character.json b/scenes/simulation-suite-v2/nodes/fn-sim-suite-add-character.json index c9c2f109..88f800c9 100644 --- a/scenes/simulation-suite-v2/nodes/fn-sim-suite-add-character.json +++ b/scenes/simulation-suite-v2/nodes/fn-sim-suite-add-character.json @@ -494,34 +494,6 @@ "registry": "state/GetState", "base_type": "core/Node" }, - "8a050403-5c69-46f7-abe2-f65db4553942": { - "title": "TRUE", - "id": "8a050403-5c69-46f7-abe2-f65db4553942", - "properties": { - "value": true - }, - "x": 460, - "y": 670, - "width": 210, - "height": 58, - "collapsed": true, - "inherited": false, - "registry": "core/MakeBool", - "base_type": "core/Node" - }, - "72943c1c-d2a1-40b4-bb28-8bcc5f02aa5c": { - "title": "Create Character", - "id": "72943c1c-d2a1-40b4-bb28-8bcc5f02aa5c", - "properties": {}, - "x": 580, - "y": 550, - "width": 245, - "height": 186, - "collapsed": false, - "inherited": false, - "registry": "agents/creator/CreateCharacter", - "base_type": "core/Graph" - }, "970f12d0-330e-41b3-b025-9a53bcf2fc6f": { "title": "SET - created_character", "id": "970f12d0-330e-41b3-b025-9a53bcf2fc6f", @@ -628,6 +600,34 @@ "inherited": false, "registry": "data/string/MakeText", "base_type": "core/Node" + }, + "72943c1c-d2a1-40b4-bb28-8bcc5f02aa5c": { + "title": "Create Character", + "id": "72943c1c-d2a1-40b4-bb28-8bcc5f02aa5c", + "properties": {}, + "x": 580, + "y": 550, + "width": 245, + "height": 206, + "collapsed": false, + "inherited": false, + "registry": "agents/creator/CreateCharacter", + "base_type": "core/Graph" + }, + "8a050403-5c69-46f7-abe2-f65db4553942": { + "title": "TRUE", + "id": "8a050403-5c69-46f7-abe2-f65db4553942", + "properties": { + "value": true + }, + "x": 400, + "y": 720, + "width": 210, + "height": 58, + "collapsed": true, + "inherited": false, + "registry": "core/MakeBool", + "base_type": "core/Node" } }, "edges": { @@ -714,17 +714,6 @@ "fd4cd318-121b-47de-84a0-b1ab62c5601b.value": [ "41c0c2cd-d39b-4528-a5fe-459094393ba3.list" ], - "8a050403-5c69-46f7-abe2-f65db4553942.value": [ - "72943c1c-d2a1-40b4-bb28-8bcc5f02aa5c.generate", - "72943c1c-d2a1-40b4-bb28-8bcc5f02aa5c.generate_attributes", - "72943c1c-d2a1-40b4-bb28-8bcc5f02aa5c.is_active" - ], - "72943c1c-d2a1-40b4-bb28-8bcc5f02aa5c.state": [ - "90610e90-3d00-4b4b-96de-1f6aa3f4f795.state" - ], - "72943c1c-d2a1-40b4-bb28-8bcc5f02aa5c.character": [ - "970f12d0-330e-41b3-b025-9a53bcf2fc6f.value" - ], "a2457116-35cb-4571-ba1a-cbf63851544e.value": [ "a9f17ddc-fc8f-4257-8e32-45ac111fd50d.state", "a9f17ddc-fc8f-4257-8e32-45ac111fd50d.character", @@ -738,6 +727,18 @@ ], "5041f12d-507f-4f5d-a26f-048625974602.value": [ "b50e23d4-b456-4385-b80e-c2b6884c7855.template" + ], + "72943c1c-d2a1-40b4-bb28-8bcc5f02aa5c.state": [ + "90610e90-3d00-4b4b-96de-1f6aa3f4f795.state" + ], + "72943c1c-d2a1-40b4-bb28-8bcc5f02aa5c.character": [ + "970f12d0-330e-41b3-b025-9a53bcf2fc6f.value" + ], + "8a050403-5c69-46f7-abe2-f65db4553942.value": [ + "72943c1c-d2a1-40b4-bb28-8bcc5f02aa5c.generate_attributes", + "72943c1c-d2a1-40b4-bb28-8bcc5f02aa5c.is_active", + "72943c1c-d2a1-40b4-bb28-8bcc5f02aa5c.generate", + "72943c1c-d2a1-40b4-bb28-8bcc5f02aa5c.assign_voice" ] }, "groups": [ @@ -808,13 +809,14 @@ "inputs": [], "outputs": [ { - "id": "f78fa84b-8b6f-4c8a-83c0-754537bb9060", + "id": "92423e0a-89d8-4ad8-a42d-fdcba75b31d1", "name": "fn", "optional": false, "group": null, "socket_type": "function" } ], + "module_properties": {}, "style": { "title_color": "#573a2e", "node_color": "#392f2c", diff --git a/scenes/simulation-suite-v2/nodes/fn-sim-suite-set-goal.json b/scenes/simulation-suite-v2/nodes/fn-sim-suite-set-goal.json index 276fba6e..b0076e56 100644 --- a/scenes/simulation-suite-v2/nodes/fn-sim-suite-set-goal.json +++ b/scenes/simulation-suite-v2/nodes/fn-sim-suite-set-goal.json @@ -116,8 +116,8 @@ "context_aware": true, "history_aware": true }, - "x": 568, - "y": 861, + "x": 372, + "y": 857, "width": 249, "height": 406, "collapsed": false, @@ -130,7 +130,7 @@ "id": "a8110d74-0fb5-4601-b883-6c63ceaa9d31", "properties": {}, "x": 24, - "y": 2084, + "y": 2088, "width": 140, "height": 106, "collapsed": false, @@ -145,7 +145,7 @@ "attribute": "title" }, "x": 213, - "y": 2094, + "y": 2098, "width": 210, "height": 98, "collapsed": false, @@ -160,7 +160,7 @@ "value": "The Simulation Suite" }, "x": 213, - "y": 2284, + "y": 2288, "width": 210, "height": 58, "collapsed": false, @@ -177,7 +177,7 @@ "case_sensitive": true }, "x": 523, - "y": 2184, + "y": 2188, "width": 210, "height": 126, "collapsed": false, @@ -192,7 +192,7 @@ "pass_through": true }, "x": 774, - "y": 2185, + "y": 2189, "width": 210, "height": 78, "collapsed": false, @@ -207,7 +207,7 @@ "attribute": "0" }, "x": 1629, - "y": 2411, + "y": 2415, "width": 210, "height": 98, "collapsed": false, @@ -222,7 +222,7 @@ "attribute": "title" }, "x": 2189, - "y": 2321, + "y": 2325, "width": 210, "height": 98, "collapsed": false, @@ -237,7 +237,7 @@ "stage": 5 }, "x": 2459, - "y": 2331, + "y": 2335, "width": 210, "height": 118, "collapsed": true, @@ -250,7 +250,7 @@ "id": "8208d05c-1822-4f4a-ba75-cfd18d2de8ca", "properties": {}, "x": 1989, - "y": 2331, + "y": 2335, "width": 140, "height": 106, "collapsed": true, @@ -266,7 +266,7 @@ "chars": "\"'*" }, "x": 1899, - "y": 2411, + "y": 2415, "width": 210, "height": 102, "collapsed": false, @@ -282,7 +282,7 @@ "max_splits": 1 }, "x": 1359, - "y": 2411, + "y": 2415, "width": 210, "height": 102, "collapsed": false, @@ -304,7 +304,7 @@ "history_aware": true }, "x": 1014, - "y": 2185, + "y": 2189, "width": 276, "height": 406, "collapsed": false, @@ -376,8 +376,8 @@ "name": "arg_goal", "scope": "local" }, - "x": 228, - "y": 1041, + "x": 32, + "y": 1037, "width": 256, "height": 122, "collapsed": false, @@ -393,7 +393,7 @@ "max_scene_types": 2 }, "x": 671, - "y": 1490, + "y": 1494, "width": 210, "height": 122, "collapsed": false, @@ -409,7 +409,7 @@ "scope": "local" }, "x": 30, - "y": 1551, + "y": 1555, "width": 256, "height": 122, "collapsed": false, @@ -448,37 +448,6 @@ "registry": "state/GetState", "base_type": "core/Node" }, - "59d31050-f61d-4798-9790-e22d34ecbd4b": { - "title": "GET local.auto_direct_enabled", - "id": "59d31050-f61d-4798-9790-e22d34ecbd4b", - "properties": { - "name": "auto_direct_enabled", - "scope": "local" - }, - "x": 20, - "y": 850, - "width": 256, - "height": 122, - "collapsed": false, - "inherited": false, - "registry": "state/GetState", - "base_type": "core/Node" - }, - "e8f19a05-43fe-4e4a-9cc4-bec0a29779d8": { - "title": "Switch", - "id": "e8f19a05-43fe-4e4a-9cc4-bec0a29779d8", - "properties": { - "pass_through": true - }, - "x": 310, - "y": 870, - "width": 210, - "height": 78, - "collapsed": false, - "inherited": false, - "registry": "core/Switch", - "base_type": "core/Node" - }, "b03fa942-c48e-4c04-b9ae-a009a7e0f947": { "title": "GET local.auto_direct_enabled", "id": "b03fa942-c48e-4c04-b9ae-a009a7e0f947", @@ -487,7 +456,7 @@ "scope": "local" }, "x": 24, - "y": 1367, + "y": 1371, "width": 256, "height": 122, "collapsed": false, @@ -502,7 +471,7 @@ "pass_through": true }, "x": 360, - "y": 1390, + "y": 1394, "width": 210, "height": 78, "collapsed": false, @@ -518,7 +487,7 @@ "scope": "local" }, "x": 30, - "y": 1821, + "y": 1826, "width": 256, "height": 122, "collapsed": false, @@ -533,7 +502,7 @@ "stage": 4 }, "x": 1100, - "y": 1870, + "y": 1875, "width": 210, "height": 118, "collapsed": true, @@ -546,7 +515,7 @@ "id": "9db37d1e-3cf8-49bd-bdc5-8663494e5657", "properties": {}, "x": 670, - "y": 1840, + "y": 1845, "width": 226, "height": 62, "collapsed": false, @@ -561,7 +530,7 @@ "stage": 3 }, "x": 1080, - "y": 1520, + "y": 1524, "width": 210, "height": 118, "collapsed": true, @@ -576,7 +545,7 @@ "pass_through": true }, "x": 370, - "y": 1840, + "y": 1845, "width": 210, "height": 78, "collapsed": false, @@ -590,8 +559,8 @@ "properties": { "stage": 2 }, - "x": 1280, - "y": 890, + "x": 1084, + "y": 886, "width": 210, "height": 118, "collapsed": true, @@ -599,14 +568,29 @@ "registry": "core/Stage", "base_type": "core/Node" }, + "5559196c-f6b1-4223-8e13-2bf64e3cfef0": { + "title": "true", + "id": "5559196c-f6b1-4223-8e13-2bf64e3cfef0", + "properties": { + "value": true + }, + "x": 24, + "y": 876, + "width": 210, + "height": 58, + "collapsed": false, + "inherited": false, + "registry": "core/MakeBool", + "base_type": "core/Node" + }, "6ef94917-f9b1-4c18-af15-617430e50cfe": { "title": "Set Scene Intent", "id": "6ef94917-f9b1-4c18-af15-617430e50cfe", "properties": { "intent": "" }, - "x": 930, - "y": 860, + "x": 731, + "y": 853, "width": 210, "height": 78, "collapsed": false, @@ -693,14 +677,8 @@ "e4cd1391-daed-4951-a6c6-438d993c07a9.state" ], "c66bdaeb-4166-4835-9415-943af547c926.value": [ - "24ac670b-4648-4915-9dbb-b6bf35ee6d80.description", - "24ac670b-4648-4915-9dbb-b6bf35ee6d80.state" - ], - "59d31050-f61d-4798-9790-e22d34ecbd4b.value": [ - "e8f19a05-43fe-4e4a-9cc4-bec0a29779d8.value" - ], - "e8f19a05-43fe-4e4a-9cc4-bec0a29779d8.yes": [ - "bb43a68e-bdf6-4b02-9cc0-102742b14f5d.state" + "24ac670b-4648-4915-9dbb-b6bf35ee6d80.state", + "24ac670b-4648-4915-9dbb-b6bf35ee6d80.description" ], "b03fa942-c48e-4c04-b9ae-a009a7e0f947.value": [ "8ad7c42c-110e-46ae-b649-4a1e6d055e25.value" @@ -717,6 +695,9 @@ "6a8762c4-16cf-4e8c-9d10-8af7597c4097.yes": [ "9db37d1e-3cf8-49bd-bdc5-8663494e5657.state" ], + "5559196c-f6b1-4223-8e13-2bf64e3cfef0.value": [ + "bb43a68e-bdf6-4b02-9cc0-102742b14f5d.state" + ], "6ef94917-f9b1-4c18-af15-617430e50cfe.state": [ "f4cd34d9-0628-4145-a3da-ec1215cd356c.state" ] @@ -745,7 +726,7 @@ { "title": "Generate Scene Types", "x": -1, - "y": 1287, + "y": 1290, "width": 1298, "height": 408, "color": "#8AA", @@ -756,8 +737,8 @@ "title": "Set story intention", "x": -1, "y": 773, - "width": 1539, - "height": 512, + "width": 1320, + "height": 514, "color": "#8AA", "font_size": 24, "inherited": false @@ -765,7 +746,7 @@ { "title": "Evaluate Scene Intent", "x": -1, - "y": 1697, + "y": 1701, "width": 1293, "height": 302, "color": "#8AA", @@ -775,7 +756,7 @@ { "title": "Set title", "x": -1, - "y": 2003, + "y": 2006, "width": 2618, "height": 637, "color": "#8AA", @@ -794,7 +775,7 @@ { "text": "Some times the AI will produce more text after the title, we only care about the title on the first line.", "x": 1359, - "y": 2311, + "y": 2315, "width": 471, "inherited": false } @@ -804,13 +785,14 @@ "inputs": [], "outputs": [ { - "id": "dede1a38-2107-4475-9db5-358c09cb0d12", + "id": "5c8dee64-5832-40ba-b1e2-2a411d913cc7", "name": "fn", "optional": false, "group": null, "socket_type": "function" } ], + "module_properties": {}, "style": { "title_color": "#573a2e", "node_color": "#392f2c", diff --git a/scenes/simulation-suite-v2/nodes/sim-suite-process-commands.json b/scenes/simulation-suite-v2/nodes/sim-suite-process-commands.json index dd82bc7f..cc309f91 100644 --- a/scenes/simulation-suite-v2/nodes/sim-suite-process-commands.json +++ b/scenes/simulation-suite-v2/nodes/sim-suite-process-commands.json @@ -666,23 +666,6 @@ "registry": "data/ListAppend", "base_type": "core/Node" }, - "4eb36f21-1020-4609-85e5-d16b42019c66": { - "title": "AI Function Calling", - "id": "4eb36f21-1020-4609-85e5-d16b42019c66", - "properties": { - "template": "computer", - "max_calls": 5, - "retries": 1 - }, - "x": 1000, - "y": 2581, - "width": 212, - "height": 206, - "collapsed": false, - "inherited": false, - "registry": "focal/Focal", - "base_type": "core/Node" - }, "3da498c0-55de-4ec3-9943-6486279b9826": { "title": "GET scene loop.user_message", "id": "3da498c0-55de-4ec3-9943-6486279b9826", @@ -1167,6 +1150,24 @@ "inherited": false, "registry": "data/MakeList", "base_type": "core/Node" + }, + "4eb36f21-1020-4609-85e5-d16b42019c66": { + "title": "AI Function Calling", + "id": "4eb36f21-1020-4609-85e5-d16b42019c66", + "properties": { + "template": "computer", + "max_calls": 5, + "retries": 1, + "response_length": 1408 + }, + "x": 1000, + "y": 2581, + "width": 210, + "height": 230, + "collapsed": false, + "inherited": false, + "registry": "focal/Focal", + "base_type": "core/Node" } }, "edges": { @@ -1296,9 +1297,6 @@ "d45f07ff-c3ce-4aac-bae6-b9c77089cb69.list": [ "4eb36f21-1020-4609-85e5-d16b42019c66.callbacks" ], - "4eb36f21-1020-4609-85e5-d16b42019c66.state": [ - "3008c0f4-105d-444a-8fde-0ac11a21f40c.state" - ], "3da498c0-55de-4ec3-9943-6486279b9826.value": [ "6d707f1c-af55-481a-970a-eb7a9f9c45dd.message" ], @@ -1380,6 +1378,9 @@ ], "632d4a0e-3327-409b-aaa4-ed38b932286b.list": [ "06175a92-abbe-4483-9ab2-abaee2104728.value" + ], + "4eb36f21-1020-4609-85e5-d16b42019c66.state": [ + "3008c0f4-105d-444a-8fde-0ac11a21f40c.state" ] }, "groups": [ diff --git a/scenes/simulation-suite-v2/templates/computer.jinja2 b/scenes/simulation-suite-v2/templates/computer.jinja2 index c95d5a86..b1ebc8f5 100644 --- a/scenes/simulation-suite-v2/templates/computer.jinja2 +++ b/scenes/simulation-suite-v2/templates/computer.jinja2 @@ -19,7 +19,7 @@ You have access to the following functions you must call to fulfill the user's r focal.callbacks.set_simulated_environment.render( "Create or change the simulated environment. This means the location, time, specific conditions, or any other aspect of the simulation that is not directly related to the characters.", instructions="Instructions on how to change the simulated environment. These will be given to the simulation computer to setup the new environment. REQUIRED.", - reset="If true, the environment should be reset and all simulated characters are removed. If false, the environment should be changed but the characters should remain. REQUIRED.", + reset="If true, the environment should be reset and ALL simulated characters are removed. IMPORTANT: If you set reset=true, this function MUST be the FIRST call in your stack; otherwise, set reset=false to avoid deactivating characters added earlier. REQUIRED.", examples=[ {"instructions": "Change the location to a lush forest, with a river running through it.", "reset":true}, {"instructions": "The simulation suite flickers and changes to a bustling city street.", "reset":true}, @@ -123,7 +123,7 @@ You have access to the following functions you must call to fulfill the user's r {{ focal.callbacks.set_simulation_goal.render( - "Briefly describe the overall goal of the simulation. What is the user looking to experience? What needs to happen for the simulation to be considered complete? This function is used to provide context and direction for the simulation. It should be clear, specific and detailed, and focused on the user's objectives.", + "Briefly describe the overall goal of the simulation. What is the user looking to experience? What needs to happen for the simulation to be considered complete? This function is used to provide context and direction for the simulation. It should be clear, specific and detailed, and focused on the user's objectives. You MUST call this on new simulations or if the user has requested a change in the simulation's goal.", goal="The overall goal of the simulation. This should be a clear and concise statement that outlines the user's objective. REQUIRED.", examples=[ {"goal": "The user is exploring a mysterious alien planet to uncover the secrets of an ancient civilization."}, diff --git a/src/talemate/agents/base.py b/src/talemate/agents/base.py index b8b7d1d2..b83dfa1b 100644 --- a/src/talemate/agents/base.py +++ b/src/talemate/agents/base.py @@ -18,10 +18,11 @@ from talemate.agents.context import ActiveAgent, active_agent from talemate.emit import emit from talemate.events import GameLoopStartEvent from talemate.context import active_scene -import talemate.config as config +from talemate.ux.schema import Column +from talemate.config import get_config, Config +import talemate.config.schema as config_schema from talemate.client.context import ( ClientContext, - set_client_context_attribute, ) __all__ = [ @@ -53,19 +54,20 @@ class AgentActionConfig(pydantic.BaseModel): type: str label: str description: str = "" - value: Union[int, float, str, bool, list, None] = None - default_value: Union[int, float, str, bool] = None - max: Union[int, float, None] = None - min: Union[int, float, None] = None - step: Union[int, float, None] = None + value: int | float | str | bool | list | None = None + default_value: int | float | str | bool | None = None + max: int | float | None = None + min: int | float | None = None + step: int | float | None = None scope: str = "global" - choices: Union[list[dict[str, str]], None] = None + choices: list[dict[str, str | int | float | bool]] | None = None note: Union[str, None] = None expensive: bool = False quick_toggle: bool = False - condition: Union[AgentActionConditional, None] = None - title: Union[str, None] = None - value_migration: Union[Callable, None] = pydantic.Field(default=None, exclude=True) + condition: AgentActionConditional | None = None + title: str | None = None + value_migration: Callable | None = pydantic.Field(default=None, exclude=True) + columns: list[Column] | None = None note_on_value: dict[str, AgentActionNote] = pydantic.Field(default_factory=dict) @@ -78,20 +80,21 @@ class AgentAction(pydantic.BaseModel): label: str description: str = "" warning: str = "" - config: Union[dict[str, AgentActionConfig], None] = None - condition: Union[AgentActionConditional, None] = None + config: dict[str, AgentActionConfig] | None = None + condition: AgentActionConditional | None = None container: bool = False - icon: Union[str, None] = None + icon: str | None = None can_be_disabled: bool = False quick_toggle: bool = False experimental: bool = False class AgentDetail(pydantic.BaseModel): - value: Union[str, None] = None - description: Union[str, None] = None - icon: Union[str, None] = None + value: str | None = None + description: str | None = None + icon: str | None = None color: str = "grey" + hidden: bool = False class DynamicInstruction(pydantic.BaseModel): @@ -172,11 +175,6 @@ def set_processing(fn): if scene: scene.continue_actions() - if getattr(scene, "config", None): - set_client_context_attribute( - "app_config_system_prompts", scene.config.get("system_prompts", {}) - ) - with ActiveAgent(self, fn, args, kwargs) as active_agent_context: try: await self.emit_status(processing=True) @@ -221,7 +219,6 @@ class Agent(ABC): verbose_name = None set_processing = set_processing requires_llm_client = True - auto_break_repetition = False websocket_handler = None essential = True ready_check_error = None @@ -235,6 +232,10 @@ class Agent(ABC): return actions + @property + def config(self) -> Config: + return get_config() + @property def agent_details(self): if hasattr(self, "client"): @@ -244,6 +245,12 @@ class Agent(ABC): @property def ready(self): + if not self.requires_llm_client: + return True + + if not hasattr(self, "client"): + return False + if not getattr(self.client, "enabled", True): return False @@ -326,7 +333,7 @@ class Agent(ABC): # scene state - def context_fingerpint(self, extra: list[str] = []) -> str | None: + def context_fingerprint(self, extra: list[str] = None) -> str | None: active_agent_context = active_agent.get() if not active_agent_context: @@ -337,8 +344,9 @@ class Agent(ABC): else: fingerprint = f"START-{active_agent_context.first.fingerprint}" - for extra_key in extra: - fingerprint += f"-{hash(extra_key)}" + if extra: + for extra_key in extra: + fingerprint += f"-{hash(extra_key)}" return fingerprint @@ -448,25 +456,26 @@ class Agent(ABC): except AttributeError: pass - async def save_config(self, app_config: config.Config | None = None): + async def save_config(self): """ Saves the agent config to the config file. If no config object is provided, the config is loaded from the config file. """ - if not app_config: - app_config: config.Config = config.load_config(as_model=True) + app_config: Config = get_config() - app_config.agents[self.agent_type] = config.Agent( + app_config.agents[self.agent_type] = config_schema.Agent( name=self.agent_type, - client=self.client.name if self.client else None, + client=self.client.name if getattr(self, "client", None) else None, enabled=self.enabled, actions={ - action_key: config.AgentAction( + action_key: config_schema.AgentAction( enabled=action.enabled, config={ - config_key: config.AgentActionConfig(value=config_obj.value) + config_key: config_schema.AgentActionConfig( + value=config_obj.value + ) for config_key, config_obj in action.config.items() }, ) @@ -478,7 +487,8 @@ class Agent(ABC): agent=self.agent_type, config=app_config.agents[self.agent_type], ) - config.save_config(app_config) + + app_config.dirty = True async def on_game_loop_start(self, event: GameLoopStartEvent): """ @@ -602,7 +612,7 @@ class Agent(ABC): exclude_fn: Callable = None, ): current_memory_context = [] - memory_helper = self.scene.get_helper("memory") + memory_helper = instance.get_agent("memory") if memory_helper: history_messages = "\n".join( self.scene.recent_history(memory_history_context_max) diff --git a/src/talemate/agents/conversation/__init__.py b/src/talemate/agents/conversation/__init__.py index 29ae195d..c735248e 100644 --- a/src/talemate/agents/conversation/__init__.py +++ b/src/talemate/agents/conversation/__init__.py @@ -23,6 +23,7 @@ from talemate.agents.base import ( Agent, AgentAction, AgentActionConfig, + AgentActionNote, AgentDetail, AgentEmission, DynamicInstruction, @@ -85,12 +86,22 @@ class ConversationAgent(MemoryRAGMixin, Agent): "format": AgentActionConfig( type="text", label="Format", - description="The generation format of the scene context, as seen by the AI.", + description="The generation format of the scene progression, as seen by the AI. Has no direct effect on your view of the scene, but will affect the way the AI perceives the scene and its characters, leading to changes in the response, for better or worse.", choices=[ {"label": "Screenplay", "value": "movie_script"}, {"label": "Chat (legacy)", "value": "chat"}, + { + "label": "Narrative (NEW, experimental)", + "value": "narrative", + }, ], value="movie_script", + note_on_value={ + "narrative": AgentActionNote( + type="primary", + text="Will attempt to generate flowing, novel-like prose with scene intent awareness and character goal consideration. A reasoning model is STRONGLY recommended. Experimental and more prone to generate out of turn character actions and dialogue.", + ) + }, ), "length": AgentActionConfig( type="number", @@ -133,12 +144,6 @@ class ConversationAgent(MemoryRAGMixin, Agent): ), }, ), - "auto_break_repetition": AgentAction( - enabled=True, - can_be_disabled=True, - label="Auto Break Repetition", - description="Will attempt to automatically break AI repetition.", - ), "content": AgentAction( enabled=True, can_be_disabled=False, @@ -161,7 +166,7 @@ class ConversationAgent(MemoryRAGMixin, Agent): def __init__( self, - client: client.TaleMateClient, + client: client.ClientBase | None = None, kind: Optional[str] = "pygmalion", logging_enabled: Optional[bool] = True, **kwargs, @@ -453,21 +458,31 @@ class ConversationAgent(MemoryRAGMixin, Agent): if total_result.startswith(":\n") or total_result.startswith(": "): total_result = total_result[2:] - # movie script format - # {uppercase character name} - # {dialogue} - total_result = total_result.replace(f"{character.name.upper()}\n", "") + conversation_format = self.conversation_format - # chat format - # {character name}: {dialogue} - total_result = total_result.replace(f"{character.name}:", "") + if conversation_format == "narrative": + # For narrative format, the LLM generates pure prose without character name prefixes + # We need to store it internally in the standard {name}: {text} format + total_result = util.clean_dialogue(total_result, main_name=character.name) + # Only add character name if it's not already there + if not total_result.startswith(character.name + ":"): + total_result = f"{character.name}: {total_result}" + else: + # movie script format + # {uppercase character name} + # {dialogue} + total_result = total_result.replace(f"{character.name.upper()}\n", "") - # Removes partial sentence at the end - total_result = util.clean_dialogue(total_result, main_name=character.name) + # chat format + # {character name}: {dialogue} + total_result = total_result.replace(f"{character.name}:", "") - # Check if total_result starts with character name, if not, prepend it - if not total_result.startswith(character.name + ":"): - total_result = f"{character.name}: {total_result}" + # Removes partial sentence at the end + total_result = util.clean_dialogue(total_result, main_name=character.name) + + # Check if total_result starts with character name, if not, prepend it + if not total_result.startswith(character.name + ":"): + total_result = f"{character.name}: {total_result}" total_result = total_result.strip() @@ -499,9 +514,6 @@ class ConversationAgent(MemoryRAGMixin, Agent): def allow_repetition_break( self, kind: str, agent_function_name: str, auto: bool = False ): - if auto and not self.actions["auto_break_repetition"].enabled: - return False - return agent_function_name == "converse" def inject_prompt_paramters( diff --git a/src/talemate/agents/creator/__init__.py b/src/talemate/agents/creator/__init__.py index de68bd0f..85f30201 100644 --- a/src/talemate/agents/creator/__init__.py +++ b/src/talemate/agents/creator/__init__.py @@ -39,7 +39,7 @@ class CreatorAgent( def __init__( self, - client: client.ClientBase, + client: client.ClientBase | None = None, **kwargs, ): self.client = client diff --git a/src/talemate/agents/creator/modules/create-character.json b/src/talemate/agents/creator/modules/create-character.json index 4fcbc4dc..07a43bca 100644 --- a/src/talemate/agents/creator/modules/create-character.json +++ b/src/talemate/agents/creator/modules/create-character.json @@ -21,7 +21,7 @@ "num": 3 }, "x": 39, - "y": 507, + "y": 236, "width": 210, "height": 154, "collapsed": false, @@ -40,7 +40,7 @@ "num": 1 }, "x": 38, - "y": 107, + "y": -164, "width": 210, "height": 154, "collapsed": false, @@ -59,7 +59,7 @@ "num": 0 }, "x": 38, - "y": -93, + "y": -364, "width": 210, "height": 154, "collapsed": false, @@ -76,7 +76,7 @@ "num": 0 }, "x": 288, - "y": -63, + "y": -334, "width": 210, "height": 106, "collapsed": true, @@ -89,7 +89,7 @@ "id": "553125be-2c2b-4404-98b5-d6333a4f9655", "properties": {}, "x": 348, - "y": 507, + "y": 236, "width": 140, "height": 26, "collapsed": false, @@ -102,7 +102,7 @@ "id": "207e357e-5e83-4d40-a331-d0041b9dfa49", "properties": {}, "x": 348, - "y": 307, + "y": 36, "width": 140, "height": 26, "collapsed": false, @@ -115,7 +115,7 @@ "id": "bad7d2ed-f6fa-452b-8b47-d753ae7a45e0", "properties": {}, "x": 348, - "y": 107, + "y": -164, "width": 140, "height": 26, "collapsed": false, @@ -131,7 +131,7 @@ "scope": "local" }, "x": 598, - "y": 107, + "y": -164, "width": 210, "height": 122, "collapsed": false, @@ -147,7 +147,7 @@ "scope": "local" }, "x": 598, - "y": 307, + "y": 36, "width": 210, "height": 122, "collapsed": false, @@ -163,7 +163,7 @@ "scope": "local" }, "x": 598, - "y": 507, + "y": 236, "width": 210, "height": 122, "collapsed": false, @@ -176,7 +176,7 @@ "id": "1765a51c-82ac-4d96-9c60-d0adc7faaa68", "properties": {}, "x": 498, - "y": 707, + "y": 436, "width": 171, "height": 26, "collapsed": false, @@ -192,7 +192,7 @@ "scope": "local" }, "x": 798, - "y": 706, + "y": 435, "width": 244, "height": 122, "collapsed": false, @@ -205,7 +205,7 @@ "id": "f3dc37df-1748-4235-b575-60e55b8bec73", "properties": {}, "x": 498, - "y": 906, + "y": 635, "width": 171, "height": 26, "collapsed": false, @@ -220,7 +220,7 @@ "stage": 0 }, "x": 1208, - "y": 366, + "y": 95, "width": 210, "height": 118, "collapsed": true, @@ -238,7 +238,7 @@ "icon": "F1719" }, "x": 1178, - "y": -144, + "y": -415, "width": 210, "height": 130, "collapsed": false, @@ -253,7 +253,7 @@ "default": true }, "x": 298, - "y": 736, + "y": 465, "width": 210, "height": 58, "collapsed": true, @@ -268,7 +268,7 @@ "default": false }, "x": 298, - "y": 936, + "y": 665, "width": 210, "height": 58, "collapsed": true, @@ -287,7 +287,7 @@ "num": 2 }, "x": 38, - "y": 306, + "y": 35, "width": 210, "height": 154, "collapsed": false, @@ -303,7 +303,7 @@ "scope": "local" }, "x": 798, - "y": 906, + "y": 635, "width": 244, "height": 122, "collapsed": false, @@ -322,7 +322,7 @@ "num": 5 }, "x": 38, - "y": 706, + "y": 435, "width": 237, "height": 154, "collapsed": false, @@ -341,7 +341,7 @@ "num": 7 }, "x": 38, - "y": 906, + "y": 635, "width": 210, "height": 154, "collapsed": false, @@ -360,7 +360,7 @@ "num": 4 }, "x": 48, - "y": 1326, + "y": 1055, "width": 237, "height": 154, "collapsed": false, @@ -375,7 +375,7 @@ "default": true }, "x": 318, - "y": 1356, + "y": 1085, "width": 210, "height": 58, "collapsed": true, @@ -388,7 +388,7 @@ "id": "6d387c67-6b32-4435-b984-4760f0f1f8d2", "properties": {}, "x": 498, - "y": 1336, + "y": 1065, "width": 171, "height": 26, "collapsed": false, @@ -404,7 +404,7 @@ "scope": "local" }, "x": 798, - "y": 1316, + "y": 1045, "width": 244, "height": 122, "collapsed": false, @@ -455,7 +455,7 @@ "num": 6 }, "x": 38, - "y": 1106, + "y": 835, "width": 252, "height": 154, "collapsed": false, @@ -471,7 +471,7 @@ "apply_on_unresolved": true }, "x": 518, - "y": 1136, + "y": 865, "width": 210, "height": 102, "collapsed": true, @@ -1101,7 +1101,7 @@ "num": 8 }, "x": 49, - "y": 1546, + "y": 1275, "width": 210, "height": 154, "collapsed": false, @@ -1116,7 +1116,7 @@ "default": false }, "x": 329, - "y": 1586, + "y": 1315, "width": 210, "height": 58, "collapsed": true, @@ -1129,7 +1129,7 @@ "id": "8acfe789-fbb5-4e29-8fd8-2217b987c086", "properties": {}, "x": 499, - "y": 1566, + "y": 1295, "width": 171, "height": 26, "collapsed": false, @@ -1145,7 +1145,7 @@ "scope": "local" }, "x": 799, - "y": 1526, + "y": 1255, "width": 244, "height": 122, "collapsed": false, @@ -1258,8 +1258,8 @@ "output_name": "character", "num": 0 }, - "x": 331, - "y": 5739, + "x": 332, + "y": 6178, "width": 210, "height": 106, "collapsed": false, @@ -1274,8 +1274,8 @@ "name": "character", "scope": "local" }, - "x": 51, - "y": 5729, + "x": 52, + "y": 6168, "width": 210, "height": 122, "collapsed": false, @@ -1291,8 +1291,8 @@ "output_name": "actor", "num": 0 }, - "x": 331, - "y": 5949, + "x": 332, + "y": 6388, "width": 210, "height": 106, "collapsed": false, @@ -1307,8 +1307,8 @@ "name": "actor", "scope": "local" }, - "x": 51, - "y": 5949, + "x": 52, + "y": 6388, "width": 210, "height": 122, "collapsed": false, @@ -1323,7 +1323,7 @@ "stage": 0 }, "x": 1320, - "y": 1160, + "y": 889, "width": 210, "height": 118, "collapsed": true, @@ -1414,7 +1414,7 @@ "writing_style": null }, "x": 320, - "y": 1200, + "y": 929, "width": 270, "height": 122, "collapsed": true, @@ -1430,7 +1430,7 @@ "scope": "local" }, "x": 790, - "y": 1110, + "y": 839, "width": 244, "height": 122, "collapsed": false, @@ -1475,6 +1475,159 @@ "inherited": false, "registry": "agents/creator/ContextualGenerate", "base_type": "core/Node" + }, + "d61de1ad-6f2a-447f-918a-dce7e76ea3a1": { + "title": "assign_voice", + "id": "d61de1ad-6f2a-447f-918a-dce7e76ea3a1", + "properties": {}, + "x": 509, + "y": 1544, + "width": 171, + "height": 26, + "collapsed": false, + "inherited": false, + "registry": "core/Watch", + "base_type": "core/Node" + }, + "3d655827-b66b-4355-910d-96097e7f2f13": { + "title": "SET local.assign_voice", + "id": "3d655827-b66b-4355-910d-96097e7f2f13", + "properties": { + "name": "assign_voice", + "scope": "local" + }, + "x": 810, + "y": 1506, + "width": 244, + "height": 122, + "collapsed": false, + "inherited": false, + "registry": "state/SetState", + "base_type": "core/Node" + }, + "6aa5c32a-8dfb-48a9-96ec-5ad9ed6aa5d1": { + "title": "Stage 0", + "id": "6aa5c32a-8dfb-48a9-96ec-5ad9ed6aa5d1", + "properties": { + "stage": 0 + }, + "x": 1170, + "y": 1546, + "width": 210, + "height": 118, + "collapsed": true, + "inherited": false, + "registry": "core/Stage", + "base_type": "core/Node" + }, + "9ac9b12d-1b97-4f42-92d8-0d4f883ffb2f": { + "title": "IN assign_voice", + "id": "9ac9b12d-1b97-4f42-92d8-0d4f883ffb2f", + "properties": { + "input_type": "bool", + "input_name": "assign_voice", + "input_optional": true, + "input_group": "", + "num": 9 + }, + "x": 60, + "y": 1527, + "width": 210, + "height": 154, + "collapsed": false, + "inherited": false, + "registry": "core/Input", + "base_type": "core/Node" + }, + "de11206d-13db-44a5-befd-de559fb68d09": { + "title": "GET local.assign_voice", + "id": "de11206d-13db-44a5-befd-de559fb68d09", + "properties": { + "name": "assign_voice", + "scope": "local" + }, + "x": 25, + "y": 5720, + "width": 240, + "height": 122, + "collapsed": false, + "inherited": false, + "registry": "state/GetState", + "base_type": "core/Node" + }, + "97b196a3-e7a6-4cfa-905e-686a744890b7": { + "title": "Switch", + "id": "97b196a3-e7a6-4cfa-905e-686a744890b7", + "properties": { + "pass_through": true + }, + "x": 355, + "y": 5740, + "width": 210, + "height": 78, + "collapsed": false, + "inherited": false, + "registry": "core/Switch", + "base_type": "core/Node" + }, + "3956711a-a3df-4213-b739-104cbe704964": { + "title": "GET local.character", + "id": "3956711a-a3df-4213-b739-104cbe704964", + "properties": { + "name": "character", + "scope": "local" + }, + "x": 25, + "y": 5930, + "width": 210, + "height": 122, + "collapsed": false, + "inherited": false, + "registry": "state/GetState", + "base_type": "core/Node" + }, + "47b2b492-4178-4254-b468-3877a5341f66": { + "title": "Assign Voice", + "id": "47b2b492-4178-4254-b468-3877a5341f66", + "properties": {}, + "x": 665, + "y": 5830, + "width": 161, + "height": 66, + "collapsed": false, + "inherited": false, + "registry": "agents/director/AssignVoice", + "base_type": "core/Node" + }, + "44d78795-2d41-468b-94d5-399b9b655888": { + "title": "Stage 6", + "id": "44d78795-2d41-468b-94d5-399b9b655888", + "properties": { + "stage": 6 + }, + "x": 885, + "y": 5860, + "width": 210, + "height": 118, + "collapsed": true, + "inherited": false, + "registry": "core/Stage", + "base_type": "core/Node" + }, + "f5e5ec03-cc12-4a45-aa15-6ca3e5e4bc85": { + "title": "As Bool", + "id": "f5e5ec03-cc12-4a45-aa15-6ca3e5e4bc85", + "properties": { + "default": true + }, + "x": 330, + "y": 1560, + "width": 210, + "height": 58, + "collapsed": true, + "inherited": false, + "registry": "core/AsBool", + "base_type": "core/Node" } }, "edges": { @@ -1726,15 +1879,39 @@ ], "4acb67ea-68ee-43ae-a8c6-98a2b0e0f053.text": [ "d41f0d98-14d5-49dd-8e57-7812fb9fee94.value" + ], + "d61de1ad-6f2a-447f-918a-dce7e76ea3a1.value": [ + "3d655827-b66b-4355-910d-96097e7f2f13.value" + ], + "3d655827-b66b-4355-910d-96097e7f2f13.value": [ + "6aa5c32a-8dfb-48a9-96ec-5ad9ed6aa5d1.state" + ], + "9ac9b12d-1b97-4f42-92d8-0d4f883ffb2f.value": [ + "f5e5ec03-cc12-4a45-aa15-6ca3e5e4bc85.value" + ], + "de11206d-13db-44a5-befd-de559fb68d09.value": [ + "97b196a3-e7a6-4cfa-905e-686a744890b7.value" + ], + "97b196a3-e7a6-4cfa-905e-686a744890b7.yes": [ + "47b2b492-4178-4254-b468-3877a5341f66.state" + ], + "3956711a-a3df-4213-b739-104cbe704964.value": [ + "47b2b492-4178-4254-b468-3877a5341f66.character" + ], + "47b2b492-4178-4254-b468-3877a5341f66.state": [ + "44d78795-2d41-468b-94d5-399b9b655888.state" + ], + "f5e5ec03-cc12-4a45-aa15-6ca3e5e4bc85.value": [ + "d61de1ad-6f2a-447f-918a-dce7e76ea3a1.value" ] }, "groups": [ { "title": "Process Arguments - Stage 0", "x": 1, - "y": -218, - "width": 1446, - "height": 1948, + "y": -490, + "width": 1432, + "height": 2216, "color": "#3f789e", "font_size": 24, "inherited": false @@ -1792,12 +1969,22 @@ { "title": "Outputs", "x": 0, - "y": 5642, + "y": 6080, "width": 595, "height": 472, "color": "#8A8", "font_size": 24, "inherited": false + }, + { + "title": "Assign Voice - Stage 6", + "x": 0, + "y": 5640, + "width": 1120, + "height": 437, + "color": "#3f789e", + "font_size": 24, + "inherited": false } ], "comments": [], @@ -1805,6 +1992,7 @@ "base_type": "core/Graph", "inputs": [], "outputs": [], + "module_properties": {}, "style": { "title_color": "#572e44", "node_color": "#392c34", diff --git a/src/talemate/agents/director/__init__.py b/src/talemate/agents/director/__init__.py index 0da53d74..a8b1b780 100644 --- a/src/talemate/agents/director/__init__.py +++ b/src/talemate/agents/director/__init__.py @@ -1,33 +1,24 @@ from __future__ import annotations -from typing import TYPE_CHECKING, List - import structlog -import traceback -import talemate.instance as instance from talemate.emit import emit -from talemate.scene_message import DirectorMessage -from talemate.util import random_color -from talemate.character import deactivate_character -from talemate.status import LoadingStatus -from talemate.exceptions import GenerationCancelled +from talemate.scene_message import DirectorMessage, Flags -from talemate.agents.base import Agent, AgentAction, AgentActionConfig, set_processing +from talemate.agents.base import Agent, AgentAction, AgentActionConfig from talemate.agents.registry import register from talemate.agents.memory.rag import MemoryRAGMixin +from talemate.client import ClientBase +from talemate.game.focal.schema import Call from .guide import GuideSceneMixin from .generate_choices import GenerateChoicesMixin from .legacy_scene_instructions import LegacySceneInstructionsMixin from .auto_direct import AutoDirectMixin from .websocket_handler import DirectorWebsocketHandler - +from .character_management import CharacterManagementMixin import talemate.agents.director.nodes # noqa: F401 -if TYPE_CHECKING: - from talemate import Character, Scene - log = structlog.get_logger("talemate.agent.director") @@ -38,6 +29,7 @@ class DirectorAgent( GenerateChoicesMixin, AutoDirectMixin, LegacySceneInstructionsMixin, + CharacterManagementMixin, Agent, ): agent_type = "director" @@ -76,9 +68,10 @@ class DirectorAgent( GenerateChoicesMixin.add_actions(actions) GuideSceneMixin.add_actions(actions) AutoDirectMixin.add_actions(actions) + CharacterManagementMixin.add_actions(actions) return actions - def __init__(self, client, **kwargs): + def __init__(self, client: ClientBase | None = None, **kwargs): self.is_enabled = True self.client = client self.next_direct_character = {} @@ -101,178 +94,24 @@ class DirectorAgent( def actor_direction_mode(self): return self.actions["direct"].config["actor_direction_mode"].value - @set_processing - async def persist_characters_from_worldstate( - self, exclude: list[str] = None - ) -> List[Character]: - created_characters = [] - - for character_name in self.scene.world_state.characters.keys(): - if exclude and character_name.lower() in exclude: - continue - - if character_name in self.scene.character_names: - continue - - character = await self.persist_character(name=character_name) - - created_characters.append(character) - - self.scene.emit_status() - - return created_characters - - @set_processing - async def persist_character( - self, - name: str, - content: str = None, - attributes: str = None, - determine_name: bool = True, - templates: list[str] = None, - active: bool = True, - narrate_entry: bool = False, - narrate_entry_direction: str = "", - augment_attributes: str = "", - generate_attributes: bool = True, - description: str = "", - ) -> Character: - world_state = instance.get_agent("world_state") - creator = instance.get_agent("creator") - narrator = instance.get_agent("narrator") - memory = instance.get_agent("memory") - scene: "Scene" = self.scene - any_attribute_templates = False - - loading_status = LoadingStatus(max_steps=None, cancellable=True) - - # Start of character creation - log.debug("persist_character", name=name) - - # Determine the character's name (or clarify if it's already set) - if determine_name: - loading_status("Determining character name") - name = await creator.determine_character_name(name, instructions=content) - log.debug("persist_character", adjusted_name=name) - - # Create the blank character - character: Character = self.scene.Character(name=name) - - # Add the character to the scene - character.color = random_color() - actor = self.scene.Actor( - character=character, agent=instance.get_agent("conversation") + async def log_function_call(self, call: Call): + log.debug("director.log_function_call", call=call) + message = DirectorMessage( + message=f"Called {call.name}", + action=call.name, + flags=Flags.HIDDEN, + subtype="function_call", ) - await self.scene.add_actor(actor) + emit("director", message, data={"function_call": call.model_dump()}) - try: - # Apply any character generation templates - if templates: - loading_status("Applying character generation templates") - templates = scene.world_state_manager.template_collection.collect_all( - templates - ) - log.debug("persist_character", applying_templates=templates) - await scene.world_state_manager.apply_templates( - templates.values(), - character_name=character.name, - information=content, - ) - - # if any of the templates are attribute templates, then we no longer need to - # generate a character sheet - any_attribute_templates = any( - template.template_type == "character_attribute" - for template in templates.values() - ) - log.debug( - "persist_character", any_attribute_templates=any_attribute_templates - ) - - if ( - any_attribute_templates - and augment_attributes - and generate_attributes - ): - log.debug( - "persist_character", augmenting_attributes=augment_attributes - ) - loading_status("Augmenting character attributes") - additional_attributes = await world_state.extract_character_sheet( - name=name, - text=content, - augmentation_instructions=augment_attributes, - ) - character.base_attributes.update(additional_attributes) - - # Generate a character sheet if there are no attribute templates - if not any_attribute_templates and generate_attributes: - loading_status("Generating character sheet") - log.debug("persist_character", extracting_character_sheet=True) - if not attributes: - attributes = await world_state.extract_character_sheet( - name=name, text=content - ) - else: - attributes = world_state._parse_character_sheet(attributes) - - log.debug("persist_character", attributes=attributes) - character.base_attributes = attributes - - # Generate a description for the character - if not description: - loading_status("Generating character description") - description = await creator.determine_character_description( - character, information=content - ) - character.description = description - log.debug("persist_character", description=description) - - # Generate a dialogue instructions for the character - loading_status("Generating acting instructions") - dialogue_instructions = ( - await creator.determine_character_dialogue_instructions( - character, information=content - ) - ) - character.dialogue_instructions = dialogue_instructions - log.debug("persist_character", dialogue_instructions=dialogue_instructions) - - # Narrate the character's entry if the option is selected - if active and narrate_entry: - loading_status("Narrating character entry") - is_present = await world_state.is_character_present(name) - if not is_present: - await narrator.action_to_narration( - "narrate_character_entry", - emit_message=True, - character=character, - narrative_direction=narrate_entry_direction, - ) - - # Deactivate the character if not active - if not active: - await deactivate_character(scene, character) - - # Commit the character's details to long term memory - await character.commit_to_memory(memory) - self.scene.emit_status() - self.scene.world_state.emit() - - loading_status.done( - message=f"{character.name} added to scene", status="success" - ) - return character - except GenerationCancelled: - loading_status.done(message="Character creation cancelled", status="idle") - await scene.remove_actor(actor) - except Exception: - loading_status.done(message="Character creation failed", status="error") - await scene.remove_actor(actor) - log.error("Error persisting character", error=traceback.format_exc()) - - async def log_action(self, action: str, action_description: str): - message = DirectorMessage(message=action_description, action=action) + async def log_action( + self, action: str, action_description: str, console_only: bool = False + ): + message = DirectorMessage( + message=action_description, + action=action, + flags=Flags.HIDDEN if console_only else Flags.NONE, + ) self.scene.push_history(message) emit("director", message) diff --git a/src/talemate/agents/director/character_management.py b/src/talemate/agents/director/character_management.py new file mode 100644 index 00000000..5e6b404a --- /dev/null +++ b/src/talemate/agents/director/character_management.py @@ -0,0 +1,333 @@ +from typing import TYPE_CHECKING +import traceback +import structlog +import talemate.instance as instance +import talemate.agents.tts.voice_library as voice_library +from talemate.agents.tts.schema import Voice +from talemate.util import random_color +from talemate.character import deactivate_character, set_voice +from talemate.status import LoadingStatus +from talemate.exceptions import GenerationCancelled +from talemate.agents.base import AgentAction, AgentActionConfig, set_processing +import talemate.game.focal as focal + + +__all__ = [ + "CharacterManagementMixin", +] + +log = structlog.get_logger() + +if TYPE_CHECKING: + from talemate import Character, Scene + from talemate.agents.tts import TTSAgent + + +class VoiceCandidate(Voice): + used: bool = False + + +class CharacterManagementMixin: + """ + Director agent mixin that provides functionality for automatically guiding + the actors or the narrator during the scene progression. + """ + + @classmethod + def add_actions(cls, actions: dict[str, AgentAction]): + actions["character_management"] = AgentAction( + enabled=True, + container=True, + can_be_disabled=False, + label="Character Management", + icon="mdi-account", + description="Configure how the director manages characters.", + config={ + "assign_voice": AgentActionConfig( + type="bool", + label="Assign Voice (TTS)", + description="If enabled, the director is allowed to assign a text-to-speech voice when persisting a character.", + value=True, + title="Persisting Characters", + ), + }, + ) + + # config property helpers + + @property + def cm_assign_voice(self) -> bool: + return self.actions["character_management"].config["assign_voice"].value + + @property + def cm_should_assign_voice(self) -> bool: + if not self.cm_assign_voice: + return False + + tts_agent: "TTSAgent" = instance.get_agent("tts") + if not tts_agent.enabled: + return False + + if not tts_agent.ready_apis: + return False + + return True + + # actions + + @set_processing + async def persist_characters_from_worldstate( + self, exclude: list[str] = None + ) -> list["Character"]: + created_characters = [] + + for character_name in self.scene.world_state.characters.keys(): + if exclude and character_name.lower() in exclude: + continue + + if character_name in self.scene.character_names: + continue + + character = await self.persist_character(name=character_name) + + created_characters.append(character) + + self.scene.emit_status() + + return created_characters + + @set_processing + async def persist_character( + self, + name: str, + content: str = None, + attributes: str = None, + determine_name: bool = True, + templates: list[str] = None, + active: bool = True, + narrate_entry: bool = False, + narrate_entry_direction: str = "", + augment_attributes: str = "", + generate_attributes: bool = True, + description: str = "", + assign_voice: bool = True, + is_player: bool = False, + ) -> "Character": + world_state = instance.get_agent("world_state") + creator = instance.get_agent("creator") + narrator = instance.get_agent("narrator") + memory = instance.get_agent("memory") + scene: "Scene" = self.scene + any_attribute_templates = False + + loading_status = LoadingStatus(max_steps=None, cancellable=True) + + # Start of character creation + log.debug("persist_character", name=name) + + # Determine the character's name (or clarify if it's already set) + if determine_name: + loading_status("Determining character name") + name = await creator.determine_character_name(name, instructions=content) + log.debug("persist_character", adjusted_name=name) + + # Create the blank character + character: "Character" = self.scene.Character(name=name, is_player=is_player) + + # Add the character to the scene + character.color = random_color() + + if is_player: + actor = self.scene.Player( + character=character, agent=instance.get_agent("conversation") + ) + else: + actor = self.scene.Actor( + character=character, agent=instance.get_agent("conversation") + ) + + await self.scene.add_actor(actor) + + try: + # Apply any character generation templates + if templates: + loading_status("Applying character generation templates") + templates = scene.world_state_manager.template_collection.collect_all( + templates + ) + log.debug("persist_character", applying_templates=templates) + await scene.world_state_manager.apply_templates( + templates.values(), + character_name=character.name, + information=content, + ) + + # if any of the templates are attribute templates, then we no longer need to + # generate a character sheet + any_attribute_templates = any( + template.template_type == "character_attribute" + for template in templates.values() + ) + log.debug( + "persist_character", any_attribute_templates=any_attribute_templates + ) + + if ( + any_attribute_templates + and augment_attributes + and generate_attributes + ): + log.debug( + "persist_character", augmenting_attributes=augment_attributes + ) + loading_status("Augmenting character attributes") + additional_attributes = await world_state.extract_character_sheet( + name=name, + text=content, + augmentation_instructions=augment_attributes, + ) + character.base_attributes.update(additional_attributes) + + # Generate a character sheet if there are no attribute templates + if not any_attribute_templates and generate_attributes: + loading_status("Generating character sheet") + log.debug("persist_character", extracting_character_sheet=True) + if not attributes: + attributes = await world_state.extract_character_sheet( + name=name, text=content + ) + else: + attributes = world_state._parse_character_sheet(attributes) + + log.debug("persist_character", attributes=attributes) + character.base_attributes = attributes + + # Generate a description for the character + if not description: + loading_status("Generating character description") + description = await creator.determine_character_description( + character, information=content + ) + character.description = description + log.debug("persist_character", description=description) + + # Generate a dialogue instructions for the character + loading_status("Generating acting instructions") + dialogue_instructions = ( + await creator.determine_character_dialogue_instructions( + character, information=content + ) + ) + character.dialogue_instructions = dialogue_instructions + log.debug("persist_character", dialogue_instructions=dialogue_instructions) + + # Narrate the character's entry if the option is selected + if active and narrate_entry: + loading_status("Narrating character entry") + is_present = await world_state.is_character_present(name) + if not is_present: + await narrator.action_to_narration( + "narrate_character_entry", + emit_message=True, + character=character, + narrative_direction=narrate_entry_direction, + ) + + if assign_voice: + await self.assign_voice_to_character(character) + + # Deactivate the character if not active + if not active: + await deactivate_character(scene, character) + + # Commit the character's details to long term memory + await character.commit_to_memory(memory) + self.scene.emit_status() + self.scene.world_state.emit() + + loading_status.done( + message=f"{character.name} added to scene", status="success" + ) + return character + except GenerationCancelled: + loading_status.done(message="Character creation cancelled", status="idle") + await scene.remove_actor(actor) + except Exception: + loading_status.done(message="Character creation failed", status="error") + await scene.remove_actor(actor) + log.error("Error persisting character", error=traceback.format_exc()) + + @set_processing + async def assign_voice_to_character( + self, character: "Character" + ) -> list[focal.Call]: + tts_agent: "TTSAgent" = instance.get_agent("tts") + if not self.cm_should_assign_voice: + log.debug("assign_voice_to_character", skip=True, reason="not enabled") + return + + vl: voice_library.VoiceLibrary = voice_library.get_instance() + + ready_tts_apis = tts_agent.ready_apis + + voices_global = voice_library.voices_for_apis(ready_tts_apis, vl) + voices_scene = voice_library.voices_for_apis( + ready_tts_apis, self.scene.voice_library + ) + + voices = voices_global + voices_scene + + if not voices: + log.debug( + "assign_voice_to_character", skip=True, reason="no voices available" + ) + return + + voice_candidates = { + voice.id: VoiceCandidate(**voice.model_dump()) for voice in voices + } + + for scene_character in self.scene.all_characters: + if scene_character.voice: + voice_candidates[scene_character.voice.id].used = True + + async def assign_voice(voice_id: str): + voice = vl.get_voice(voice_id) or self.scene.voice_library.get_voice( + voice_id + ) + if not voice: + log.error( + "assign_voice_to_character", + skip=True, + reason="voice not found", + voice_id=voice_id, + ) + return + await set_voice(character, voice, auto=True) + await self.log_action( + f"Assigned voice `{voice.label}` to `{character.name}`", + "Assigned voice", + console_only=True, + ) + + focal_handler = focal.Focal( + self.client, + callbacks=[ + focal.Callback( + name="assign_voice", + arguments=[focal.Argument(name="voice_id", type="str")], + fn=assign_voice, + ), + ], + max_calls=1, + character=character, + voices=list(voice_candidates.values()), + scene=self.scene, + narrator_voice=tts_agent.narrator_voice, + ) + + await focal_handler.request("director.cm-assign-voice") + + log.debug("assign_voice_to_character", calls=focal_handler.state.calls) + + return focal_handler.state.calls diff --git a/src/talemate/agents/director/guide.py b/src/talemate/agents/director/guide.py index 34df323f..786f90ff 100644 --- a/src/talemate/agents/director/guide.py +++ b/src/talemate/agents/director/guide.py @@ -231,7 +231,9 @@ class GuideSceneMixin: if cached_guidance: if not analysis: return cached_guidance.get("guidance") - elif cached_guidance.get("fp") == self.context_fingerpint(extra=[analysis]): + elif cached_guidance.get("fp") == self.context_fingerprint( + extra=[analysis] + ): return cached_guidance.get("guidance") return None @@ -250,7 +252,7 @@ class GuideSceneMixin: self.set_scene_states( **{ key: { - "fp": self.context_fingerpint(extra=[analysis]), + "fp": self.context_fingerprint(extra=[analysis]), "guidance": guidance, "analysis_type": analysis_type, "character": character.name if character else None, diff --git a/src/talemate/agents/director/nodes.py b/src/talemate/agents/director/nodes.py index 569826e6..3e3fe784 100644 --- a/src/talemate/agents/director/nodes.py +++ b/src/talemate/agents/director/nodes.py @@ -7,7 +7,7 @@ from talemate.game.engine.nodes.core import ( ) from talemate.game.engine.nodes.registry import register from talemate.game.engine.nodes.agent import AgentSettingsNode, AgentNode - +from talemate.character import Character TYPE_CHOICES.extend( [ @@ -77,3 +77,90 @@ class PersistCharacter(AgentNode): ) self.set_output_values({"state": state, "character": character}) + + +@register("agents/director/AssignVoice") +class AssignVoice(AgentNode): + """ + Assigns a voice to a character. + """ + + _agent_name: ClassVar[str] = "director" + + def __init__(self, title="Assign Voice", **kwargs): + super().__init__(title=title, **kwargs) + + def setup(self): + self.add_input("state") + self.add_input("character", socket_type="character") + + self.add_output("state") + self.add_output("character", socket_type="character") + self.add_output("voice", socket_type="tts/voice") + + async def run(self, state: GraphState): + character: "Character" = self.require_input("character") + + await self.agent.assign_voice_to_character(character) + + voice = character.voice + + self.set_output_values({"state": state, "character": character, "voice": voice}) + + +@register("agents/director/LogAction") +class LogAction(AgentNode): + """ + Logs an action to the console. + """ + + _agent_name: ClassVar[str] = "director" + + class Fields: + action = PropertyField( + name="action", + type="str", + description="The action to log", + default="", + ) + action_description = PropertyField( + name="action_description", + type="str", + description="The description of the action", + default="", + ) + console_only = PropertyField( + name="console_only", + type="bool", + description="Whether to log the action to the console only", + default=False, + ) + + def __init__(self, title="Log Director Action", **kwargs): + super().__init__(title=title, **kwargs) + + def setup(self): + self.add_input("state") + self.add_input("action", socket_type="str") + self.add_input("action_description", socket_type="str") + self.add_input("console_only", socket_type="bool", optional=True) + + self.set_property("action", "") + self.set_property("action_description", "") + self.set_property("console_only", False) + + self.add_output("state") + + async def run(self, state: GraphState): + state = self.require_input("state") + action = self.require_input("action") + action_description = self.require_input("action_description") + console_only = self.normalized_input_value("console_only") or False + + await self.agent.log_action( + action=action, + action_description=action_description, + console_only=console_only, + ) + + self.set_output_values({"state": state}) diff --git a/src/talemate/agents/director/websocket_handler.py b/src/talemate/agents/director/websocket_handler.py index b8617a62..0c7451a0 100644 --- a/src/talemate/agents/director/websocket_handler.py +++ b/src/talemate/agents/director/websocket_handler.py @@ -5,8 +5,9 @@ from typing import TYPE_CHECKING from talemate.instance import get_agent from talemate.server.websocket_plugin import Plugin -from talemate.context import interaction +from talemate.context import interaction, handle_generation_cancelled from talemate.status import set_loading +from talemate.exceptions import GenerationCancelled if TYPE_CHECKING: from talemate.tale_mate import Scene @@ -45,6 +46,12 @@ class PersistCharacterPayload(pydantic.BaseModel): content: str = "" description: str = "" + is_player: bool = False + + +class AssignVoiceToCharacterPayload(pydantic.BaseModel): + character_name: str + class DirectorWebsocketHandler(Plugin): """ @@ -105,7 +112,13 @@ class DirectorWebsocketHandler(Plugin): async def handle_task_done(task): if task.exception(): - log.error("Error persisting character", error=task.exception()) + exc = task.exception() + log.error("Error persisting character", error=exc) + + # Handle GenerationCancelled properly to reset cancel_requested flag + if isinstance(exc, GenerationCancelled): + handle_generation_cancelled(exc) + await self.signal_operation_failed("Error persisting character") else: self.websocket_handler.queue_put( @@ -118,3 +131,63 @@ class DirectorWebsocketHandler(Plugin): await self.signal_operation_done() task.add_done_callback(lambda task: asyncio.create_task(handle_task_done(task))) + + async def handle_assign_voice_to_character(self, data: dict): + """ + Assign a voice to a character using the director agent + """ + try: + payload = AssignVoiceToCharacterPayload(**data) + except pydantic.ValidationError as e: + await self.signal_operation_failed(str(e)) + return + + scene: "Scene" = self.scene + if not scene: + await self.signal_operation_failed("No scene active") + return + + character = scene.get_character(payload.character_name) + if not character: + await self.signal_operation_failed( + f"Character '{payload.character_name}' not found" + ) + return + + character.voice = None + + # Add as asyncio task + task = asyncio.create_task(self.director.assign_voice_to_character(character)) + + async def handle_task_done(task): + if task.exception(): + exc = task.exception() + log.error("Error assigning voice to character", error=exc) + + # Handle GenerationCancelled properly to reset cancel_requested flag + if isinstance(exc, GenerationCancelled): + handle_generation_cancelled(exc) + + self.websocket_handler.queue_put( + { + "type": self.router, + "action": "assign_voice_to_character_failed", + "character_name": payload.character_name, + "error": str(exc), + } + ) + await self.signal_operation_failed( + f"Error assigning voice to character: {exc}" + ) + else: + self.websocket_handler.queue_put( + { + "type": self.router, + "action": "assign_voice_to_character_done", + "character_name": payload.character_name, + } + ) + await self.signal_operation_done() + self.scene.emit_status() + + task.add_done_callback(lambda task: asyncio.create_task(handle_task_done(task))) diff --git a/src/talemate/agents/editor/__init__.py b/src/talemate/agents/editor/__init__.py index f86c9f27..9719d97c 100644 --- a/src/talemate/agents/editor/__init__.py +++ b/src/talemate/agents/editor/__init__.py @@ -6,6 +6,7 @@ import structlog import talemate.emit.async_signals import talemate.util as util +from talemate.client import ClientBase from talemate.prompts import Prompt from talemate.agents.base import Agent, AgentAction, AgentActionConfig, set_processing @@ -86,7 +87,7 @@ class EditorAgent( RevisionMixin.add_actions(actions) return actions - def __init__(self, client, **kwargs): + def __init__(self, client: ClientBase | None = None, **kwargs): self.client = client self.is_enabled = True self.actions = EditorAgent.init_actions() diff --git a/src/talemate/agents/editor/websocket_handler.py b/src/talemate/agents/editor/websocket_handler.py index 194d4e5a..75107c26 100644 --- a/src/talemate/agents/editor/websocket_handler.py +++ b/src/talemate/agents/editor/websocket_handler.py @@ -60,5 +60,8 @@ class EditorWebsocketHandler(Plugin): character=character, ) revised = await editor.revision_revise(info) + if isinstance(message, CharacterMessage): + if not revised.startswith(character.name + ":"): + revised = f"{character.name}: {revised}" scene.edit_message(message.id, revised) diff --git a/src/talemate/agents/memory/__init__.py b/src/talemate/agents/memory/__init__.py index 446d7a92..897def6f 100644 --- a/src/talemate/agents/memory/__init__.py +++ b/src/talemate/agents/memory/__init__.py @@ -23,10 +23,9 @@ from talemate.agents.base import ( AgentDetail, set_processing, ) -from talemate.config import load_config +from talemate.config.schema import EmbeddingFunctionPreset from talemate.context import scene_is_loading, active_scene from talemate.emit import emit -from talemate.emit.signals import handlers import talemate.emit.async_signals as async_signals from talemate.agents.memory.context import memory_request, MemoryRequest from talemate.agents.memory.exceptions import ( @@ -107,14 +106,13 @@ class MemoryAgent(Agent): } return actions - def __init__(self, scene, **kwargs): + def __init__(self, **kwargs): self.db = None - self.scene = scene self.memory_tracker = {} - self.config = load_config() self._ready_to_add = False - handlers["config_saved"].connect(self.on_config_saved) + async_signals.get("config.changed").connect(self.on_config_changed) + async_signals.get("client.embeddings_available").connect( self.on_client_embeddings_available ) @@ -136,28 +134,29 @@ class MemoryAgent(Agent): @property def get_presets(self): - def _label(embedding: dict): - prefix = ( - embedding["client"] if embedding["client"] else embedding["embeddings"] - ) - if embedding["model"]: - return f"{prefix}: {embedding['model']}" + def _label(embedding: EmbeddingFunctionPreset): + prefix = embedding.client if embedding.client else embedding.embeddings + if embedding.model: + return f"{prefix}: {embedding.model}" else: return f"{prefix}" return [ {"value": k, "label": _label(v)} - for k, v in self.config.get("presets", {}).get("embeddings", {}).items() + for k, v in self.config.presets.embeddings.items() ] @property def embeddings_config(self): _embeddings = self.actions["_config"].config["embeddings"].value - return self.config.get("presets", {}).get("embeddings", {}).get(_embeddings, {}) + return self.config.presets.embeddings.get(_embeddings) @property def embeddings(self): - return self.embeddings_config.get("embeddings", "sentence-transformer") + try: + return self.embeddings_config.embeddings + except AttributeError: + return None @property def using_openai_embeddings(self): @@ -181,22 +180,31 @@ class MemoryAgent(Agent): @property def embeddings_client(self): - return self.embeddings_config.get("client") + try: + return self.embeddings_config.client + except AttributeError: + return None @property def max_distance(self) -> float: - distance = float(self.embeddings_config.get("distance", 1.0)) - distance_mod = float(self.embeddings_config.get("distance_mod", 1.0)) + distance = float(self.embeddings_config.distance) + distance_mod = float(self.embeddings_config.distance_mod) return distance * distance_mod @property def model(self): - return self.embeddings_config.get("model") + try: + return self.embeddings_config.model + except AttributeError: + return None @property def distance_function(self): - return self.embeddings_config.get("distance_function", "l2") + try: + return self.embeddings_config.distance_function + except AttributeError: + return None @property def device(self) -> str: @@ -204,7 +212,10 @@ class MemoryAgent(Agent): @property def trust_remote_code(self) -> bool: - return self.embeddings_config.get("trust_remote_code", False) + try: + return self.embeddings_config.trust_remote_code + except AttributeError: + return False @property def fingerprint(self) -> str: @@ -241,7 +252,7 @@ class MemoryAgent(Agent): if self.using_sentence_transformer_embeddings and not self.model: self.actions["_config"].config["embeddings"].value = "default" - if not scene or not scene.get_helper("memory"): + if not scene or not scene.active: return self.close_db(scene) @@ -255,7 +266,14 @@ class MemoryAgent(Agent): self.actions["_config"].config["embeddings"].choices = self.get_presets return self.actions["_config"].config["embeddings"].choices - def on_config_saved(self, event): + async def fix_broken_embeddings(self): + if not self.embeddings_config: + self.actions["_config"].config["embeddings"].value = "default" + await self.emit_status() + await self.handle_embeddings_change() + await self.save_config() + + async def on_config_changed(self, event): loop = asyncio.get_running_loop() openai_key = self.openai_api_key @@ -263,7 +281,6 @@ class MemoryAgent(Agent): old_presets = self.actions["_config"].config["embeddings"].choices.copy() - self.config = load_config() new_presets = self.sync_presets() if fingerprint != self.fingerprint: log.warning( @@ -285,10 +302,13 @@ class MemoryAgent(Agent): if emit_status: loop.run_until_complete(self.emit_status()) + await self.fix_broken_embeddings() + async def on_client_embeddings_available(self, event: "ClientEmbeddingsStatus"): current_embeddings = self.actions["_config"].config["embeddings"].value if current_embeddings == event.client.embeddings_identifier: + event.seen = True return if not self.using_client_api_embeddings or not self.ready: @@ -304,6 +324,7 @@ class MemoryAgent(Agent): await self.emit_status() await self.handle_embeddings_change() await self.save_config() + event.seen = True @set_processing async def set_db(self): @@ -837,7 +858,7 @@ class ChromaDBMemoryAgent(MemoryAgent): @property def openai_api_key(self): - return self.config.get("openai", {}).get("api_key") + return self.config.openai.api_key @property def embedding_function(self) -> Callable: diff --git a/src/talemate/agents/narrator/__init__.py b/src/talemate/agents/narrator/__init__.py index f8bcbc52..6d0e8910 100644 --- a/src/talemate/agents/narrator/__init__.py +++ b/src/talemate/agents/narrator/__init__.py @@ -138,11 +138,6 @@ class NarratorAgent(MemoryRAGMixin, Agent): ), }, ), - "auto_break_repetition": AgentAction( - enabled=True, - label="Auto Break Repetition", - description="Will attempt to automatically break AI repetition.", - ), "content": AgentAction( enabled=True, can_be_disabled=False, @@ -210,7 +205,7 @@ class NarratorAgent(MemoryRAGMixin, Agent): def __init__( self, - client: client.TaleMateClient, + client: client.ClientBase | None = None, **kwargs, ): self.client = client @@ -753,9 +748,6 @@ class NarratorAgent(MemoryRAGMixin, Agent): def allow_repetition_break( self, kind: str, agent_function_name: str, auto: bool = False ): - if auto and not self.actions["auto_break_repetition"].enabled: - return False - return True def set_generation_overrides(self, prompt_param: dict): diff --git a/src/talemate/agents/summarize/__init__.py b/src/talemate/agents/summarize/__init__.py index f111e475..1ee6a010 100644 --- a/src/talemate/agents/summarize/__init__.py +++ b/src/talemate/agents/summarize/__init__.py @@ -16,7 +16,7 @@ from talemate.scene_message import ( ReinforcementMessage, ) from talemate.world_state.templates import GenerationOptions - +from talemate.client import ClientBase from talemate.agents.base import ( Agent, AgentAction, @@ -34,6 +34,7 @@ from talemate.history import ArchiveEntry from .analyze_scene import SceneAnalyzationMixin from .context_investigation import ContextInvestigationMixin from .layered_history import LayeredHistoryMixin +from .tts_utils import TTSUtilsMixin if TYPE_CHECKING: from talemate.tale_mate import Character @@ -71,6 +72,7 @@ class SummarizeAgent( ContextInvestigationMixin, # Needs to be after ContextInvestigationMixin so signals are connected in the right order SceneAnalyzationMixin, + TTSUtilsMixin, Agent, ): """ @@ -129,7 +131,7 @@ class SummarizeAgent( ContextInvestigationMixin.add_actions(actions) return actions - def __init__(self, client, **kwargs): + def __init__(self, client: ClientBase | None = None, **kwargs): self.client = client self.actions = SummarizeAgent.init_actions() diff --git a/src/talemate/agents/summarize/analyze_scene.py b/src/talemate/agents/summarize/analyze_scene.py index f9012d8d..29431a30 100644 --- a/src/talemate/agents/summarize/analyze_scene.py +++ b/src/talemate/agents/summarize/analyze_scene.py @@ -16,12 +16,29 @@ from talemate.agents.conversation import ConversationAgentEmission from talemate.agents.narrator import NarratorAgentEmission from talemate.agents.context import active_agent from talemate.agents.base import RagBuildSubInstructionEmission +from contextvars import ContextVar if TYPE_CHECKING: from talemate.tale_mate import Character log = structlog.get_logger() +## CONTEXT + +scene_analysis_disabled_context = ContextVar("scene_analysis_disabled", default=False) + + +class SceneAnalysisDisabled: + """ + Context manager to disable scene analysis during specific agent actions. + """ + + def __enter__(self): + self.token = scene_analysis_disabled_context.set(True) + + def __exit__(self, _exc_type, _exc_value, _traceback): + scene_analysis_disabled_context.reset(self.token) + talemate.emit.async_signals.register( "agent.summarization.scene_analysis.before", @@ -184,6 +201,16 @@ class SceneAnalyzationMixin: if not self.analyze_scene: return + try: + if scene_analysis_disabled_context.get(): + log.debug( + "on_inject_instructions: scene analysis disabled through context", + emission=emission, + ) + return + except LookupError: + pass + analyze_scene_for_type = getattr(self, f"analyze_scene_for_{emission_type}") if not analyze_scene_for_type: @@ -259,7 +286,14 @@ class SceneAnalyzationMixin: if not cached_analysis: return None - fingerprint = self.context_fingerpint() + fingerprint = self.context_fingerprint() + + log.debug( + "get_cached_analysis", + fingerprint=fingerprint, + cached_analysis_fp=cached_analysis.get("fp"), + match=cached_analysis.get("fp") == fingerprint, + ) if cached_analysis.get("fp") == fingerprint: return cached_analysis["guidance"] @@ -271,7 +305,7 @@ class SceneAnalyzationMixin: Sets the cached analysis for the given type. """ - fingerprint = self.context_fingerpint() + fingerprint = self.context_fingerprint() self.set_scene_states( **{ diff --git a/src/talemate/agents/summarize/tts_utils.py b/src/talemate/agents/summarize/tts_utils.py new file mode 100644 index 00000000..7c2caeb2 --- /dev/null +++ b/src/talemate/agents/summarize/tts_utils.py @@ -0,0 +1,59 @@ +import structlog +from talemate.agents.base import ( + set_processing, +) +from talemate.prompts import Prompt +from talemate.status import set_loading +from talemate.util.dialogue import separate_dialogue_from_exposition + +log = structlog.get_logger("talemate.agents.summarize.tts_utils") + + +class TTSUtilsMixin: + """ + Summarizer Mixin for text-to-speech utilities. + """ + + @set_loading("Preparing TTS context") + @set_processing + async def markup_context_for_tts(self, text: str) -> str: + """ + Markup the context for text-to-speech. + """ + + original_text = text + + log.debug("Markup context for TTS", text=text) + + # if there are no quotes in the text, there is nothing to separate + if '"' not in text: + return original_text + + # here we separate dialogue from exposition because into + # obvious segments. It seems to have a positive effect on some + # LLMs returning the complete text. + separate_chunks = separate_dialogue_from_exposition(text) + + numbered_chunks = [] + for i, chunk in enumerate(separate_chunks): + numbered_chunks.append(f"[{i + 1}] {chunk.text.strip()}") + + text = "\n".join(numbered_chunks) + + response = await Prompt.request( + "summarizer.markup-context-for-tts", + self.client, + "investigate_1024", + vars={ + "text": text, + "max_tokens": self.client.max_token_length, + "scene": self.scene, + }, + ) + + try: + response = response.split("")[1].split("")[0].strip() + return response + except IndexError: + log.error("Failed to extract markup from response", response=response) + return original_text diff --git a/src/talemate/agents/tts.py b/src/talemate/agents/tts.py deleted file mode 100644 index dc4575aa..00000000 --- a/src/talemate/agents/tts.py +++ /dev/null @@ -1,670 +0,0 @@ -from __future__ import annotations - -import asyncio -import base64 -import functools -import io -import os -import tempfile -import time -import uuid -from typing import Union - -import httpx -import nltk -import pydantic -import structlog -from nltk.tokenize import sent_tokenize -from openai import AsyncOpenAI - -import talemate.config as config -import talemate.emit.async_signals -import talemate.instance as instance -from talemate.emit import emit -from talemate.emit.signals import handlers -from talemate.events import GameLoopNewMessageEvent -from talemate.scene_message import CharacterMessage, NarratorMessage - -from .base import ( - Agent, - AgentAction, - AgentActionConditional, - AgentActionConfig, - AgentDetail, - set_processing, -) -from .registry import register - -try: - from TTS.api import TTS -except ImportError: - TTS = None - -log = structlog.get_logger("talemate.agents.tts") # - -if not TTS: - # TTS installation is massive and requires a lot of dependencies - # so we don't want to require it unless the user wants to use it - log.info( - "TTS (local) requires the TTS package, please install with `pip install TTS` if you want to use the local api" - ) - - -def parse_chunks(text: str) -> list[str]: - """ - Takes a string and splits it into chunks based on punctuation. - - In case of an error it will return the original text as a single chunk and - the error will be logged. - """ - - try: - text = text.replace("...", "__ellipsis__") - chunks = sent_tokenize(text) - cleaned_chunks = [] - - for chunk in chunks: - chunk = chunk.replace("*", "") - if not chunk: - continue - cleaned_chunks.append(chunk) - - for i, chunk in enumerate(cleaned_chunks): - chunk = chunk.replace("__ellipsis__", "...") - cleaned_chunks[i] = chunk - - return cleaned_chunks - except Exception as e: - log.error("chunking error", error=e, text=text) - return [text.replace("__ellipsis__", "...").replace("*", "")] - - -def clean_quotes(chunk: str): - # if there is an uneven number of quotes, remove the last one if its - # at the end of the chunk. If its in the middle, add a quote to the end - if chunk.count('"') % 2 == 1: - if chunk.endswith('"'): - chunk = chunk[:-1] - else: - chunk += '"' - - return chunk - - -def rejoin_chunks(chunks: list[str], chunk_size: int = 250): - """ - Will combine chunks split by punctuation into a single chunk until - max chunk size is reached - """ - - joined_chunks = [] - - current_chunk = "" - - for chunk in chunks: - if len(current_chunk) + len(chunk) > chunk_size: - joined_chunks.append(clean_quotes(current_chunk)) - current_chunk = "" - - current_chunk += chunk - - if current_chunk: - joined_chunks.append(clean_quotes(current_chunk)) - return joined_chunks - - -class Voice(pydantic.BaseModel): - value: str - label: str - - -class VoiceLibrary(pydantic.BaseModel): - api: str - voices: list[Voice] = pydantic.Field(default_factory=list) - last_synced: float = None - - -@register() -class TTSAgent(Agent): - """ - Text to speech agent - """ - - agent_type = "tts" - verbose_name = "Voice" - requires_llm_client = False - essential = False - - @classmethod - def config_options(cls, agent=None): - config_options = super().config_options(agent=agent) - - if agent: - config_options["actions"]["_config"]["config"]["voice_id"]["choices"] = [ - voice.model_dump() for voice in agent.list_voices_sync() - ] - - return config_options - - def __init__(self, **kwargs): - self.is_enabled = False # - - try: - nltk.data.find("tokenizers/punkt") - except LookupError: - try: - nltk.download("punkt", quiet=True) - except Exception as e: - log.error("nltk download error", error=e) - except Exception as e: - log.error("nltk find error", error=e) - - self.voices = { - "elevenlabs": VoiceLibrary(api="elevenlabs"), - "tts": VoiceLibrary(api="tts"), - "openai": VoiceLibrary(api="openai"), - } - self.config = config.load_config() - self.playback_done_event = asyncio.Event() - self.preselect_voice = None - self.actions = { - "_config": AgentAction( - enabled=True, - label="Configure", - description="TTS agent configuration", - config={ - "api": AgentActionConfig( - type="text", - choices=[ - {"value": "tts", "label": "TTS (Local)"}, - {"value": "elevenlabs", "label": "Eleven Labs"}, - {"value": "openai", "label": "OpenAI"}, - ], - value="tts", - label="API", - description="Which TTS API to use", - onchange="emit", - ), - "voice_id": AgentActionConfig( - type="text", - value="default", - label="Narrator Voice", - description="Voice ID/Name to use for TTS", - choices=[], - ), - "generate_for_player": AgentActionConfig( - type="bool", - value=False, - label="Generate for player", - description="Generate audio for player messages", - ), - "generate_for_npc": AgentActionConfig( - type="bool", - value=True, - label="Generate for NPCs", - description="Generate audio for NPC messages", - ), - "generate_for_narration": AgentActionConfig( - type="bool", - value=True, - label="Generate for narration", - description="Generate audio for narration messages", - ), - "generate_chunks": AgentActionConfig( - type="bool", - value=False, - label="Split generation", - description="Generate audio chunks for each sentence - will be much more responsive but may loose context to inform inflection", - ), - }, - ), - "openai": AgentAction( - enabled=True, - container=True, - icon="mdi-server-outline", - condition=AgentActionConditional( - attribute="_config.config.api", value="openai" - ), - label="OpenAI", - config={ - "model": AgentActionConfig( - type="text", - value="tts-1", - choices=[ - {"value": "tts-1", "label": "TTS 1"}, - {"value": "tts-1-hd", "label": "TTS 1 HD"}, - ], - label="Model", - description="TTS model to use", - ), - }, - ), - } - - self.actions["_config"].model_dump() - handlers["config_saved"].connect(self.on_config_saved) - - @property - def enabled(self): - return self.is_enabled - - @property - def has_toggle(self): - return True - - @property - def experimental(self): - return False - - @property - def not_ready_reason(self) -> str: - """ - Returns a string explaining why the agent is not ready - """ - - if self.ready: - return "" - - if self.api == "tts": - if not TTS: - return "TTS not installed" - - elif self.requires_token and not self.token: - return "No API token" - - elif not self.default_voice_id: - return "No voice selected" - - @property - def agent_details(self): - details = { - "api": AgentDetail( - icon="mdi-server-outline", - value=self.api_label, - description="The backend to use for TTS", - ).model_dump(), - } - - if self.ready and self.enabled: - details["voice"] = AgentDetail( - icon="mdi-account-voice", - value=self.voice_id_to_label(self.default_voice_id) or "", - description="The voice to use for TTS", - color="info", - ).model_dump() - elif self.enabled: - details["error"] = AgentDetail( - icon="mdi-alert", - value=self.not_ready_reason, - description=self.not_ready_reason, - color="error", - ).model_dump() - - return details - - @property - def api(self): - return self.actions["_config"].config["api"].value - - @property - def api_label(self): - choices = self.actions["_config"].config["api"].choices - api = self.api - for choice in choices: - if choice["value"] == api: - return choice["label"] - return api - - @property - def token(self): - api = self.api - return self.config.get(api, {}).get("api_key") - - @property - def default_voice_id(self): - return self.actions["_config"].config["voice_id"].value - - @property - def requires_token(self): - return self.api != "tts" - - @property - def ready(self): - if self.api == "tts": - if not TTS: - return False - return True - - return (not self.requires_token or self.token) and self.default_voice_id - - @property - def status(self): - if not self.enabled: - return "disabled" - if self.ready: - if getattr(self, "processing_bg", 0) > 0: - return "busy_bg" if not getattr(self, "processing", False) else "busy" - return "active" if not getattr(self, "processing", False) else "busy" - if self.requires_token and not self.token: - return "error" - if self.api == "tts": - if not TTS: - return "error" - return "uninitialized" - - @property - def max_generation_length(self): - if self.api == "elevenlabs": - return 1024 - elif self.api == "coqui": - return 250 - - return 250 - - @property - def openai_api_key(self): - return self.config.get("openai", {}).get("api_key") - - async def apply_config(self, *args, **kwargs): - try: - api = kwargs["actions"]["_config"]["config"]["api"]["value"] - except KeyError: - api = self.api - - api_changed = api != self.api - - # log.debug( - # "apply_config", - # api=api, - # api_changed=api != self.api, - # current_api=self.api, - # args=args, - # kwargs=kwargs, - # ) - - try: - self.preselect_voice = kwargs["actions"]["_config"]["config"]["voice_id"][ - "value" - ] - except KeyError: - self.preselect_voice = self.default_voice_id - - await super().apply_config(*args, **kwargs) - - if api_changed: - try: - self.actions["_config"].config["voice_id"].value = ( - self.voices[api].voices[0].value - ) - except IndexError: - self.actions["_config"].config["voice_id"].value = "" - - def connect(self, scene): - super().connect(scene) - talemate.emit.async_signals.get("game_loop_new_message").connect( - self.on_game_loop_new_message - ) - - def on_config_saved(self, event): - config = event.data - self.config = config - instance.emit_agent_status(self.__class__, self) - - async def on_game_loop_new_message(self, emission: GameLoopNewMessageEvent): - """ - Called when a conversation is generated - """ - - if not self.enabled or not self.ready: - return - - if not isinstance(emission.message, (CharacterMessage, NarratorMessage)): - return - - if ( - isinstance(emission.message, NarratorMessage) - and not self.actions["_config"].config["generate_for_narration"].value - ): - return - - if isinstance(emission.message, CharacterMessage): - if ( - emission.message.source == "player" - and not self.actions["_config"].config["generate_for_player"].value - ): - return - elif ( - emission.message.source == "ai" - and not self.actions["_config"].config["generate_for_npc"].value - ): - return - - if isinstance(emission.message, CharacterMessage): - character_prefix = emission.message.split(":", 1)[0] - else: - character_prefix = "" - - log.info( - "reactive tts", message=emission.message, character_prefix=character_prefix - ) - - await self.generate(str(emission.message).replace(character_prefix + ": ", "")) - - def voice(self, voice_id: str) -> Union[Voice, None]: - for voice in self.voices[self.api].voices: - if voice.value == voice_id: - return voice - return None - - def voice_id_to_label(self, voice_id: str): - for voice in self.voices[self.api].voices: - if voice.value == voice_id: - return voice.label - return None - - def list_voices_sync(self): - loop = asyncio.get_event_loop() - return loop.run_until_complete(self.list_voices()) - - async def list_voices(self): - if self.requires_token and not self.token: - return [] - - library = self.voices[self.api] - - # TODO: allow re-syncing voices - if library.last_synced: - return library.voices - - list_fn = getattr(self, f"_list_voices_{self.api}") - log.info("Listing voices", api=self.api) - - library.voices = await list_fn() - library.last_synced = time.time() - - if self.preselect_voice: - if self.voice(self.preselect_voice): - self.actions["_config"].config["voice_id"].value = self.preselect_voice - self.preselect_voice = None - - # if the current voice cannot be found, reset it - if not self.voice(self.default_voice_id): - self.actions["_config"].config["voice_id"].value = "" - - # set loading to false - return library.voices - - @set_processing - async def generate(self, text: str): - if not self.enabled or not self.ready or not text: - return - - self.playback_done_event.set() - - generate_fn = getattr(self, f"_generate_{self.api}") - - if self.actions["_config"].config["generate_chunks"].value: - chunks = parse_chunks(text) - chunks = rejoin_chunks(chunks) - else: - chunks = parse_chunks(text) - chunks = rejoin_chunks(chunks, chunk_size=self.max_generation_length) - - # Start generating audio chunks in the background - generation_task = asyncio.create_task(self.generate_chunks(generate_fn, chunks)) - await self.set_background_processing(generation_task) - - # Wait for both tasks to complete - # await asyncio.gather(generation_task) - - async def generate_chunks(self, generate_fn, chunks): - for chunk in chunks: - chunk = chunk.replace("*", "").strip() - log.info("Generating audio", api=self.api, chunk=chunk) - audio_data = await generate_fn(chunk) - self.play_audio(audio_data) - - def play_audio(self, audio_data): - # play audio through the python audio player - # play(audio_data) - - emit( - "audio_queue", - data={"audio_data": base64.b64encode(audio_data).decode("utf-8")}, - ) - - self.playback_done_event.set() # Signal that playback is finished - - # LOCAL - - async def _generate_tts(self, text: str) -> Union[bytes, None]: - if not TTS: - return - - tts_config = self.config.get("tts", {}) - model = tts_config.get("model") - device = tts_config.get("device", "cpu") - - log.debug("tts local", model=model, device=device) - - if not hasattr(self, "tts_instance"): - self.tts_instance = TTS(model).to(device) - - tts = self.tts_instance - - loop = asyncio.get_event_loop() - - voice = self.voice(self.default_voice_id) - - with tempfile.TemporaryDirectory() as temp_dir: - file_path = os.path.join(temp_dir, f"tts-{uuid.uuid4()}.wav") - - await loop.run_in_executor( - None, - functools.partial( - tts.tts_to_file, - text=text, - speaker_wav=voice.value, - language="en", - file_path=file_path, - ), - ) - # tts.tts_to_file(text=text, speaker_wav=voice.value, language="en", file_path=file_path) - - with open(file_path, "rb") as f: - return f.read() - - async def _list_voices_tts(self) -> dict[str, str]: - return [ - Voice(**voice) for voice in self.config.get("tts", {}).get("voices", []) - ] - - # ELEVENLABS - - async def _generate_elevenlabs( - self, text: str, chunk_size: int = 1024 - ) -> Union[bytes, None]: - api_key = self.token - if not api_key: - return - - async with httpx.AsyncClient() as client: - url = f"https://api.elevenlabs.io/v1/text-to-speech/{self.default_voice_id}" - headers = { - "Accept": "audio/mpeg", - "Content-Type": "application/json", - "xi-api-key": api_key, - } - data = { - "text": text, - "model_id": self.config.get("elevenlabs", {}).get("model"), - "voice_settings": {"stability": 0.5, "similarity_boost": 0.5}, - } - - response = await client.post(url, json=data, headers=headers, timeout=300) - - if response.status_code == 200: - bytes_io = io.BytesIO() - for chunk in response.iter_bytes(chunk_size=chunk_size): - if chunk: - bytes_io.write(chunk) - - # Put the audio data in the queue for playback - return bytes_io.getvalue() - else: - log.error(f"Error generating audio: {response.text}") - - async def _list_voices_elevenlabs(self) -> dict[str, str]: - url_voices = "https://api.elevenlabs.io/v1/voices" - - voices = [] - - async with httpx.AsyncClient() as client: - headers = { - "Accept": "application/json", - "xi-api-key": self.token, - } - response = await client.get( - url_voices, headers=headers, params={"per_page": 1000} - ) - speakers = response.json()["voices"] - voices.extend( - [ - Voice(value=speaker["voice_id"], label=speaker["name"]) - for speaker in speakers - ] - ) - - # sort by name - voices.sort(key=lambda x: x.label) - - return voices - - # OPENAI - - async def _generate_openai(self, text: str, chunk_size: int = 1024): - client = AsyncOpenAI(api_key=self.openai_api_key) - - model = self.actions["openai"].config["model"].value - - response = await client.audio.speech.create( - model=model, voice=self.default_voice_id, input=text - ) - - bytes_io = io.BytesIO() - for chunk in response.iter_bytes(chunk_size=chunk_size): - if chunk: - bytes_io.write(chunk) - - # Put the audio data in the queue for playback - return bytes_io.getvalue() - - async def _list_voices_openai(self) -> dict[str, str]: - return [ - Voice(value="alloy", label="Alloy"), - Voice(value="echo", label="Echo"), - Voice(value="fable", label="Fable"), - Voice(value="onyx", label="Onyx"), - Voice(value="nova", label="Nova"), - Voice(value="shimmer", label="Shimmer"), - ] diff --git a/src/talemate/agents/tts/__init__.py b/src/talemate/agents/tts/__init__.py new file mode 100644 index 00000000..1158510f --- /dev/null +++ b/src/talemate/agents/tts/__init__.py @@ -0,0 +1,995 @@ +from __future__ import annotations + +import asyncio +import base64 +import re +import traceback +from typing import TYPE_CHECKING + +import uuid +from collections import deque + +import structlog +from nltk.tokenize import sent_tokenize + +import talemate.util.dialogue as dialogue_utils +import talemate.emit.async_signals as async_signals +import talemate.instance as instance +from talemate.ux.schema import Note +from talemate.emit import emit +from talemate.events import GameLoopNewMessageEvent +from talemate.scene_message import ( + CharacterMessage, + NarratorMessage, + ContextInvestigationMessage, +) +from talemate.agents.base import ( + Agent, + AgentAction, + AgentActionConfig, + AgentDetail, + AgentActionNote, + set_processing, +) +from talemate.agents.registry import register + +from .schema import ( + APIStatus, + Voice, + VoiceLibrary, + GenerationContext, + Chunk, + VoiceGenerationEmission, +) +from .providers import provider + +import talemate.agents.tts.voice_library as voice_library + +from .elevenlabs import ElevenLabsMixin +from .openai import OpenAIMixin +from .google import GoogleMixin +from .kokoro import KokoroMixin +from .chatterbox import ChatterboxMixin +from .websocket_handler import TTSWebsocketHandler +from .f5tts import F5TTSMixin + +import talemate.agents.tts.nodes as tts_nodes # noqa: F401 + +if TYPE_CHECKING: + from talemate.character import Character, VoiceChangedEvent + from talemate.agents.summarize import SummarizeAgent + from talemate.game.engine.nodes.scene import SceneLoopEvent + +log = structlog.get_logger("talemate.agents.tts") + +HOT_SWAP_NOTIFICATION_TIME = 60 + +VOICE_LIBRARY_NOTE = "Voices are not managed here, but in the voice library which can be accessed through the Talemate application bar at the top. When disabling/enabling APIS, close and open this window to refresh the choices." + +async_signals.register( + "agent.tts.prepare.before", + "agent.tts.prepare.after", + "agent.tts.generate.before", + "agent.tts.generate.after", +) + + +def parse_chunks(text: str) -> list[str]: + """ + Takes a string and splits it into chunks based on punctuation. + + In case of an error it will return the original text as a single chunk and + the error will be logged. + """ + + try: + text = text.replace("*", "") + + # ensure sentence terminators are before quotes + # otherwise the beginning of dialog will bleed into narration + text = re.sub(r'([^.?!]+) "', r'\1. "', text) + + text = text.replace("...", "__ellipsis__") + chunks = sent_tokenize(text) + cleaned_chunks = [] + + for chunk in chunks: + if not chunk.strip(): + continue + cleaned_chunks.append(chunk) + + for i, chunk in enumerate(cleaned_chunks): + chunk = chunk.replace("__ellipsis__", "...") + cleaned_chunks[i] = chunk + + return cleaned_chunks + except Exception as e: + log.error("chunking error", error=e, text=text) + return [text.replace("__ellipsis__", "...").replace("*", "")] + + +def rejoin_chunks(chunks: list[str], chunk_size: int = 250): + """ + Will combine chunks split by punctuation into a single chunk until + max chunk size is reached + """ + + joined_chunks = [] + + current_chunk = "" + + for chunk in chunks: + if len(current_chunk) + len(chunk) > chunk_size: + joined_chunks.append(current_chunk) + current_chunk = "" + + current_chunk += chunk + + if current_chunk: + joined_chunks.append(current_chunk) + return joined_chunks + + +@register() +class TTSAgent( + ElevenLabsMixin, + OpenAIMixin, + GoogleMixin, + KokoroMixin, + ChatterboxMixin, + F5TTSMixin, + Agent, +): + """ + Text to speech agent + """ + + agent_type = "tts" + verbose_name = "Voice" + requires_llm_client = False + essential = False + + # websocket handler for frontend voice library management + websocket_handler = TTSWebsocketHandler + + @classmethod + def config_options(cls, agent=None): + config_options = super().config_options(agent=agent) + + if not agent: + return config_options + + narrator_voice_id = config_options["actions"]["_config"]["config"][ + "narrator_voice_id" + ] + + narrator_voice_id["choices"] = cls.narrator_voice_id_choices(agent) + + return config_options + + @classmethod + def narrator_voice_id_choices(cls, agent: "TTSAgent") -> list[dict[str, str]]: + choices = voice_library.voices_for_apis(agent.ready_apis, agent.voice_library) + choices.sort(key=lambda x: x.label) + return [ + { + "label": f"{voice.label} ({voice.provider})", + "value": voice.id, + } + for voice in choices + ] + + @classmethod + def init_actions(cls) -> dict[str, AgentAction]: + actions = { + "_config": AgentAction( + enabled=True, + label="Configure", + description="TTS agent configuration", + config={ + "apis": AgentActionConfig( + type="flags", + value=[ + "kokoro", + ], + label="Enabled APIs", + description="APIs to use for TTS", + choices=[], + ), + "narrator_voice_id": AgentActionConfig( + type="autocomplete", + value="kokoro:am_adam", + label="Narrator Voice", + description="Voice to use for narration", + choices=[], + note=VOICE_LIBRARY_NOTE, + ), + "speaker_separation": AgentActionConfig( + type="text", + value="simple", + label="Speaker separation", + description="How to separate speaker dialogue from exposition", + choices=[ + {"label": "No separation", "value": "none"}, + {"label": "Simple", "value": "simple"}, + {"label": "Mixed", "value": "mixed"}, + {"label": "AI assisted", "value": "ai_assisted"}, + ], + note_on_value={ + "none": AgentActionNote( + type="primary", + text="Character messages will be voiced entirely by the character's voice with a fallback to the narrator voice if the character has no voice selecte. Narrator messages will be voiced exclusively by the narrator voice.", + ), + "simple": AgentActionNote( + type="primary", + text="Exposition and dialogue will be separated in character messages. Narrator messages will be voiced exclusively by the narrator voice. This means", + ), + "mixed": AgentActionNote( + type="primary", + text="A mix of `simple` and `ai_assisted`. Character messages are separated into narrator and the character's voice. Narrator messages that have dialogue are analyzed by the Summarizer agent to determine the appropriate speaker(s).", + ), + "ai_assisted": AgentActionNote( + type="primary", + text="Appropriate speaker separation will be attempted based on the content of the message with help from the Summarizer agent. This sends an extra prompt to the LLM to determine the appropriate speaker(s).", + ), + }, + ), + "generate_for_player": AgentActionConfig( + type="bool", + value=False, + label="Auto-generate for player", + description="Generate audio for player messages", + ), + "generate_for_npc": AgentActionConfig( + type="bool", + value=True, + label="Auto-generate for AI characters", + description="Generate audio for NPC messages", + ), + "generate_for_narration": AgentActionConfig( + type="bool", + value=True, + label="Auto-generate for narration", + description="Generate audio for narration messages", + ), + "generate_for_context_investigation": AgentActionConfig( + type="bool", + value=True, + label="Auto-generate for context investigation", + description="Generate audio for context investigation messages", + ), + }, + ), + } + + KokoroMixin.add_actions(actions) + ChatterboxMixin.add_actions(actions) + GoogleMixin.add_actions(actions) + ElevenLabsMixin.add_actions(actions) + OpenAIMixin.add_actions(actions) + F5TTSMixin.add_actions(actions) + + return actions + + def __init__(self, **kwargs): + self.is_enabled = False # tts agent is disabled by default + self.actions = TTSAgent.init_actions() + self.playback_done_event = asyncio.Event() + + # Queue management for voice generation + # Each queue instance gets a unique id so it can later be referenced + # (e.g. for cancellation of all remaining items). + # Only one queue can be active at a time. New generation requests that + # arrive while a queue is processing will be appended to the same + # queue. Once the queue is fully processed it is discarded and a new + # one will be created for subsequent generation requests. + # Queue now holds individual (context, chunk) pairs so interruption can + # happen between chunks even when a single context produced many. + self._generation_queue: deque[tuple[GenerationContext, Chunk]] = deque() + self._queue_id: str | None = None + self._queue_task: asyncio.Task | None = None + self._queue_lock = asyncio.Lock() + + # general helpers + + @property + def enabled(self): + return self.is_enabled + + @property + def has_toggle(self): + return True + + @property + def experimental(self): + return False + + @property + def voice_library(self) -> VoiceLibrary: + return voice_library.get_instance() + + # config helpers + + @property + def narrator_voice_id(self) -> str: + return self.actions["_config"].config["narrator_voice_id"].value + + @property + def generate_for_player(self) -> bool: + return self.actions["_config"].config["generate_for_player"].value + + @property + def generate_for_npc(self) -> bool: + return self.actions["_config"].config["generate_for_npc"].value + + @property + def generate_for_narration(self) -> bool: + return self.actions["_config"].config["generate_for_narration"].value + + @property + def generate_for_context_investigation(self) -> bool: + return ( + self.actions["_config"].config["generate_for_context_investigation"].value + ) + + @property + def speaker_separation(self) -> str: + return self.actions["_config"].config["speaker_separation"].value + + @property + def apis(self) -> list[str]: + return self.actions["_config"].config["apis"].value + + @property + def all_apis(self) -> list[str]: + return [api["value"] for api in self.actions["_config"].config["apis"].choices] + + @property + def agent_details(self): + details = {} + + self.actions["_config"].config[ + "narrator_voice_id" + ].choices = self.narrator_voice_id_choices(self) + + if not self.enabled: + return details + + used_apis: set[str] = set() + + used_disabled_apis: set[str] = set() + + if self.narrator_voice: + # + + label = self.narrator_voice.label + color = "primary" + used_apis.add(self.narrator_voice.provider) + + if not self.api_enabled(self.narrator_voice.provider): + used_disabled_apis.add(self.narrator_voice.provider) + + if not self.api_ready(self.narrator_voice.provider): + color = "error" + + details["narrator_voice"] = AgentDetail( + icon="mdi-script-text", + value=label, + description="Default voice", + color=color, + ).model_dump() + + scene = getattr(self, "scene", None) + if scene: + for character in scene.characters: + if character.voice: + label = character.voice.label + color = "primary" + used_apis.add(character.voice.provider) + if not self.api_enabled(character.voice.provider): + used_disabled_apis.add(character.voice.provider) + if not self.api_ready(character.voice.provider): + color = "error" + + details[f"{character.name}_voice"] = AgentDetail( + icon="mdi-account-voice", + value=f"{character.name}", + description=f"{character.name}'s voice: {label} ({character.voice.provider})", + color=color, + ).model_dump() + + for api in used_disabled_apis: + details[f"{api}_disabled"] = AgentDetail( + icon="mdi-alert-circle", + value=f"{api} disabled", + description=f"{api} disabled - at least one voice is attempting to use this api but is not enabled", + color="error", + ).model_dump() + + for api in used_apis: + fn = getattr(self, f"{api}_agent_details", None) + if fn: + details.update(fn) + return details + + @property + def status(self): + if not self.enabled: + return "disabled" + if self.ready: + if getattr(self, "processing_bg", 0) > 0: + return "busy_bg" if not getattr(self, "processing", False) else "busy" + return "idle" if not getattr(self, "processing", False) else "busy" + return "uninitialized" + + @property + def narrator_voice(self) -> Voice | None: + return self.voice_library.get_voice(self.narrator_voice_id) + + @property + def api_status(self) -> list[APIStatus]: + api_status: list[APIStatus] = [] + + for api in self.all_apis: + not_configured_reason = getattr(self, f"{api}_not_configured_reason", None) + not_configured_action = getattr(self, f"{api}_not_configured_action", None) + api_info: str | None = getattr(self, f"{api}_info", None) + messages: list[Note] = [] + if not_configured_reason: + messages.append( + Note( + text=not_configured_reason, + color="error", + icon="mdi-alert-circle-outline", + actions=[not_configured_action] + if not_configured_action + else None, + ) + ) + if api_info: + messages.append( + Note( + text=api_info.strip(), + color="muted", + icon="mdi-information-outline", + ) + ) + _status = APIStatus( + api=api, + enabled=self.api_enabled(api), + ready=self.api_ready(api), + configured=self.api_configured(api), + messages=messages, + supports_mixing=getattr(self, f"{api}_supports_mixing", False), + provider=provider(api), + default_model=getattr(self, f"{api}_model", None), + model_choices=getattr(self, f"{api}_model_choices", []), + ) + api_status.append(_status) + + # order by api + api_status.sort(key=lambda x: x.api) + + return api_status + + # events + + def connect(self, scene): + super().connect(scene) + async_signals.get("game_loop_new_message").connect( + self.on_game_loop_new_message + ) + async_signals.get("voice_library.update.after").connect( + self.on_voice_library_update + ) + async_signals.get("scene_loop_init_after").connect(self.on_scene_loop_init) + async_signals.get("character.voice_changed").connect( + self.on_character_voice_changed + ) + + async def on_scene_loop_init(self, event: "SceneLoopEvent"): + if not self.enabled or not self.ready or not self.generate_for_narration: + return + + if self.scene.environment == "creative": + return + + content_messages = self.scene.last_message_of_type( + ["character", "narrator", "context_investigation"] + ) + + if content_messages: + # we already have a history, so we don't need to generate TTS for the intro + return + + await self.generate(self.scene.get_intro(), character=None) + + async def on_voice_library_update(self, voice_library: VoiceLibrary): + log.debug("Voice library updated - refreshing narrator voice choices") + self.actions["_config"].config[ + "narrator_voice_id" + ].choices = self.narrator_voice_id_choices(self) + await self.emit_status() + + async def on_game_loop_new_message(self, emission: GameLoopNewMessageEvent): + """ + Called when a conversation is generated + """ + + if self.scene.environment == "creative": + return + + character: Character | None = None + + if not self.enabled or not self.ready: + return + + if not isinstance( + emission.message, + (CharacterMessage, NarratorMessage, ContextInvestigationMessage), + ): + return + + if ( + isinstance(emission.message, NarratorMessage) + and not self.generate_for_narration + ): + return + + if ( + isinstance(emission.message, ContextInvestigationMessage) + and not self.generate_for_context_investigation + ): + return + + if isinstance(emission.message, CharacterMessage): + if emission.message.source == "player" and not self.generate_for_player: + return + elif emission.message.source == "ai" and not self.generate_for_npc: + return + + character = self.scene.get_character(emission.message.character_name) + + if isinstance(emission.message, CharacterMessage): + character_prefix = emission.message.split(":", 1)[0] + text_to_generate = str(emission.message).replace( + character_prefix + ": ", "" + ) + elif isinstance(emission.message, ContextInvestigationMessage): + character_prefix = "" + text_to_generate = ( + emission.message.message + ) # Use just the message content, not the title prefix + else: + character_prefix = "" + text_to_generate = str(emission.message) + + log.info( + "reactive tts", message=emission.message, character_prefix=character_prefix + ) + + await self.generate( + text_to_generate, + character=character, + message=emission.message, + ) + + async def on_character_voice_changed(self, event: "VoiceChangedEvent"): + log.debug( + "Character voice changed", character=event.character, voice=event.voice + ) + await self.emit_status() + + # voice helpers + + @property + def ready_apis(self) -> list[str]: + """ + Returns a list of apis that are ready + """ + return [api for api in self.apis if self.api_ready(api)] + + @property + def used_apis(self) -> list[str]: + """ + Returns a list of apis that are in use + + The api is in use if it is the narrator voice or if any of the active characters in the scene use a voice from the api. + """ + return [api for api in self.apis if self.api_used(api)] + + def api_enabled(self, api: str) -> bool: + """ + Returns whether the api is currently in the .apis list, which means it is enabled. + """ + return api in self.apis + + def api_ready(self, api: str) -> bool: + """ + Returns whether the api is ready. + + The api must be enabled and configured. + """ + + if not self.api_enabled(api): + return False + + return self.api_configured(api) + + def api_configured(self, api: str) -> bool: + return getattr(self, f"{api}_configured", True) + + def api_used(self, api: str) -> bool: + """ + Returns whether the narrator or any of the active characters in the scene + use a voice from the given api + + Args: + api (str): The api to check + + Returns: + bool: Whether the api is in use + """ + + if self.narrator_voice and self.narrator_voice.provider == api: + return True + + if not getattr(self, "scene", None): + return False + + for character in self.scene.characters: + if not character.voice: + continue + voice = self.voice_library.get_voice(character.voice.id) + if voice and voice.provider == api: + return True + + return False + + def use_ai_assisted_speaker_separation( + self, + text: str, + message: CharacterMessage + | NarratorMessage + | ContextInvestigationMessage + | None, + ) -> bool: + """ + Returns whether the ai assisted speaker separation should be used for the given text. + """ + try: + if not message and '"' not in text: + return False + + if not message and '"' in text: + return self.speaker_separation in ["ai_assisted", "mixed"] + + if message.source == "player": + return False + + if self.speaker_separation == "ai_assisted": + return True + + if ( + isinstance(message, NarratorMessage) + and self.speaker_separation == "mixed" + ): + return True + + return False + except Exception as e: + log.error( + "Error using ai assisted speaker separation", + error=e, + traceback=traceback.format_exc(), + ) + return False + + # tts markup cache + + async def get_tts_markup_cache(self, text: str) -> str | None: + """ + Returns the cached tts markup for the given text. + """ + fp = hash(text) + cached_markup = self.get_scene_state("tts_markup_cache") + if cached_markup and cached_markup.get("fp") == fp: + return cached_markup.get("markup") + return None + + async def set_tts_markup_cache(self, text: str, markup: str): + fp = hash(text) + self.set_scene_states( + tts_markup_cache={ + "fp": fp, + "markup": markup, + } + ) + + # generation + + @set_processing + async def generate( + self, + text: str, + character: Character | None = None, + force_voice: Voice | None = None, + message: CharacterMessage | NarratorMessage | None = None, + ): + """ + Public entry-point for voice generation. + + The actual audio generation happens sequentially inside a single + background queue. If a queue is currently active, we simply append the + new request to it; if not, we create a new queue (with its own unique + id) and start processing. + """ + if not self.enabled or not self.ready or not text: + return + + self.playback_done_event.set() + + summarizer: "SummarizeAgent" = instance.get_agent("summarizer") + + context = GenerationContext(voice_id=self.narrator_voice_id) + character_voice: Voice = force_voice or self.narrator_voice + + if character and character.voice: + voice = character.voice + if voice and self.api_ready(voice.provider): + character_voice = voice + else: + log.warning( + "Character voice not available", + character=character.name, + voice=character.voice, + ) + + log.debug("Voice routing", character=character, voice=character_voice) + + # initial chunking by separating dialogue from exposition + chunks: list[Chunk] = [] + if self.speaker_separation != "none": + if self.use_ai_assisted_speaker_separation(text, message): + markup = await self.get_tts_markup_cache(text) + if not markup: + log.debug("No markup cache found, generating markup") + markup = await summarizer.markup_context_for_tts(text) + await self.set_tts_markup_cache(text, markup) + else: + log.debug("Using markup cache") + # Use the new markup parser for AI-assisted format + dlg_chunks = dialogue_utils.parse_tts_markup(markup) + else: + # Use the original parser for non-AI-assisted format + dlg_chunks = dialogue_utils.separate_dialogue_from_exposition(text) + + for _dlg_chunk in dlg_chunks: + _voice = ( + character_voice + if _dlg_chunk.type == "dialogue" + else self.narrator_voice + ) + + if _dlg_chunk.speaker is not None: + # speaker name has been identified + _character = self.scene.get_character(_dlg_chunk.speaker) + log.debug( + "Identified speaker", + speaker=_dlg_chunk.speaker, + character=_character, + ) + if ( + _character + and _character.voice + and self.api_ready(_character.voice.provider) + ): + log.debug( + "Using character voice", + character=_character.name, + voice=_character.voice, + ) + _voice = _character.voice + + _api: str = _voice.provider if _voice else self.api + chunk = Chunk( + api=_api, + voice=Voice(**_voice.model_dump()), + model=_voice.provider_model, + generate_fn=getattr(self, f"{_api}_generate"), + prepare_fn=getattr(self, f"{_api}_prepare_chunk", None), + character_name=character.name if character else None, + text=[_dlg_chunk.text], + type=_dlg_chunk.type, + message_id=message.id if message else None, + ) + chunks.append(chunk) + else: + _voice = character_voice if character else self.narrator_voice + _api: str = _voice.provider if _voice else self.api + chunks = [ + Chunk( + api=_api, + voice=Voice(**_voice.model_dump()), + model=_voice.provider_model, + generate_fn=getattr(self, f"{_api}_generate"), + prepare_fn=getattr(self, f"{_api}_prepare_chunk", None), + character_name=character.name if character else None, + text=[text], + type="dialogue" if character else "exposition", + message_id=message.id if message else None, + ) + ] + + # second chunking by splitting into chunks of max_generation_length + + for chunk in chunks: + api_chunk_size = getattr(self, f"{chunk.api}_chunk_size", 0) + + log.debug("chunking", api=chunk.api, api_chunk_size=api_chunk_size) + + _text = [] + + max_generation_length = getattr(self, f"{chunk.api}_max_generation_length") + + if api_chunk_size > 0: + max_generation_length = min(max_generation_length, api_chunk_size) + + for _chunk_text in chunk.text: + if len(_chunk_text) <= max_generation_length: + _text.append(_chunk_text) + continue + + _parsed = parse_chunks(_chunk_text) + _joined = rejoin_chunks(_parsed, chunk_size=max_generation_length) + _text.extend(_joined) + + log.debug("chunked for size", before=chunk.text, after=_text) + + chunk.text = _text + + context.chunks = chunks + + # Enqueue each chunk individually for fine-grained interruptibility + async with self._queue_lock: + if self._queue_id is None: + self._queue_id = str(uuid.uuid4()) + + for chunk in context.chunks: + self._generation_queue.append((context, chunk)) + + # Start processing task if needed + if self._queue_task is None or self._queue_task.done(): + self._queue_task = asyncio.create_task( + self._process_queue(self._queue_id) + ) + + log.debug( + "tts queue enqueue", + queue_id=self._queue_id, + total_items=len(self._generation_queue), + ) + + # The caller doesn't need to wait for the queue to finish; it runs in + # the background. We still register the task with Talemate's + # background-processing tracking so that UI can reflect activity. + await self.set_background_processing(self._queue_task) + + # --------------------------------------------------------------------- + # Queue helpers + # --------------------------------------------------------------------- + + async def _process_queue(self, queue_id: str): + """Sequentially processes all GenerationContext objects in the queue. + + Once the last context has been processed the queue state is reset so a + future generation call will create a new queue (and therefore a new + id). The *queue_id* argument allows us to later add cancellation logic + that can target a specific queue instance. + """ + + try: + while True: + async with self._queue_lock: + if not self._generation_queue: + break + + context, chunk = self._generation_queue.popleft() + + log.debug( + "tts queue dequeue", + queue_id=queue_id, + total_items=len(self._generation_queue), + chunk_type=chunk.type, + ) + + # Process outside lock so other coroutines can enqueue + await self._generate_chunk(chunk, context) + except Exception as e: + log.error( + "Error processing queue", error=e, traceback=traceback.format_exc() + ) + finally: + # Clean up queue state after finishing (or on cancellation) + async with self._queue_lock: + if queue_id == self._queue_id: + self._queue_id = None + self._queue_task = None + self._generation_queue.clear() + + # Public helper so external code (e.g. later cancellation UI) can find the current queue id + def current_queue_id(self) -> str | None: + return self._queue_id + + async def _generate_chunk(self, chunk: Chunk, context: GenerationContext): + """Generate audio for a single chunk (all its sub-chunks).""" + + for _chunk in chunk.sub_chunks: + if not _chunk.cleaned_text.strip(): + continue + + emission: VoiceGenerationEmission = VoiceGenerationEmission( + chunk=_chunk, context=context + ) + + if _chunk.prepare_fn: + await async_signals.get("agent.tts.prepare.before").send(emission) + await _chunk.prepare_fn(_chunk) + await async_signals.get("agent.tts.prepare.after").send(emission) + + log.info( + "Generating audio", + api=chunk.api, + text=_chunk.cleaned_text, + parameters=_chunk.voice.parameters, + prepare_fn=_chunk.prepare_fn, + ) + + await async_signals.get("agent.tts.generate.before").send(emission) + try: + emission.wav_bytes = await _chunk.generate_fn(_chunk, context) + except Exception as e: + log.error("Error generating audio", error=e, chunk=_chunk) + continue + await async_signals.get("agent.tts.generate.after").send(emission) + self.play_audio(emission.wav_bytes, chunk.message_id) + await asyncio.sleep(0.1) + + # Deprecated: kept for backward compatibility but no longer used. + async def generate_chunks(self, context: GenerationContext): + for chunk in context.chunks: + await self._generate_chunk(chunk, context) + + def play_audio(self, audio_data, message_id: int | None = None): + # play audio through the websocket (browser) + + audio_data_encoded: str = base64.b64encode(audio_data).decode("utf-8") + + emit( + "audio_queue", + data={"audio_data": audio_data_encoded, "message_id": message_id}, + ) + + self.playback_done_event.set() # Signal that playback is finished + + async def stop_and_clear_queue(self): + """Cancel any ongoing generation and clear the pending queue. + + This is triggered by UI actions that request immediate stop of TTS + synthesis and playback. It cancels the background task (if still + running) and clears all queued items in a thread-safe manner. + """ + async with self._queue_lock: + # Clear all queued items + self._generation_queue.clear() + + # Cancel the background task if it is still running + if self._queue_task and not self._queue_task.done(): + self._queue_task.cancel() + + # Reset queue identifiers/state + self._queue_id = None + self._queue_task = None + + # Ensure downstream components know playback is finished + self.playback_done_event.set() diff --git a/src/talemate/agents/tts/chatterbox.py b/src/talemate/agents/tts/chatterbox.py new file mode 100644 index 00000000..eba949af --- /dev/null +++ b/src/talemate/agents/tts/chatterbox.py @@ -0,0 +1,317 @@ +import os +import functools +import tempfile +import uuid +import asyncio +import structlog +import pydantic + +import torch + + +# Lazy imports for heavy dependencies +def _import_heavy_deps(): + global ta, ChatterboxTTS + import torchaudio as ta + from chatterbox.tts import ChatterboxTTS + + +CUDA_AVAILABLE = torch.cuda.is_available() + +from talemate.agents.base import ( + AgentAction, + AgentActionConfig, + AgentDetail, +) +from talemate.ux.schema import Field + +from .schema import Voice, Chunk, GenerationContext, VoiceProvider, INFO_CHUNK_SIZE +from .voice_library import add_default_voices +from .providers import register, provider +from .util import voice_is_talemate_asset + +log = structlog.get_logger("talemate.agents.tts.chatterbox") + +add_default_voices( + [ + Voice( + label="Eva", + provider="chatterbox", + provider_id="tts/voice/chatterbox/eva.wav", + tags=["female", "calm", "mature", "thoughtful"], + ), + Voice( + label="Lisa", + provider="chatterbox", + provider_id="tts/voice/chatterbox/lisa.wav", + tags=["female", "energetic", "young"], + ), + Voice( + label="Adam", + provider="chatterbox", + provider_id="tts/voice/chatterbox/adam.wav", + tags=["male", "calm", "mature", "thoughtful", "deep"], + ), + Voice( + label="Bradford", + provider="chatterbox", + provider_id="tts/voice/chatterbox/bradford.wav", + tags=["male", "calm", "mature", "thoughtful", "deep"], + ), + Voice( + label="Julia", + provider="chatterbox", + provider_id="tts/voice/chatterbox/julia.wav", + tags=["female", "calm", "mature"], + ), + Voice( + label="Zoe", + provider="chatterbox", + provider_id="tts/voice/chatterbox/zoe.wav", + tags=["female"], + ), + Voice( + label="William", + provider="chatterbox", + provider_id="tts/voice/chatterbox/william.wav", + tags=["male", "young"], + ), + ] +) + +CHATTERBOX_INFO = """ +Chatterbox is a local text to speech model. + +The voice id is the path to the .wav file for the voice. + +The path can be relative to the talemate root directory, and you can put new *.wav samples +in the `tts/voice/chatterbox` directory. It is also ok if you want to load the files from somewhere else as long as the filepath is available to the talemate backend. + +First generation will download the models (2.13GB + 1.06GB). + +Uses about 4GB of VRAM. +""" + + +@register() +class ChatterboxProvider(VoiceProvider): + name: str = "chatterbox" + allow_model_override: bool = False + allow_file_upload: bool = True + upload_file_types: list[str] = ["audio/wav"] + voice_parameters: list[Field] = [ + Field( + name="exaggeration", + type="number", + label="Exaggeration level", + value=0.5, + min=0.25, + max=2.0, + step=0.05, + ), + Field( + name="cfg_weight", + type="number", + label="CFG/Pace", + value=0.5, + min=0.2, + max=1.0, + step=0.1, + ), + Field( + name="temperature", + type="number", + label="Temperature", + value=0.8, + min=0.05, + max=5.0, + step=0.05, + ), + ] + + +class ChatterboxInstance(pydantic.BaseModel): + model: "ChatterboxTTS" + device: str + + class Config: + arbitrary_types_allowed = True + + +class ChatterboxMixin: + """ + Chatterbox agent mixin for local text to speech. + """ + + @classmethod + def add_actions(cls, actions: dict[str, AgentAction]): + actions["_config"].config["apis"].choices.append( + { + "value": "chatterbox", + "label": "Chatterbox (Local)", + "help": "Chatterbox is a local text to speech model.", + } + ) + + actions["chatterbox"] = AgentAction( + enabled=True, + container=True, + icon="mdi-server-outline", + label="Chatterbox", + description="Chatterbox is a local text to speech model.", + config={ + "device": AgentActionConfig( + type="text", + value="cuda" if CUDA_AVAILABLE else "cpu", + label="Device", + choices=[ + {"value": "cpu", "label": "CPU"}, + {"value": "cuda", "label": "CUDA"}, + ], + description="Device to use for TTS", + ), + "chunk_size": AgentActionConfig( + type="number", + min=0, + step=64, + max=2048, + value=256, + label="Chunk size", + note=INFO_CHUNK_SIZE, + ), + }, + ) + return actions + + @property + def chatterbox_configured(self) -> bool: + return True + + @property + def chatterbox_max_generation_length(self) -> int: + return 512 + + @property + def chatterbox_device(self) -> str: + return self.actions["chatterbox"].config["device"].value + + @property + def chatterbox_chunk_size(self) -> int: + return self.actions["chatterbox"].config["chunk_size"].value + + @property + def chatterbox_info(self) -> str: + return CHATTERBOX_INFO + + @property + def chatterbox_agent_details(self) -> dict: + if not self.chatterbox_configured: + return {} + details = {} + + details["chatterbox_device"] = AgentDetail( + icon="mdi-memory", + value=f"Chatterbox: {self.chatterbox_device}", + description="The device to use for Chatterbox", + ).model_dump() + + return details + + def chatterbox_delete_voice(self, voice: Voice): + """ + Remove the voice from the file system. + Only do this if the path is within TALEMATE_ROOT. + """ + + is_talemate_asset, resolved = voice_is_talemate_asset( + voice, provider(voice.provider) + ) + + log.debug( + "chatterbox_delete_voice", + voice_id=voice.provider_id, + is_talemate_asset=is_talemate_asset, + resolved=resolved, + ) + + if not is_talemate_asset: + return + + try: + if resolved.exists() and resolved.is_file(): + resolved.unlink() + log.debug("Deleted chatterbox voice file", path=str(resolved)) + except Exception as e: + log.error( + "Failed to delete chatterbox voice file", error=e, path=str(resolved) + ) + + def _chatterbox_generate_file( + self, + model: "ChatterboxTTS", + text: str, + audio_prompt_path: str, + output_path: str, + **kwargs, + ): + wav = model.generate(text=text, audio_prompt_path=audio_prompt_path, **kwargs) + ta.save(output_path, wav, model.sr) + return output_path + + async def chatterbox_generate( + self, chunk: Chunk, context: GenerationContext + ) -> bytes | None: + chatterbox_instance: ChatterboxInstance | None = getattr( + self, "chatterbox_instance", None + ) + + reload: bool = False + + if not chatterbox_instance: + reload = True + elif chatterbox_instance.device != self.chatterbox_device: + reload = True + + if reload: + log.debug( + "chatterbox - reinitializing tts instance", + device=self.chatterbox_device, + ) + # Lazy import heavy dependencies only when needed + _import_heavy_deps() + + self.chatterbox_instance = ChatterboxInstance( + model=ChatterboxTTS.from_pretrained(device=self.chatterbox_device), + device=self.chatterbox_device, + ) + + model: "ChatterboxTTS" = self.chatterbox_instance.model + + loop = asyncio.get_event_loop() + + voice = chunk.voice + + with tempfile.TemporaryDirectory() as temp_dir: + file_path = os.path.join(temp_dir, f"tts-{uuid.uuid4()}.wav") + + await loop.run_in_executor( + None, + functools.partial( + self._chatterbox_generate_file, + model=model, + text=chunk.cleaned_text, + audio_prompt_path=voice.provider_id, + output_path=file_path, + **voice.parameters, + ), + ) + + with open(file_path, "rb") as f: + return f.read() + + async def chatterbox_prepare_chunk(self, chunk: Chunk): + voice = chunk.voice + P = provider(voice.provider) + exaggeration = P.voice_parameter(voice, "exaggeration") + + voice.parameters["exaggeration"] = exaggeration diff --git a/src/talemate/agents/tts/elevenlabs.py b/src/talemate/agents/tts/elevenlabs.py new file mode 100644 index 00000000..cfdcf26e --- /dev/null +++ b/src/talemate/agents/tts/elevenlabs.py @@ -0,0 +1,248 @@ +import io +from typing import Union + +import structlog + + +# Lazy imports for heavy dependencies +def _import_heavy_deps(): + global AsyncElevenLabs, ApiError + from elevenlabs.client import AsyncElevenLabs + + # Added explicit ApiError import for clearer error handling + from elevenlabs.core.api_error import ApiError + + +from talemate.ux.schema import Action + +from talemate.agents.base import ( + AgentAction, + AgentActionConfig, + AgentDetail, +) +from .schema import Voice, VoiceLibrary, GenerationContext, Chunk, INFO_CHUNK_SIZE +from .voice_library import add_default_voices + +# emit helper to propagate status messages to the UX +from talemate.emit import emit + +log = structlog.get_logger("talemate.agents.tts.elevenlabs") + + +add_default_voices( + [ + Voice( + label="Adam", + provider="elevenlabs", + provider_id="wBXNqKUATyqu0RtYt25i", + tags=["male", "deep"], + ), + Voice( + label="Amy", + provider="elevenlabs", + provider_id="oGn4Ha2pe2vSJkmIJgLQ", + tags=["female"], + ), + ] +) + + +ELEVENLABS_INFO = """ +ElevenLabs is a cloud-based text to speech API. + +To add new voices, head to their voice library at [https://elevenlabs.io/app/voice-library](https://elevenlabs.io/app/voice-library) and note the voice id of the voice you want to use. (Click 'More Actions -> Copy Voice ID') + +**About elevenlabs voices** +Your elevenlabs subscription allows you to maintain a set number of voices (10 for cheapest plan). + +Any voice that you generate audio for is automatically added to your voices at [https://elevenlabs.io/app/voice-lab](https://elevenlabs.io/app/voice-lab). This also happens when you use the "Test" button above. It is recommend testing via their voice library instead. +""" + + +class ElevenLabsMixin: + """ + ElevenLabs TTS agent mixin for cloud-based text to speech. + """ + + @classmethod + def add_actions(cls, actions: dict[str, AgentAction]): + actions["_config"].config["apis"].choices.append( + { + "value": "elevenlabs", + "label": "ElevenLabs", + "help": "ElevenLabs is a cloud-based text to speech model that uses the ElevenLabs API. (API key required)", + } + ) + + actions["elevenlabs"] = AgentAction( + enabled=True, + container=True, + icon="mdi-server-outline", + label="ElevenLabs", + description="ElevenLabs is a cloud-based text to speech API. (API key required and must be set in the Talemate Settings -> Application -> ElevenLabs)", + config={ + "model": AgentActionConfig( + type="text", + value="eleven_flash_v2_5", + label="Model", + description="Model to use for TTS", + choices=[ + { + "value": "eleven_multilingual_v2", + "label": "Eleven Multilingual V2", + }, + {"value": "eleven_flash_v2_5", "label": "Eleven Flash V2.5"}, + {"value": "eleven_turbo_v2_5", "label": "Eleven Turbo V2.5"}, + ], + ), + "chunk_size": AgentActionConfig( + type="number", + min=0, + step=64, + max=2048, + value=0, + label="Chunk size", + note=INFO_CHUNK_SIZE, + ), + }, + ) + + return actions + + @classmethod + def add_voices(cls, voices: dict[str, VoiceLibrary]): + voices["elevenlabs"] = VoiceLibrary(api="elevenlabs", local=True) + + @property + def elevenlabs_chunk_size(self) -> int: + return self.actions["elevenlabs"].config["chunk_size"].value + + @property + def elevenlabs_configured(self) -> bool: + api_key_set = bool(self.elevenlabs_api_key) + model_set = bool(self.elevenlabs_model) + return api_key_set and model_set + + @property + def elevenlabs_not_configured_reason(self) -> str | None: + if not self.elevenlabs_api_key: + return "ElevenLabs API key not set" + if not self.elevenlabs_model: + return "ElevenLabs model not set" + return None + + @property + def elevenlabs_not_configured_action(self) -> Action | None: + if not self.elevenlabs_api_key: + return Action( + action_name="openAppConfig", + arguments=["application", "elevenlabs_api"], + label="Set API Key", + icon="mdi-key", + ) + if not self.elevenlabs_model: + return Action( + action_name="openAgentSettings", + arguments=["tts", "elevenlabs"], + label="Set Model", + icon="mdi-brain", + ) + return None + + @property + def elevenlabs_max_generation_length(self) -> int: + return 1024 + + @property + def elevenlabs_model(self) -> str: + return self.actions["elevenlabs"].config["model"].value + + @property + def elevenlabs_model_choices(self) -> list[str]: + return [ + {"label": choice["label"], "value": choice["value"]} + for choice in self.actions["elevenlabs"].config["model"].choices + ] + + @property + def elevenlabs_info(self) -> str: + return ELEVENLABS_INFO + + @property + def elevenlabs_agent_details(self) -> dict: + details = {} + + if not self.elevenlabs_configured: + details["elevenlabs_api_key"] = AgentDetail( + icon="mdi-key", + value="ElevenLabs API key not set", + description="ElevenLabs API key not set. You can set it in the Talemate Settings -> Application -> ElevenLabs", + color="error", + ).model_dump() + else: + details["elevenlabs_model"] = AgentDetail( + icon="mdi-brain", + value=self.elevenlabs_model, + description="The model to use for ElevenLabs", + ).model_dump() + + return details + + @property + def elevenlabs_api_key(self) -> str: + return self.config.elevenlabs.api_key + + async def elevenlabs_generate( + self, chunk: Chunk, context: GenerationContext, chunk_size: int = 1024 + ) -> Union[bytes, None]: + api_key = self.elevenlabs_api_key + if not api_key: + return + + # Lazy import heavy dependencies only when needed + _import_heavy_deps() + + client = AsyncElevenLabs(api_key=api_key) + + try: + response_async_iter = client.text_to_speech.convert( + text=chunk.cleaned_text, + voice_id=chunk.voice.provider_id, + model_id=chunk.model or self.elevenlabs_model, + ) + + bytes_io = io.BytesIO() + + async for _chunk_bytes in response_async_iter: + if _chunk_bytes: + bytes_io.write(_chunk_bytes) + + return bytes_io.getvalue() + + except ApiError as e: + # Emit detailed status message to the frontend UI + error_message = "ElevenLabs API Error" + try: + # The ElevenLabs ApiError often contains a JSON body with details + detail = e.body.get("detail", {}) if hasattr(e, "body") else {} + error_message = detail.get("message", str(e)) or str(e) + except Exception: + error_message = str(e) + + log.error("ElevenLabs API error", error=str(e)) + emit( + "status", + message=f"ElevenLabs TTS: {error_message}", + status="error", + ) + raise e + + except Exception as e: + # Catch-all to ensure the app does not crash on unexpected errors + log.error("ElevenLabs TTS generation error", error=str(e)) + emit( + "status", + message=f"ElevenLabs TTS: Unexpected error – {str(e)}", + status="error", + ) + raise e diff --git a/src/talemate/agents/tts/f5tts.py b/src/talemate/agents/tts/f5tts.py new file mode 100644 index 00000000..930d0409 --- /dev/null +++ b/src/talemate/agents/tts/f5tts.py @@ -0,0 +1,436 @@ +import os +import functools +import tempfile +import uuid +import asyncio +import structlog +import pydantic +import re + +import torch + + +# Lazy imports for heavy dependencies +def _import_heavy_deps(): + global F5TTS + from f5_tts.api import F5TTS + + +CUDA_AVAILABLE = torch.cuda.is_available() + +from talemate.agents.base import ( + AgentAction, + AgentActionConfig, + AgentDetail, +) +from talemate.ux.schema import Field + +from .schema import Voice, Chunk, GenerationContext, VoiceProvider, INFO_CHUNK_SIZE +from .voice_library import add_default_voices +from .providers import register, provider +from .util import voice_is_talemate_asset + +log = structlog.get_logger("talemate.agents.tts.f5tts") + +REF_TEXT = "You awaken aboard your ship, the Starlight Nomad. A soft hum resonates throughout the vessel indicating its systems are online." + +add_default_voices( + [ + Voice( + label="Adam", + provider="f5tts", + provider_id="tts/voice/f5tts/adam.wav", + tags=["male", "calm", "mature", "deep", "thoughtful"], + parameters={ + "speed": 1.05, + "ref_text": REF_TEXT, + }, + ), + Voice( + label="Bradford", + provider="f5tts", + provider_id="tts/voice/f5tts/bradford.wav", + tags=["male", "calm", "mature"], + parameters={ + "speed": 1, + "ref_text": REF_TEXT, + }, + ), + Voice( + label="Julia", + provider="f5tts", + provider_id="tts/voice/f5tts/julia.wav", + tags=["female", "calm", "mature"], + parameters={ + "speed": 1.1, + "ref_text": REF_TEXT, + }, + ), + Voice( + label="Lisa", + provider="f5tts", + provider_id="tts/voice/f5tts/lisa.wav", + tags=["female", "young", "energetic"], + parameters={ + "speed": 1.2, + "ref_text": REF_TEXT, + }, + ), + Voice( + label="Eva", + provider="f5tts", + provider_id="tts/voice/f5tts/eva.wav", + tags=["female", "mature", "thoughtful"], + parameters={ + "speed": 1.15, + "ref_text": REF_TEXT, + }, + ), + Voice( + label="Zoe", + provider="f5tts", + provider_id="tts/voice/f5tts/zoe.wav", + tags=["female"], + parameters={ + "speed": 1.15, + "ref_text": REF_TEXT, + }, + ), + Voice( + label="William", + provider="f5tts", + provider_id="tts/voice/f5tts/william.wav", + tags=["male", "young"], + parameters={ + "speed": 1.15, + "ref_text": REF_TEXT, + }, + ), + ] +) + +F5TTS_INFO = """ +F5-TTS is a local text-to-speech model. + +The voice id is the path to the reference *.wav* file that contains a short +voice sample (≈3-5 s). You can place new samples in the +`tts/voice/f5tts` directory of your Talemate workspace or supply an absolute +path that is accessible to the backend. + +The first generation will download the model weights (~1.3 GB) if they are not +cached yet. +""" + + +@register() +class F5TTSProvider(VoiceProvider): + """Metadata for the F5-TTS provider.""" + + name: str = "f5tts" + allow_model_override: bool = False + allow_file_upload: bool = True + upload_file_types: list[str] = ["audio/wav"] + + # Provider-specific tunable parameters that can be stored per-voice + voice_parameters: list[Field] = [ + Field( + name="speed", + type="number", + label="Speed", + value=1.0, + min=0.25, + max=2.0, + step=0.05, + description="If the speech is too fast or slow, adjust this value. 1.0 is normal speed.", + ), + Field( + name="ref_text", + type="text", + label="Reference text", + value="", + description="Text that matches the reference audio sample (improves synthesis quality).", + required=True, + ), + Field( + name="cfg_strength", + type="number", + label="CFG Strength", + value=2.0, + min=0.1, + step=0.1, + max=10.0, + description="CFG strength for the model.", + ), + ] + + +class F5TTSInstance(pydantic.BaseModel): + """Holds a single F5-TTS model instance (lazy-initialised).""" + + model: "F5TTS" # Forward reference for lazy loading + model_name: str + + class Config: + arbitrary_types_allowed = True + + +class F5TTSMixin: + """F5-TTS agent mixin for local text-to-speech generation.""" + + # --------------------------------------------------------------------- + # UI integration / configuration helpers + # --------------------------------------------------------------------- + + @classmethod + def add_actions(cls, actions: dict[str, AgentAction]): + """Expose the F5-TTS backend in the global TTS agent settings.""" + + actions["_config"].config["apis"].choices.append( + { + "value": "f5tts", + "label": "F5-TTS (Local)", + "help": "F5-TTS is a local text-to-speech model.", + } + ) + + actions["f5tts"] = AgentAction( + enabled=True, + container=True, + icon="mdi-server-outline", + label="F5-TTS", + description="F5-TTS is a local text-to-speech model.", + config={ + "device": AgentActionConfig( + type="text", + value="cuda" if CUDA_AVAILABLE else "cpu", + label="Device", + choices=[ + {"value": "cpu", "label": "CPU"}, + {"value": "cuda", "label": "CUDA"}, + ], + description="Device to use for TTS", + ), + "model_name": AgentActionConfig( + type="text", + value="F5TTS_v1_Base", + label="Model", + description="Model will be downloaded on first use.", + choices=[ + {"value": "E2TTS_Base", "label": "E2TTS_Base"}, + {"value": "F5TTS_Base", "label": "F5TTS_Base"}, + {"value": "F5TTS_v1_Base", "label": "F5TTS_v1_Base"}, + ], + ), + "nfe_step": AgentActionConfig( + type="number", + label="NFE Step", + value=32, + min=32, + step=16, + max=64, + description="Number of diffusion steps.", + ), + "chunk_size": AgentActionConfig( + type="number", + min=0, + step=32, + max=1024, + value=64, + label="Chunk size", + note=INFO_CHUNK_SIZE, + ), + "replace_exclamation_marks": AgentActionConfig( + type="bool", + value=True, + label="Replace exclamation marks", + description="Some models tend to over-emphasise exclamation marks, so this is a workaround to make the speech more natural.", + ), + }, + ) + + # No additional per-API settings (model/device) required for F5-TTS. + return actions + + # ------------------------------------------------------------------ + # Convenience properties consumed by the core TTS agent + # ------------------------------------------------------------------ + + @property + def f5tts_configured(self) -> bool: + # Local backend – always available once the model weights are present. + return True + + @property + def f5tts_device(self) -> str: + return self.actions["f5tts"].config["device"].value + + @property + def f5tts_chunk_size(self) -> int: + return self.actions["f5tts"].config["chunk_size"].value + + @property + def f5tts_replace_exclamation_marks(self) -> bool: + return self.actions["f5tts"].config["replace_exclamation_marks"].value + + @property + def f5tts_model_name(self) -> str: + return self.actions["f5tts"].config["model_name"].value + + @property + def f5tts_nfe_step(self) -> int: + return self.actions["f5tts"].config["nfe_step"].value + + @property + def f5tts_max_generation_length(self) -> int: + return 1024 + + @property + def f5tts_info(self) -> str: + return F5TTS_INFO + + @property + def f5tts_agent_details(self) -> dict: + if not self.f5tts_configured: + return {} + details = {} + + device = self.f5tts_device + model_name = self.f5tts_model_name + + details["f5tts_device"] = AgentDetail( + icon="mdi-memory", + value=f"{model_name}@{device}", + description="The model and device to use for F5-TTS", + ).model_dump() + + return details + + # ------------------------------------------------------------------ + # Voice housekeeping helpers + # ------------------------------------------------------------------ + + def f5tts_delete_voice(self, voice: Voice): + """Delete *voice* reference file if it is inside the Talemate workspace.""" + + is_talemate_asset, resolved = voice_is_talemate_asset( + voice, provider(voice.provider) + ) + + log.debug( + "f5tts_delete_voice", + voice_id=voice.provider_id, + is_talemate_asset=is_talemate_asset, + resolved=resolved, + ) + + if not is_talemate_asset: + return + + try: + if resolved.exists() and resolved.is_file(): + resolved.unlink() + log.debug("Deleted F5-TTS voice file", path=str(resolved)) + except Exception as e: + log.error("Failed to delete F5-TTS voice file", error=e, path=str(resolved)) + + # ------------------------------------------------------------------ + # Generation helpers + # ------------------------------------------------------------------ + + def _f5tts_generate_file( + self, + model: "F5TTS", + chunk: Chunk, + voice: Voice, + output_path: str, + ) -> str: + """Blocking generation helper executed in a thread-pool.""" + + wav, sr, _ = model.infer( + ref_file=voice.provider_id, + ref_text=voice.parameters.get("ref_text", ""), + gen_text=chunk.cleaned_text, + file_wave=output_path, + speed=voice.parameters.get("speed", 1.0), + cfg_strength=voice.parameters.get("cfg_strength", 2.0), + nfe_step=self.f5tts_nfe_step, + ) + + # Some versions of F5-TTS don’t write *file_wave*. Drop-in save as fallback. + # if not os.path.exists(output_path): + # ta.save(output_path, wav, sr) + + return output_path + + async def f5tts_generate( + self, chunk: Chunk, context: GenerationContext + ) -> bytes | None: + """Asynchronously synthesise *chunk* using F5-TTS.""" + + # Lazy initialisation & caching across invocations + f5tts_instance: "F5TTSInstance | None" = getattr(self, "f5tts_instance", None) + + device = self.f5tts_device + model_name: str = self.f5tts_model_name + + reload_model = ( + f5tts_instance is None + or f5tts_instance.model.device != device + or f5tts_instance.model_name != model_name + ) + + if reload_model: + if f5tts_instance is not None: + log.debug( + "Reloading F5-TTS backend", device=device, model_name=model_name + ) + else: + log.debug( + "Initialising F5-TTS backend", device=device, model_name=model_name + ) + + # Lazy import heavy dependencies only when needed + _import_heavy_deps() + + f5tts_instance = F5TTSInstance( + model=F5TTS(device=device, model=model_name), + model_name=model_name, + ) + self.f5tts_instance = f5tts_instance + + model: "F5TTS" = f5tts_instance.model + + loop = asyncio.get_event_loop() + + voice = chunk.voice + + with tempfile.TemporaryDirectory() as temp_dir: + file_path = os.path.join(temp_dir, f"tts-{uuid.uuid4()}.wav") + + # Delegate blocking work to the default ThreadPoolExecutor + await loop.run_in_executor( + None, + functools.partial( + self._f5tts_generate_file, model, chunk, voice, file_path + ), + ) + + # Read the generated WAV and return bytes for websocket playback + with open(file_path, "rb") as f: + return f.read() + + async def f5tts_prepare_chunk(self, chunk: Chunk): + text = chunk.text[0] + + # f5-tts seems to have issues with ellipses + text = text.replace("…", "...").replace("...", ".") + + # hyphanated words also seem to be a problem + text = re.sub(r"(\w)-(\w)", r"\1 \2", text) + + if self.f5tts_replace_exclamation_marks: + text = text.replace("!", ".") + + chunk.text[0] = text + + return chunk diff --git a/src/talemate/agents/tts/google.py b/src/talemate/agents/tts/google.py new file mode 100644 index 00000000..f36dfca5 --- /dev/null +++ b/src/talemate/agents/tts/google.py @@ -0,0 +1,319 @@ +import io +import wave +from typing import Union, Optional + +import structlog +from google import genai +from google.genai import types +from talemate.ux.schema import Action +from talemate.agents.base import ( + AgentAction, + AgentActionConfig, + AgentDetail, +) +from .schema import Voice, VoiceLibrary, Chunk, GenerationContext, INFO_CHUNK_SIZE +from .voice_library import add_default_voices + +log = structlog.get_logger("talemate.agents.tts.google") + +GOOGLE_INFO = """ +Google Gemini TTS is a cloud-based text to speech model. + +A list of available voices can be found at [https://ai.google.dev/gemini-api/docs/speech-generation](https://ai.google.dev/gemini-api/docs/speech-generation). +""" + +add_default_voices( + [ + Voice(label="Zephyr", provider="google", provider_id="Zephyr", tags=["female"]), + Voice(label="Puck", provider="google", provider_id="Puck", tags=["male"]), + Voice(label="Charon", provider="google", provider_id="Charon", tags=["male"]), + Voice(label="Kore", provider="google", provider_id="Kore", tags=["female"]), + Voice(label="Fenrir", provider="google", provider_id="Fenrir", tags=["male"]), + Voice(label="Leda", provider="google", provider_id="Leda", tags=["female"]), + Voice(label="Orus", provider="google", provider_id="Orus", tags=["male"]), + Voice(label="Aoede", provider="google", provider_id="Aoede", tags=["female"]), + Voice( + label="Callirrhoe", + provider="google", + provider_id="Callirrhoe", + tags=["female"], + ), + Voice( + label="Autonoe", provider="google", provider_id="Autonoe", tags=["female"] + ), + Voice( + label="Enceladus", + provider="google", + provider_id="Enceladus", + tags=["male", "deep"], + ), + Voice(label="Iapetus", provider="google", provider_id="Iapetus", tags=["male"]), + Voice(label="Umbriel", provider="google", provider_id="Umbriel", tags=["male"]), + Voice( + label="Algieba", + provider="google", + provider_id="Algieba", + tags=["male", "deep"], + ), + Voice( + label="Despina", + provider="google", + provider_id="Despina", + tags=["female", "young"], + ), + Voice( + label="Erinome", provider="google", provider_id="Erinome", tags=["female"] + ), + Voice(label="Algenib", provider="google", provider_id="Algenib", tags=["male"]), + Voice( + label="Rasalgethi", + provider="google", + provider_id="Rasalgethi", + tags=["male", "neutral"], + ), + Voice( + label="Laomedeia", + provider="google", + provider_id="Laomedeia", + tags=["female"], + ), + Voice( + label="Achernar", + provider="google", + provider_id="Achernar", + tags=["female", "young"], + ), + Voice(label="Alnilam", provider="google", provider_id="Alnilam", tags=["male"]), + Voice(label="Schedar", provider="google", provider_id="Schedar", tags=["male"]), + Voice( + label="Gacrux", + provider="google", + provider_id="Gacrux", + tags=["female", "mature"], + ), + Voice( + label="Pulcherrima", + provider="google", + provider_id="Pulcherrima", + tags=["female", "mature"], + ), + Voice( + label="Achird", + provider="google", + provider_id="Achird", + tags=["male", "energetic"], + ), + Voice( + label="Zubenelgenubi", + provider="google", + provider_id="Zubenelgenubi", + tags=["male"], + ), + Voice( + label="Vindemiatrix", + provider="google", + provider_id="Vindemiatrix", + tags=["female", "mature"], + ), + Voice( + label="Sadachbia", provider="google", provider_id="Sadachbia", tags=["male"] + ), + Voice( + label="Sadaltager", + provider="google", + provider_id="Sadaltager", + tags=["male"], + ), + Voice( + label="Sulafat", + provider="google", + provider_id="Sulafat", + tags=["female", "young"], + ), + ] +) + + +class GoogleMixin: + """Google Gemini TTS mixin (Flash/Pro preview models).""" + + @classmethod + def add_actions(cls, actions: dict[str, AgentAction]): + actions["_config"].config["apis"].choices.append( + { + "value": "google", + "label": "Google Gemini", + "help": "Google Gemini is a cloud-based text to speech model that uses the Google Gemini API. (API key required)", + } + ) + + actions["google"] = AgentAction( + enabled=True, + container=True, + icon="mdi-server-outline", + label="Google Gemini", + description="Google Gemini is a cloud-based text to speech API. (API key required and must be set in the Talemate Settings -> Application -> Google)", + config={ + "model": AgentActionConfig( + type="text", + value="gemini-2.5-flash-preview-tts", + choices=[ + { + "value": "gemini-2.5-flash-preview-tts", + "label": "Gemini 2.5 Flash TTS (Preview)", + }, + { + "value": "gemini-2.5-pro-preview-tts", + "label": "Gemini 2.5 Pro TTS (Preview)", + }, + ], + label="Model", + description="Google TTS model to use", + ), + "chunk_size": AgentActionConfig( + type="number", + min=0, + step=64, + max=2048, + value=0, + label="Chunk size", + note=INFO_CHUNK_SIZE, + ), + }, + ) + + return actions + + @classmethod + def add_voices(cls, voices: dict[str, VoiceLibrary]): + voices["google"] = VoiceLibrary(api="google") + + @property + def google_configured(self) -> bool: + return bool(self.google_api_key) and bool(self.google_model) + + @property + def google_chunk_size(self) -> int: + return self.actions["google"].config["chunk_size"].value + + @property + def google_not_configured_reason(self) -> str | None: + if not self.google_api_key: + return "Google API key not set" + if not self.google_model: + return "Google model not set" + return None + + @property + def google_not_configured_action(self) -> Action | None: + if not self.google_api_key: + return Action( + action_name="openAppConfig", + arguments=["application", "google_api"], + label="Set API Key", + icon="mdi-key", + ) + if not self.google_model: + return Action( + action_name="openAgentSettings", + arguments=["tts", "google"], + label="Set Model", + icon="mdi-brain", + ) + return None + + @property + def google_info(self) -> str: + return GOOGLE_INFO + + @property + def google_max_generation_length(self) -> int: + return 1024 + + @property + def google_model(self) -> str: + return self.actions["google"].config["model"].value + + @property + def google_model_choices(self) -> list[str]: + return [ + {"label": choice["label"], "value": choice["value"]} + for choice in self.actions["google"].config["model"].choices + ] + + @property + def google_api_key(self) -> Optional[str]: + return self.config.google.api_key + + @property + def google_agent_details(self) -> dict: + details = {} + + if not self.google_configured: + details["google_api_key"] = AgentDetail( + icon="mdi-key", + value="Google API key not set", + description="Google API key not set. You can set it in the Talemate Settings -> Application -> Google", + color="error", + ).model_dump() + else: + details["google_model"] = AgentDetail( + icon="mdi-brain", + value=self.google_model, + description="The model to use for Google", + ).model_dump() + + return details + + def _make_google_client(self) -> genai.Client: + """Return a fresh genai.Client so updated creds propagate immediately.""" + return genai.Client(api_key=self.google_api_key or None) + + async def google_generate( + self, + chunk: Chunk, + context: GenerationContext, + chunk_size: int = 1024, # kept for signature parity + ) -> Union[bytes, None]: + """Generate audio and wrap raw PCM into a playable WAV container.""" + + voice_name = chunk.voice.provider_id + client = self._make_google_client() + + try: + response = await client.aio.models.generate_content( + model=chunk.model or self.google_model, + contents=chunk.cleaned_text, + config=types.GenerateContentConfig( + response_modalities=["AUDIO"], + speech_config=types.SpeechConfig( + voice_config=types.VoiceConfig( + prebuilt_voice_config=types.PrebuiltVoiceConfig( + voice_name=voice_name, + ) + ) + ), + ), + ) + + # Extract raw 24 kHz 16‑bit PCM (mono) bytes from first candidate + part = response.candidates[0].content.parts[0].inline_data + if not part or not part.data: + return None + pcm_bytes: bytes = part.data + + # Wrap into a WAV container that browsers can decode + wav_io = io.BytesIO() + with wave.open(wav_io, "wb") as wf: + wf.setnchannels(1) + wf.setsampwidth(2) # 16‑bit + wf.setframerate(24000) # Hz + wf.writeframes(pcm_bytes) + return wav_io.getvalue() + + except Exception as e: + import traceback + + traceback.print_exc() + log.error("google_generate failed", error=str(e)) + return None diff --git a/src/talemate/agents/tts/kokoro.py b/src/talemate/agents/tts/kokoro.py new file mode 100644 index 00000000..98b38c17 --- /dev/null +++ b/src/talemate/agents/tts/kokoro.py @@ -0,0 +1,324 @@ +import os +import functools +import tempfile +import uuid +import asyncio +import structlog +import pydantic +import traceback +from pathlib import Path + + +import torch +import soundfile as sf +from kokoro import KPipeline + + +from talemate.agents.base import ( + AgentAction, + AgentActionConfig, +) +from .schema import ( + Voice, + Chunk, + GenerationContext, + VoiceMixer, + VoiceProvider, + INFO_CHUNK_SIZE, +) +from .providers import register +from .voice_library import add_default_voices + +log = structlog.get_logger("talemate.agents.tts.kokoro") + +CUSTOM_VOICE_STORAGE = ( + Path(__file__).parent.parent.parent.parent.parent / "tts" / "voice" / "kokoro" +) + +add_default_voices( + [ + Voice( + label="Alloy", provider="kokoro", provider_id="af_alloy", tags=["female"] + ), + Voice( + label="Aoede", provider="kokoro", provider_id="af_aoede", tags=["female"] + ), + Voice( + label="Bella", provider="kokoro", provider_id="af_bella", tags=["female"] + ), + Voice( + label="Heart", provider="kokoro", provider_id="af_heart", tags=["female"] + ), + Voice( + label="Jessica", + provider="kokoro", + provider_id="af_jessica", + tags=["female"], + ), + Voice(label="Kore", provider="kokoro", provider_id="af_kore", tags=["female"]), + Voice( + label="Nicole", provider="kokoro", provider_id="af_nicole", tags=["female"] + ), + Voice(label="Nova", provider="kokoro", provider_id="af_nova", tags=["female"]), + Voice( + label="River", provider="kokoro", provider_id="af_river", tags=["female"] + ), + Voice( + label="Sarah", provider="kokoro", provider_id="af_sarah", tags=["female"] + ), + Voice(label="Sky", provider="kokoro", provider_id="af_sky", tags=["female"]), + Voice(label="Adam", provider="kokoro", provider_id="am_adam", tags=["male"]), + Voice(label="Echo", provider="kokoro", provider_id="am_echo", tags=["male"]), + Voice(label="Eric", provider="kokoro", provider_id="am_eric", tags=["male"]), + Voice( + label="Fenrir", provider="kokoro", provider_id="am_fenrir", tags=["male"] + ), + Voice(label="Liam", provider="kokoro", provider_id="am_liam", tags=["male"]), + Voice( + label="Michael", provider="kokoro", provider_id="am_michael", tags=["male"] + ), + Voice(label="Onyx", provider="kokoro", provider_id="am_onyx", tags=["male"]), + Voice(label="Puck", provider="kokoro", provider_id="am_puck", tags=["male"]), + Voice(label="Santa", provider="kokoro", provider_id="am_santa", tags=["male"]), + Voice( + label="Alice", provider="kokoro", provider_id="bf_alice", tags=["female"] + ), + Voice(label="Emma", provider="kokoro", provider_id="bf_emma", tags=["female"]), + Voice( + label="Isabella", + provider="kokoro", + provider_id="bf_isabella", + tags=["female"], + ), + Voice(label="Lily", provider="kokoro", provider_id="bf_lily", tags=["female"]), + Voice( + label="Daniel", provider="kokoro", provider_id="bm_daniel", tags=["male"] + ), + Voice(label="Fable", provider="kokoro", provider_id="bm_fable", tags=["male"]), + Voice( + label="George", provider="kokoro", provider_id="bm_george", tags=["male"] + ), + Voice(label="Lewis", provider="kokoro", provider_id="bm_lewis", tags=["male"]), + ] +) + +KOKORO_INFO = """ +Kokoro is a local text to speech model. + +**WILL DOWNLOAD**: Voices will be downloaded on first use, so the first generation will take longer to complete. +""" + + +@register() +class KokoroProvider(VoiceProvider): + name: str = "kokoro" + allow_model_override: bool = False + + +class KokoroInstance(pydantic.BaseModel): + pipeline: "KPipeline" # Forward reference for lazy loading + + class Config: + arbitrary_types_allowed = True + + +class KokoroMixin: + """ + Kokoro agent mixin for local text to speech. + """ + + @classmethod + def add_actions(cls, actions: dict[str, AgentAction]): + actions["_config"].config["apis"].choices.append( + { + "value": "kokoro", + "label": "Kokoro (Local)", + "help": "Kokoro is a local text to speech model.", + } + ) + + actions["kokoro"] = AgentAction( + enabled=True, + container=True, + icon="mdi-server-outline", + label="Kokoro", + description="Kokoro is a local text to speech model.", + config={ + "chunk_size": AgentActionConfig( + type="number", + min=0, + step=64, + max=2048, + value=512, + label="Chunk size", + note=INFO_CHUNK_SIZE, + ), + }, + ) + return actions + + @property + def kokoro_configured(self) -> bool: + return True + + @property + def kokoro_chunk_size(self) -> int: + return self.actions["kokoro"].config["chunk_size"].value + + @property + def kokoro_max_generation_length(self) -> int: + return 256 + + @property + def kokoro_agent_details(self) -> dict: + return {} + + @property + def kokoro_supports_mixing(self) -> bool: + return True + + @property + def kokoro_info(self) -> str: + return KOKORO_INFO + + def kokoro_delete_voice(self, voice_id: str) -> None: + """ + If the voice_id is a file in the CUSTOM_VOICE_STORAGE directory, delete it. + """ + + # if voice id is a deletable file it'll be a relative or absolute path + # to a file in the CUSTOM_VOICE_STORAGE directory + + # we must verify that it is in the CUSTOM_VOICE_STORAGE directory + voice_path = Path(voice_id).resolve() + log.debug( + "Kokoro - Checking if voice id is deletable", + voice_id=voice_id, + exists=voice_path.exists(), + parent=voice_path.parent, + is_custom_voice_storage=voice_path.parent == CUSTOM_VOICE_STORAGE, + ) + if voice_path.exists() and voice_path.parent == CUSTOM_VOICE_STORAGE: + log.debug("Kokoro - Deleting voice file", voice_id=voice_id) + try: + voice_path.unlink() + except FileNotFoundError: + pass + + def _kokoro_mix(self, mixer: VoiceMixer) -> "torch.Tensor": + pipeline = KPipeline(lang_code="a") + + packs = [ + { + "voice_tensor": pipeline.load_single_voice(voice.id), + "weight": voice.weight, + } + for voice in mixer.voices + ] + + mixed_voice = None + for pack in packs: + if mixed_voice is None: + mixed_voice = pack["voice_tensor"] * pack["weight"] + else: + mixed_voice += pack["voice_tensor"] * pack["weight"] + + # TODO: ensure weights sum to 1 + + return mixed_voice + + async def kokoro_test_mix(self, mixer: VoiceMixer): + """Test a mixed voice by generating a sample.""" + mixed_voice_tensor = self._kokoro_mix(mixer) + + loop = asyncio.get_event_loop() + + pipeline = KPipeline(lang_code="a") + + with tempfile.TemporaryDirectory() as temp_dir: + file_path = os.path.join(temp_dir, f"tts-{uuid.uuid4()}.wav") + + await loop.run_in_executor( + None, + functools.partial( + self._kokoro_generate, + pipeline, + "This is a test of the mixed voice.", + mixed_voice_tensor, + file_path, + ), + ) + + # Read and play the audio + with open(file_path, "rb") as f: + audio_data = f.read() + self.play_audio(audio_data) + + async def kokoro_save_mix(self, voice_id: str, mixer: VoiceMixer) -> Path: + """Save a voice tensor to disk.""" + # Ensure the directory exists + CUSTOM_VOICE_STORAGE.mkdir(parents=True, exist_ok=True) + + save_to_path = CUSTOM_VOICE_STORAGE / f"{voice_id}.pt" + voice_tensor = self._kokoro_mix(mixer) + torch.save(voice_tensor, save_to_path) + return save_to_path + + def _kokoro_generate( + self, + pipeline: "KPipeline", + text: str, + voice: "str | torch.Tensor", + file_path: str, + ) -> None: + """Generate audio from text using the given voice.""" + try: + generator = pipeline(text, voice=voice) + for i, (gs, ps, audio) in enumerate(generator): + sf.write(file_path, audio, 24000) + except Exception as e: + traceback.print_exc() + raise e + + async def kokoro_generate( + self, chunk: Chunk, context: GenerationContext + ) -> bytes | None: + kokoro_instance = getattr(self, "kokoro_instance", None) + + reload: bool = False + + if not kokoro_instance: + reload = True + + if reload: + log.debug( + "kokoro - reinitializing tts instance", + ) + # Lazy import heavy dependencies only when needed + + self.kokoro_instance = KokoroInstance( + # a= American English + # TODO: allow config of language??? + pipeline=KPipeline(lang_code="a") + ) + + pipeline = self.kokoro_instance.pipeline + + loop = asyncio.get_event_loop() + + with tempfile.TemporaryDirectory() as temp_dir: + file_path = os.path.join(temp_dir, f"tts-{uuid.uuid4()}.wav") + + await loop.run_in_executor( + None, + functools.partial( + self._kokoro_generate, + pipeline, + chunk.cleaned_text, + chunk.voice.provider_id, + file_path, + ), + ) + + with open(file_path, "rb") as f: + return f.read() diff --git a/src/talemate/agents/tts/nodes.py b/src/talemate/agents/tts/nodes.py new file mode 100644 index 00000000..51c152f2 --- /dev/null +++ b/src/talemate/agents/tts/nodes.py @@ -0,0 +1,165 @@ +import structlog +from typing import ClassVar +from talemate.game.engine.nodes.core import ( + GraphState, + PropertyField, + TYPE_CHOICES, + UNRESOLVED, +) +from talemate.game.engine.nodes.registry import register +from talemate.game.engine.nodes.agent import AgentSettingsNode, AgentNode +from talemate.agents.tts.schema import Voice, VoiceLibrary + +TYPE_CHOICES.extend( + [ + "tts/voice", + ] +) + +log = structlog.get_logger("talemate.game.engine.nodes.agents.tts") + + +@register("agents/tts/Settings") +class TTSAgentSettings(AgentSettingsNode): + """ + Base node to render TTS agent settings. + """ + + _agent_name: ClassVar[str] = "tts" + + def __init__(self, title="TTS Agent Settings", **kwargs): + super().__init__(title=title, **kwargs) + + +@register("agents/tts/GetVoice") +class GetVoice(AgentNode): + """ + Gets a voice from the TTS agent. + """ + + _agent_name: ClassVar[str] = "tts" + + class Fields: + voice_id = PropertyField( + name="voice_id", + type="str", + description="The ID of the voice to get", + default=UNRESOLVED, + ) + + def __init__(self, title="Get Voice", **kwargs): + super().__init__(title=title, **kwargs) + + @property + def voice_library(self) -> VoiceLibrary: + return self.agent.voice_library + + def setup(self): + self.add_input("voice_id", socket_type="str", optional=True) + self.set_property("voice_id", UNRESOLVED) + + self.add_output("voice", socket_type="tts/voice") + + async def run(self, state: GraphState): + voice_id = self.require_input("voice_id") + + voice = self.voice_library.get_voice(voice_id) + + self.set_output_values({"voice": voice}) + + +@register("agents/tts/GetNarratorVoice") +class GetNarratorVoice(AgentNode): + """ + Gets the narrator voice from the TTS agent. + """ + + _agent_name: ClassVar[str] = "tts" + + def __init__(self, title="Get Narrator Voice", **kwargs): + super().__init__(title=title, **kwargs) + + def setup(self): + self.add_output("voice", socket_type="tts/voice") + + async def run(self, state: GraphState): + voice = self.agent.narrator_voice + + self.set_output_values({"voice": voice}) + + +@register("agents/tts/UnpackVoice") +class UnpackVoice(AgentNode): + """ + Unpacks a voice from the TTS agent. + """ + + _agent_name: ClassVar[str] = "tts" + + def __init__(self, title="Unpack Voice", **kwargs): + super().__init__(title=title, **kwargs) + + def setup(self): + self.add_input("voice", socket_type="tts/voice") + self.add_output("voice", socket_type="tts/voice") + self.add_output("label", socket_type="str") + self.add_output("provider", socket_type="str") + self.add_output("provider_id", socket_type="str") + self.add_output("provider_model", socket_type="str") + self.add_output("tags", socket_type="list") + self.add_output("parameters", socket_type="dict") + self.add_output("is_scene_asset", socket_type="bool") + + async def run(self, state: GraphState): + voice: Voice = self.require_input("voice") + + self.set_output_values( + { + "voice": voice, + **voice.model_dump(), + } + ) + + +@register("agents/tts/Generate") +class Generate(AgentNode): + """ + Generates a voice from the TTS agent. + """ + + _agent_name: ClassVar[str] = "tts" + + class Fields: + text = PropertyField( + name="text", + type="text", + description="The text to generate", + default=UNRESOLVED, + ) + + def __init__(self, title="Generate TTS", **kwargs): + super().__init__(title=title, **kwargs) + + def setup(self): + self.add_input("state") + self.add_input("text", socket_type="text", optional=True) + self.add_input("voice", socket_type="tts/voice", optional=True) + self.add_input("character", socket_type="character", optional=True) + self.set_property("text", UNRESOLVED) + self.add_output("state") + + async def run(self, state: GraphState): + text = self.require_input("text") + voice = self.normalized_input_value("voice") + character = self.normalized_input_value("character") + + if not voice and not character: + raise ValueError("Either voice or character must be provided") + + await self.agent.generate( + text=text, + character=character, + force_voice=voice, + ) + + self.set_output_values({"state": state}) diff --git a/src/talemate/agents/tts/openai.py b/src/talemate/agents/tts/openai.py new file mode 100644 index 00000000..1f60c8ae --- /dev/null +++ b/src/talemate/agents/tts/openai.py @@ -0,0 +1,230 @@ +import io +from typing import Union + +import structlog +from openai import AsyncOpenAI +from talemate.ux.schema import Action +from talemate.agents.base import AgentAction, AgentActionConfig, AgentDetail +from .schema import Voice, VoiceLibrary, Chunk, GenerationContext, INFO_CHUNK_SIZE +from .voice_library import add_default_voices + +log = structlog.get_logger("talemate.agents.tts.openai") + +OPENAI_INFO = """ +OpenAI TTS is a cloud-based text to speech model. + +A list of available voices can be found at [https://platform.openai.com/docs/guides/text-to-speech#voice-options](https://platform.openai.com/docs/guides/text-to-speech#voice-options). +""" + +add_default_voices( + [ + Voice( + label="Alloy", + provider="openai", + provider_id="alloy", + tags=["neutral", "female"], + ), + Voice( + label="Ash", + provider="openai", + provider_id="ash", + tags=["male"], + ), + Voice( + label="Ballad", + provider="openai", + provider_id="ballad", + tags=["male", "energetic"], + ), + Voice( + label="Coral", + provider="openai", + provider_id="coral", + tags=["female", "energetic"], + ), + Voice( + label="Echo", + provider="openai", + provider_id="echo", + tags=["male", "neutral"], + ), + Voice( + label="Fable", + provider="openai", + provider_id="fable", + tags=["neutral", "feminine"], + ), + Voice( + label="Onyx", + provider="openai", + provider_id="onyx", + tags=["male"], + ), + Voice( + label="Nova", + provider="openai", + provider_id="nova", + tags=["female"], + ), + Voice( + label="Sage", + provider="openai", + provider_id="sage", + tags=["female"], + ), + Voice( + label="Shimmer", + provider="openai", + provider_id="shimmer", + tags=["female"], + ), + ] +) + + +class OpenAIMixin: + """ + OpenAI TTS agent mixin for cloud-based text to speech. + """ + + @classmethod + def add_actions(cls, actions: dict[str, AgentAction]): + actions["_config"].config["apis"].choices.append( + { + "value": "openai", + "label": "OpenAI", + "help": "OpenAI is a cloud-based text to speech model that uses the OpenAI API. (API key required)", + } + ) + + actions["openai"] = AgentAction( + enabled=True, + container=True, + icon="mdi-server-outline", + label="OpenAI", + description="OpenAI TTS is a cloud-based text to speech API. (API key required and must be set in the Talemate Settings -> Application -> OpenAI)", + config={ + "model": AgentActionConfig( + type="text", + value="gpt-4o-mini-tts", + choices=[ + {"value": "gpt-4o-mini-tts", "label": "GPT-4o Mini TTS"}, + {"value": "tts-1", "label": "TTS 1"}, + {"value": "tts-1-hd", "label": "TTS 1 HD"}, + ], + label="Model", + description="TTS model to use", + ), + "chunk_size": AgentActionConfig( + type="number", + min=0, + step=64, + max=2048, + value=512, + label="Chunk size", + note=INFO_CHUNK_SIZE, + ), + }, + ) + + return actions + + @classmethod + def add_voices(cls, voices: dict[str, VoiceLibrary]): + voices["openai"] = VoiceLibrary(api="openai") + + @property + def openai_chunk_size(self) -> int: + return self.actions["openai"].config["chunk_size"].value + + @property + def openai_max_generation_length(self) -> int: + return 1024 + + @property + def openai_model(self) -> str: + return self.actions["openai"].config["model"].value + + @property + def openai_model_choices(self) -> list[str]: + return [ + {"label": choice["label"], "value": choice["value"]} + for choice in self.actions["openai"].config["model"].choices + ] + + @property + def openai_api_key(self) -> str: + return self.config.openai.api_key + + @property + def openai_configured(self) -> bool: + return bool(self.openai_api_key) and bool(self.openai_model) + + @property + def openai_info(self) -> str: + return OPENAI_INFO + + @property + def openai_not_configured_reason(self) -> str | None: + if not self.openai_api_key: + return "OpenAI API key not set" + if not self.openai_model: + return "OpenAI model not set" + return None + + @property + def openai_not_configured_action(self) -> Action | None: + if not self.openai_api_key: + return Action( + action_name="openAppConfig", + arguments=["application", "openai_api"], + label="Set API Key", + icon="mdi-key", + ) + if not self.openai_model: + return Action( + action_name="openAgentSettings", + arguments=["tts", "openai"], + label="Set Model", + icon="mdi-brain", + ) + return None + + @property + def openai_agent_details(self) -> dict: + details = {} + + if not self.openai_configured: + details["openai_api_key"] = AgentDetail( + icon="mdi-key", + value="OpenAI API key not set", + description="OpenAI API key not set. You can set it in the Talemate Settings -> Application -> OpenAI", + color="error", + ).model_dump() + else: + details["openai_model"] = AgentDetail( + icon="mdi-brain", + value=self.openai_model, + description="The model to use for OpenAI", + ).model_dump() + + return details + + async def openai_generate( + self, chunk: Chunk, context: GenerationContext, chunk_size: int = 1024 + ) -> Union[bytes, None]: + client = AsyncOpenAI(api_key=self.openai_api_key) + + model = chunk.model or self.openai_model + + response = await client.audio.speech.create( + model=model, voice=chunk.voice.provider_id, input=chunk.cleaned_text + ) + + bytes_io = io.BytesIO() + for chunk in response.iter_bytes(chunk_size=chunk_size): + if chunk: + bytes_io.write(chunk) + + # Put the audio data in the queue for playback + return bytes_io.getvalue() diff --git a/src/talemate/agents/tts/providers.py b/src/talemate/agents/tts/providers.py new file mode 100644 index 00000000..367a07e8 --- /dev/null +++ b/src/talemate/agents/tts/providers.py @@ -0,0 +1,24 @@ +from .schema import VoiceProvider +from typing import Generator + +__all__ = ["register", "provider", "providers"] + +PROVIDERS = {} + + +class register: + def __call__(self, cls: type[VoiceProvider]): + PROVIDERS[cls().name] = cls + return cls + + +def provider(name: str) -> VoiceProvider: + cls = PROVIDERS.get(name) + if not cls: + return VoiceProvider(name=name) + return cls() + + +def providers() -> Generator[VoiceProvider, None, None]: + for cls in PROVIDERS.values(): + yield cls() diff --git a/src/talemate/agents/tts/schema.py b/src/talemate/agents/tts/schema.py new file mode 100644 index 00000000..806d2b38 --- /dev/null +++ b/src/talemate/agents/tts/schema.py @@ -0,0 +1,201 @@ +import pydantic +from pathlib import Path +import re +from typing import Callable, Literal + +from talemate.ux.schema import Note, Field +from talemate.path import TALEMATE_ROOT + +__all__ = [ + "APIStatus", + "Chunk", + "GenerationContext", + "VoiceProvider", + "Voice", + "VoiceLibrary", + "VoiceWeight", + "VoiceMixer", + "VoiceGenerationEmission", + "INFO_CHUNK_SIZE", +] + + +MAX_TAG_LENGTH: int = 64 # Maximum number of characters per tag (configurable) +MAX_TAGS_PER_VOICE: int = 10 # Maximum number of tags per voice (configurable) + +DEFAULT_VOICE_DIR = TALEMATE_ROOT / "tts" / "voice" + +INFO_CHUNK_SIZE = "Split text into chunks of this size. Smaller values will increase responsiveness at the cost of lost context between chunks. (Stuff like appropriate inflection, etc.). 0 = no chunking." + + +class VoiceProvider(pydantic.BaseModel): + name: str + voice_parameters: list[Field] = pydantic.Field(default_factory=list) + allow_model_override: bool = True + allow_file_upload: bool = False + upload_file_types: list[str] | None = None + + @property + def default_parameters(self) -> dict[str, str | float | int | bool]: + return {param.name: param.value for param in self.voice_parameters} + + @property + def default_voice_dir(self) -> Path: + return DEFAULT_VOICE_DIR / self.name + + def voice_parameter( + self, voice: "Voice", name: str + ) -> str | float | int | bool | None: + """ + Get a parameter from the voice. + If the parameter is not set, return the default parameter from the provider. + """ + if name in voice.parameters: + return voice.parameters[name] + return self.default_parameters.get(name) + + +class VoiceWeight(pydantic.BaseModel): + id: str + weight: float + + +class VoiceMixer(pydantic.BaseModel): + voices: list[VoiceWeight] + + +class Voice(pydantic.BaseModel): + # arbitrary voice label to allow a human to easily identify the voice + label: str + + # voice provider, this would be the TTS api in the voice + provider: str + + # voice id as known to the voice provider + provider_id: str + + # allows to also override to a specific model + provider_model: str | None = None + + # free-form tags for categorizing the voice (e.g. "male", "energetic") + tags: list[str] = pydantic.Field(default_factory=list) + + # provider specific parameters for the voice + parameters: dict[str, str | float | int | bool] = pydantic.Field( + default_factory=dict + ) + + is_scene_asset: bool = False + + @pydantic.field_validator("tags") + @classmethod + def _validate_tags(cls, v: list[str]): + """Validate tag list length and individual tag length.""" + if len(v) > MAX_TAGS_PER_VOICE: + raise ValueError( + f"Too many tags – maximum {MAX_TAGS_PER_VOICE} tags are allowed per voice" + ) + for tag in v: + if len(tag) > MAX_TAG_LENGTH: + raise ValueError( + f"Tag '{tag}' exceeds maximum length of {MAX_TAG_LENGTH} characters" + ) + return v + + model_config = pydantic.ConfigDict(validate_assignment=True, exclude_none=True) + + @pydantic.computed_field(description="The unique identifier for the voice") + @property + def id(self) -> str: + return f"{self.provider}:{self.provider_id}" + + +class VoiceLibrary(pydantic.BaseModel): + version: int = 1 + voices: dict[str, Voice] = pydantic.Field(default_factory=dict) + + def get_voice(self, voice_id: str) -> Voice | None: + return self.voices.get(voice_id) + + +class Chunk(pydantic.BaseModel): + text: list[str] = pydantic.Field(default_factory=list) + type: Literal["dialogue", "exposition"] + character_name: str | None = None + api: str | None = None + voice: Voice | None = None + model: str | None = None + generate_fn: Callable | None = None + prepare_fn: Callable | None = None + message_id: int | None = None + + @property + def cleaned_text(self) -> str: + cleaned: str = self.text[0].replace("*", "").replace('"', "").replace("`", "") + + # troublemakers + cleaned = cleaned.replace("—", " - ").replace("…", "...").replace(";", ",") + + # replace any grouped up whitespace with a single space + cleaned = re.sub(r"\s+", " ", cleaned) + + # replace full uppercase word with lowercase + # e.g. "HELLO" -> "hello" + cleaned = re.sub(r"[A-Z]{2,}", lambda m: m.group(0).lower(), cleaned) + + cleaned = cleaned.strip(",").strip() + + # If there is no commong sentence ending punctuation, add a period + if len(cleaned) > 0 and cleaned[-1] not in [".", "!", "?"]: + cleaned += "." + + return cleaned.strip().strip(",").strip() + + @property + def sub_chunks(self) -> list["Chunk"]: + if len(self.text) == 1: + return [self] + + return [ + Chunk( + text=[text], + type=self.type, + character_name=self.character_name, + api=self.api, + voice=Voice(**self.voice.model_dump()), + model=self.model, + generate_fn=self.generate_fn, + prepare_fn=self.prepare_fn, + ) + for text in self.text + ] + + +class GenerationContext(pydantic.BaseModel): + chunks: list[Chunk] = pydantic.Field(default_factory=list) + + +class VoiceGenerationEmission(pydantic.BaseModel): + chunk: Chunk + context: GenerationContext + wav_bytes: bytes | None = None + + +class ModelChoice(pydantic.BaseModel): + label: str + value: str + + +class APIStatus(pydantic.BaseModel): + """Status of an API.""" + + api: str + enabled: bool + ready: bool + configured: bool + provider: VoiceProvider + messages: list[Note] = pydantic.Field(default_factory=list) + supports_mixing: bool = False + + default_model: str | None = None + model_choices: list[ModelChoice] = pydantic.Field(default_factory=list) diff --git a/src/talemate/agents/tts/util.py b/src/talemate/agents/tts/util.py new file mode 100644 index 00000000..204b0771 --- /dev/null +++ b/src/talemate/agents/tts/util.py @@ -0,0 +1,111 @@ +from pathlib import Path +from typing import TYPE_CHECKING +import structlog + +from .schema import TALEMATE_ROOT, Voice, VoiceProvider + +from .voice_library import get_instance + +if TYPE_CHECKING: + from talemate.tale_mate import Scene + +log = structlog.get_logger("talemate.agents.tts.util") + +__all__ = [ + "voice_parameter", + "voice_is_talemate_asset", + "voice_is_scene_asset", + "get_voice", +] + + +def voice_parameter( + voice: Voice, provider: VoiceProvider, name: str +) -> str | float | int | bool | None: + """ + Get a parameter from the voice. + """ + if name in voice.parameters: + return voice.parameters[name] + return provider.default_parameters.get(name) + + +def voice_is_talemate_asset( + voice: Voice, provider: VoiceProvider +) -> tuple[bool, Path | None]: + """ + Check if the voice is a Talemate asset. + """ + + if not provider.allow_file_upload: + return False, None + + path = Path(voice.provider_id) + if not path.is_absolute(): + path = TALEMATE_ROOT / path + try: + resolved = path.resolve(strict=False) + except Exception as e: + log.error( + "voice_is_talemate_asset - invalid path", + error=e, + voice_id=voice.provider_id, + ) + return False, None + + root = TALEMATE_ROOT.resolve() + log.debug( + "voice_is_talemate_asset - resolved", resolved=str(resolved), root=str(root) + ) + if not str(resolved).startswith(str(root)): + return False, None + + return True, resolved + + +def voice_is_scene_asset(voice: Voice, provider: VoiceProvider) -> bool: + """ + Check if the voice is a scene asset. + + Scene assets are stored in the the scene's assets directory. + + This function does NOT check .is_scene_asset but does path resolution to + determine if the voice is a scene asset. + """ + + is_talemate_asset, resolved = voice_is_talemate_asset(voice, provider) + if not is_talemate_asset: + return False + + SCENES_DIR = TALEMATE_ROOT / "scenes" + + if str(resolved).startswith(str(SCENES_DIR.resolve())): + return True + + return False + + +def get_voice(scene: "Scene", voice_id: str) -> Voice | None: + """Return a Voice by *voice_id* preferring the scene's library (if any). + + Args: + scene: Scene instance or ``None``. + voice_id: The fully-qualified voice identifier (``provider:provider_id``). + + The function first checks *scene.voice_library* (if present) and falls back + to the global voice library instance. + """ + + try: + if scene and getattr(scene, "voice_library", None): + voice = scene.voice_library.get_voice(voice_id) + if voice: + return voice + except Exception as e: + log.error("get_voice - scene lookup failed", error=e) + + try: + return get_instance().get_voice(voice_id) + except Exception as e: + log.error("get_voice - global lookup failed", error=e) + return None diff --git a/src/talemate/agents/tts/voice_library.py b/src/talemate/agents/tts/voice_library.py new file mode 100644 index 00000000..74afb15c --- /dev/null +++ b/src/talemate/agents/tts/voice_library.py @@ -0,0 +1,169 @@ +import structlog +from pathlib import Path # +import pydantic + +import talemate.emit.async_signals as async_signals + +from .schema import VoiceLibrary, Voice +from typing import TYPE_CHECKING, Callable, Literal + +if TYPE_CHECKING: + from talemate.tale_mate import Scene + +__all__ = [ + "load_voice_library", + "save_voice_library", + "get_instance", + "add_default_voices", + "DEFAULT_VOICES", + "VOICE_LIBRARY_PATH", + "require_instance", + "load_scene_voice_library", + "save_scene_voice_library", + "scoped_voice_library", +] + +log = structlog.get_logger("talemate.agents.tts.voice_library") + +async_signals.register( + "voice_library.update.before", + "voice_library.update.after", +) + +VOICE_LIBRARY_PATH = ( + Path(__file__).parent.parent.parent.parent.parent + / "tts" + / "voice" + / "voice-library.json" +) + + +DEFAULT_VOICES = {} + +# TODO: does this need to be made thread safe? +VOICE_LIBRARY = None + + +class ScopedVoiceLibrary(pydantic.BaseModel): + voice_library: VoiceLibrary + fn_save: Callable[[VoiceLibrary], None] + + async def save(self): + await self.fn_save(self.voice_library) + + +def scoped_voice_library( + scope: Literal["global", "scene"], scene: "Scene | None" = None +) -> ScopedVoiceLibrary: + if scope == "global": + return ScopedVoiceLibrary( + voice_library=get_instance(), fn_save=save_voice_library + ) + else: + if not scene: + raise ValueError("Scene is required for scoped voice library") + + async def _save(library: VoiceLibrary): + await save_scene_voice_library(scene, library) + + return ScopedVoiceLibrary(voice_library=scene.voice_library, fn_save=_save) + + +async def require_instance(): + global VOICE_LIBRARY + if not VOICE_LIBRARY: + VOICE_LIBRARY = await load_voice_library() + return VOICE_LIBRARY + + +async def load_voice_library() -> VoiceLibrary: + """ + Load the voice library from the file. + """ + try: + with open(VOICE_LIBRARY_PATH, "r") as f: + return VoiceLibrary.model_validate_json(f.read()) + except FileNotFoundError: + library = VoiceLibrary(voices=DEFAULT_VOICES) + await save_voice_library(library) + return library + finally: + log.debug("loaded voice library", path=str(VOICE_LIBRARY_PATH)) + + +async def save_voice_library(voice_library: VoiceLibrary): + """ + Save the voice library to the file. + """ + await async_signals.get("voice_library.update.before").send(voice_library) + with open(VOICE_LIBRARY_PATH, "w") as f: + f.write(voice_library.model_dump_json(indent=2)) + await async_signals.get("voice_library.update.after").send(voice_library) + + +def get_instance() -> VoiceLibrary: + """ + Get the shared voice library instance. + """ + if not VOICE_LIBRARY: + raise RuntimeError("Voice library not loaded yet.") + return VOICE_LIBRARY + + +def add_default_voices(voices: list[Voice]): + """ + Add default voices to the voice library. + """ + global DEFAULT_VOICES + for voice in voices: + DEFAULT_VOICES[voice.id] = voice + + +def voices_for_apis(apis: list[str], voice_library: VoiceLibrary) -> list[Voice]: + """ + Get the voices for the given apis. + """ + return [voice for voice in voice_library.voices.values() if voice.provider in apis] + + +def _scene_library_path(scene: "Scene") -> Path: + """Return the path to the *scene* voice-library.json file.""" + + return Path(scene.info_dir) / "voice-library.json" + + +async def load_scene_voice_library(scene: "Scene") -> VoiceLibrary: + """Load and return the voice library for *scene*. + + If the file does not exist an empty ``VoiceLibrary`` instance is returned. + The returned instance is *not* stored on the scene – caller decides. + """ + + path = _scene_library_path(scene) + + try: + if path.exists(): + with open(path, "r") as f: + library = VoiceLibrary.model_validate_json(f.read()) + else: + library = VoiceLibrary() + except Exception as e: + log.error("load_scene_voice_library", error=e, path=str(path)) + library = VoiceLibrary() + + return library + + +async def save_scene_voice_library(scene: "Scene", library: VoiceLibrary): + """Persist *library* to the scene's ``voice-library.json``. + + The directory ``scene/{name}/info`` is created if necessary. + """ + + path = _scene_library_path(scene) + try: + path.parent.mkdir(parents=True, exist_ok=True) + with open(path, "w") as f: + f.write(library.model_dump_json(indent=2)) + except Exception as e: + log.error("save_scene_voice_library", error=e, path=str(path)) diff --git a/src/talemate/agents/tts/websocket_handler.py b/src/talemate/agents/tts/websocket_handler.py new file mode 100644 index 00000000..f1133a57 --- /dev/null +++ b/src/talemate/agents/tts/websocket_handler.py @@ -0,0 +1,674 @@ +from __future__ import annotations + +import asyncio +from pathlib import Path +import pydantic +import structlog + + +from typing import TYPE_CHECKING, Literal +import base64 +import os +import re + +import talemate.emit.async_signals as async_signals +from talemate.instance import get_agent +from talemate.server.websocket_plugin import Plugin + +import talemate.scene_message as scene_message + +from .voice_library import ( + get_instance as get_voice_library, + save_voice_library, + scoped_voice_library, + ScopedVoiceLibrary, +) +from .schema import ( + Voice, + GenerationContext, + Chunk, + APIStatus, + VoiceMixer, + VoiceWeight, + TALEMATE_ROOT, + VoiceLibrary, +) + +from .util import voice_is_scene_asset +from .providers import provider + +if TYPE_CHECKING: + from talemate.agents.tts import TTSAgent + from talemate.tale_mate import Scene + from talemate.character import Character + +__all__ = [ + "TTSWebsocketHandler", +] + +log = structlog.get_logger("talemate.server.voice_library") + + +class EditVoicePayload(pydantic.BaseModel): + """Payload for editing an existing voice. Only specified fields are updated.""" + + voice_id: str + scope: Literal["global", "scene"] + + label: str + provider: str + provider_id: str + provider_model: str | None = None + tags: list[str] = pydantic.Field(default_factory=list) + parameters: dict[str, int | float | str | bool] = pydantic.Field( + default_factory=dict + ) + + +class VoiceRefPayload(pydantic.BaseModel): + """Payload referencing an existing voice by its id (used for remove / test).""" + + voice_id: str + scope: Literal["global", "scene"] + + +class TestVoicePayload(pydantic.BaseModel): + """Payload for testing a voice.""" + + provider: str + provider_id: str + provider_model: str | None = None + text: str | None = None + parameters: dict[str, int | float | str | bool] = pydantic.Field( + default_factory=dict + ) + + +class TestCharacterVoicePayload(pydantic.BaseModel): + """Payload for testing a character voice.""" + + character_name: str + text: str | None = None + + +class AddVoicePayload(Voice): + """Explicit payload for adding a new voice - identical fields to Voice.""" + + scope: Literal["global", "scene"] + + +class TestMixedVoicePayload(pydantic.BaseModel): + """Payload for testing a mixed voice.""" + + provider: str + voices: list[VoiceWeight] + + +class SaveMixedVoicePayload(pydantic.BaseModel): + """Payload for saving a mixed voice.""" + + provider: str + label: str + voices: list[VoiceWeight] + tags: list[str] = pydantic.Field(default_factory=list) + + +class UploadVoiceFilePayload(pydantic.BaseModel): + """Payload for uploading a new voice file for providers that support it.""" + + provider: str + label: str + content: str # Base64 data URL (e.g. data:audio/wav;base64,AAAB...) + as_scene_asset: bool = False + + @pydantic.field_validator("content") + @classmethod + def _validate_content(cls, v: str): + if not v.startswith("data:") or ";base64," not in v: + raise ValueError("Content must be a base64 data URL") + return v + + +class GenerateForSceneMessagePayload(pydantic.BaseModel): + """Payload for generating a voice for a scene message.""" + + message_id: int | Literal["intro"] + + +class TTSWebsocketHandler(Plugin): + """Websocket plugin to manage the TTS voice library.""" + + router = "tts" + + def __init__(self, websocket_handler): + super().__init__(websocket_handler) + # Immediately send current voice list to the frontend + asyncio.create_task(self._send_voice_list()) + + # --------------------------------------------------------------------- + # Events + # --------------------------------------------------------------------- + + def connect(self): + # needs to be after config is saved so the TTS agent has already + # refreshed to the latest config + async_signals.get("config.changed.follow").connect( + self.on_app_config_change_followup + ) + + async def on_app_config_change_followup(self, event): + self._send_api_status() + + # --------------------------------------------------------------------- + # Helper methods + # --------------------------------------------------------------------- + + async def _send_voice_list(self, select_voice_id: str | None = None): + # global voice library + voice_library = get_voice_library() + voices = [v.model_dump() for v in voice_library.voices.values()] + voices.sort(key=lambda x: x["label"]) + + # scene voice library + if self.scene: + scene_voice_library = self.scene.voice_library + scene_voices = [v.model_dump() for v in scene_voice_library.voices.values()] + scene_voices.sort(key=lambda x: x["label"]) + else: + scene_voices = [] + + self.websocket_handler.queue_put( + { + "type": self.router, + "action": "voices", + "voices": voices, + "scene_voices": scene_voices, + "select_voice_id": select_voice_id, + } + ) + + def _voice_exists(self, voice_library: VoiceLibrary, voice_id: str) -> bool: + return voice_id in voice_library.voices + + def _broadcast_update(self, select_voice_id: str | None = None): + # After any mutation we broadcast the full list for simplicity + asyncio.create_task(self._send_voice_list(select_voice_id)) + + def _send_api_status(self): + tts_agent: "TTSAgent" = get_agent("tts") + api_status: list[APIStatus] = tts_agent.api_status + self.websocket_handler.queue_put( + { + "type": self.router, + "action": "api_status", + "api_status": [s.model_dump() for s in api_status], + } + ) + + # --------------------------------------------------------------------- + # Handlers + # --------------------------------------------------------------------- + + async def handle_list(self, data: dict): + await self._send_voice_list() + + async def handle_api_status(self, data: dict): + self._send_api_status() + + async def handle_add(self, data: dict): + try: + voice = AddVoicePayload(**data) + except pydantic.ValidationError as e: + await self.signal_operation_failed(str(e)) + return + + if voice.scope == "scene" and not self.scene: + await self.signal_operation_failed("No scene active") + return + + scoped: ScopedVoiceLibrary = scoped_voice_library(voice.scope, self.scene) + voice.is_scene_asset = voice.scope == "scene" + + if self._voice_exists(scoped.voice_library, voice.id): + await self.signal_operation_failed("Voice already exists") + return + + scoped.voice_library.voices[voice.id] = voice + + await scoped.save() + + self._broadcast_update() + await self.signal_operation_done() + + async def handle_remove(self, data: dict): + try: + payload = VoiceRefPayload(**data) + except pydantic.ValidationError as e: + await self.signal_operation_failed(str(e)) + return + + tts_agent: "TTSAgent" = get_agent("tts") + + scoped: ScopedVoiceLibrary = scoped_voice_library(payload.scope, self.scene) + + log.debug("Removing voice", voice_id=payload.voice_id, scope=payload.scope) + + try: + voice = scoped.voice_library.voices.pop(payload.voice_id) + except KeyError: + await self.signal_operation_failed("Voice not found (1)") + return + + provider = voice.provider + # check if porivder has a delete method + delete_method = getattr(tts_agent, f"{provider}_delete_voice", None) + if delete_method: + delete_method(voice) + + await scoped.save() + self._broadcast_update() + await self.signal_operation_done() + + async def handle_edit(self, data: dict): + try: + payload = EditVoicePayload(**data) + except pydantic.ValidationError as e: + await self.signal_operation_failed(str(e)) + return + + scoped: ScopedVoiceLibrary = scoped_voice_library(payload.scope, self.scene) + voice = scoped.voice_library.voices.get(payload.voice_id) + if not voice: + await self.signal_operation_failed("Voice not found") + return + + # all fields are always provided + voice.label = payload.label + voice.provider = payload.provider + voice.provider_id = payload.provider_id + voice.provider_model = payload.provider_model + voice.tags = payload.tags + voice.parameters = payload.parameters + voice.is_scene_asset = voice_is_scene_asset(voice, provider(voice.provider)) + + # If provider or provider_id changed, id changes -> reinsert + new_id = voice.id + if new_id != payload.voice_id: + # Remove old key, insert new + del scoped.voice_library.voices[payload.voice_id] + scoped.voice_library.voices[new_id] = voice + + await scoped.save() + self._broadcast_update() + await self.signal_operation_done() + + async def handle_test(self, data: dict): + """Handle a request to test a voice. + + Supports two payload formats: + + 1. Existing voice - identified by ``voice_id`` (legacy behaviour) + 2. Unsaved voice - identified by at least ``provider`` and ``provider_id``. + """ + + tts_agent: "TTSAgent" = get_agent("tts") + + try: + payload = TestVoicePayload(**data) + except pydantic.ValidationError as e: + await self.signal_operation_failed(str(e)) + return + + voice = Voice( + label=f"{payload.provider_id} (test)", + provider=payload.provider, + provider_id=payload.provider_id, + provider_model=payload.provider_model, + parameters=payload.parameters, + ) + + if not tts_agent or not tts_agent.api_ready(voice.provider): + await self.signal_operation_failed(f"API '{voice.provider}' not ready") + return + + generate_fn = getattr(tts_agent, f"{voice.provider}_generate", None) + if not generate_fn: + await self.signal_operation_failed("Provider not supported by TTS agent") + return + + prepare_fn = getattr(tts_agent, f"{voice.provider}_prepare_chunk", None) + + # Use provided text or default + test_text = payload.text or "This is a test of the selected voice." + + # Build minimal generation context + context = GenerationContext() + chunk = Chunk( + text=[test_text], + type="dialogue", + api=voice.provider, + voice=voice, + model=voice.provider_model, + generate_fn=generate_fn, + prepare_fn=prepare_fn, + character_name=None, + ) + context.chunks.append(chunk) + + # Run generation in background so we don't block the event loop + async def _run_test(): + try: + await tts_agent.generate_chunks(context) + finally: + await self.signal_operation_done(signal_only=True) + + asyncio.create_task(_run_test()) + + async def handle_test_character_voice(self, data: dict): + """Handle a request to test a character voice.""" + + try: + payload = TestCharacterVoicePayload(**data) + except pydantic.ValidationError as e: + await self.signal_operation_failed(str(e)) + return + + character = self.scene.get_character(payload.character_name) + if not character: + await self.signal_operation_failed("Character not found") + return + + if not character.voice: + await self.signal_operation_failed("Character has no voice") + return + + text: str = payload.text or "This is a test of the selected voice." + + await self.handle_test( + { + "provider": character.voice.provider, + "provider_id": character.voice.provider_id, + "provider_model": character.voice.provider_model, + "parameters": character.voice.parameters, + "text": text, + } + ) + + async def handle_test_mixed(self, data: dict): + """Handle a request to test a mixed voice.""" + + tts_agent: "TTSAgent" = get_agent("tts") + + try: + payload = TestMixedVoicePayload(**data) + except pydantic.ValidationError as e: + await self.signal_operation_failed(str(e)) + return + + # Validate that weights sum to 1.0 + total_weight = sum(v.weight for v in payload.voices) + if abs(total_weight - 1.0) > 0.001: + await self.signal_operation_failed( + f"Weights must sum to 1.0, got {total_weight}" + ) + return + + if not tts_agent or not tts_agent.api_ready(payload.provider): + await self.signal_operation_failed(f"{payload.provider} API not ready") + return + + # Build mixer + mixer = VoiceMixer(voices=payload.voices) + + # Run test in background using the appropriate provider's test method + test_method = getattr(tts_agent, f"{payload.provider}_test_mix", None) + if not test_method: + await self.signal_operation_failed( + f"{payload.provider} does not implement voice mixing" + ) + return + + async def _run_test(): + try: + await test_method(mixer) + finally: + await self.signal_operation_done(signal_only=True) + + asyncio.create_task(_run_test()) + + async def handle_save_mixed(self, data: dict): + """Handle a request to save a mixed voice.""" + + tts_agent: "TTSAgent" = get_agent("tts") + + try: + payload = SaveMixedVoicePayload(**data) + except pydantic.ValidationError as e: + await self.signal_operation_failed(str(e)) + return + + # Validate that weights sum to 1.0 + total_weight = sum(v.weight for v in payload.voices) + if abs(total_weight - 1.0) > 0.001: + await self.signal_operation_failed( + f"Weights must sum to 1.0, got {total_weight}" + ) + return + + if not tts_agent or not tts_agent.api_ready(payload.provider): + await self.signal_operation_failed(f"{payload.provider} API not ready") + return + + # Build mixer + mixer = VoiceMixer(voices=payload.voices) + + # Create a unique voice id for the mixed voice + voice_id = f"{payload.label.lower().replace(' ', '-')}" + + # Mix and save the voice using the appropriate provider's methods + save_method = getattr(tts_agent, f"{payload.provider}_save_mix", None) + + if not save_method: + await self.signal_operation_failed( + f"{payload.provider} does not implement voice mixing" + ) + return + + try: + saved_path = await save_method(voice_id, mixer) + + # voice id is Path relative to talemate root + voice_id = str(saved_path.relative_to(TALEMATE_ROOT)) + + # Add the voice to the library + new_voice = Voice( + label=payload.label, + provider=payload.provider, + provider_id=voice_id, + tags=payload.tags, + mix=mixer, + ) + + voice_library = get_voice_library() + voice_library.voices[new_voice.id] = new_voice + await save_voice_library(voice_library) + self._broadcast_update(new_voice.id) + await self.signal_operation_done() + + except Exception as e: + log.error("Failed to save mixed voice", error=e) + await self.signal_operation_failed(f"Failed to save mixed voice: {str(e)}") + + async def handle_generate_for_scene_message(self, data: dict): + """Handle a request to generate a voice for a scene message.""" + + tts_agent: "TTSAgent" = get_agent("tts") + scene: "Scene" = self.scene + + log.debug("Generating TTS for scene message", data=data) + + try: + payload = GenerateForSceneMessagePayload(**data) + except pydantic.ValidationError as e: + await self.signal_operation_failed(str(e)) + return + + log.debug("Payload", payload=payload) + + character: "Character | None" = None + text: str = "" + message: scene_message.SceneMessage | None = None + + if payload.message_id == "intro": + text = scene.get_intro() + else: + message = scene.get_message(payload.message_id) + + if not message: + await self.signal_operation_failed("Message not found") + return + + if message.typ not in ["character", "narrator", "context_investigation"]: + await self.signal_operation_failed( + "Message is not a character, narrator, or context investigation message" + ) + return + + log.debug("Message type", message_type=message.typ) + + if isinstance(message, scene_message.CharacterMessage): + character = scene.get_character(message.character_name) + + if not character: + await self.signal_operation_failed("Character not found") + return + + text = message.without_name + elif isinstance(message, scene_message.ContextInvestigationMessage): + text = message.message + else: + text = message.message + + if not text: + await self.signal_operation_failed("No text to generate speech for.") + return + + await tts_agent.generate(text, character, message=message) + + await self.signal_operation_done() + + async def handle_stop_and_clear(self, data: dict): + """Handle a request from the frontend to stop and clear the current TTS queue.""" + + tts_agent: "TTSAgent" = get_agent("tts") + + if not tts_agent: + await self.signal_operation_failed("TTS agent not available") + return + + try: + await tts_agent.stop_and_clear_queue() + await self.signal_operation_done() + except Exception as e: + log.error("Failed to stop and clear TTS queue", error=e) + await self.signal_operation_failed(str(e)) + + async def handle_upload_voice_file(self, data: dict): + """Handle uploading a new audio file for a voice. + + The *provider* defines which MIME types it accepts via + ``VoiceProvider.upload_file_types``. This method therefore: + + 1. Parses the data-URL to obtain the raw bytes **and** MIME type. + 2. Verifies the MIME type against the provider's allowed list + (if the provider restricts uploads). + 3. Stores the file under + + ``tts/voice//.`` + + where *extension* is derived from the MIME type (e.g. ``audio/wav`` → ``wav``). + 4. Returns the relative path ("provider_id") back to the frontend so + it can populate the voice's ``provider_id`` field. + """ + + try: + payload = UploadVoiceFilePayload(**data) + except pydantic.ValidationError as e: + await self.signal_operation_failed(str(e)) + return + + # Check provider allows file uploads + from .providers import provider as get_provider + + P = get_provider(payload.provider) + if not P.allow_file_upload: + await self.signal_operation_failed( + f"Provider '{payload.provider}' does not support file uploads" + ) + return + + # Build filename from label + def slugify(text: str) -> str: + text = text.lower().strip() + text = re.sub(r"[^a-z0-9]+", "-", text) + return text.strip("-") + + filename_no_ext = slugify(payload.label or "voice") or "voice" + + # Determine media type and validate against provider + try: + header, b64data = payload.content.split(",", 1) + media_type = header.split(":", 1)[1].split(";", 1)[0] + except Exception: + await self.signal_operation_failed("Invalid data URL format") + return + + if P.upload_file_types and media_type not in P.upload_file_types: + await self.signal_operation_failed( + f"File type '{media_type}' not allowed for provider '{payload.provider}'" + ) + return + + extension = media_type.split("/")[1] + filename = f"{filename_no_ext}.{extension}" + + # Determine target directory and path + if not payload.as_scene_asset: + target_dir = P.default_voice_dir + else: + target_dir = Path(self.scene.assets.asset_directory) / "tts" + + os.makedirs(target_dir, exist_ok=True) + target_path = target_dir / filename + + log.debug( + "Target path", + target_path=target_path, + as_scene_asset=payload.as_scene_asset, + ) + + # Decode base64 data URL + try: + file_bytes = base64.b64decode(b64data) + except Exception as e: + await self.signal_operation_failed(f"Invalid base64 data: {e}") + return + + try: + with open(target_path, "wb") as f: + f.write(file_bytes) + except Exception as e: + await self.signal_operation_failed(f"Failed to save file: {e}") + return + + provider_id = str(target_path.relative_to(TALEMATE_ROOT)) + + # Send response back to frontend so it can set provider_id + self.websocket_handler.queue_put( + { + "type": self.router, + "action": "voice_file_uploaded", + "provider_id": provider_id, + } + ) + await self.signal_operation_done(signal_only=True) diff --git a/src/talemate/agents/visual/__init__.py b/src/talemate/agents/visual/__init__.py index 30c5f07d..a1a1e019 100644 --- a/src/talemate/agents/visual/__init__.py +++ b/src/talemate/agents/visual/__init__.py @@ -14,10 +14,9 @@ from talemate.agents.base import ( ) from talemate.agents.registry import register from talemate.agents.editor.revision import RevisionDisabled +from talemate.agents.summarize.analyze_scene import SceneAnalysisDisabled from talemate.client.base import ClientBase -from talemate.config import load_config from talemate.emit import emit -from talemate.emit.signals import handlers as signal_handlers from talemate.prompts.base import Prompt from .commands import * # noqa @@ -152,16 +151,13 @@ class VisualBase(Agent): return actions - def __init__(self, client: ClientBase, *kwargs): + def __init__(self, client: ClientBase | None = None, **kwargs): self.client = client self.is_enabled = False self.backend_ready = False self.initialized = False - self.config = load_config() self.actions = VisualBase.init_actions() - signal_handlers["config_saved"].connect(self.on_config_saved) - @property def enabled(self): return self.is_enabled @@ -231,6 +227,10 @@ class VisualBase(Agent): or f"{self.backend_name} is not ready for processing", ).model_dump() + backend_detail_fn = getattr(self, f"{self.backend.lower()}_agent_details", None) + if backend_detail_fn: + details.update(backend_detail_fn()) + return details @property @@ -241,11 +241,6 @@ class VisualBase(Agent): def allow_automatic_generation(self): return self.actions["automatic_generation"].enabled - def on_config_saved(self, event): - config = event.data - self.config = config - asyncio.create_task(self.emit_status()) - async def on_ready_check_success(self): prev_ready = self.backend_ready self.backend_ready = True @@ -406,7 +401,11 @@ class VisualBase(Agent): f"data:image/png;base64,{image}" ) character.cover_image = asset.id - self.scene.assets.cover_image = asset.id + + # Only set scene cover image if scene doesn't already have one + if not self.scene.assets.cover_image: + self.scene.assets.cover_image = asset.id + self.scene.emit_status() async def emit_image(self, image: str): @@ -538,7 +537,7 @@ class VisualBase(Agent): @set_processing async def generate_environment_prompt(self, instructions: str = None): - with RevisionDisabled(): + with RevisionDisabled(), SceneAnalysisDisabled(): response = await Prompt.request( "visual.generate-environment-prompt", self.client, @@ -557,7 +556,7 @@ class VisualBase(Agent): ): character = self.scene.get_character(character_name) - with RevisionDisabled(): + with RevisionDisabled(), SceneAnalysisDisabled(): response = await Prompt.request( "visual.generate-character-prompt", self.client, diff --git a/src/talemate/agents/visual/comfyui.py b/src/talemate/agents/visual/comfyui.py index 63be0185..ac6d71c2 100644 --- a/src/talemate/agents/visual/comfyui.py +++ b/src/talemate/agents/visual/comfyui.py @@ -10,7 +10,12 @@ import httpx import pydantic import structlog -from talemate.agents.base import AgentAction, AgentActionConditional, AgentActionConfig +from talemate.agents.base import ( + AgentAction, + AgentActionConditional, + AgentActionConfig, + AgentDetail, +) from .handlers import register from .schema import RenderSettings, Resolution @@ -164,11 +169,16 @@ class ComfyUIMixin: label="Checkpoint", choices=[], description="The main checkpoint to use.", + note="If the agent is enabled and connected, but the checkpoint list is empty, try closing this window and opening it again.", ), }, ) } + @property + def comfyui_checkpoint(self): + return self.actions["comfyui"].config["checkpoint"].value + @property def comfyui_workflow_filename(self): base_name = self.actions["comfyui"].config["workflow"].value @@ -219,10 +229,27 @@ class ComfyUIMixin: async def comfyui_checkpoints(self): loader_node = (await self.comfyui_object_info)["CheckpointLoaderSimple"] _checkpoints = loader_node["input"]["required"]["ckpt_name"][0] + log.debug("comfyui_checkpoints", _checkpoints=_checkpoints) return [ {"label": checkpoint, "value": checkpoint} for checkpoint in _checkpoints ] + def comfyui_agent_details(self): + checkpoint: str = self.comfyui_checkpoint + if not checkpoint: + return {} + + # remove .safetensors + checkpoint = checkpoint.replace(".safetensors", "") + + return { + "checkpoint": AgentDetail( + icon="mdi-brain", + value=checkpoint, + description="The checkpoint to use for comfyui", + ).model_dump() + } + async def comfyui_get_image(self, filename: str, subfolder: str, folder_type: str): data = {"filename": filename, "subfolder": subfolder, "type": folder_type} url_values = urllib.parse.urlencode(data) diff --git a/src/talemate/agents/visual/openai_image.py b/src/talemate/agents/visual/openai_image.py index c09bfc0d..40e2955b 100644 --- a/src/talemate/agents/visual/openai_image.py +++ b/src/talemate/agents/visual/openai_image.py @@ -55,7 +55,7 @@ class OpenAIImageMixin: @property def openai_api_key(self): - return self.config.get("openai", {}).get("api_key") + return self.config.openai.api_key @property def openai_model_type(self): diff --git a/src/talemate/agents/visual/websocket_handler.py b/src/talemate/agents/visual/websocket_handler.py index 8694ffdf..fd0a66e6 100644 --- a/src/talemate/agents/visual/websocket_handler.py +++ b/src/talemate/agents/visual/websocket_handler.py @@ -91,7 +91,7 @@ class VisualWebsocketHandler(Plugin): await visual.generate_character_portrait( payload.context.character_name, payload.context.instructions, - replace=True, + replace=payload.context.replace, prompt_only=payload.context.prompt_only, ) diff --git a/src/talemate/agents/world_state/__init__.py b/src/talemate/agents/world_state/__init__.py index acb0f8da..84129d0b 100644 --- a/src/talemate/agents/world_state/__init__.py +++ b/src/talemate/agents/world_state/__init__.py @@ -13,6 +13,7 @@ import talemate.util as util from talemate.emit import emit from talemate.events import GameLoopEvent from talemate.instance import get_agent +from talemate.client import ClientBase from talemate.prompts import Prompt from talemate.scene_message import ( ReinforcementMessage, @@ -125,7 +126,7 @@ class WorldStateAgent(CharacterProgressionMixin, Agent): CharacterProgressionMixin.add_actions(actions) return actions - def __init__(self, client, **kwargs): + def __init__(self, client: ClientBase | None = None, **kwargs): self.client = client self.is_enabled = True self.next_update = 0 diff --git a/src/talemate/character.py b/src/talemate/character.py index 9091e80b..b7e4eb0b 100644 --- a/src/talemate/character.py +++ b/src/talemate/character.py @@ -1,16 +1,539 @@ from typing import TYPE_CHECKING, Union +import pydantic +import structlog +import random +import re +import traceback -from talemate.instance import get_agent +import talemate.util as util +import talemate.instance as instance +import talemate.scene_message as scene_message +import talemate.agents.base as agent_base +from talemate.agents.tts.schema import Voice +import talemate.emit.async_signals as async_signals if TYPE_CHECKING: - from talemate.tale_mate import Character, Scene - + from talemate.tale_mate import Scene, Actor __all__ = [ + "Character", + "VoiceChangedEvent", "deactivate_character", "activate_character", + "set_voice", ] +log = structlog.get_logger("talemate.character") + +async_signals.register("character.voice_changed") + + +class Character(pydantic.BaseModel): + # core character information + name: str + description: str = "" + greeting_text: str = "" + color: str = "#fff" + is_player: bool = False + memory_dirty: bool = False + cover_image: str | None = None + voice: Voice | None = None + + # dialogue instructions and examples + dialogue_instructions: str | None = None + example_dialogue: list[str] = pydantic.Field(default_factory=list) + + # attribute and detail storage + base_attributes: dict[str, str | int | float | bool] = pydantic.Field( + default_factory=dict + ) + details: dict[str, str] = pydantic.Field(default_factory=dict) + + # helpful references + agent: agent_base.Agent | None = pydantic.Field(default=None, exclude=True) + actor: "Actor | None" = pydantic.Field(default=None, exclude=True) + + class Config: + arbitrary_types_allowed = True + + @property + def gender(self) -> str: + return self.base_attributes.get("gender", "") + + @property + def sheet(self) -> str: + sheet = self.base_attributes or { + "name": self.name, + "description": self.description, + } + + sheet_list = [] + + for key, value in sheet.items(): + sheet_list.append(f"{key}: {value}") + + return "\n".join(sheet_list) + + @property + def random_dialogue_example(self): + """ + Get a random example dialogue line for this character. + + Returns: + str: The random example dialogue line. + """ + if not self.example_dialogue: + return "" + + return random.choice(self.example_dialogue) + + def __str__(self): + return f"Character: {self.name}" + + def __repr__(self): + return str(self) + + def __hash__(self): + return hash(self.name) + + def set_color(self, color: str | None = None): + # if no color provided, chose a random color + + if color is None: + color = util.random_color() + self.color = color + + def set_cover_image(self, asset_id: str, initial_only: bool = False): + if self.cover_image and initial_only: + return + + self.cover_image = asset_id + + def sheet_filtered(self, *exclude): + sheet = self.base_attributes or { + "name": self.name, + "gender": self.gender, + "description": self.description, + } + + sheet_list = [] + + for key, value in sheet.items(): + if key not in exclude: + sheet_list.append(f"{key}: {value}") + + return "\n".join(sheet_list) + + def random_dialogue_examples( + self, + scene: "Scene", + num: int = 3, + strip_name: bool = False, + max_backlog: int = 250, + max_length: int = 192, + history_threshold: int = 15, + ) -> list[str]: + """ + Get multiple random example dialogue lines for this character. + + Will return up to `num` examples and not have any duplicates. + """ + + if len(scene.history) < history_threshold and self.example_dialogue: + # when history is too short, we just use from the prepared + # examples + return self._random_dialogue_examples(num, strip_name) + + history_examples = self._random_dialogue_examples_from_history( + scene, num, max_backlog + ) + + if len(history_examples) < num: + random_examples = self._random_dialogue_examples( + num - len(history_examples), strip_name + ) + + for example in random_examples: + history_examples.append(example) + + # ensure sane example lengths + + history_examples = [ + util.strip_partial_sentences(example[:max_length]) + for example in history_examples + ] + + log.debug("random_dialogue_examples", history_examples=history_examples) + return history_examples + + def _random_dialogue_examples_from_history( + self, scene: "Scene", num: int = 3, max_backlog: int = 250 + ) -> list[str]: + """ + Get multiple random example dialogue lines for this character from the scene's history. + + Will checks the last `max_backlog` messages in the scene's history and returns up to `num` examples. + """ + + history = scene.history[-max_backlog:] + + examples = [] + + for message in history: + if not isinstance(message, scene_message.CharacterMessage): + continue + + if message.character_name != self.name: + continue + + examples.append(message.without_name.strip()) + + if not examples: + return [] + + return random.sample(examples, min(num, len(examples))) + + def _random_dialogue_examples( + self, num: int = 3, strip_name: bool = False + ) -> list[str]: + """ + Get multiple random example dialogue lines for this character. + + Will return up to `num` examples and not have any duplicates. + """ + + if not self.example_dialogue: + return [] + + # create copy of example_dialogue so we dont modify the original + + examples = self.example_dialogue.copy() + + # shuffle the examples so we get a random order + + random.shuffle(examples) + + # now pop examples until we have `num` examples or we run out of examples + + if strip_name: + examples = [example.split(":", 1)[1].strip() for example in examples] + + return [examples.pop() for _ in range(min(num, len(examples)))] + + def filtered_sheet(self, attributes: list[str]): + """ + Same as sheet but only returns the attributes in the given list + + Attributes that dont exist will be ignored + """ + + sheet_list = [] + + for key, value in self.base_attributes.items(): + if key.lower() not in attributes: + continue + sheet_list.append(f"{key}: {value}") + + return "\n".join(sheet_list) + + def rename(self, new_name: str): + """ + Rename the character. + + Args: + new_name (str): The new name of the character. + + Returns: + None + """ + + orig_name = self.name + self.name = new_name + + if orig_name.lower() == "you": + # we dont want to replace "you" in the description + # or anywhere else so we can just return here + return + + if self.description: + self.description = self.description.replace(f"{orig_name}", self.name) + for k, v in self.base_attributes.items(): + if isinstance(v, str): + self.base_attributes[k] = v.replace(f"{orig_name}", self.name) + for i, v in list(self.details.items()): + if isinstance(v, str): + self.details[i] = v.replace(f"{orig_name}", self.name) + self.memory_dirty = True + + def introduce_main_character(self, character: "Character"): + """ + Makes this character aware of the main character's name in the scene. + + This will replace all occurrences of {{user}} (case-insensitive) in all of the character's properties + with the main character's name. + """ + + properties = ["description", "greeting_text"] + + pattern = re.compile(re.escape("{{user}}"), re.IGNORECASE) + + for prop in properties: + prop_value = getattr(self, prop) + + try: + updated_prop_value = pattern.sub(character.name, prop_value) + except Exception as e: + log.error( + "introduce_main_character", + error=e, + traceback=traceback.format_exc(), + ) + updated_prop_value = prop_value + setattr(self, prop, updated_prop_value) + + # also replace in all example dialogue + + for i, dialogue in enumerate(self.example_dialogue): + self.example_dialogue[i] = pattern.sub(character.name, dialogue) + + def update(self, **kwargs): + """ + Update character properties with given key-value pairs. + """ + + for key, value in kwargs.items(): + setattr(self, key, value) + + self.memory_dirty = True + + async def purge_from_memory(self): + """ + Purges this character's details from memory. + """ + memory_agent = instance.get_agent("memory") + await memory_agent.delete({"character": self.name}) + log.info("purged character from memory", character=self.name) + + async def commit_to_memory(self, memory_agent): + """ + Commits this character's details to the memory agent. (vectordb) + """ + + items = [] + + if not self.base_attributes or "description" not in self.base_attributes: + if not self.description: + self.description = "" + description_chunks = [ + chunk.strip() for chunk in self.description.split("\n") if chunk.strip() + ] + + for idx in range(len(description_chunks)): + chunk = description_chunks[idx] + + items.append( + { + "text": f"{self.name}: {chunk}", + "id": f"{self.name}.description.{idx}", + "meta": { + "character": self.name, + "attr": "description", + "typ": "base_attribute", + }, + } + ) + + seen_attributes = set() + + for attr, value in self.base_attributes.items(): + if attr.startswith("_"): + continue + + if attr.lower() in ["name", "scenario_context", "_prompt", "_template"]: + continue + + seen_attributes.add(attr) + + items.append( + { + "text": f"{self.name}'s {attr}: {value}", + "id": f"{self.name}.{attr}", + "meta": { + "character": self.name, + "attr": attr, + "typ": "base_attribute", + }, + } + ) + + for key, detail in self.details.items(): + # if colliding with attribute name, prefix with detail_ + if key in seen_attributes: + key = f"detail_{key}" + + items.append( + { + "text": f"{self.name} - {key}: {detail}", + "id": f"{self.name}.{key}", + "meta": { + "character": self.name, + "typ": "details", + "detail": key, + }, + } + ) + + if items: + await memory_agent.add_many(items) + + self.memory_dirty = False + + async def commit_single_attribute_to_memory( + self, memory_agent, attribute: str, value: str + ): + """ + Commits a single attribute to memory + """ + + items = [] + + # remove old attribute if it exists + + await memory_agent.delete( + {"character": self.name, "typ": "base_attribute", "attr": attribute} + ) + + self.base_attributes[attribute] = value + + items.append( + { + "text": f"{self.name}'s {attribute}: {self.base_attributes[attribute]}", + "id": f"{self.name}.{attribute}", + "meta": { + "character": self.name, + "attr": attribute, + "typ": "base_attribute", + }, + } + ) + + log.debug("commit_single_attribute_to_memory", items=items) + + await memory_agent.add_many(items) + + async def commit_single_detail_to_memory( + self, memory_agent, detail: str, value: str + ): + """ + Commits a single detail to memory + """ + + items = [] + + # remove old detail if it exists + + await memory_agent.delete( + {"character": self.name, "typ": "details", "detail": detail} + ) + + self.details[detail] = value + + items.append( + { + "text": f"{self.name} - {detail}: {value}", + "id": f"{self.name}.{detail}", + "meta": { + "character": self.name, + "typ": "details", + "detail": detail, + }, + } + ) + + log.debug("commit_single_detail_to_memory", items=items) + + await memory_agent.add_many(items) + + async def set_detail(self, name: str, value): + memory_agent = instance.get_agent("memory") + if not value: + try: + del self.details[name] + await memory_agent.delete( + {"character": self.name, "typ": "details", "detail": name} + ) + except KeyError: + pass + else: + self.details[name] = value + await self.commit_single_detail_to_memory(memory_agent, name, value) + + def set_detail_defer(self, name: str, value): + self.details[name] = value + self.memory_dirty = True + + def get_detail(self, name: str): + return self.details.get(name) + + async def set_base_attribute(self, name: str, value): + memory_agent = instance.get_agent("memory") + + if not value: + try: + del self.base_attributes[name] + await memory_agent.delete( + {"character": self.name, "typ": "base_attribute", "attr": name} + ) + except KeyError: + pass + else: + self.base_attributes[name] = value + await self.commit_single_attribute_to_memory(memory_agent, name, value) + + def set_base_attribute_defer(self, name: str, value): + self.base_attributes[name] = value + self.memory_dirty = True + + def get_base_attribute(self, name: str): + return self.base_attributes.get(name) + + async def set_description(self, description: str): + memory_agent = instance.get_agent("memory") + self.description = description + + items = [] + + await memory_agent.delete( + {"character": self.name, "typ": "base_attribute", "attr": "description"} + ) + + description_chunks = [ + chunk.strip() for chunk in self.description.split("\n") if chunk.strip() + ] + + for idx in range(len(description_chunks)): + chunk = description_chunks[idx] + + items.append( + { + "text": f"{self.name}: {chunk}", + "id": f"{self.name}.description.{idx}", + "meta": { + "character": self.name, + "attr": "description", + "typ": "base_attribute", + }, + } + ) + + await memory_agent.add_many(items) + + +class VoiceChangedEvent(pydantic.BaseModel): + character: "Character" + voice: Voice | None + auto: bool = False + async def deactivate_character(scene: "Scene", character: Union[str, "Character"]): """ @@ -51,9 +574,18 @@ async def activate_character(scene: "Scene", character: Union[str, "Character"]) return False if not character.is_player: - actor = scene.Actor(character, get_agent("conversation")) + actor = scene.Actor(character, instance.get_agent("conversation")) else: actor = scene.Player(character, None) await scene.add_actor(actor) del scene.inactive_characters[character.name] + + +async def set_voice(character: "Character", voice: Voice | None, auto: bool = False): + character.voice = voice + emission: VoiceChangedEvent = VoiceChangedEvent( + character=character, voice=voice, auto=auto + ) + await async_signals.get("character.voice_changed").send(emission) + return emission diff --git a/src/talemate/client/anthropic.py b/src/talemate/client/anthropic.py index 6b6546c8..860f16d0 100644 --- a/src/talemate/client/anthropic.py +++ b/src/talemate/client/anthropic.py @@ -9,10 +9,8 @@ from talemate.client.remote import ( EndpointOverrideMixin, endpoint_override_extra_fields, ) -from talemate.config import Client as BaseClientConfig -from talemate.config import load_config +from talemate.config.schema import Client as BaseClientConfig from talemate.emit import emit -from talemate.emit.signals import handlers __all__ = [ "AnthropicClient", @@ -33,10 +31,13 @@ SUPPORTED_MODELS = [ "claude-opus-4-20250514", ] +DEFAULT_MODEL = "claude-3-5-sonnet-latest" +MIN_THINKING_TOKENS = 1024 + class Defaults(EndpointOverride, CommonDefaults, pydantic.BaseModel): max_token_length: int = 16384 - model: str = "claude-3-5-sonnet-latest" + model: str = DEFAULT_MODEL double_coercion: str = None @@ -52,7 +53,6 @@ class AnthropicClient(EndpointOverrideMixin, ClientBase): client_type = "anthropic" conversation_retries = 0 - auto_break_repetition_enabled = False # TODO: make this configurable? decensor_enabled = False config_cls = ClientConfig @@ -66,22 +66,13 @@ class AnthropicClient(EndpointOverrideMixin, ClientBase): defaults: Defaults = Defaults() extra_fields: dict[str, ExtraField] = endpoint_override_extra_fields() - def __init__(self, model="claude-3-5-sonnet-latest", **kwargs): - self.model_name = model - self.api_key_status = None - self._reconfigure_endpoint_override(**kwargs) - self.config = load_config() - super().__init__(**kwargs) - - handlers["config_saved"].connect(self.on_config_saved) - @property def can_be_coerced(self) -> bool: - return True + return not self.reason_enabled @property def anthropic_api_key(self): - return self.config.get("anthropic", {}).get("api_key") + return self.config.anthropic.api_key @property def supported_parameters(self): @@ -92,17 +83,25 @@ class AnthropicClient(EndpointOverrideMixin, ClientBase): "max_tokens", ] + @property + def min_reason_tokens(self) -> int: + return MIN_THINKING_TOKENS + + @property + def requires_reasoning_pattern(self) -> bool: + return False + def emit_status(self, processing: bool = None): error_action = None + error_message: str | None = None if processing is not None: self.processing = processing if self.anthropic_api_key: status = "busy" if self.processing else "idle" - model_name = self.model_name else: status = "error" - model_name = "No API key set" + error_message = "No API key set" error_action = ErrorAction( title="Set API Key", action_name="openAppConfig", @@ -115,7 +114,7 @@ class AnthropicClient(EndpointOverrideMixin, ClientBase): if not self.model_name: status = "error" - model_name = "No model loaded" + error_message = "No model loaded" self.current_status = status @@ -124,73 +123,18 @@ class AnthropicClient(EndpointOverrideMixin, ClientBase): "double_coercion": self.double_coercion, "meta": self.Meta().model_dump(), "enabled": self.enabled, + "error_message": error_message, } data.update(self._common_status_data()) emit( "client_status", message=self.client_type, id=self.name, - details=model_name, + details=self.model_name, status=status if self.enabled else "disabled", data=data, ) - def set_client(self, max_token_length: int = None): - if ( - not self.anthropic_api_key - and not self.endpoint_override_base_url_configured - ): - self.client = AsyncAnthropic(api_key="sk-1111") - log.error("No anthropic API key set") - if self.api_key_status: - self.api_key_status = False - emit("request_client_status") - emit("request_agent_status") - return - - if not self.model_name: - self.model_name = "claude-3-opus-20240229" - - if max_token_length and not isinstance(max_token_length, int): - max_token_length = int(max_token_length) - - model = self.model_name - - self.client = AsyncAnthropic(api_key=self.api_key, base_url=self.base_url) - self.max_token_length = max_token_length or 16384 - - if not self.api_key_status: - if self.api_key_status is False: - emit("request_client_status") - emit("request_agent_status") - self.api_key_status = True - - log.info( - "anthropic set client", - max_token_length=self.max_token_length, - provided_max_token_length=max_token_length, - model=model, - ) - - def reconfigure(self, **kwargs): - if kwargs.get("model"): - self.model_name = kwargs["model"] - self.set_client(kwargs.get("max_token_length")) - - if "enabled" in kwargs: - self.enabled = bool(kwargs["enabled"]) - - if "double_coercion" in kwargs: - self.double_coercion = kwargs["double_coercion"] - - self._reconfigure_common_parameters(**kwargs) - self._reconfigure_endpoint_override(**kwargs) - - def on_config_saved(self, event): - config = event.data - self.config = config - self.set_client(max_token_length=self.max_token_length) - def response_tokens(self, response: str): return response.usage.output_tokens @@ -200,13 +144,6 @@ class AnthropicClient(EndpointOverrideMixin, ClientBase): async def status(self): self.emit_status() - def prompt_template(self, system_message: str, prompt: str): - """ - Anthropic handles the prompt template internally, so we just - give the prompt as is. - """ - return prompt - async def generate(self, prompt: str, parameters: dict, kind: str): """ Generates text from the given prompt and parameters. @@ -218,17 +155,35 @@ class AnthropicClient(EndpointOverrideMixin, ClientBase): ): raise Exception("No anthropic API key set") - prompt, coercion_prompt = self.split_prompt_for_coercion(prompt) + client = AsyncAnthropic(api_key=self.api_key, base_url=self.base_url) + + if self.can_be_coerced: + prompt, coercion_prompt = self.split_prompt_for_coercion(prompt) + else: + coercion_prompt = None system_message = self.get_system_message(kind) messages = [{"role": "user", "content": prompt.strip()}] if coercion_prompt: + log.debug("Adding coercion pre-fill", coercion_prompt=coercion_prompt) messages.append({"role": "assistant", "content": coercion_prompt.strip()}) + if self.reason_enabled: + parameters["thinking"] = { + "type": "enabled", + "budget_tokens": self.validated_reason_tokens, + } + # thinking doesn't support temperature, top_p, or top_k + # and the API will error if they are set + parameters.pop("temperature", None) + parameters.pop("top_p", None) + parameters.pop("top_k", None) + self.log.debug( "generate", + model=self.model_name, prompt=prompt[:128] + " ...", parameters=parameters, system_message=system_message, @@ -238,7 +193,7 @@ class AnthropicClient(EndpointOverrideMixin, ClientBase): prompt_tokens = 0 try: - stream = await self.client.messages.create( + stream = await client.messages.create( model=self.model_name, system=system_message, messages=messages, @@ -247,13 +202,25 @@ class AnthropicClient(EndpointOverrideMixin, ClientBase): ) response = "" + reasoning = "" async for event in stream: - if event.type == "content_block_delta": + if ( + event.type == "content_block_delta" + and event.delta.type == "text_delta" + ): content = event.delta.text response += content self.update_request_tokens(self.count_tokens(content)) + elif ( + event.type == "content_block_delta" + and event.delta.type == "thinking_delta" + ): + content = event.delta.thinking + reasoning += content + self.update_request_tokens(self.count_tokens(content)) + elif event.type == "message_start": prompt_tokens = event.message.usage.input_tokens @@ -262,8 +229,9 @@ class AnthropicClient(EndpointOverrideMixin, ClientBase): self._returned_prompt_tokens = prompt_tokens self._returned_response_tokens = completion_tokens + self._reasoning_response = reasoning - log.debug("generated response", response=response) + log.debug("generated response", response=response, reasoning=reasoning) return response except PermissionDeniedError as e: diff --git a/src/talemate/client/base.py b/src/talemate/client/base.py index ff1f1afb..6e8c9605 100644 --- a/src/talemate/client/base.py +++ b/src/talemate/client/base.py @@ -4,6 +4,7 @@ A unified client base, based on the openai API import ipaddress import logging +import re import random import time import traceback @@ -14,20 +15,27 @@ import pydantic import dataclasses import structlog import urllib3 -from openai import AsyncOpenAI, PermissionDeniedError +from openai import PermissionDeniedError import talemate.client.presets as presets import talemate.instance as instance import talemate.util as util from talemate.agents.context import active_agent from talemate.client.context import client_context_attribute -from talemate.client.model_prompts import model_prompt +from talemate.client.model_prompts import model_prompt, DEFAULT_TEMPLATE from talemate.client.ratelimit import CounterRateLimiter from talemate.context import active_scene +from talemate.prompts.base import Prompt from talemate.emit import emit -from talemate.config import load_config, save_config, EmbeddingFunctionPreset +from talemate.config import get_config, Config +from talemate.config.schema import EmbeddingFunctionPreset, Client as ClientConfig import talemate.emit.async_signals as async_signals -from talemate.exceptions import SceneInactiveError, GenerationCancelled +from talemate.exceptions import ( + SceneInactiveError, + GenerationCancelled, + GenerationProcessingError, + ReasoningResponseError, +) import talemate.ux.schema as ux_schema from talemate.client.system_prompts import SystemPrompts @@ -43,6 +51,11 @@ STOPPING_STRINGS = ["<|im_end|>", ""] REPLACE_SMART_QUOTES = True +INDIRECT_COERCION_PROMPT = "\nStart your response with: " + +DEFAULT_REASONING_PATTERN = r".*?" + + class ClientDisabledError(OSError): def __init__(self, client: "ClientBase"): self.client = client @@ -63,6 +76,7 @@ class PromptData(pydantic.BaseModel): generation_parameters: dict = pydantic.Field(default_factory=dict) inference_preset: str = None preset_group: str | None = None + reasoning: str | None = None class ErrorAction(pydantic.BaseModel): @@ -76,6 +90,9 @@ class CommonDefaults(pydantic.BaseModel): rate_limit: int | None = None data_format: Literal["yaml", "json"] | None = None preset_group: str | None = None + reason_enabled: bool = False + reason_tokens: int = 0 + reason_response_pattern: str | None = None class Defaults(CommonDefaults, pydantic.BaseModel): @@ -99,6 +116,7 @@ class ExtraField(pydantic.BaseModel): description: str group: FieldGroup | None = None note: ux_schema.Note | None = None + choices: list[str | int | float | bool] | None = None class ParameterReroute(pydantic.BaseModel): @@ -162,39 +180,36 @@ class RequestInformation(pydantic.BaseModel): class ClientEmbeddingsStatus: client: "ClientBase | None" = None embedding_name: str | None = None + seen: bool = False + + +@dataclasses.dataclass +class ClientStatus: + client: "ClientBase | None" = None + enabled: bool = False async_signals.register( "client.embeddings_available", + "client.enabled", + "client.disabled", ) class ClientBase: - api_url: str - model_name: str - api_key: str = None - name: str = None - enabled: bool = True + name: str + remote_model_name: str | None = None + remote_model_locked: bool = False current_status: str = None - max_token_length: int = 8192 processing: bool = False connected: bool = False conversation_retries: int = 0 - auto_break_repetition_enabled: bool = True decensor_enabled: bool = True auto_determine_prompt_template: bool = False finalizers: list[str] = [] - double_coercion: Union[str, None] = None - data_format: Literal["yaml", "json"] | None = None - rate_limit: int | None = None client_type = "base" request_information: RequestInformation | None = None - status_request_timeout: int = 2 - - system_prompts: SystemPrompts = SystemPrompts() - preset_group: str | None = "" - rate_limit_counter: CounterRateLimiter = None class Meta(pydantic.BaseModel): @@ -207,27 +222,92 @@ class ClientBase: def __init__( self, - api_url: str = None, name: str = None, **kwargs, ): - self.api_url = api_url self.name = name or self.client_type + self.remote_model_name = None self.auto_determine_prompt_template_attempt = None self.log = structlog.get_logger(f"client.{self.client_type}") - self.double_coercion = kwargs.get("double_coercion", None) - self._reconfigure_common_parameters(**kwargs) - self.enabled = kwargs.get("enabled", True) - if "max_token_length" in kwargs: - self.max_token_length = ( - int(kwargs["max_token_length"]) if kwargs["max_token_length"] else 8192 - ) - - self.set_client(max_token_length=self.max_token_length) def __str__(self): return f"{self.client_type}Client[{self.api_url}][{self.model_name or ''}]" + ##### + + # config getters + + @property + def config(self) -> Config: + return get_config() + + @property + def client_config(self) -> ClientConfig: + try: + return get_config().clients[self.name] + except KeyError: + return ClientConfig(type=self.client_type, name=self.name) + + @property + def model(self) -> str | None: + return self.client_config.model + + @property + def model_name(self) -> str | None: + if self.remote_model_locked: + return self.remote_model_name + return self.remote_model_name or self.model + + @property + def api_key(self) -> str | None: + return self.client_config.api_key + + @property + def api_url(self) -> str | None: + return self.client_config.api_url + + @property + def max_token_length(self) -> int: + return self.client_config.max_token_length + + @property + def double_coercion(self) -> str | None: + return self.client_config.double_coercion + + @property + def rate_limit(self) -> int | None: + return self.client_config.rate_limit + + @property + def data_format(self) -> Literal["yaml", "json"]: + return self.client_config.data_format + + @property + def enabled(self) -> bool: + return self.client_config.enabled + + @property + def system_prompts(self) -> SystemPrompts: + return self.client_config.system_prompts + + @property + def preset_group(self) -> str | None: + return self.client_config.preset_group + + @property + def reason_enabled(self) -> bool: + return self.client_config.reason_enabled + + @property + def reason_tokens(self) -> int: + return self.client_config.reason_tokens + + @property + def reason_response_pattern(self) -> str: + return self.client_config.reason_response_pattern or DEFAULT_REASONING_PATTERN + + ##### + @property def experimental(self): return False @@ -238,6 +318,9 @@ class ClientBase: Determines whether or not his client can pass LLM coercion. (e.g., is able to predefine partial LLM output in the prompt) """ + if self.reason_enabled: + # We are not able to coerce via pre-filling if reasoning is enabled + return False return self.Meta().requires_prompt_template @property @@ -283,7 +366,49 @@ class ClientBase: def embeddings_identifier(self) -> str: return f"client-api/{self.name}/{self.embeddings_model_name}" - async def destroy(self, config: dict): + @property + def reasoning_response(self) -> str | None: + return getattr(self, "_reasoning_response", None) + + @property + def min_reason_tokens(self) -> int: + return 0 + + @property + def validated_reason_tokens(self) -> int: + return max(self.reason_tokens, self.min_reason_tokens) + + @property + def default_prompt_template(self) -> str: + return DEFAULT_TEMPLATE + + @property + def requires_reasoning_pattern(self) -> bool: + return True + + async def enable(self): + self.client_config.enabled = True + self.emit_status() + + await self.config.set_dirty() + await self.status() + await async_signals.get("client.enabled").send( + ClientStatus(client=self, enabled=True) + ) + + async def disable(self): + self.client_config.enabled = False + self.emit_status() + + if self.supports_embeddings: + await self.reset_embeddings() + await self.config.set_dirty() + await self.status() + await async_signals.get("client.disabled").send( + ClientStatus(client=self, enabled=False) + ) + + async def destroy(self): """ This is called before the client is removed from talemate.instance.clients @@ -294,16 +419,13 @@ class ClientBase: """ if self.supports_embeddings: - self.remove_embeddings(config) + await self.remove_embeddings() - def reset_embeddings(self): + async def reset_embeddings(self): self._embeddings_model_name = None self._embeddings_status = False - def set_client(self, **kwargs): - self.client = AsyncOpenAI(base_url=self.api_url, api_key="sk-1111") - - def set_embeddings(self): + async def set_embeddings(self): log.debug( "setting embeddings", client=self.name, @@ -314,7 +436,7 @@ class ClientBase: if not self.supports_embeddings or not self.embeddings_status: return - config = load_config(as_model=True) + config: Config = get_config() key = self.embeddings_identifier @@ -334,30 +456,25 @@ class ClientBase: custom=True, ) - save_config(config) + await config.set_dirty() - def remove_embeddings(self, config: dict | None = None): + async def remove_embeddings(self): # remove all embeddings for this client - for key, value in list(config["presets"]["embeddings"].items()): - if value["client"] == self.name and value["embeddings"] == "client-api": + config: Config = get_config() + for key, value in list(config.presets.embeddings.items()): + if value.client == self.name and value.embeddings == "client-api": log.warning("!!! removing embeddings", client=self.name, key=key) - config["presets"]["embeddings"].pop(key) - - def set_system_prompts(self, system_prompts: dict | SystemPrompts): - if isinstance(system_prompts, dict): - self.system_prompts = SystemPrompts(**system_prompts) - elif not isinstance(system_prompts, SystemPrompts): - raise ValueError( - "system_prompts must be a `dict` or `SystemPrompts` instance" - ) - else: - self.system_prompts = system_prompts + config.presets.embeddings.pop(key) + await config.set_dirty() def prompt_template(self, sys_msg: str, prompt: str): """ Applies the appropriate prompt template for the model. """ + if not self.Meta().requires_prompt_template: + return prompt + if not self.model_name: self.log.warning("prompt template not applied", reason="no model loaded") return f"{sys_msg}\n{prompt}" @@ -372,13 +489,22 @@ class ClientBase: else: double_coercion = None - return model_prompt(self.model_name, sys_msg, prompt, double_coercion)[0] + return model_prompt( + self.model_name, + sys_msg, + prompt, + double_coercion, + default_template=self.default_prompt_template, + )[0] def prompt_template_example(self): if not getattr(self, "model_name", None): return None, None return model_prompt( - self.model_name, "{sysmsg}", "{prompt}<|BOT|>{LLM coercion}" + self.model_name, + "{sysmsg}", + "{prompt}<|BOT|>{LLM coercion}", + default_template=self.default_prompt_template, ) def split_prompt_for_coercion(self, prompt: str) -> tuple[str, str]: @@ -386,59 +512,29 @@ class ClientBase: Splits the prompt and the prefill/coercion prompt. """ if "<|BOT|>" in prompt: - _, right = prompt.split("<|BOT|>", 1) + prompt, coercion = prompt.split("<|BOT|>", 1) if self.double_coercion: - right = f"{self.double_coercion}\n\n{right}" + coercion = f"{self.double_coercion}\n\n{coercion}" - return prompt, right + return prompt, coercion return prompt, None - def reconfigure(self, **kwargs): + def rate_limit_update(self): """ - Reconfigures the client. + Updates the rate limit counter for the client. - Keyword Arguments: - - - api_url: the API URL to use - - max_token_length: the max token length to use - - enabled: whether the client is enabled + If the rate limit is set to 0, the rate limit counter is set to None. """ - - if "api_url" in kwargs: - self.api_url = kwargs["api_url"] - - if kwargs.get("max_token_length"): - self.max_token_length = int(kwargs["max_token_length"]) - - if "enabled" in kwargs: - self.enabled = bool(kwargs["enabled"]) - if not self.enabled and self.supports_embeddings and self.embeddings_status: - self.reset_embeddings() - - if "double_coercion" in kwargs: - self.double_coercion = kwargs["double_coercion"] - - self._reconfigure_common_parameters(**kwargs) - - def _reconfigure_common_parameters(self, **kwargs): - if "rate_limit" in kwargs: - self.rate_limit = kwargs["rate_limit"] - if self.rate_limit: - if not self.rate_limit_counter: - self.rate_limit_counter = CounterRateLimiter( - rate_per_minute=self.rate_limit - ) - else: - self.rate_limit_counter.update_rate_limit(self.rate_limit) + if self.rate_limit: + if not self.rate_limit_counter: + self.rate_limit_counter = CounterRateLimiter( + rate_per_minute=self.rate_limit + ) else: - self.rate_limit_counter = None - - if "data_format" in kwargs: - self.data_format = kwargs["data_format"] - - if "preset_group" in kwargs: - self.preset_group = kwargs["preset_group"] + self.rate_limit_counter.update_rate_limit(self.rate_limit) + else: + self.rate_limit_counter = None def host_is_remote(self, url: str) -> bool: """ @@ -491,43 +587,40 @@ class ClientBase: - kind: the kind of generation """ - - app_config_system_prompts = client_context_attribute( - "app_config_system_prompts" - ) - - if app_config_system_prompts: - self.system_prompts.parent = SystemPrompts(**app_config_system_prompts) - - return self.system_prompts.get(kind, self.decensor_enabled) + config: Config = get_config() + self.system_prompts.parent = config.system_prompts + sys_prompt = self.system_prompts.get(kind, self.decensor_enabled) + return sys_prompt def emit_status(self, processing: bool = None): """ Sets and emits the client status. """ + error_message: str | None = None if processing is not None: self.processing = processing if not self.enabled: status = "disabled" - model_name = "Disabled" + error_message = "Disabled" elif not self.connected: status = "error" - model_name = "Could not connect" + error_message = "Could not connect" elif self.model_name: status = "busy" if self.processing else "idle" - model_name = self.model_name else: - model_name = "No model loaded" + error_message = "No model loaded" status = "warning" status_change = status != self.current_status self.current_status = status + default_prompt_template = self.default_prompt_template + prompt_template_example, prompt_template_file = self.prompt_template_example() has_prompt_template = ( - prompt_template_file and prompt_template_file != "default.jinja2" + prompt_template_file and prompt_template_file != default_prompt_template ) if not has_prompt_template and self.auto_determine_prompt_template: @@ -545,21 +638,28 @@ class ClientBase: self.prompt_template_example() ) has_prompt_template = ( - prompt_template_file and prompt_template_file != "default.jinja2" + prompt_template_file + and prompt_template_file != default_prompt_template ) + dedicated_default_template = default_prompt_template != DEFAULT_TEMPLATE + data = { - "api_key": self.api_key, "prompt_template_example": prompt_template_example, "has_prompt_template": has_prompt_template, + "dedicated_default_template": dedicated_default_template, "template_file": prompt_template_file, "meta": self.Meta().model_dump(), "error_action": None, "double_coercion": self.double_coercion, "enabled": self.enabled, "system_prompts": self.system_prompts.model_dump(), + "error_message": error_message, } + if self.Meta().enable_api_auth: + data["api_key"] = self.api_key + data.update(self._common_status_data()) for field_name in getattr(self.Meta(), "extra_fields", {}).keys(): @@ -571,7 +671,7 @@ class ClientBase: "client_status", message=self.client_type, id=self.name, - details=model_name, + details=self.model_name, status=status, data=data, ) @@ -595,6 +695,11 @@ class ClientBase: "supports_embeddings": self.supports_embeddings, "embeddings_status": self.embeddings_status, "embeddings_model_name": self.embeddings_model_name, + "reason_enabled": self.reason_enabled, + "reason_tokens": self.reason_tokens, + "min_reason_tokens": self.min_reason_tokens, + "reason_response_pattern": self.reason_response_pattern, + "requires_reasoning_pattern": self.requires_reasoning_pattern, "request_information": self.request_information.model_dump() if self.request_information else None, @@ -646,20 +751,16 @@ class ClientBase: return try: - self.model_name = await self.get_model_name() + self.remote_model_name = await self.get_model_name() except Exception as e: self.log.warning("client status error", e=e, client=self.name) - self.model_name = None + self.remote_model_name = None self.connected = False self.emit_status() return self.connected = True - if not self.model_name or self.model_name == "None": - self.emit_status() - return - self.emit_status() def generate_prompt_parameters(self, kind: str): @@ -682,6 +783,15 @@ class ClientBase: parameters, kind, agent_context.action ) + if self.reason_enabled and self.reason_tokens > 0: + log.debug( + "padding for reasoning", + client=self.client_type, + reason_tokens=self.reason_tokens, + validated_reason_tokens=self.validated_reason_tokens, + ) + parameters["max_tokens"] += self.validated_reason_tokens + if client_context_attribute( "nuke_repetition" ) > 0.0 and self.jiggle_enabled_for(kind): @@ -838,12 +948,89 @@ class ClientBase: else: self.request_information.tokens += tokens + def strip_coercion_prompt(self, response: str, coercion_prompt: str = None) -> str: + """ + Strips the coercion prompt from the response if it is present. + """ + if not coercion_prompt or not response.startswith(coercion_prompt): + return response + + return response.replace(coercion_prompt, "").lstrip() + + def strip_reasoning(self, response: str) -> tuple[str, str]: + """ + Strips the reasoning from the response if the model is reasoning. + """ + + if not self.reason_enabled: + return response, None + + if not self.requires_reasoning_pattern: + # reasoning handled automatically during streaming + return response, None + + pattern = self.reason_response_pattern + if not pattern: + pattern = DEFAULT_REASONING_PATTERN + + log.debug("reasoning pattern", pattern=pattern) + + extract_reason = re.search(pattern, response, re.DOTALL) + + if extract_reason: + reasoning_response = extract_reason.group(0) + return response.replace(reasoning_response, ""), reasoning_response + + raise ReasoningResponseError() + + def attach_response_length_instruction( + self, prompt: str, response_length: int | None + ) -> str: + """ + Attaches the response length instruction to the prompt. + """ + + if not response_length or response_length < 0: + log.warning("response length instruction", response_length=response_length) + return prompt + + instructions_prompt = Prompt.get( + "common.response-length", + vars={ + "response_length": response_length, + "attach_response_length_instruction": True, + }, + ) + + instructions_prompt = instructions_prompt.render() + + if instructions_prompt.strip() in prompt: + log.debug( + "response length instruction already in prompt", + instructions_prompt=instructions_prompt, + ) + return prompt + + log.debug( + "response length instruction", instructions_prompt=instructions_prompt + ) + + if "<|RESPONSE_LENGTH_INSTRUCTIONS|>" in prompt: + return prompt.replace( + "<|RESPONSE_LENGTH_INSTRUCTIONS|>", instructions_prompt + ) + elif "<|BOT|>" in prompt: + return prompt.replace("<|BOT|>", f"{instructions_prompt}<|BOT|>") + else: + return f"{prompt}{instructions_prompt}" + async def send_prompt( self, prompt: str, kind: str = "conversation", finalize: Callable = lambda x: x, retries: int = 2, + data_expected: bool | None = None, ) -> str: """ Send a prompt to the AI and return its response. @@ -852,7 +1039,9 @@ class ClientBase: """ try: - return await self._send_prompt(prompt, kind, finalize, retries) + return await self._send_prompt( + prompt, kind, finalize, retries, data_expected + ) except GenerationCancelled: await self.abort_generation() raise @@ -863,6 +1052,7 @@ class ClientBase: kind: str = "conversation", finalize: Callable = lambda x: x, retries: int = 2, + data_expected: bool | None = None, ) -> str: """ Send a prompt to the AI and return its response. @@ -871,6 +1061,7 @@ class ClientBase: """ try: + self.rate_limit_update() if self.rate_limit_counter: aborted: bool = False while not self.rate_limit_counter.increment(): @@ -927,12 +1118,27 @@ class ClientBase: try: self._returned_prompt_tokens = None self._returned_response_tokens = None + self._reasoning_response = None self.emit_status(processing=True) await self.status() prompt_param = self.generate_prompt_parameters(kind) + if self.reason_enabled and not data_expected: + prompt = self.attach_response_length_instruction( + prompt, + (prompt_param.get(self.max_tokens_param_name) or 0) + - self.reason_tokens, + ) + + if not self.can_be_coerced: + prompt, coercion_prompt = self.split_prompt_for_coercion(prompt) + if coercion_prompt: + prompt += f"{INDIRECT_COERCION_PROMPT}{coercion_prompt}" + else: + coercion_prompt = None + finalized_prompt = self.prompt_template( self.get_system_message(kind), prompt ).strip(" ") @@ -954,11 +1160,26 @@ class ClientBase: max_token_length=self.max_token_length, parameters=prompt_param, ) - prompt_sent = self.repetition_adjustment(finalized_prompt) + + if "<|RESPONSE_LENGTH_INSTRUCTIONS|>" in finalized_prompt: + finalized_prompt = finalized_prompt.replace( + "\n<|RESPONSE_LENGTH_INSTRUCTIONS|>", "" + ) self.new_request() - response = await self._cancelable_generate(prompt_sent, prompt_param, kind) + response = await self._cancelable_generate( + finalized_prompt, prompt_param, kind + ) + + response, reasoning_response = self.strip_reasoning(response) + if reasoning_response: + self._reasoning_response = reasoning_response + + if coercion_prompt: + response = self.process_response_for_indirect_coercion( + finalized_prompt, response, coercion_prompt + ) self.end_request() @@ -966,14 +1187,13 @@ class ClientBase: # generation was cancelled raise response - # response = await self.generate(prompt_sent, prompt_param, kind) - - response, finalized_prompt = await self.auto_break_repetition( - finalized_prompt, prompt_param, response, kind, retries - ) - if REPLACE_SMART_QUOTES: - response = response.replace("“", '"').replace("”", '"') + response = ( + response.replace("“", '"') + .replace("”", '"') + .replace("‘", "'") + .replace("’", "'") + ) time_end = time.time() @@ -991,7 +1211,7 @@ class ClientBase: "prompt_sent", data=PromptData( kind=kind, - prompt=prompt_sent, + prompt=finalized_prompt, response=response, prompt_tokens=self._returned_prompt_tokens or token_length, response_tokens=self._returned_response_tokens @@ -1003,12 +1223,17 @@ class ClientBase: generation_parameters=prompt_param, inference_preset=client_context_attribute("inference_preset"), preset_group=self.preset_group, + reasoning=self._reasoning_response, ).model_dump(), ) return response except GenerationCancelled: raise + except GenerationProcessingError as e: + self.log.error("send_prompt error", e=e) + emit("status", message=str(e), status="error") + return "" except Exception: self.log.error("send_prompt error", e=traceback.format_exc()) emit( @@ -1023,130 +1248,6 @@ class ClientBase: if self.rate_limit_counter: self.rate_limit_counter.increment() - async def auto_break_repetition( - self, - finalized_prompt: str, - prompt_param: dict, - response: str, - kind: str, - retries: int, - pad_max_tokens: int = 32, - ) -> str: - """ - If repetition breaking is enabled, this will retry the prompt if its - response is too similar to other messages in the prompt - - This requires the agent to have the allow_repetition_break method - and the jiggle_enabled_for method and the client to have the - auto_break_repetition_enabled attribute set to True - - Arguments: - - - finalized_prompt: the prompt that was sent - - prompt_param: the parameters that were used - - response: the response that was received - - kind: the kind of generation - - retries: the number of retries left - - pad_max_tokens: increase response max_tokens by this amount per iteration - - Returns: - - - the response - """ - - if not self.auto_break_repetition_enabled or not response.strip(): - return response, finalized_prompt - - agent_context = active_agent.get() - if self.jiggle_enabled_for(kind, auto=True): - # check if the response is a repetition - # using the default similarity threshold of 98, meaning it needs - # to be really similar to be considered a repetition - - is_repetition, similarity_score, matched_line = util.similarity_score( - response, finalized_prompt.split("\n"), similarity_threshold=80 - ) - - if not is_repetition: - # not a repetition, return the response - - self.log.debug( - "send_prompt no similarity", similarity_score=similarity_score - ) - finalized_prompt = self.repetition_adjustment( - finalized_prompt, is_repetitive=False - ) - return response, finalized_prompt - - while is_repetition and retries > 0: - # it's a repetition, retry the prompt with adjusted parameters - - self.log.warn( - "send_prompt similarity retry", - agent=agent_context.agent.agent_type, - similarity_score=similarity_score, - retries=retries, - ) - - # first we apply the client's randomness jiggle which will adjust - # parameters like temperature and repetition_penalty, depending - # on the client - # - # this is a cumulative adjustment, so it will add to the previous - # iteration's adjustment, this also means retries should be kept low - # otherwise it will get out of hand and start generating nonsense - - self.jiggle_randomness(prompt_param, offset=0.5) - - # then we pad the max_tokens by the pad_max_tokens amount - - prompt_param[self.max_tokens_param_name] += pad_max_tokens - - # send the prompt again - # we use the repetition_adjustment method to further encourage - # the AI to break the repetition on its own as well. - - finalized_prompt = self.repetition_adjustment( - finalized_prompt, is_repetitive=True - ) - - response = retried_response = await self.generate( - finalized_prompt, prompt_param, kind - ) - - self.log.debug( - "send_prompt dedupe sentences", - response=response, - matched_line=matched_line, - ) - - # a lot of the times the response will now contain the repetition + something new - # so we dedupe the response to remove the repetition on sentences level - - response = util.dedupe_sentences( - response, matched_line, similarity_threshold=85, debug=True - ) - self.log.debug( - "send_prompt dedupe sentences (after)", response=response - ) - - # deduping may have removed the entire response, so we check for that - - if not util.strip_partial_sentences(response).strip(): - # if the response is empty, we set the response to the original - # and try again next loop - - response = retried_response - - # check if the response is a repetition again - - is_repetition, similarity_score, matched_line = util.similarity_score( - response, finalized_prompt.split("\n"), similarity_threshold=80 - ) - retries -= 1 - - return response, finalized_prompt - def count_tokens(self, content: str): return util.count_tokens(content) @@ -1169,31 +1270,9 @@ class ClientBase: return agent.allow_repetition_break(kind, agent_context.action, auto=auto) - def repetition_adjustment(self, prompt: str, is_repetitive: bool = False): - """ - Breaks the prompt into lines and checkse each line for a match with - [$REPETITION|{repetition_adjustment}]. - - On match and if is_repetitive is True, the line is removed from the prompt and - replaced with the repetition_adjustment. - - On match and if is_repetitive is False, the line is removed from the prompt. - """ - - lines = prompt.split("\n") - new_lines = [] - for line in lines: - if line.startswith("[$REPETITION|"): - if is_repetitive: - new_lines.append(line.split("|")[1][:-1]) - else: - new_lines.append("") - else: - new_lines.append(line) - - return "\n".join(new_lines) - - def process_response_for_indirect_coercion(self, prompt: str, response: str) -> str: + def process_response_for_indirect_coercion( + self, prompt: str, response: str, coercion_prompt: str + ) -> str: """ A lot of remote APIs don't let us control the prompt template and we cannot directly append the beginning of the desired response to the prompt. @@ -1202,13 +1281,19 @@ class ClientBase: and then hopefully it will adhere to it and we can strip it off the actual response. """ - _, right = prompt.split("\nStart your response with: ") - expected_response = right.strip() - if expected_response and expected_response.startswith("{"): + if coercion_prompt and coercion_prompt.startswith("{"): if response.startswith("```json") and response.endswith("```"): response = response[7:-3].strip() - if right and response.startswith(right): - response = response[len(right) :].strip() + log.debug( + "process_response_for_indirect_coercion", + response=f"|{response[:100]}...|", + coercion_prompt=f"|{coercion_prompt}|", + ) + + if coercion_prompt and response.startswith(coercion_prompt): + response = response[len(coercion_prompt) :].strip() + elif coercion_prompt and response.lstrip().startswith(coercion_prompt): + response = response.lstrip()[len(coercion_prompt) :].strip() return response diff --git a/src/talemate/client/cohere.py b/src/talemate/client/cohere.py index 06fe26c4..730e64c6 100644 --- a/src/talemate/client/cohere.py +++ b/src/talemate/client/cohere.py @@ -15,9 +15,8 @@ from talemate.client.remote import ( EndpointOverrideMixin, endpoint_override_extra_fields, ) -from talemate.config import Client as BaseClientConfig, load_config +from talemate.config.schema import Client as BaseClientConfig from talemate.emit import emit -from talemate.emit.signals import handlers from talemate.util import count_tokens __all__ = [ @@ -54,7 +53,6 @@ class CohereClient(EndpointOverrideMixin, ClientBase): client_type = "cohere" conversation_retries = 0 - auto_break_repetition_enabled = False decensor_enabled = True config_cls = ClientConfig @@ -67,18 +65,9 @@ class CohereClient(EndpointOverrideMixin, ClientBase): extra_fields: dict[str, ExtraField] = endpoint_override_extra_fields() defaults: Defaults = Defaults() - def __init__(self, model="command-r-plus", **kwargs): - self.model_name = model - self.api_key_status = None - self._reconfigure_endpoint_override(**kwargs) - self.config = load_config() - super().__init__(**kwargs) - - handlers["config_saved"].connect(self.on_config_saved) - @property def cohere_api_key(self): - return self.config.get("cohere", {}).get("api_key") + return self.config.cohere.api_key @property def supported_parameters(self): @@ -96,15 +85,15 @@ class CohereClient(EndpointOverrideMixin, ClientBase): def emit_status(self, processing: bool = None): error_action = None + error_message = None if processing is not None: self.processing = processing if self.cohere_api_key: status = "busy" if self.processing else "idle" - model_name = self.model_name else: status = "error" - model_name = "No API key set" + error_message = "No API key set" error_action = ErrorAction( title="Set API Key", action_name="openAppConfig", @@ -117,7 +106,7 @@ class CohereClient(EndpointOverrideMixin, ClientBase): if not self.model_name: status = "error" - model_name = "No model loaded" + error_message = "No model loaded" self.current_status = status @@ -125,67 +114,18 @@ class CohereClient(EndpointOverrideMixin, ClientBase): "error_action": error_action.model_dump() if error_action else None, "meta": self.Meta().model_dump(), "enabled": self.enabled, + "error_message": error_message, } data.update(self._common_status_data()) emit( "client_status", message=self.client_type, id=self.name, - details=model_name, + details=self.model_name, status=status if self.enabled else "disabled", data=data, ) - def set_client(self, max_token_length: int = None): - if not self.cohere_api_key and not self.endpoint_override_base_url_configured: - self.client = AsyncClientV2("sk-1111") - log.error("No cohere API key set") - if self.api_key_status: - self.api_key_status = False - emit("request_client_status") - emit("request_agent_status") - return - - if not self.model_name: - self.model_name = "command-r-plus" - - if max_token_length and not isinstance(max_token_length, int): - max_token_length = int(max_token_length) - - model = self.model_name - - self.client = AsyncClientV2(self.api_key, base_url=self.base_url) - self.max_token_length = max_token_length or 16384 - - if not self.api_key_status: - if self.api_key_status is False: - emit("request_client_status") - emit("request_agent_status") - self.api_key_status = True - - log.info( - "cohere set client", - max_token_length=self.max_token_length, - provided_max_token_length=max_token_length, - model=model, - ) - - def reconfigure(self, **kwargs): - if kwargs.get("model"): - self.model_name = kwargs["model"] - self.set_client(kwargs.get("max_token_length")) - - if "enabled" in kwargs: - self.enabled = bool(kwargs["enabled"]) - - self._reconfigure_common_parameters(**kwargs) - self._reconfigure_endpoint_override(**kwargs) - - def on_config_saved(self, event): - config = event.data - self.config = config - self.set_client(max_token_length=self.max_token_length) - def response_tokens(self, response: str): return count_tokens(response) @@ -195,16 +135,6 @@ class CohereClient(EndpointOverrideMixin, ClientBase): async def status(self): self.emit_status() - def prompt_template(self, system_message: str, prompt: str): - if "<|BOT|>" in prompt: - _, right = prompt.split("<|BOT|>", 1) - if right: - prompt = prompt.replace("<|BOT|>", "\nStart your response with: ") - else: - prompt = prompt.replace("<|BOT|>", "") - - return prompt - def clean_prompt_parameters(self, parameters: dict): super().clean_prompt_parameters(parameters) @@ -228,13 +158,7 @@ class CohereClient(EndpointOverrideMixin, ClientBase): if not self.cohere_api_key and not self.endpoint_override_base_url_configured: raise Exception("No cohere API key set") - right = None - expected_response = None - try: - _, right = prompt.split("\nStart your response with: ") - expected_response = right.strip() - except (IndexError, ValueError): - pass + client = AsyncClientV2(self.api_key, base_url=self.base_url) human_message = prompt.strip() system_message = self.get_system_message(kind) @@ -263,7 +187,7 @@ class CohereClient(EndpointOverrideMixin, ClientBase): # manager, so attempting to use `async with` raises a `TypeError` as seen # in issue logs. We therefore iterate over the generator directly. - stream = self.client.chat_stream( + stream = client.chat_stream( model=self.model_name, messages=messages, **parameters, @@ -283,13 +207,6 @@ class CohereClient(EndpointOverrideMixin, ClientBase): log.debug("generated response", response=response) - if expected_response and expected_response.startswith("{"): - if response.startswith("```json") and response.endswith("```"): - response = response[7:-3].strip() - - if right and response.startswith(right): - response = response[len(right) :].strip() - return response # except PermissionDeniedError as e: # self.log.error("generate error", e=e) diff --git a/src/talemate/client/deepseek.py b/src/talemate/client/deepseek.py index dfa536a5..302cc27a 100644 --- a/src/talemate/client/deepseek.py +++ b/src/talemate/client/deepseek.py @@ -4,9 +4,7 @@ from openai import AsyncOpenAI, PermissionDeniedError from talemate.client.base import ClientBase, ErrorAction, CommonDefaults from talemate.client.registry import register -from talemate.config import load_config from talemate.emit import emit -from talemate.emit.signals import handlers from talemate.util import count_tokens __all__ = [ @@ -40,7 +38,6 @@ class DeepSeekClient(ClientBase): client_type = "deepseek" conversation_retries = 0 - auto_break_repetition_enabled = False # TODO: make this configurable? decensor_enabled = False @@ -52,17 +49,9 @@ class DeepSeekClient(ClientBase): requires_prompt_template: bool = False defaults: Defaults = Defaults() - def __init__(self, model="deepseek-chat", **kwargs): - self.model_name = model - self.api_key_status = None - self.config = load_config() - super().__init__(**kwargs) - - handlers["config_saved"].connect(self.on_config_saved) - @property def deepseek_api_key(self): - return self.config.get("deepseek", {}).get("api_key") + return self.config.deepseek.api_key @property def supported_parameters(self): @@ -75,15 +64,15 @@ class DeepSeekClient(ClientBase): def emit_status(self, processing: bool = None): error_action = None + error_message = None if processing is not None: self.processing = processing if self.deepseek_api_key: status = "busy" if self.processing else "idle" - model_name = self.model_name else: status = "error" - model_name = "No API key set" + error_message = "No API key set" error_action = ErrorAction( title="Set API Key", action_name="openAppConfig", @@ -96,7 +85,7 @@ class DeepSeekClient(ClientBase): if not self.model_name: status = "error" - model_name = "No model loaded" + error_message = "No model loaded" self.current_status = status @@ -104,66 +93,18 @@ class DeepSeekClient(ClientBase): "error_action": error_action.model_dump() if error_action else None, "meta": self.Meta().model_dump(), "enabled": self.enabled, + "error_message": error_message, } data.update(self._common_status_data()) emit( "client_status", message=self.client_type, id=self.name, - details=model_name, + details=self.model_name, status=status if self.enabled else "disabled", data=data, ) - def set_client(self, max_token_length: int = None): - if not self.deepseek_api_key: - self.client = AsyncOpenAI(api_key="sk-1111", base_url=BASE_URL) - log.error("No DeepSeek API key set") - if self.api_key_status: - self.api_key_status = False - emit("request_client_status") - emit("request_agent_status") - return - - if not self.model_name: - self.model_name = "deepseek-chat" - - if max_token_length and not isinstance(max_token_length, int): - max_token_length = int(max_token_length) - - model = self.model_name - - self.client = AsyncOpenAI(api_key=self.deepseek_api_key, base_url=BASE_URL) - self.max_token_length = max_token_length or 16384 - - if not self.api_key_status: - if self.api_key_status is False: - emit("request_client_status") - emit("request_agent_status") - self.api_key_status = True - - log.info( - "deepseek set client", - max_token_length=self.max_token_length, - provided_max_token_length=max_token_length, - model=model, - ) - - def reconfigure(self, **kwargs): - if kwargs.get("model"): - self.model_name = kwargs["model"] - self.set_client(kwargs.get("max_token_length")) - - if "enabled" in kwargs: - self.enabled = bool(kwargs["enabled"]) - - self._reconfigure_common_parameters(**kwargs) - - def on_config_saved(self, event): - config = event.data - self.config = config - self.set_client(max_token_length=self.max_token_length) - def count_tokens(self, content: str): if not self.model_name: return 0 @@ -172,18 +113,6 @@ class DeepSeekClient(ClientBase): async def status(self): self.emit_status() - def prompt_template(self, system_message: str, prompt: str): - # only gpt-4-1106-preview supports json_object response coersion - - if "<|BOT|>" in prompt: - _, right = prompt.split("<|BOT|>", 1) - if right: - prompt = prompt.replace("<|BOT|>", "\nStart your response with: ") - else: - prompt = prompt.replace("<|BOT|>", "") - - return prompt - def response_tokens(self, response: str): # Count tokens in a response string using the util.count_tokens helper return self.count_tokens(response) @@ -200,20 +129,7 @@ class DeepSeekClient(ClientBase): if not self.deepseek_api_key: raise Exception("No DeepSeek API key set") - # only gpt-4-* supports enforcing json object - supports_json_object = ( - self.model_name.startswith("gpt-4-") - or self.model_name in JSON_OBJECT_RESPONSE_MODELS - ) - right = None - expected_response = None - try: - _, right = prompt.split("\nStart your response with: ") - expected_response = right.strip() - if expected_response.startswith("{") and supports_json_object: - parameters["response_format"] = {"type": "json_object"} - except (IndexError, ValueError): - pass + client = AsyncOpenAI(api_key=self.deepseek_api_key, base_url=BASE_URL) human_message = {"role": "user", "content": prompt.strip()} system_message = {"role": "system", "content": self.get_system_message(kind)} @@ -227,7 +143,7 @@ class DeepSeekClient(ClientBase): try: # Use streaming so we can update_Request_tokens incrementally - stream = await self.client.chat.completions.create( + stream = await client.chat.completions.create( model=self.model_name, messages=[system_message, human_message], stream=True, @@ -251,20 +167,6 @@ class DeepSeekClient(ClientBase): self._returned_prompt_tokens = self.prompt_tokens(prompt) self._returned_response_tokens = self.response_tokens(response) - # older models don't support json_object response coersion - # and often like to return the response wrapped in ```json - # so we strip that out if the expected response is a json object - if ( - not supports_json_object - and expected_response - and expected_response.startswith("{") - ): - if response.startswith("```json") and response.endswith("```"): - response = response[7:-3].strip() - - if right and response.startswith(right): - response = response[len(right) :].strip() - return response except PermissionDeniedError as e: self.log.error("generate error", e=e) diff --git a/src/talemate/client/google.py b/src/talemate/client/google.py index faebddc1..6f7fc14b 100644 --- a/src/talemate/client/google.py +++ b/src/talemate/client/google.py @@ -21,10 +21,8 @@ from talemate.client.remote import ( EndpointOverrideMixin, endpoint_override_extra_fields, ) -from talemate.config import Client as BaseClientConfig -from talemate.config import load_config +from talemate.config.schema import Client as BaseClientConfig from talemate.emit import emit -from talemate.emit.signals import handlers from talemate.util import count_tokens __all__ = [ @@ -41,10 +39,11 @@ SUPPORTED_MODELS = [ "gemini-1.5-pro", "gemini-2.0-flash", "gemini-2.0-flash-lite", - "gemini-2.5-flash-preview-04-17", + "gemini-2.5-flash-lite-preview-06-17", "gemini-2.5-flash-preview-05-20", - "gemini-2.5-pro-preview-03-25", + "gemini-2.5-flash", "gemini-2.5-pro-preview-06-05", + "gemini-2.5-pro", ] @@ -59,6 +58,9 @@ class ClientConfig(EndpointOverride, BaseClientConfig): disable_safety_settings: bool = False +MIN_THINKING_TOKENS = 0 + + @register() class GoogleClient(EndpointOverrideMixin, RemoteServiceMixin, ClientBase): """ @@ -67,7 +69,6 @@ class GoogleClient(EndpointOverrideMixin, RemoteServiceMixin, ClientBase): client_type = "google" conversation_retries = 0 - auto_break_repetition_enabled = False decensor_enabled = True config_cls = ClientConfig @@ -90,21 +91,23 @@ class GoogleClient(EndpointOverrideMixin, RemoteServiceMixin, ClientBase): extra_fields.update(endpoint_override_extra_fields()) def __init__(self, model="gemini-2.0-flash", **kwargs): - self.model_name = model self.setup_status = None self.model_instance = None - self.disable_safety_settings = kwargs.get("disable_safety_settings", False) self.google_credentials_read = False self.google_project_id = None - self._reconfigure_endpoint_override(**kwargs) - self.config = load_config() super().__init__(**kwargs) - handlers["config_saved"].connect(self.on_config_saved) + @property + def disable_safety_settings(self): + return self.client_config.disable_safety_settings + + @property + def min_reason_tokens(self) -> int: + return MIN_THINKING_TOKENS @property def can_be_coerced(self) -> bool: - return True + return not self.reason_enabled @property def google_credentials(self): @@ -116,15 +119,15 @@ class GoogleClient(EndpointOverrideMixin, RemoteServiceMixin, ClientBase): @property def google_credentials_path(self): - return self.config.get("google").get("gcloud_credentials_path") + return self.config.google.gcloud_credentials_path @property def google_location(self): - return self.config.get("google").get("gcloud_location") + return self.config.google.gcloud_location @property def google_api_key(self): - return self.config.get("google").get("api_key") + return self.config.google.api_key @property def vertexai_ready(self) -> bool: @@ -197,6 +200,16 @@ class GoogleClient(EndpointOverrideMixin, RemoteServiceMixin, ClientBase): return genai_types.HttpOptions(base_url=self.base_url) + @property + def thinking_config(self) -> genai_types.ThinkingConfig | None: + if not self.reason_enabled: + return None + + return genai_types.ThinkingConfig( + thinking_budget=self.validated_reason_tokens, + include_thoughts=True, + ) + @property def supported_parameters(self): return [ @@ -211,6 +224,10 @@ class GoogleClient(EndpointOverrideMixin, RemoteServiceMixin, ClientBase): ), ] + @property + def requires_reasoning_pattern(self) -> bool: + return False + def emit_status(self, processing: bool = None): error_action = None if processing is not None: @@ -269,46 +286,20 @@ class GoogleClient(EndpointOverrideMixin, RemoteServiceMixin, ClientBase): "Error setting client base URL", error=e, client=self.client_type ) - def set_client(self, max_token_length: int = None, **kwargs): - if not self.ready: - log.error("Google cloud setup incomplete") - if self.setup_status: - self.setup_status = False - emit("request_client_status") - emit("request_agent_status") - return - - if not self.model_name: - self.model_name = "gemini-2.0-flash" - - if max_token_length and not isinstance(max_token_length, int): - max_token_length = int(max_token_length) - + def make_client(self) -> genai.Client: if self.google_credentials_path: os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = self.google_credentials_path - - model = self.model_name - - self.max_token_length = max_token_length or 16384 - if self.vertexai_ready and not self.developer_api_ready: - self.client = genai.Client( + return genai.Client( vertexai=True, project=self.google_project_id, location=self.google_location, ) else: - self.client = genai.Client( + return genai.Client( api_key=self.api_key or None, http_options=self.http_options ) - log.info( - "google set client", - max_token_length=self.max_token_length, - provided_max_token_length=max_token_length, - model=model, - ) - def response_tokens(self, response: str): """Return token count for a response which may be a string or SDK object.""" return count_tokens(response) @@ -316,22 +307,6 @@ class GoogleClient(EndpointOverrideMixin, RemoteServiceMixin, ClientBase): def prompt_tokens(self, prompt: str): return count_tokens(prompt) - def reconfigure(self, **kwargs): - if kwargs.get("model"): - self.model_name = kwargs["model"] - self.set_client(kwargs.get("max_token_length")) - - if "disable_safety_settings" in kwargs: - self.disable_safety_settings = kwargs["disable_safety_settings"] - - if "enabled" in kwargs: - self.enabled = bool(kwargs["enabled"]) - - if "double_coercion" in kwargs: - self.double_coercion = kwargs["double_coercion"] - - self._reconfigure_common_parameters(**kwargs) - def clean_prompt_parameters(self, parameters: dict): super().clean_prompt_parameters(parameters) @@ -339,13 +314,6 @@ class GoogleClient(EndpointOverrideMixin, RemoteServiceMixin, ClientBase): if "top_k" in parameters and parameters["top_k"] == 0: del parameters["top_k"] - def prompt_template(self, system_message: str, prompt: str): - """ - Google handles the prompt template internally, so we just - give the prompt as is. - """ - return prompt - async def generate(self, prompt: str, parameters: dict, kind: str): """ Generates text from the given prompt and parameters. @@ -354,7 +322,12 @@ class GoogleClient(EndpointOverrideMixin, RemoteServiceMixin, ClientBase): if not self.ready: raise Exception("Google setup incomplete") - prompt, coercion_prompt = self.split_prompt_for_coercion(prompt) + client = self.make_client() + + if self.can_be_coerced: + prompt, coercion_prompt = self.split_prompt_for_coercion(prompt) + else: + coercion_prompt = None human_message = prompt.strip() system_message = self.get_system_message(kind) @@ -371,6 +344,7 @@ class GoogleClient(EndpointOverrideMixin, RemoteServiceMixin, ClientBase): ] if coercion_prompt: + log.debug("Adding coercion pre-fill", coercion_prompt=coercion_prompt) contents.append( genai_types.Content( role="model", @@ -384,49 +358,57 @@ class GoogleClient(EndpointOverrideMixin, RemoteServiceMixin, ClientBase): self.log.debug( "generate", + model=self.model_name, base_url=self.base_url, prompt=prompt[:128] + " ...", parameters=parameters, system_message=system_message, disable_safety_settings=self.disable_safety_settings, safety_settings=self.safety_settings, + thinking_config=self.thinking_config, ) try: # Use streaming so we can update_Request_tokens incrementally - # stream = await chat.send_message_async( - # human_message, - # safety_settings=self.safety_settings, - # generation_config=parameters, - # stream=True - # ) - - stream = await self.client.aio.models.generate_content_stream( + stream = await client.aio.models.generate_content_stream( model=self.model_name, contents=contents, config=genai_types.GenerateContentConfig( safety_settings=self.safety_settings, http_options=self.http_options, + thinking_config=self.thinking_config, **parameters, ), ) response = "" - + reasoning = "" + # https://ai.google.dev/gemini-api/docs/thinking#summaries async for chunk in stream: - # For each streamed chunk, append content and update token counts - content_piece = getattr(chunk, "text", None) - if not content_piece: - # Some SDK versions wrap text under candidates[0].text - try: - content_piece = chunk.candidates[0].text # type: ignore - except Exception: - content_piece = None + try: + if not chunk: + continue - if content_piece: - response += content_piece - # Incrementally update token usage - self.update_request_tokens(count_tokens(content_piece)) + if not chunk.candidates: + continue + + if not chunk.candidates[0].content.parts: + continue + + for part in chunk.candidates[0].content.parts: + if not part.text: + continue + if part.thought: + reasoning += part.text + else: + response += part.text + self.update_request_tokens(count_tokens(part.text)) + except Exception as e: + log.error("error processing chunk", e=e, chunk=chunk) + continue + + if reasoning: + self._reasoning_response = reasoning # Store total token accounting for prompt/response self._returned_prompt_tokens = self.prompt_tokens(prompt) diff --git a/src/talemate/client/groq.py b/src/talemate/client/groq.py index b9e52569..8e42c886 100644 --- a/src/talemate/client/groq.py +++ b/src/talemate/client/groq.py @@ -4,9 +4,8 @@ from groq import AsyncGroq, PermissionDeniedError from talemate.client.base import ClientBase, ErrorAction, ParameterReroute, ExtraField from talemate.client.registry import register -from talemate.config import load_config +from talemate.config.schema import Client as BaseClientConfig from talemate.emit import emit -from talemate.emit.signals import handlers from talemate.client.remote import ( EndpointOverride, EndpointOverrideMixin, @@ -23,6 +22,10 @@ SUPPORTED_MODELS = [ "mixtral-8x7b-32768", "llama3-8b-8192", "llama3-70b-8192", + "llama-3.3-70b-versatile", + "qwen/qwen3-32b", + "moonshotai/kimi-k2-instruct", + "deepseek-r1-distill-llama-70b", ] JSON_OBJECT_RESPONSE_MODELS = [] @@ -30,7 +33,11 @@ JSON_OBJECT_RESPONSE_MODELS = [] class Defaults(EndpointOverride, pydantic.BaseModel): max_token_length: int = 8192 - model: str = "llama3-70b-8192" + model: str = "moonshotai/kimi-k2-instruct" + + +class ClientConfig(EndpointOverride, BaseClientConfig): + pass @register() @@ -41,9 +48,9 @@ class GroqClient(EndpointOverrideMixin, ClientBase): client_type = "groq" conversation_retries = 0 - auto_break_repetition_enabled = False # TODO: make this configurable? decensor_enabled = True + config_cls = ClientConfig class Meta(ClientBase.Meta): name_prefix: str = "Groq" @@ -54,19 +61,13 @@ class GroqClient(EndpointOverrideMixin, ClientBase): defaults: Defaults = Defaults() extra_fields: dict[str, ExtraField] = endpoint_override_extra_fields() - def __init__(self, model="llama3-70b-8192", **kwargs): - self.model_name = model - self.api_key_status = None - # Apply any endpoint override parameters provided via kwargs before creating client - self._reconfigure_endpoint_override(**kwargs) - self.config = load_config() - super().__init__(**kwargs) - - handlers["config_saved"].connect(self.on_config_saved) + @property + def can_be_coerced(self) -> bool: + return not self.reason_enabled @property def groq_api_key(self): - return self.config.get("groq", {}).get("api_key") + return self.config.groq.api_key @property def supported_parameters(self): @@ -83,15 +84,15 @@ class GroqClient(EndpointOverrideMixin, ClientBase): def emit_status(self, processing: bool = None): error_action = None + error_message = None if processing is not None: self.processing = processing if self.groq_api_key: status = "busy" if self.processing else "idle" - model_name = self.model_name else: status = "error" - model_name = "No API key set" + error_message = "No API key set" error_action = ErrorAction( title="Set API Key", action_name="openAppConfig", @@ -104,7 +105,7 @@ class GroqClient(EndpointOverrideMixin, ClientBase): if not self.model_name: status = "error" - model_name = "No model loaded" + error_message = "No model loaded" self.current_status = status @@ -112,6 +113,7 @@ class GroqClient(EndpointOverrideMixin, ClientBase): "error_action": error_action.model_dump() if error_action else None, "meta": self.Meta().model_dump(), "enabled": self.enabled, + "error_message": error_message, } # Include shared/common status data (rate limit, etc.) data.update(self._common_status_data()) @@ -120,66 +122,11 @@ class GroqClient(EndpointOverrideMixin, ClientBase): "client_status", message=self.client_type, id=self.name, - details=model_name, + details=self.model_name, status=status if self.enabled else "disabled", data=data, ) - def set_client(self, max_token_length: int = None): - # Determine if we should use the globally configured API key or the override key - if not self.groq_api_key and not self.endpoint_override_base_url_configured: - # No API key and no endpoint override – cannot initialize client correctly - self.client = AsyncGroq(api_key="sk-1111") - log.error("No groq.ai API key set") - if self.api_key_status: - self.api_key_status = False - emit("request_client_status") - emit("request_agent_status") - return - - if not self.model_name: - self.model_name = "llama3-70b-8192" - - if max_token_length and not isinstance(max_token_length, int): - max_token_length = int(max_token_length) - - model = self.model_name - - # Use the override values (if any) when constructing the Groq client - self.client = AsyncGroq(api_key=self.api_key, base_url=self.base_url) - self.max_token_length = max_token_length or 16384 - - if not self.api_key_status: - if self.api_key_status is False: - emit("request_client_status") - emit("request_agent_status") - self.api_key_status = True - - log.info( - "groq.ai set client", - max_token_length=self.max_token_length, - provided_max_token_length=max_token_length, - model=model, - ) - - def reconfigure(self, **kwargs): - if kwargs.get("model"): - self.model_name = kwargs["model"] - self.set_client(kwargs.get("max_token_length")) - - if "enabled" in kwargs: - self.enabled = bool(kwargs["enabled"]) - - # Allow dynamic reconfiguration of endpoint override parameters - self._reconfigure_endpoint_override(**kwargs) - # Reconfigure any common parameters (rate limit, data format, etc.) - self._reconfigure_common_parameters(**kwargs) - - def on_config_saved(self, event): - config = event.data - self.config = config - self.set_client(max_token_length=self.max_token_length) - def response_tokens(self, response: str): return response.usage.completion_tokens @@ -189,16 +136,6 @@ class GroqClient(EndpointOverrideMixin, ClientBase): async def status(self): self.emit_status() - def prompt_template(self, system_message: str, prompt: str): - if "<|BOT|>" in prompt: - _, right = prompt.split("<|BOT|>", 1) - if right: - prompt = prompt.replace("<|BOT|>", "\nStart your response with: ") - else: - prompt = prompt.replace("<|BOT|>", "") - - return prompt - async def generate(self, prompt: str, parameters: dict, kind: str): """ Generates text from the given prompt and parameters. @@ -207,16 +144,12 @@ class GroqClient(EndpointOverrideMixin, ClientBase): if not self.groq_api_key and not self.endpoint_override_base_url_configured: raise Exception("No groq.ai API key set") - supports_json_object = self.model_name in JSON_OBJECT_RESPONSE_MODELS - right = None - expected_response = None - try: - _, right = prompt.split("\nStart your response with: ") - expected_response = right.strip() - if expected_response.startswith("{") and supports_json_object: - parameters["response_format"] = {"type": "json_object"} - except (IndexError, ValueError): - pass + client = AsyncGroq(api_key=self.api_key, base_url=self.base_url) + + if self.can_be_coerced: + prompt, coercion_prompt = self.split_prompt_for_coercion(prompt) + else: + coercion_prompt = None system_message = self.get_system_message(kind) @@ -225,6 +158,10 @@ class GroqClient(EndpointOverrideMixin, ClientBase): {"role": "user", "content": prompt}, ] + if coercion_prompt: + log.debug("Adding coercion pre-fill", coercion_prompt=coercion_prompt) + messages.append({"role": "assistant", "content": coercion_prompt.strip()}) + self.log.debug( "generate", prompt=prompt[:128] + " ...", @@ -233,27 +170,25 @@ class GroqClient(EndpointOverrideMixin, ClientBase): ) try: - response = await self.client.chat.completions.create( + stream = await client.chat.completions.create( model=self.model_name, messages=messages, + stream=True, **parameters, ) - response = response.choices[0].message.content + response = "" - # older models don't support json_object response coersion - # and often like to return the response wrapped in ```json - # so we strip that out if the expected response is a json object - if ( - not supports_json_object - and expected_response - and expected_response.startswith("{") - ): - if response.startswith("```json") and response.endswith("```"): - response = response[7:-3].strip() - - if right and response.startswith(right): - response = response[len(right) :].strip() + # Iterate over streamed chunks + async for chunk in stream: + if not chunk.choices: + continue + delta = chunk.choices[0].delta + if delta and getattr(delta, "content", None): + content_piece = delta.content + response += content_piece + # Incrementally track token usage + self.update_request_tokens(self.count_tokens(content_piece)) return response except PermissionDeniedError as e: diff --git a/src/talemate/client/koboldcpp.py b/src/talemate/client/koboldcpp.py index 32b1e539..cedc927a 100644 --- a/src/talemate/client/koboldcpp.py +++ b/src/talemate/client/koboldcpp.py @@ -75,6 +75,7 @@ class KoboldEmbeddingFunction(EmbeddingFunction): class KoboldCppClient(ClientBase): auto_determine_prompt_template: bool = True client_type = "koboldcpp" + remote_model_locked: bool = True class Meta(ClientBase.Meta): name_prefix: str = "KoboldCpp" @@ -188,6 +189,10 @@ class KoboldCppClient(ClientBase): def embeddings_function(self): return KoboldEmbeddingFunction(self.embeddings_url, self.embeddings_model_name) + @property + def default_prompt_template(self) -> str: + return "KoboldAI.jinja2" + def api_endpoint_specified(self, url: str) -> bool: return "/v1" in self.api_url @@ -200,14 +205,9 @@ class KoboldCppClient(ClientBase): self.api_url += "/" def __init__(self, **kwargs): - self.api_key = kwargs.pop("api_key", "") super().__init__(**kwargs) self.ensure_api_endpoint_specified() - def set_client(self, **kwargs): - self.api_key = kwargs.get("api_key", self.api_key) - self.ensure_api_endpoint_specified() - async def get_embeddings_model_name(self): # if self._embeddings_model_name is set, return it if self.embeddings_model_name: @@ -245,15 +245,21 @@ class KoboldCppClient(ClientBase): model_name=self.embeddings_model_name, ) - self.set_embeddings() + await self.set_embeddings() - await async_signals.get("client.embeddings_available").send( - ClientEmbeddingsStatus( - client=self, - embedding_name=self.embeddings_model_name, - ) + emission = ClientEmbeddingsStatus( + client=self, + embedding_name=self.embeddings_model_name, ) + await async_signals.get("client.embeddings_available").send(emission) + + if not emission.seen: + # the suggestion has not been seen by the memory agent + # yet, so we unset the embeddings model name so it will + # get suggested again + self._embeddings_model_name = None + async def get_model_name(self): self.ensure_api_endpoint_specified() @@ -437,12 +443,6 @@ class KoboldCppClient(ClientBase): except KeyError: pass - def reconfigure(self, **kwargs): - if "api_key" in kwargs: - self.api_key = kwargs.pop("api_key") - - super().reconfigure(**kwargs) - async def visual_automatic1111_setup(self, visual_agent: "VisualBase") -> bool: """ Automatically configure the visual agent for automatic1111 diff --git a/src/talemate/client/lmstudio.py b/src/talemate/client/lmstudio.py index 76682519..5a475d2d 100644 --- a/src/talemate/client/lmstudio.py +++ b/src/talemate/client/lmstudio.py @@ -14,6 +14,7 @@ class Defaults(CommonDefaults, pydantic.BaseModel): class LMStudioClient(ClientBase): auto_determine_prompt_template: bool = True client_type = "lmstudio" + remote_model_locked: bool = True class Meta(ClientBase.Meta): name_prefix: str = "LMStudio" @@ -32,17 +33,16 @@ class LMStudioClient(ClientBase): ), ] - def set_client(self, **kwargs): - self.client = AsyncOpenAI(base_url=self.api_url + "/v1", api_key="sk-1111") - - def reconfigure(self, **kwargs): - super().reconfigure(**kwargs) - - if self.client and self.client.base_url != self.api_url: - self.set_client() + def make_client(self): + return AsyncOpenAI(base_url=self.api_url + "/v1", api_key=self.api_key) async def get_model_name(self): - model_name = await super().get_model_name() + client = self.make_client() + models = await client.models.list(timeout=self.status_request_timeout) + try: + model_name = models.data[0].id + except IndexError: + return None # model name comes back as a file path, so we need to extract the model name # the path could be windows or linux so it needs to handle both backslash and forward slash @@ -65,9 +65,11 @@ class LMStudioClient(ClientBase): parameters=parameters, ) + client = self.make_client() + try: # Send the request in streaming mode so we can update token counts - stream = await self.client.completions.create( + stream = await client.completions.create( model=self.model_name, prompt=prompt, stream=True, diff --git a/src/talemate/client/mistral.py b/src/talemate/client/mistral.py index 11276db8..d623c8e3 100644 --- a/src/talemate/client/mistral.py +++ b/src/talemate/client/mistral.py @@ -15,9 +15,8 @@ from talemate.client.remote import ( EndpointOverrideMixin, endpoint_override_extra_fields, ) -from talemate.config import Client as BaseClientConfig, load_config +from talemate.config.schema import Client as BaseClientConfig from talemate.emit import emit -from talemate.emit.signals import handlers __all__ = [ "MistralAIClient", @@ -33,14 +32,7 @@ SUPPORTED_MODELS = [ "mistral-small-latest", "mistral-medium-latest", "mistral-large-latest", -] - -JSON_OBJECT_RESPONSE_MODELS = [ - "open-mixtral-8x22b", - "open-mistral-nemo", - "mistral-small-latest", - "mistral-medium-latest", - "mistral-large-latest", + "magistral-medium-2506", ] @@ -61,7 +53,6 @@ class MistralAIClient(EndpointOverrideMixin, ClientBase): client_type = "mistral" conversation_retries = 0 - auto_break_repetition_enabled = False # TODO: make this configurable? decensor_enabled = True config_cls = ClientConfig @@ -75,17 +66,13 @@ class MistralAIClient(EndpointOverrideMixin, ClientBase): defaults: Defaults = Defaults() extra_fields: dict[str, ExtraField] = endpoint_override_extra_fields() - def __init__(self, model="open-mixtral-8x22b", **kwargs): - self.model_name = model - self.api_key_status = None - self._reconfigure_endpoint_override(**kwargs) - self.config = load_config() - super().__init__(**kwargs) - handlers["config_saved"].connect(self.on_config_saved) + @property + def can_be_coerced(self) -> bool: + return not self.reason_enabled @property def mistral_api_key(self): - return self.config.get("mistralai", {}).get("api_key") + return self.config.mistralai.api_key @property def supported_parameters(self): @@ -97,15 +84,15 @@ class MistralAIClient(EndpointOverrideMixin, ClientBase): def emit_status(self, processing: bool = None): error_action = None + error_message = None if processing is not None: self.processing = processing if self.mistral_api_key: status = "busy" if self.processing else "idle" - model_name = self.model_name else: status = "error" - model_name = "No API key set" + error_message = "No API key set" error_action = ErrorAction( title="Set API Key", action_name="openAppConfig", @@ -118,74 +105,25 @@ class MistralAIClient(EndpointOverrideMixin, ClientBase): if not self.model_name: status = "error" - model_name = "No model loaded" + error_message = "No model loaded" self.current_status = status data = { "error_action": error_action.model_dump() if error_action else None, "meta": self.Meta().model_dump(), "enabled": self.enabled, + "error_message": error_message, } data.update(self._common_status_data()) emit( "client_status", message=self.client_type, id=self.name, - details=model_name, + details=self.model_name, status=status if self.enabled else "disabled", data=data, ) - def set_client(self, max_token_length: int = None): - if not self.mistral_api_key and not self.endpoint_override_base_url_configured: - self.client = Mistral(api_key="sk-1111") - log.error("No mistral.ai API key set") - if self.api_key_status: - self.api_key_status = False - emit("request_client_status") - emit("request_agent_status") - return - - if not self.model_name: - self.model_name = "open-mixtral-8x22b" - - if max_token_length and not isinstance(max_token_length, int): - max_token_length = int(max_token_length) - - model = self.model_name - - self.client = Mistral(api_key=self.api_key, server_url=self.base_url) - self.max_token_length = max_token_length or 16384 - - if not self.api_key_status: - if self.api_key_status is False: - emit("request_client_status") - emit("request_agent_status") - self.api_key_status = True - - log.info( - "mistral.ai set client", - max_token_length=self.max_token_length, - provided_max_token_length=max_token_length, - model=model, - ) - - def reconfigure(self, **kwargs): - if "enabled" in kwargs: - self.enabled = bool(kwargs["enabled"]) - - self._reconfigure_common_parameters(**kwargs) - self._reconfigure_endpoint_override(**kwargs) - - if kwargs.get("model"): - self.model_name = kwargs["model"] - self.set_client(kwargs.get("max_token_length")) - - def on_config_saved(self, event): - config = event.data - self.config = config - self.set_client(max_token_length=self.max_token_length) - def response_tokens(self, response: str): return response.usage.completion_tokens @@ -195,16 +133,6 @@ class MistralAIClient(EndpointOverrideMixin, ClientBase): async def status(self): self.emit_status() - def prompt_template(self, system_message: str, prompt: str): - if "<|BOT|>" in prompt: - _, right = prompt.split("<|BOT|>", 1) - if right: - prompt = prompt.replace("<|BOT|>", "\nStart your response with: ") - else: - prompt = prompt.replace("<|BOT|>", "") - - return prompt - def clean_prompt_parameters(self, parameters: dict): super().clean_prompt_parameters(parameters) # clamp temperature to 0.1 and 1.0 @@ -220,16 +148,12 @@ class MistralAIClient(EndpointOverrideMixin, ClientBase): if not self.mistral_api_key: raise Exception("No mistral.ai API key set") - supports_json_object = self.model_name in JSON_OBJECT_RESPONSE_MODELS - right = None - expected_response = None - try: - _, right = prompt.split("\nStart your response with: ") - expected_response = right.strip() - if expected_response.startswith("{") and supports_json_object: - parameters["response_format"] = {"type": "json_object"} - except (IndexError, ValueError): - pass + client = Mistral(api_key=self.api_key, server_url=self.base_url) + + if self.can_be_coerced: + prompt, coercion_prompt = self.split_prompt_for_coercion(prompt) + else: + coercion_prompt = None system_message = self.get_system_message(kind) @@ -238,6 +162,16 @@ class MistralAIClient(EndpointOverrideMixin, ClientBase): {"role": "user", "content": prompt.strip()}, ] + if coercion_prompt: + log.debug("Adding coercion pre-fill", coercion_prompt=coercion_prompt) + messages.append( + { + "role": "assistant", + "content": coercion_prompt.strip(), + "prefix": True, + } + ) + self.log.debug( "generate", base_url=self.base_url, @@ -247,7 +181,7 @@ class MistralAIClient(EndpointOverrideMixin, ClientBase): ) try: - event_stream = await self.client.chat.stream_async( + event_stream = await client.chat.stream_async( model=self.model_name, messages=messages, **parameters, @@ -271,22 +205,6 @@ class MistralAIClient(EndpointOverrideMixin, ClientBase): self._returned_prompt_tokens = prompt_tokens self._returned_response_tokens = completion_tokens - # response = response.choices[0].message.content - - # older models don't support json_object response coersion - # and often like to return the response wrapped in ```json - # so we strip that out if the expected response is a json object - if ( - not supports_json_object - and expected_response - and expected_response.startswith("{") - ): - if response.startswith("```json") and response.endswith("```"): - response = response[7:-3].strip() - - if right and response.startswith(right): - response = response[len(right) :].strip() - return response except SDKError as e: self.log.error("generate error", e=e) diff --git a/src/talemate/client/model_prompts.py b/src/talemate/client/model_prompts.py index 042f1c9a..0482fcee 100644 --- a/src/talemate/client/model_prompts.py +++ b/src/talemate/client/model_prompts.py @@ -27,6 +27,8 @@ TALEMATE_TEMPLATE_PATH = os.path.join(BASE_TEMPLATE_PATH, "talemate") # user overrides USER_TEMPLATE_PATH = os.path.join(BASE_TEMPLATE_PATH, "user") +DEFAULT_TEMPLATE = "default.jinja2" + TEMPLATE_IDENTIFIERS = [] @@ -73,10 +75,11 @@ class ModelPrompt: system_message: str, prompt: str, double_coercion: str = None, + default_template: str = DEFAULT_TEMPLATE, ): template, template_file = self.get_template(model_name) if not template: - template_file = "default.jinja2" + template_file = default_template template = self.env.get_template(template_file) if not double_coercion: diff --git a/src/talemate/client/ollama.py b/src/talemate/client/ollama.py index 3836c19f..cf062acf 100644 --- a/src/talemate/client/ollama.py +++ b/src/talemate/client/ollama.py @@ -12,7 +12,7 @@ from talemate.client.base import ( ExtraField, ) from talemate.client.registry import register -from talemate.config import Client as BaseClientConfig +from talemate.config.schema import Client as BaseClientConfig log = structlog.get_logger("talemate.client.ollama") @@ -24,12 +24,10 @@ class OllamaClientDefaults(CommonDefaults): api_url: str = "http://localhost:11434" # Default Ollama URL model: str = "" # Allow empty default, will fetch from Ollama api_handles_prompt_template: bool = False - allow_thinking: bool = False class ClientConfig(BaseClientConfig): api_handles_prompt_template: bool = False - allow_thinking: bool = False @register() @@ -58,13 +56,6 @@ class OllamaClient(ClientBase): required=False, description="Let Ollama handle the prompt template. Only do this if you don't know which prompt template to use. Letting talemate handle the prompt template will generally lead to improved responses.", ), - "allow_thinking": ExtraField( - name="allow_thinking", - type="bool", - label="Allow thinking", - required=False, - description="Allow the model to think before responding. Talemate does not have a good way to deal with this yet, so it's recommended to leave this off.", - ), } @property @@ -90,51 +81,25 @@ class OllamaClient(ClientBase): "extra_stopping_strings", ] + def __init__( + self, + **kwargs, + ): + self._available_models = [] + self._models_last_fetched = 0 + super().__init__(**kwargs) + @property def can_be_coerced(self): """ Determines whether or not his client can pass LLM coercion. (e.g., is able to predefine partial LLM output in the prompt) """ - return not self.api_handles_prompt_template + return not self.api_handles_prompt_template and not self.reason_enabled @property - def can_think(self) -> bool: - """ - Allow reasoning models to think before responding. - """ - return self.allow_thinking - - def __init__( - self, - model=None, - api_handles_prompt_template=False, - allow_thinking=False, - **kwargs, - ): - self.model_name = model - self.api_handles_prompt_template = api_handles_prompt_template - self.allow_thinking = allow_thinking - self._available_models = [] - self._models_last_fetched = 0 - self.client = None - super().__init__(**kwargs) - - def set_client(self, **kwargs): - """ - Initialize the Ollama client with the API URL. - """ - # Update model if provided - if kwargs.get("model"): - self.model_name = kwargs["model"] - - # Create async client with the configured API URL - # Ollama's AsyncClient expects just the base URL without any path - self.client = ollama.AsyncClient(host=self.api_url) - self.api_handles_prompt_template = kwargs.get( - "api_handles_prompt_template", self.api_handles_prompt_template - ) - self.allow_thinking = kwargs.get("allow_thinking", self.allow_thinking) + def api_handles_prompt_template(self) -> bool: + return self.client_config.api_handles_prompt_template async def status(self): """ @@ -177,7 +142,9 @@ class OllamaClient(ClientBase): if time.time() - self._models_last_fetched < FETCH_MODELS_INTERVAL: return self._available_models - response = await self.client.list() + client = ollama.AsyncClient(host=self.api_url) + + response = await client.list() models = response.get("models", []) model_names = [model.model for model in models] self._available_models = sorted(model_names) @@ -192,19 +159,11 @@ class OllamaClient(ClientBase): return data async def get_model_name(self): - return self.model_name + return self.model def prompt_template(self, system_message: str, prompt: str): if not self.api_handles_prompt_template: return super().prompt_template(system_message, prompt) - - if "<|BOT|>" in prompt: - _, right = prompt.split("<|BOT|>", 1) - if right: - prompt = prompt.replace("<|BOT|>", "\nStart your response with: ") - else: - prompt = prompt.replace("<|BOT|>", "") - return prompt def tune_prompt_parameters(self, parameters: dict, kind: str): @@ -251,6 +210,8 @@ class OllamaClient(ClientBase): if not self.model_name: raise Exception("No model specified or available in Ollama") + client = ollama.AsyncClient(host=self.api_url) + # Prepare options for Ollama options = parameters @@ -258,12 +219,11 @@ class OllamaClient(ClientBase): try: # Use generate endpoint for completion - stream = await self.client.generate( + stream = await client.generate( model=self.model_name, prompt=prompt.strip(), options=options, raw=self.can_be_coerced, - think=self.can_think, stream=True, ) @@ -306,20 +266,3 @@ class OllamaClient(ClientBase): prompt_config["repetition_penalty"] = random.uniform( rep_pen + min_offset * 0.3, rep_pen + offset * 0.3 ) - - def reconfigure(self, **kwargs): - """ - Reconfigure the client with new settings. - """ - # Handle model update - if kwargs.get("model"): - self.model_name = kwargs["model"] - - super().reconfigure(**kwargs) - - # Re-initialize client if API URL changed or model changed - if "api_url" in kwargs or "model" in kwargs: - self.set_client(**kwargs) - - if "api_handles_prompt_template" in kwargs: - self.api_handles_prompt_template = kwargs["api_handles_prompt_template"] diff --git a/src/talemate/client/openai.py b/src/talemate/client/openai.py index 81f28289..858f50e3 100644 --- a/src/talemate/client/openai.py +++ b/src/talemate/client/openai.py @@ -12,9 +12,8 @@ from talemate.client.remote import ( EndpointOverrideMixin, endpoint_override_extra_fields, ) -from talemate.config import Client as BaseClientConfig, load_config +from talemate.config.schema import Client as BaseClientConfig from talemate.emit import emit -from talemate.emit.signals import handlers __all__ = [ "OpenAIClient", @@ -44,22 +43,15 @@ SUPPORTED_MODELS = [ "o1-preview", "o1-mini", "o3-mini", -] - -# any model starting with gpt-4- is assumed to support 'json_object' -# for others we need to explicitly state the model name -JSON_OBJECT_RESPONSE_MODELS = [ - "gpt-4o-2024-08-06", - "gpt-4o-2024-11-20", - "gpt-4o-realtime-preview", - "gpt-4o-mini-realtime-preview", - "gpt-4o", - "gpt-4o-mini", - "gpt-3.5-turbo-0125", + "gpt-5", + "gpt-5-mini", + "gpt-5-nano", ] def num_tokens_from_messages(messages: list[dict], model: str = "gpt-3.5-turbo-0613"): + # TODO this whole function probably needs to be rewritten at this point + """Return the number of tokens used by a list of messages.""" try: encoding = tiktoken.encoding_for_model(model) @@ -83,7 +75,7 @@ def num_tokens_from_messages(messages: list[dict], model: str = "gpt-3.5-turbo-0 tokens_per_name = -1 # if there's a name, the role is omitted elif "gpt-3.5-turbo" in model: return num_tokens_from_messages(messages, model="gpt-3.5-turbo-0613") - elif "gpt-4" in model or "o1" in model or "o3" in model: + elif "gpt-4" in model or "o1" in model or "o3" in model or "gpt-5" in model: return num_tokens_from_messages(messages, model="gpt-4-0613") else: raise NotImplementedError( @@ -104,9 +96,13 @@ def num_tokens_from_messages(messages: list[dict], model: str = "gpt-3.5-turbo-0 return num_tokens +DEFAULT_MODEL = "gpt-4o" + + class Defaults(EndpointOverride, CommonDefaults, pydantic.BaseModel): max_token_length: int = 16384 - model: str = "gpt-4o" + model: str = DEFAULT_MODEL + reason_tokens: int = 1024 class ClientConfig(EndpointOverride, BaseClientConfig): @@ -121,7 +117,6 @@ class OpenAIClient(EndpointOverrideMixin, ClientBase): client_type = "openai" conversation_retries = 0 - auto_break_repetition_enabled = False # TODO: make this configurable? decensor_enabled = False config_cls = ClientConfig @@ -135,18 +130,9 @@ class OpenAIClient(EndpointOverrideMixin, ClientBase): defaults: Defaults = Defaults() extra_fields: dict[str, ExtraField] = endpoint_override_extra_fields() - def __init__(self, model="gpt-4o", **kwargs): - self.model_name = model - self.api_key_status = None - self._reconfigure_endpoint_override(**kwargs) - self.config = load_config() - super().__init__(**kwargs) - - handlers["config_saved"].connect(self.on_config_saved) - @property def openai_api_key(self): - return self.config.get("openai", {}).get("api_key") + return self.config.openai.api_key @property def supported_parameters(self): @@ -157,17 +143,35 @@ class OpenAIClient(EndpointOverrideMixin, ClientBase): "max_tokens", ] + @property + def requires_reasoning_pattern(self) -> bool: + return False + def emit_status(self, processing: bool = None): error_action = None + error_message = None if processing is not None: self.processing = processing + # Auto-toggle reasoning based on selected model (OpenAI-specific) + # o1/o3/gpt-5 families are reasoning models + try: + if self.model_name: + is_reasoning_model = ( + "o1" in self.model_name + or "o3" in self.model_name + or "gpt-5" in self.model_name + ) + if self.client_config.reason_enabled != is_reasoning_model: + self.client_config.reason_enabled = is_reasoning_model + except Exception: + pass + if self.openai_api_key: status = "busy" if self.processing else "idle" - model_name = self.model_name else: status = "error" - model_name = "No API key set" + error_message = "No API key set" error_action = ErrorAction( title="Set API Key", action_name="openAppConfig", @@ -180,7 +184,7 @@ class OpenAIClient(EndpointOverrideMixin, ClientBase): if not self.model_name: status = "error" - model_name = "No model loaded" + error_message = "No model loaded" self.current_status = status @@ -188,6 +192,7 @@ class OpenAIClient(EndpointOverrideMixin, ClientBase): "error_action": error_action.model_dump() if error_action else None, "meta": self.Meta().model_dump(), "enabled": self.enabled, + "error_message": error_message, } data.update(self._common_status_data()) @@ -195,74 +200,11 @@ class OpenAIClient(EndpointOverrideMixin, ClientBase): "client_status", message=self.client_type, id=self.name, - details=model_name, + details=self.model_name, status=status if self.enabled else "disabled", data=data, ) - def set_client(self, max_token_length: int = None): - if not self.openai_api_key and not self.endpoint_override_base_url_configured: - self.client = AsyncOpenAI(api_key="sk-1111") - log.error("No OpenAI API key set") - if self.api_key_status: - self.api_key_status = False - emit("request_client_status") - emit("request_agent_status") - return - - if not self.model_name: - self.model_name = "gpt-3.5-turbo-16k" - - if max_token_length and not isinstance(max_token_length, int): - max_token_length = int(max_token_length) - - model = self.model_name - - self.client = AsyncOpenAI(api_key=self.api_key, base_url=self.base_url) - if model == "gpt-3.5-turbo": - self.max_token_length = min(max_token_length or 4096, 4096) - elif model == "gpt-4": - self.max_token_length = min(max_token_length or 8192, 8192) - elif model == "gpt-3.5-turbo-16k": - self.max_token_length = min(max_token_length or 16384, 16384) - elif model.startswith("gpt-4o") and model != "gpt-4o-2024-05-13": - self.max_token_length = min(max_token_length or 16384, 16384) - elif model == "gpt-4o-2024-05-13": - self.max_token_length = min(max_token_length or 4096, 4096) - elif model == "gpt-4-1106-preview": - self.max_token_length = min(max_token_length or 128000, 128000) - else: - self.max_token_length = max_token_length or 8192 - - if not self.api_key_status: - if self.api_key_status is False: - emit("request_client_status") - emit("request_agent_status") - self.api_key_status = True - - log.info( - "openai set client", - max_token_length=self.max_token_length, - provided_max_token_length=max_token_length, - model=model, - ) - - def reconfigure(self, **kwargs): - if kwargs.get("model"): - self.model_name = kwargs["model"] - self.set_client(kwargs.get("max_token_length")) - - if "enabled" in kwargs: - self.enabled = bool(kwargs["enabled"]) - - self._reconfigure_common_parameters(**kwargs) - self._reconfigure_endpoint_override(**kwargs) - - def on_config_saved(self, event): - config = event.data - self.config = config - self.set_client(max_token_length=self.max_token_length) - def count_tokens(self, content: str): if not self.model_name: return 0 @@ -271,18 +213,6 @@ class OpenAIClient(EndpointOverrideMixin, ClientBase): async def status(self): self.emit_status() - def prompt_template(self, system_message: str, prompt: str): - # only gpt-4-1106-preview supports json_object response coersion - - if "<|BOT|>" in prompt: - _, right = prompt.split("<|BOT|>", 1) - if right: - prompt = prompt.replace("<|BOT|>", "\nStart your response with: ") - else: - prompt = prompt.replace("<|BOT|>", "") - - return prompt - async def generate(self, prompt: str, parameters: dict, kind: str): """ Generates text from the given prompt and parameters. @@ -291,26 +221,17 @@ class OpenAIClient(EndpointOverrideMixin, ClientBase): if not self.openai_api_key and not self.endpoint_override_base_url_configured: raise Exception("No OpenAI API key set") - # only gpt-4-* supports enforcing json object - supports_json_object = ( - self.model_name.startswith("gpt-4-") - or self.model_name in JSON_OBJECT_RESPONSE_MODELS - ) - right = None - expected_response = None - try: - _, right = prompt.split("\nStart your response with: ") - expected_response = right.strip() - if expected_response.startswith("{") and supports_json_object: - parameters["response_format"] = {"type": "json_object"} - except (IndexError, ValueError): - pass + client = AsyncOpenAI(api_key=self.api_key, base_url=self.base_url) human_message = {"role": "user", "content": prompt.strip()} system_message = {"role": "system", "content": self.get_system_message(kind)} # o1 and o3 models don't support system_message - if "o1" in self.model_name or "o3" in self.model_name: + if ( + "o1" in self.model_name + or "o3" in self.model_name + or "gpt-5" in self.model_name + ): messages = [human_message] # paramters need to be munged # `max_tokens` becomes `max_completion_tokens` @@ -339,13 +260,20 @@ class OpenAIClient(EndpointOverrideMixin, ClientBase): self.log.debug( "generate", + model=self.model_name, prompt=prompt[:128] + " ...", parameters=parameters, system_message=system_message, ) + # GPT-5 models do not allow streaming for non-verified orgs; use non-streaming path + if "gpt-5" in self.model_name: + return await self._generate_non_streaming_completion( + client, messages, parameters + ) + try: - stream = await self.client.chat.completions.create( + stream = await client.chat.completions.create( model=self.model_name, messages=messages, stream=True, @@ -365,23 +293,6 @@ class OpenAIClient(EndpointOverrideMixin, ClientBase): # Incrementally track token usage self.update_request_tokens(self.count_tokens(content_piece)) - # self._returned_prompt_tokens = self.prompt_tokens(prompt) - # self._returned_response_tokens = self.response_tokens(response) - - # older models don't support json_object response coersion - # and often like to return the response wrapped in ```json - # so we strip that out if the expected response is a json object - if ( - not supports_json_object - and expected_response - and expected_response.startswith("{") - ): - if response.startswith("```json") and response.endswith("```"): - response = response[7:-3].strip() - - if right and response.startswith(right): - response = response[len(right) :].strip() - return response except PermissionDeniedError as e: self.log.error("generate error", e=e) @@ -389,3 +300,36 @@ class OpenAIClient(EndpointOverrideMixin, ClientBase): return "" except Exception: raise + + async def _generate_non_streaming_completion( + self, client: AsyncOpenAI, messages: list[dict], parameters: dict + ) -> str: + """Perform a non-streaming chat completion request and return the content. + + This is used for GPT-5 models which disallow streaming for non-verified orgs. + """ + try: + response = await client.chat.completions.create( + model=self.model_name, + messages=messages, + # No stream flag -> non-streaming + **parameters, + ) + + if not response.choices: + return "" + + message = response.choices[0].message + content = getattr(message, "content", "") or "" + + if content: + # Update token usage based on the full content + self.update_request_tokens(self.count_tokens(content)) + + return content + except PermissionDeniedError as e: + self.log.error("generate (non-streaming) error", e=e) + emit("status", message="OpenAI API: Permission Denied", status="error") + return "" + except Exception: + raise diff --git a/src/talemate/client/openai_compat.py b/src/talemate/client/openai_compat.py index d86a0a88..abe5c629 100644 --- a/src/talemate/client/openai_compat.py +++ b/src/talemate/client/openai_compat.py @@ -6,7 +6,7 @@ from openai import AsyncOpenAI, PermissionDeniedError from talemate.client.base import ClientBase, ExtraField from talemate.client.registry import register -from talemate.config import Client as BaseClientConfig +from talemate.config.schema import Client as BaseClientConfig from talemate.emit import emit log = structlog.get_logger("talemate.client.openai_compat") @@ -51,13 +51,9 @@ class OpenAICompatibleClient(ClientBase): ) } - def __init__( - self, model=None, api_key=None, api_handles_prompt_template=False, **kwargs - ): - self.model_name = model - self.api_key = api_key - self.api_handles_prompt_template = api_handles_prompt_template - super().__init__(**kwargs) + @property + def api_handles_prompt_template(self) -> bool: + return self.client_config.api_handles_prompt_template @property def experimental(self): @@ -69,7 +65,7 @@ class OpenAICompatibleClient(ClientBase): Determines whether or not his client can pass LLM coercion. (e.g., is able to predefine partial LLM output in the prompt) """ - return not self.api_handles_prompt_template + return not self.reason_enabled @property def supported_parameters(self): @@ -80,43 +76,21 @@ class OpenAICompatibleClient(ClientBase): "max_tokens", ] - def set_client(self, **kwargs): - self.api_key = kwargs.get("api_key", self.api_key) - self.api_handles_prompt_template = kwargs.get( - "api_handles_prompt_template", self.api_handles_prompt_template - ) - url = self.api_url - self.client = AsyncOpenAI(base_url=url, api_key=self.api_key) - self.model_name = ( - kwargs.get("model") or kwargs.get("model_name") or self.model_name - ) - def prompt_template(self, system_message: str, prompt: str): - log.debug( - "IS API HANDLING PROMPT TEMPLATE", - api_handles_prompt_template=self.api_handles_prompt_template, - ) - if not self.api_handles_prompt_template: return super().prompt_template(system_message, prompt) - - if "<|BOT|>" in prompt: - _, right = prompt.split("<|BOT|>", 1) - if right: - prompt = prompt.replace("<|BOT|>", "\nStart your response with: ") - else: - prompt = prompt.replace("<|BOT|>", "") - return prompt async def get_model_name(self): - return self.model_name + return self.model async def generate(self, prompt: str, parameters: dict, kind: str): """ Generates text from the given prompt and parameters. """ + client = AsyncOpenAI(base_url=self.api_url, api_key=self.api_key) + try: if self.api_handles_prompt_template: # OpenAI API handles prompt template @@ -126,15 +100,37 @@ class OpenAICompatibleClient(ClientBase): prompt=prompt[:128] + " ...", parameters=parameters, ) - human_message = {"role": "user", "content": prompt.strip()} - response = await self.client.chat.completions.create( + + if self.can_be_coerced: + prompt, coercion_prompt = self.split_prompt_for_coercion(prompt) + else: + coercion_prompt = None + + messages = [ + {"role": "system", "content": self.get_system_message(kind)}, + {"role": "user", "content": prompt.strip()}, + ] + + if coercion_prompt: + log.debug( + "Adding coercion pre-fill", coercion_prompt=coercion_prompt + ) + messages.append( + { + "role": "assistant", + "content": coercion_prompt.strip(), + "prefix": True, + } + ) + + response = await client.chat.completions.create( model=self.model_name, - messages=[human_message], + messages=messages, stream=False, **parameters, ) response = response.choices[0].message.content - return self.process_response_for_indirect_coercion(prompt, response) + return response else: # Talemate handles prompt template # Use the completions endpoint @@ -144,7 +140,7 @@ class OpenAICompatibleClient(ClientBase): parameters=parameters, ) parameters["prompt"] = prompt - response = await self.client.completions.create( + response = await client.completions.create( model=self.model_name, stream=False, **parameters ) return response.choices[0].text @@ -159,34 +155,6 @@ class OpenAICompatibleClient(ClientBase): ) return "" - def reconfigure(self, **kwargs): - if kwargs.get("model"): - self.model_name = kwargs["model"] - if "api_url" in kwargs: - self.api_url = kwargs["api_url"] - if "max_token_length" in kwargs: - self.max_token_length = ( - int(kwargs["max_token_length"]) if kwargs["max_token_length"] else 8192 - ) - if "api_key" in kwargs: - self.api_key = kwargs["api_key"] - if "api_handles_prompt_template" in kwargs: - self.api_handles_prompt_template = kwargs["api_handles_prompt_template"] - # TODO: why isn't this calling super()? - if "enabled" in kwargs: - self.enabled = bool(kwargs["enabled"]) - - if "double_coercion" in kwargs: - self.double_coercion = kwargs["double_coercion"] - - if "rate_limit" in kwargs: - self.rate_limit = kwargs["rate_limit"] - - if "enabled" in kwargs: - self.enabled = bool(kwargs["enabled"]) - - self.set_client(**kwargs) - def jiggle_randomness(self, prompt_config: dict, offset: float = 0.3) -> dict: """ adjusts temperature and presence penalty diff --git a/src/talemate/client/openrouter.py b/src/talemate/client/openrouter.py index 1878630f..0a125fa5 100644 --- a/src/talemate/client/openrouter.py +++ b/src/talemate/client/openrouter.py @@ -4,9 +4,17 @@ import httpx import asyncio import json -from talemate.client.base import ClientBase, ErrorAction, CommonDefaults +from talemate.client.base import ( + ClientBase, + ErrorAction, + CommonDefaults, + ExtraField, + FieldGroup, +) +from talemate.config.schema import Client as BaseClientConfig +from talemate.config import get_config + from talemate.client.registry import register -from talemate.config import load_config from talemate.emit import emit from talemate.emit.signals import handlers @@ -18,6 +26,91 @@ log = structlog.get_logger("talemate.client.openrouter") # Available models will be populated when first client with API key is initialized AVAILABLE_MODELS = [] + +# Static list of providers that are supported by OpenRouter +# https://openrouter.ai/docs/features/provider-routing#json-schema-for-provider-preferences + + +AVAILABLE_PROVIDERS = [ + "AnyScale", + "Cent-ML", + "HuggingFace", + "Hyperbolic 2", + "Lepton", + "Lynn 2", + "Lynn", + "Mancer", + "Modal", + "OctoAI", + "Recursal", + "Reflection", + "Replicate", + "SambaNova 2", + "SF Compute", + "Together 2", + "01.AI", + "AI21", + "AionLabs", + "Alibaba", + "Amazon Bedrock", + "Anthropic", + "AtlasCloud", + "Atoma", + "Avian", + "Azure", + "BaseTen", + "Cerebras", + "Chutes", + "Cloudflare", + "Cohere", + "CrofAI", + "Crusoe", + "DeepInfra", + "DeepSeek", + "Enfer", + "Featherless", + "Fireworks", + "Friendli", + "GMICloud", + "Google", + "Google AI Studio", + "Groq", + "Hyperbolic", + "Inception", + "InferenceNet", + "Infermatic", + "Inflection", + "InoCloud", + "Kluster", + "Lambda", + "Liquid", + "Mancer 2", + "Meta", + "Minimax", + "Mistral", + "Moonshot AI", + "Morph", + "NCompass", + "Nebius", + "NextBit", + "Nineteen", + "Novita", + "OpenAI", + "OpenInference", + "Parasail", + "Perplexity", + "Phala", + "SambaNova", + "Stealth", + "Switchpoint", + "Targon", + "Together", + "Ubicloud", + "Venice", + "xAI", +] +AVAILABLE_PROVIDERS.sort() + DEFAULT_MODEL = "" MODELS_FETCHED = False @@ -25,7 +118,6 @@ MODELS_FETCHED = False async def fetch_available_models(api_key: str = None): """Fetch available models from OpenRouter API""" global AVAILABLE_MODELS, DEFAULT_MODEL, MODELS_FETCHED - if not api_key: return [] @@ -37,6 +129,7 @@ async def fetch_available_models(api_key: str = None): return AVAILABLE_MODELS try: + log.debug("Fetching models from OpenRouter") async with httpx.AsyncClient() as client: response = await client.get( "https://openrouter.ai/api/v1/models", timeout=10.0 @@ -61,19 +154,36 @@ async def fetch_available_models(api_key: str = None): return AVAILABLE_MODELS -def fetch_models_sync(event): - api_key = event.data.get("openrouter", {}).get("api_key") +def fetch_models_sync(api_key: str): loop = asyncio.get_event_loop() loop.run_until_complete(fetch_available_models(api_key)) -handlers["config_saved"].connect(fetch_models_sync) -handlers["talemate_started"].connect(fetch_models_sync) +def on_talemate_started(event): + fetch_models_sync(get_config().openrouter.api_key) + + +handlers["talemate_started"].connect(on_talemate_started) class Defaults(CommonDefaults, pydantic.BaseModel): max_token_length: int = 16384 model: str = DEFAULT_MODEL + provider_only: list[str] = pydantic.Field(default_factory=list) + provider_ignore: list[str] = pydantic.Field(default_factory=list) + + +class ClientConfig(BaseClientConfig): + provider_only: list[str] = pydantic.Field(default_factory=list) + provider_ignore: list[str] = pydantic.Field(default_factory=list) + + +PROVIDER_FIELD_GROUP = FieldGroup( + name="provider", + label="Provider", + description="Configure OpenRouter provider routing.", + icon="mdi-server-network", +) @register() @@ -84,9 +194,9 @@ class OpenRouterClient(ClientBase): client_type = "openrouter" conversation_retries = 0 - auto_break_repetition_enabled = False # TODO: make this configurable? decensor_enabled = False + config_cls = ClientConfig class Meta(ClientBase.Meta): name_prefix: str = "OpenRouter" @@ -97,23 +207,46 @@ class OpenRouterClient(ClientBase): ) requires_prompt_template: bool = False defaults: Defaults = Defaults() + extra_fields: dict[str, ExtraField] = { + "provider_only": ExtraField( + name="provider_only", + type="flags", + label="Only use these providers", + choices=AVAILABLE_PROVIDERS, + description="Manually limit the providers to use for the selected model. This will override the default provider selection for this model.", + group=PROVIDER_FIELD_GROUP, + required=False, + ), + "provider_ignore": ExtraField( + name="provider_ignore", + type="flags", + label="Ignore these providers", + choices=AVAILABLE_PROVIDERS, + description="Ignore these providers for the selected model. This will override the default provider selection for this model.", + group=PROVIDER_FIELD_GROUP, + required=False, + ), + } - def __init__(self, model=None, **kwargs): - self.model_name = model or DEFAULT_MODEL - self.api_key_status = None - self.config = load_config() + def __init__(self, **kwargs): self._models_fetched = False super().__init__(**kwargs) - handlers["config_saved"].connect(self.on_config_saved) + @property + def provider_only(self) -> list[str]: + return self.client_config.provider_only + + @property + def provider_ignore(self) -> list[str]: + return self.client_config.provider_ignore @property def can_be_coerced(self) -> bool: - return True + return not self.reason_enabled @property def openrouter_api_key(self): - return self.config.get("openrouter", {}).get("api_key") + return self.config.openrouter.api_key @property def supported_parameters(self): @@ -130,15 +263,15 @@ class OpenRouterClient(ClientBase): def emit_status(self, processing: bool = None): error_action = None + error_message = None if processing is not None: self.processing = processing if self.openrouter_api_key: status = "busy" if self.processing else "idle" - model_name = self.model_name else: status = "error" - model_name = "No API key set" + error_message = "No API key set" error_action = ErrorAction( title="Set API Key", action_name="openAppConfig", @@ -151,7 +284,7 @@ class OpenRouterClient(ClientBase): if not self.model_name: status = "error" - model_name = "No model loaded" + error_message = "No model loaded" self.current_status = status @@ -159,6 +292,7 @@ class OpenRouterClient(ClientBase): "error_action": error_action.model_dump() if error_action else None, "meta": self.Meta().model_dump(), "enabled": self.enabled, + "error_message": error_message, } data.update(self._common_status_data()) @@ -166,60 +300,11 @@ class OpenRouterClient(ClientBase): "client_status", message=self.client_type, id=self.name, - details=model_name, + details=self.model_name, status=status if self.enabled else "disabled", data=data, ) - def set_client(self, max_token_length: int = None): - # Unlike other clients, we don't need to set up a client instance - # We'll use httpx directly in the generate method - - if not self.openrouter_api_key: - log.error("No OpenRouter API key set") - if self.api_key_status: - self.api_key_status = False - emit("request_client_status") - emit("request_agent_status") - return - - if not self.model_name: - self.model_name = DEFAULT_MODEL - - if max_token_length and not isinstance(max_token_length, int): - max_token_length = int(max_token_length) - - # Set max token length (default to 16k if not specified) - self.max_token_length = max_token_length or 16384 - - if not self.api_key_status: - if self.api_key_status is False: - emit("request_client_status") - emit("request_agent_status") - self.api_key_status = True - - log.info( - "openrouter set client", - max_token_length=self.max_token_length, - provided_max_token_length=max_token_length, - model=self.model_name, - ) - - def reconfigure(self, **kwargs): - if kwargs.get("model"): - self.model_name = kwargs["model"] - self.set_client(kwargs.get("max_token_length")) - - if "enabled" in kwargs: - self.enabled = bool(kwargs["enabled"]) - - self._reconfigure_common_parameters(**kwargs) - - def on_config_saved(self, event): - config = event.data - self.config = config - self.set_client(max_token_length=self.max_token_length) - async def status(self): # Fetch models if we have an API key and haven't fetched yet if self.openrouter_api_key and not self._models_fetched: @@ -229,13 +314,6 @@ class OpenRouterClient(ClientBase): self.emit_status() - def prompt_template(self, system_message: str, prompt: str): - """ - Open-router handles the prompt template internally, so we just - give the prompt as is. - """ - return prompt - async def generate(self, prompt: str, parameters: dict, kind: str): """ Generates text from the given prompt and parameters using OpenRouter API. @@ -244,7 +322,10 @@ class OpenRouterClient(ClientBase): if not self.openrouter_api_key: raise Exception("No OpenRouter API key set") - prompt, coercion_prompt = self.split_prompt_for_coercion(prompt) + if self.can_be_coerced: + prompt, coercion_prompt = self.split_prompt_for_coercion(prompt) + else: + coercion_prompt = None # Prepare messages for chat completion messages = [ @@ -253,7 +334,23 @@ class OpenRouterClient(ClientBase): ] if coercion_prompt: - messages.append({"role": "assistant", "content": coercion_prompt.strip()}) + log.debug("Adding coercion pre-fill", coercion_prompt=coercion_prompt) + messages.append( + { + "role": "assistant", + "content": coercion_prompt.strip(), + "prefix": True, + } + ) + + provider = {} + if self.provider_only: + provider["only"] = self.provider_only + if self.provider_ignore: + provider["ignore"] = self.provider_ignore + + if provider: + parameters["provider"] = provider # Prepare request payload payload = { @@ -320,7 +417,7 @@ class OpenRouterClient(ClientBase): self.count_tokens(content) ) - except json.JSONDecodeError: + except (json.JSONDecodeError, KeyError): pass # Extract the response content diff --git a/src/talemate/client/presets.py b/src/talemate/client/presets.py index 56d5b807..50fb85de 100644 --- a/src/talemate/client/presets.py +++ b/src/talemate/client/presets.py @@ -3,8 +3,7 @@ from typing import TYPE_CHECKING import structlog from talemate.client.context import set_client_context_attribute -from talemate.config import InferencePresets, InferencePresetGroup, load_config -from talemate.emit.signals import handlers +from talemate.config import get_config if TYPE_CHECKING: from talemate.client.base import ClientBase @@ -20,42 +19,19 @@ __all__ = [ log = structlog.get_logger("talemate.client.presets") -config = load_config(as_model=True) - - -# Load the config -CONFIG = { - "inference": config.presets.inference, - "inference_groups": config.presets.inference_groups, -} - - -# Sync the config when it is saved -def sync_config(event): - CONFIG["inference"] = InferencePresets( - **event.data.get("presets", {}).get("inference", {}) - ) - CONFIG["inference_groups"] = { - group: InferencePresetGroup(**data) - for group, data in event.data.get("presets", {}) - .get("inference_groups", {}) - .items() - } - - -handlers["config_saved"].connect(sync_config) - def get_inference_parameters(preset_name: str, group: str | None = None) -> dict: """ Returns the inference parameters for the given preset name. """ - presets = CONFIG["inference"].model_dump() + config = get_config() + + presets = config.presets.inference.model_dump() if group: try: - group_presets = CONFIG["inference_groups"].get(group).model_dump() + group_presets = config.presets.inference_groups.get(group).model_dump() presets.update(group_presets["presets"]) except AttributeError: log.warning( @@ -74,6 +50,7 @@ def configure(parameters: dict, kind: str, total_budget: int, client: "ClientBas """ set_preset(parameters, kind, client) set_max_tokens(parameters, kind, total_budget) + return parameters @@ -141,7 +118,7 @@ def preset_for_kind(kind: str, client: "ClientBase") -> dict: if not preset_name: log.warning( f"No preset found for kind {kind}, defaulting to 'scene_direction'", - presets=CONFIG["inference"], + presets=get_config().presets.inference, ) preset_name = "scene_direction" diff --git a/src/talemate/client/remote.py b/src/talemate/client/remote.py index ba9ff2ca..138cfe85 100644 --- a/src/talemate/client/remote.py +++ b/src/talemate/client/remote.py @@ -69,17 +69,13 @@ class EndpointOverrideAPIKeyField(EndpointOverrideField): class EndpointOverrideMixin: - override_base_url: str | None = None - override_api_key: str | None = None + @property + def override_base_url(self) -> str | None: + return self.client_config.override_base_url - def set_client_api_key(self, api_key: str | None): - if getattr(self, "client", None): - try: - self.client.api_key = api_key - except Exception as e: - log.error( - "Error setting client API key", error=e, client=self.client_type - ) + @property + def override_api_key(self) -> str | None: + return self.client_config.override_api_key @property def api_key(self) -> str | None: @@ -108,41 +104,7 @@ class EndpointOverrideMixin: and self.endpoint_override_api_key_configured ) - def _reconfigure_endpoint_override(self, **kwargs): - if "override_base_url" in kwargs: - orig = getattr(self, "override_base_url", None) - self.override_base_url = kwargs["override_base_url"] - if getattr(self, "client", None) and orig != self.override_base_url: - log.info("Reconfiguring client base URL", new=self.override_base_url) - self.set_client(kwargs.get("max_token_length")) - - if "override_api_key" in kwargs: - self.override_api_key = kwargs["override_api_key"] - self.set_client_api_key(self.override_api_key) - class RemoteServiceMixin: - def prompt_template(self, system_message: str, prompt: str): - if "<|BOT|>" in prompt: - _, right = prompt.split("<|BOT|>", 1) - if right: - prompt = prompt.replace("<|BOT|>", "\nStart your response with: ") - else: - prompt = prompt.replace("<|BOT|>", "") - - return prompt - - def reconfigure(self, **kwargs): - if kwargs.get("model"): - self.model_name = kwargs["model"] - self.set_client(kwargs.get("max_token_length")) - if "enabled" in kwargs: - self.enabled = bool(kwargs["enabled"]) - - def on_config_saved(self, event): - config = event.data - self.config = config - self.set_client(max_token_length=self.max_token_length) - async def status(self): self.emit_status() diff --git a/src/talemate/client/runpod.py b/src/talemate/client/runpod.py index b3109f8e..f2bb8ad9 100644 --- a/src/talemate/client/runpod.py +++ b/src/talemate/client/runpod.py @@ -9,7 +9,7 @@ import dotenv import runpod import structlog -from talemate.config import load_config +from talemate.config import get_config from .bootstrap import ClientBootstrap, ClientType, register_list @@ -17,7 +17,6 @@ log = structlog.get_logger("talemate.client.runpod") dotenv.load_dotenv() -runpod.api_key = load_config().get("runpod", {}).get("api_key", "") TEXTGEN_IDENTIFIERS = ["textgen", "thebloke llms", "text-generation-webui"] @@ -35,6 +34,7 @@ async def _async_get_pods(): """ asyncio wrapper around get_pods. """ + runpod.api_key = get_config().runpod.api_key loop = asyncio.get_event_loop() return await loop.run_in_executor(None, runpod.get_pods) @@ -44,6 +44,7 @@ async def get_textgen_pods(): """ Return a list of text generation pods. """ + runpod.api_key = get_config().runpod.api_key if not runpod.api_key: return @@ -60,6 +61,8 @@ async def get_automatic1111_pods(): Return a list of automatic1111 pods. """ + runpod.api_key = get_config().runpod.api_key + if not runpod.api_key: return diff --git a/src/talemate/client/tabbyapi.py b/src/talemate/client/tabbyapi.py index 65bc910e..58e8ccab 100644 --- a/src/talemate/client/tabbyapi.py +++ b/src/talemate/client/tabbyapi.py @@ -8,7 +8,7 @@ from openai import PermissionDeniedError from talemate.client.base import ClientBase, ExtraField, CommonDefaults from talemate.client.registry import register from talemate.client.utils import urljoin -from talemate.config import Client as BaseClientConfig +from talemate.config.schema import Client as BaseClientConfig from talemate.emit import emit log = structlog.get_logger("talemate.client.tabbyapi") @@ -34,6 +34,7 @@ class TabbyAPIClient(ClientBase): client_type = "tabbyapi" conversation_retries = 0 config_cls = ClientConfig + remote_model_locked: bool = True class Meta(ClientBase.Meta): title: str = "TabbyAPI" @@ -52,13 +53,9 @@ class TabbyAPIClient(ClientBase): ) } - def __init__( - self, model=None, api_key=None, api_handles_prompt_template=False, **kwargs - ): - self.model_name = model - self.api_key = api_key - self.api_handles_prompt_template = api_handles_prompt_template - super().__init__(**kwargs) + @property + def api_handles_prompt_template(self) -> bool: + return self.client_config.api_handles_prompt_template @property def experimental(self): @@ -69,7 +66,7 @@ class TabbyAPIClient(ClientBase): """ Determines whether or not this client can pass LLM coercion. (e.g., is able to predefine partial LLM output in the prompt) """ - return not self.api_handles_prompt_template + return not self.reason_enabled @property def supported_parameters(self): @@ -92,31 +89,9 @@ class TabbyAPIClient(ClientBase): "temperature", ] - def set_client(self, **kwargs): - self.api_key = kwargs.get("api_key", self.api_key) - self.api_handles_prompt_template = kwargs.get( - "api_handles_prompt_template", self.api_handles_prompt_template - ) - self.model_name = ( - kwargs.get("model") or kwargs.get("model_name") or self.model_name - ) - def prompt_template(self, system_message: str, prompt: str): - log.debug( - "IS API HANDLING PROMPT TEMPLATE", - api_handles_prompt_template=self.api_handles_prompt_template, - ) - if not self.api_handles_prompt_template: return super().prompt_template(system_message, prompt) - - if "<|BOT|>" in prompt: - _, right = prompt.split("<|BOT|>", 1) - if right: - prompt = prompt.replace("<|BOT|>", "\nStart your response with: ") - else: - prompt = prompt.replace("<|BOT|>", "") - return prompt async def get_model_name(self): @@ -152,11 +127,31 @@ class TabbyAPIClient(ClientBase): parameters=parameters, ) - human_message = {"role": "user", "content": prompt.strip()} + if self.can_be_coerced: + prompt, coercion_prompt = self.split_prompt_for_coercion(prompt) + else: + coercion_prompt = None + + messages = [ + {"role": "system", "content": self.get_system_message(kind)}, + {"role": "user", "content": prompt.strip()}, + ] + + if coercion_prompt: + log.debug( + "Adding coercion pre-fill", coercion_prompt=coercion_prompt + ) + messages.append( + { + "role": "assistant", + "content": coercion_prompt.strip(), + "prefix": True, + } + ) payload = { "model": self.model_name, - "messages": [human_message], + "messages": messages, "stream": True, "stream_options": { "include_usage": True, @@ -229,6 +224,10 @@ class TabbyAPIClient(ClientBase): ) usage = data_obj.get("usage", {}) + + if not usage: + continue + completion_tokens = usage.get( "completion_tokens", 0 ) @@ -239,7 +238,7 @@ class TabbyAPIClient(ClientBase): self.update_request_tokens( self.count_tokens(content) ) - except json.JSONDecodeError: + except (json.JSONDecodeError, IndexError): # ignore malformed json chunks pass @@ -247,12 +246,6 @@ class TabbyAPIClient(ClientBase): self._returned_prompt_tokens = prompt_tokens self._returned_response_tokens = completion_tokens - if is_chat: - # Process indirect coercion - response_text = self.process_response_for_indirect_coercion( - prompt, response_text - ) - return response_text except PermissionDeniedError as e: @@ -264,34 +257,15 @@ class TabbyAPIClient(ClientBase): emit("status", message="TabbyAPI: Request timed out", status="error") return "" except Exception as e: + import traceback + + print(traceback.format_exc()) self.log.error("generate error", e=e) emit( "status", message="Error during generation (check logs)", status="error" ) return "" - def reconfigure(self, **kwargs): - if kwargs.get("model"): - self.model_name = kwargs["model"] - if "api_url" in kwargs: - self.api_url = kwargs["api_url"] - if "max_token_length" in kwargs: - self.max_token_length = ( - int(kwargs["max_token_length"]) if kwargs["max_token_length"] else 8192 - ) - if "api_key" in kwargs: - self.api_key = kwargs["api_key"] - if "api_handles_prompt_template" in kwargs: - self.api_handles_prompt_template = kwargs["api_handles_prompt_template"] - if "enabled" in kwargs: - self.enabled = bool(kwargs["enabled"]) - if "double_coercion" in kwargs: - self.double_coercion = kwargs["double_coercion"] - - self._reconfigure_common_parameters(**kwargs) - - self.set_client(**kwargs) - def jiggle_randomness(self, prompt_config: dict, offset: float = 0.3) -> dict: """ adjusts temperature and presence penalty by random values using the base value as a center diff --git a/src/talemate/client/textgenwebui.py b/src/talemate/client/textgenwebui.py index 94c0a053..f25a74ac 100644 --- a/src/talemate/client/textgenwebui.py +++ b/src/talemate/client/textgenwebui.py @@ -6,7 +6,6 @@ import requests import asyncio import httpx import structlog -from openai import AsyncOpenAI from talemate.client.base import STOPPING_STRINGS, ClientBase, Defaults from talemate.client.registry import register @@ -21,6 +20,7 @@ class TextGeneratorWebuiClientDefaults(Defaults): @register() class TextGeneratorWebuiClient(ClientBase): auto_determine_prompt_template: bool = True + remote_model_locked: bool = True finalizers: list[str] = [ "finalize_llama3", "finalize_YI", @@ -81,10 +81,6 @@ class TextGeneratorWebuiClient(ClientBase): "extra_stopping_strings", ] - def __init__(self, **kwargs): - self.api_key = kwargs.pop("api_key", "") - super().__init__(**kwargs) - def tune_prompt_parameters(self, parameters: dict, kind: str): super().tune_prompt_parameters(parameters, kind) parameters["stopping_strings"] = STOPPING_STRINGS + parameters.get( @@ -98,10 +94,6 @@ class TextGeneratorWebuiClient(ClientBase): if parameters.get("min_p"): parameters["do_sample"] = True - def set_client(self, **kwargs): - self.api_key = kwargs.get("api_key", self.api_key) - self.client = AsyncOpenAI(base_url=self.api_url + "/v1", api_key="sk-1111") - def finalize_llama3(self, parameters: dict, prompt: str) -> tuple[str, bool]: if "<|eot_id|>" not in prompt: return prompt, False @@ -213,9 +205,3 @@ class TextGeneratorWebuiClient(ClientBase): prompt_config["repetition_penalty"] = random.uniform( rep_pen + min_offset * 0.3, rep_pen + offset * 0.3 ) - - def reconfigure(self, **kwargs): - if "api_key" in kwargs: - self.api_key = kwargs.pop("api_key") - - super().reconfigure(**kwargs) diff --git a/src/talemate/commands/__init__.py b/src/talemate/commands/__init__.py index 4465f439..1ca3ccc8 100644 --- a/src/talemate/commands/__init__.py +++ b/src/talemate/commands/__init__.py @@ -10,8 +10,6 @@ from .cmd_rebuild_archive import CmdRebuildArchive # noqa: F401 from .cmd_rename import CmdRename # noqa: F401 from .cmd_regenerate import CmdRegenerate # noqa: F401 from .cmd_reset import CmdReset # noqa: F401 -from .cmd_save import CmdSave # noqa: F401 -from .cmd_save_as import CmdSaveAs # noqa: F401 from .cmd_setenv import CmdSetEnvironmentToCreative, CmdSetEnvironmentToScene # noqa: F401 from .cmd_time_util import CmdAdvanceTime # noqa: F401 from .cmd_tts import CmdTestTTS # noqa: F401 diff --git a/src/talemate/commands/cmd_debug_tools.py b/src/talemate/commands/cmd_debug_tools.py index 1853e1cb..ea88adb8 100644 --- a/src/talemate/commands/cmd_debug_tools.py +++ b/src/talemate/commands/cmd_debug_tools.py @@ -51,7 +51,7 @@ class CmdLongTermMemoryStats(TalemateCommand): aliases = ["ltm_stats"] async def run(self): - memory = self.scene.get_helper("memory").agent + memory = get_agent("memory") count = await memory.count() db_name = memory.db_name diff --git a/src/talemate/commands/cmd_rebuild_archive.py b/src/talemate/commands/cmd_rebuild_archive.py index 0182c16f..be1f894b 100644 --- a/src/talemate/commands/cmd_rebuild_archive.py +++ b/src/talemate/commands/cmd_rebuild_archive.py @@ -1,6 +1,7 @@ from talemate.commands.base import TalemateCommand from talemate.commands.manager import register from talemate.emit import emit +from talemate.instance import get_agent @register @@ -14,8 +15,8 @@ class CmdRebuildArchive(TalemateCommand): aliases = ["rebuild"] async def run(self): - summarizer = self.scene.get_helper("summarizer") - memory = self.scene.get_helper("memory") + summarizer = get_agent("summarizer") + memory = get_agent("memory") if not summarizer: self.system_message("No summarizer found") diff --git a/src/talemate/commands/cmd_save.py b/src/talemate/commands/cmd_save.py deleted file mode 100644 index bb82cee5..00000000 --- a/src/talemate/commands/cmd_save.py +++ /dev/null @@ -1,17 +0,0 @@ -from talemate.commands.base import TalemateCommand -from talemate.commands.manager import register - - -@register -class CmdSave(TalemateCommand): - """ - Command class for the 'save' command - """ - - name = "save" - description = "Save the scene" - aliases = ["s"] - sets_scene_unsaved = False - - async def run(self): - await self.scene.save() diff --git a/src/talemate/commands/cmd_save_as.py b/src/talemate/commands/cmd_save_as.py deleted file mode 100644 index 53c7479d..00000000 --- a/src/talemate/commands/cmd_save_as.py +++ /dev/null @@ -1,17 +0,0 @@ -from talemate.commands.base import TalemateCommand -from talemate.commands.manager import register - - -@register -class CmdSaveAs(TalemateCommand): - """ - Command class for the 'save_as' command - """ - - name = "save_as" - description = "Save the scene with a new name" - aliases = ["sa"] - sets_scene_unsaved = False - - async def run(self): - await self.scene.save(save_as=True) diff --git a/src/talemate/config/__init__.py b/src/talemate/config/__init__.py new file mode 100644 index 00000000..7b48d0f3 --- /dev/null +++ b/src/talemate/config/__init__.py @@ -0,0 +1,11 @@ +from .state import get_config, save_config, cleanup, update_config, commit_config +from .schema import Config + +__all__ = [ + "get_config", + "save_config", + "cleanup", + "Config", + "update_config", + "commit_config", +] diff --git a/src/talemate/config.py b/src/talemate/config/schema.py similarity index 67% rename from src/talemate/config.py rename to src/talemate/config/schema.py index df2db8d5..b528a92a 100644 --- a/src/talemate/config.py +++ b/src/talemate/config/schema.py @@ -4,45 +4,66 @@ from typing import TYPE_CHECKING, Any, ClassVar, Dict, Optional, TypeVar, Union, import pydantic import structlog -import yaml -from pydantic import BaseModel from typing_extensions import Annotated -from talemate.agents.registry import get_agent_class +import talemate.emit.async_signals as async_signals + from talemate.client.registry import get_client_class from talemate.client.system_prompts import SystemPrompts -from talemate.emit import emit from talemate.scene_assets import Asset +from talemate.path import SCENES_DIR + if TYPE_CHECKING: from talemate.tale_mate import Scene log = structlog.get_logger("talemate.config") - -def scenes_dir(): - relative_path = os.path.join( - os.path.dirname(os.path.realpath(__file__)), - "..", - "..", - "scenes", - ) - return os.path.abspath(relative_path) +async_signals.register( + "config.changed", + "config.changed.follow", +) -class Client(BaseModel): +class Client(pydantic.BaseModel): + """ + LLM Client configuration + """ + + # clien type/provider (e.g., openai, anthropic, etc.) type: str name: str model: Union[str, None] = None api_url: Union[str, None] = None api_key: Union[str, None] = None + # max input tokens to send with a generation request max_token_length: int = 8192 + + # prefill text for ALL requests double_coercion: Union[str, None] = None + + # max requests per minute rate_limit: Union[int, None] = None + + # expected data structure format in responses data_format: Literal["json", "yaml"] | None = None + enabled: bool = True + # whether or not to enable reasoning + reason_enabled: bool = False + + # add extra allowance for response tokens + # this is useful for when the model generates visible thinking + # tokens. + reason_tokens: int = 0 + + # regex to strip from the response if the model is reasoning + reason_response_pattern: Union[str, None] = None + system_prompts: SystemPrompts = SystemPrompts() + + # inference preset group to use for this client preset_group: str | None = None class Config: @@ -52,16 +73,16 @@ class Client(BaseModel): ClientType = TypeVar("ClientType", bound=Client) -class AgentActionConfig(BaseModel): - value: Union[int, float, str, bool, list[bool | str | int | float], None] = None +class AgentActionConfig(pydantic.BaseModel): + value: Union[int, float, str, bool, list, None] = None -class AgentAction(BaseModel): +class AgentAction(pydantic.BaseModel): enabled: bool = True config: Union[dict[str, AgentActionConfig], None] = None -class Agent(BaseModel): +class Agent(pydantic.BaseModel): name: Union[str, None] = None client: Union[str, None] = None actions: Union[dict[str, AgentAction], None] = None @@ -77,7 +98,7 @@ class Agent(BaseModel): return super().model_dump(exclude_none=True) -class GamePlayerCharacter(BaseModel): +class GamePlayerCharacter(pydantic.BaseModel): name: str = "" color: str = "#3362bb" gender: str = "" @@ -87,14 +108,14 @@ class GamePlayerCharacter(BaseModel): extra = "ignore" -class General(BaseModel): +class General(pydantic.BaseModel): auto_save: bool = True auto_progress: bool = True - max_backscroll: int = 512 + max_backscroll: int = 100 add_default_character: bool = True -class StateReinforcementTemplate(BaseModel): +class StateReinforcementTemplate(pydantic.BaseModel): name: str query: str state_type: str = "npc" @@ -108,7 +129,7 @@ class StateReinforcementTemplate(BaseModel): type: ClassVar = "state_reinforcement" -class WorldStateTemplates(BaseModel): +class WorldStateTemplates(pydantic.BaseModel): state_reinforcement: dict[str, StateReinforcementTemplate] = pydantic.Field( default_factory=dict ) @@ -117,11 +138,11 @@ class WorldStateTemplates(BaseModel): return self.state_reinforcement.get(name) -class WorldState(BaseModel): +class WorldState(pydantic.BaseModel): templates: WorldStateTemplates = WorldStateTemplates() -class Game(BaseModel): +class Game(pydantic.BaseModel): default_player_character: GamePlayerCharacter = GamePlayerCharacter() general: General = General() world_state: WorldState = WorldState() @@ -130,71 +151,60 @@ class Game(BaseModel): extra = "ignore" -class CreatorConfig(BaseModel): +class CreatorConfig(pydantic.BaseModel): content_context: list[str] = [ "a fun and engaging slice of life story aimed at an adult audience." ] -class OpenAIConfig(BaseModel): +class OpenAIConfig(pydantic.BaseModel): api_key: Union[str, None] = None -class MistralAIConfig(BaseModel): +class MistralAIConfig(pydantic.BaseModel): api_key: Union[str, None] = None -class AnthropicConfig(BaseModel): +class AnthropicConfig(pydantic.BaseModel): api_key: Union[str, None] = None -class CohereConfig(BaseModel): +class CohereConfig(pydantic.BaseModel): api_key: Union[str, None] = None -class GroqConfig(BaseModel): +class GroqConfig(pydantic.BaseModel): api_key: Union[str, None] = None -class DeepSeekConfig(BaseModel): +class DeepSeekConfig(pydantic.BaseModel): api_key: Union[str, None] = None -class OpenRouterConfig(BaseModel): +class OpenRouterConfig(pydantic.BaseModel): api_key: Union[str, None] = None -class RunPodConfig(BaseModel): +class RunPodConfig(pydantic.BaseModel): api_key: Union[str, None] = None -class ElevenLabsConfig(BaseModel): +class ElevenLabsConfig(pydantic.BaseModel): api_key: Union[str, None] = None model: str = "eleven_turbo_v2" -class CoquiConfig(BaseModel): +class CoquiConfig(pydantic.BaseModel): api_key: Union[str, None] = None -class GoogleConfig(BaseModel): +class GoogleConfig(pydantic.BaseModel): gcloud_credentials_path: Union[str, None] = None gcloud_location: Union[str, None] = None api_key: Union[str, None] = None -class TTSVoiceSamples(BaseModel): - label: str - value: str - - -class TTSConfig(BaseModel): - device: str = "cuda" - model: str = "tts_models/multilingual/multi-dataset/xtts_v2" - voices: list[TTSVoiceSamples] = pydantic.Field(default_factory=list) - - -class RecentScene(BaseModel): +class RecentScene(pydantic.BaseModel): name: str path: str filename: str @@ -202,7 +212,7 @@ class RecentScene(BaseModel): cover_image: Union[Asset, None] = None -class EmbeddingFunctionPreset(BaseModel): +class EmbeddingFunctionPreset(pydantic.BaseModel): embeddings: str = "sentence-transformer" model: str = "all-MiniLM-L6-v2" trust_remote_code: bool = False @@ -263,7 +273,7 @@ def generate_chromadb_presets() -> dict[str, EmbeddingFunctionPreset]: } -class InferenceParameters(BaseModel): +class InferenceParameters(pydantic.BaseModel): temperature: float = 1.0 temperature_last: bool = True top_p: float | None = 1.0 @@ -290,7 +300,7 @@ class InferenceParameters(BaseModel): changed: bool = False -class InferencePresets(BaseModel): +class InferencePresets(pydantic.BaseModel): analytical: InferenceParameters = InferenceParameters( temperature=0.7, presence_penalty=0, @@ -324,12 +334,12 @@ class InferencePresets(BaseModel): ) -class InferencePresetGroup(BaseModel): +class InferencePresetGroup(pydantic.BaseModel): name: str presets: InferencePresets -class Presets(BaseModel): +class Presets(pydantic.BaseModel): inference_defaults: InferencePresets = InferencePresets() inference: InferencePresets = InferencePresets() @@ -353,9 +363,7 @@ def gnerate_intro_scenes(): scenes = [ RecentScene( name="Simulation Suite V2", - path=os.path.join( - scenes_dir(), "simulation-suite-v2", "the-simulation-suite.json" - ), + path=str(SCENES_DIR / "simulation-suite-v2" / "the-simulation-suite.json"), filename="the-simulation-suite.json", date=datetime.datetime.now().isoformat(), cover_image=Asset( @@ -366,7 +374,7 @@ def gnerate_intro_scenes(): ), RecentScene( name="Infinity Quest", - path=os.path.join(scenes_dir(), "infinity-quest", "infinity-quest.json"), + path=str(SCENES_DIR / "infinity-quest" / "infinity-quest.json"), filename="infinity-quest.json", date=datetime.datetime.now().isoformat(), cover_image=Asset( @@ -377,8 +385,8 @@ def gnerate_intro_scenes(): ), RecentScene( name="Infinity Quest Dynamic Story", - path=os.path.join( - scenes_dir(), "infinity-quest-dynamic-story-v2", "infinity-quest.json" + path=str( + SCENES_DIR / "infinity-quest-dynamic-story-v2" / "infinity-quest.json" ), filename="infinity-quest.json", date=datetime.datetime.now().isoformat(), @@ -393,7 +401,7 @@ def gnerate_intro_scenes(): return scenes -class RecentScenes(BaseModel): +class RecentScenes(pydantic.BaseModel): scenes: list[RecentScene] = pydantic.Field(default_factory=gnerate_intro_scenes) max_entries: int = 10 @@ -473,7 +481,7 @@ AnnotatedClient = Annotated[ ] -class HistoryMessageStyle(BaseModel): +class HistoryMessageStyle(pydantic.BaseModel): italic: bool = False bold: bool = False @@ -486,7 +494,7 @@ class HidableHistoryMessageStyle(HistoryMessageStyle): show: bool = True -class SceneAppearance(BaseModel): +class SceneAppearance(pydantic.BaseModel): narrator_messages: HistoryMessageStyle = HistoryMessageStyle(italic=True) character_messages: HistoryMessageStyle = HistoryMessageStyle() director_messages: HidableHistoryMessageStyle = HidableHistoryMessageStyle() @@ -496,11 +504,11 @@ class SceneAppearance(BaseModel): ) -class Appearance(BaseModel): +class Appearance(pydantic.BaseModel): scene: SceneAppearance = SceneAppearance() -class Config(BaseModel): +class Config(pydantic.BaseModel): clients: Dict[str, AnnotatedClient] = {} game: Game = Game() @@ -531,8 +539,6 @@ class Config(BaseModel): coqui: CoquiConfig = CoquiConfig() - tts: TTSConfig = TTSConfig() - recent_scenes: RecentScenes = RecentScenes() presets: Presets = Presets() @@ -541,146 +547,18 @@ class Config(BaseModel): system_prompts: SystemPrompts = SystemPrompts() + dirty: bool = pydantic.Field(default=False, exclude=True) + class Config: extra = "ignore" - def save(self, file_path: str = "./config.yaml"): - save_config(self, file_path) + async def set_dirty(self): + self.dirty = True + await async_signals.get("config.changed").send(self) + await async_signals.get("config.changed.follow").send(self) -class SceneAssetUpload(BaseModel): +class SceneAssetUpload(pydantic.BaseModel): scene_cover_image: bool character_cover_image: str | None = None content: str = None - - -def load_config( - file_path: str = "./config.yaml", as_model: bool = False -) -> Union[dict, Config]: - """ - Load the config file from the given path. - - Should cache the config and only reload if the file modification time - has changed since the last load - """ - with open(file_path, "r") as file: - config_data = yaml.safe_load(file) - - try: - config = Config(**config_data) - config.recent_scenes.clean() - except pydantic.ValidationError as e: - log.error("config validation", error=e) - return None - - if as_model: - return config - - return config.model_dump() - - -def save_config(config, file_path: str = "./config.yaml"): - """ - Save the config file to the given path. - """ - - log.debug("Saving config", file_path=file_path) - - # If config is a Config instance, convert it to a dictionary - if isinstance(config, Config): - config = config.model_dump(exclude_none=True) - elif isinstance(config, dict): - # validate - try: - config = Config(**config).model_dump(exclude_none=True) - except pydantic.ValidationError as e: - log.error("config validation", error=e) - return None - - # we dont want to persist the following, so we drop them: - # - presets.inference_defaults - # - presets.embeddings_defaults - - if "inference_defaults" in config["presets"]: - config["presets"].pop("inference_defaults") - - if "embeddings_defaults" in config["presets"]: - config["presets"].pop("embeddings_defaults") - - # for normal presets we only want to persist if they have changed - for preset_name, preset in list(config["presets"]["inference"].items()): - if not preset.get("changed"): - config["presets"]["inference"].pop(preset_name) - - # in inference groups also only keep if changed - for group_name, group in list(config["presets"]["inference_groups"].items()): - for preset_name, preset in list(group["presets"].items()): - if not preset.get("changed"): - group["presets"].pop(preset_name) - - # if presets is empty, remove it - if not config["presets"]["inference"]: - config["presets"].pop("inference") - - # if system_prompts is empty, remove it - if not config["system_prompts"]: - config.pop("system_prompts") - - # set any client preset_group to "" if it references an - # entry that no longer exists in inference_groups - for client in config["clients"].values(): - if not client.get("preset_group"): - continue - - if client["preset_group"] not in config["presets"].get("inference_groups", {}): - log.warning( - f"Client {client['name']} references non-existent preset group {client['preset_group']}, setting to default" - ) - client["preset_group"] = "" - - with open(file_path, "w") as file: - yaml.dump(config, file) - - emit("config_saved", data=config) - - -def cleanup() -> Config: - log.info("cleaning up config") - - config = load_config(as_model=True) - - cleanup_removed_clients(config) - cleanup_removed_agents(config) - - save_config(config) - - return config - - -def cleanup_removed_clients(config: Config): - """ - Will remove any clients that are no longer present - """ - - if not config: - return - - for client_in_config in list(config.clients.keys()): - client_config = config.clients[client_in_config] - if not get_client_class(client_config.type): - log.info("removing client from config", client=client_in_config) - del config.clients[client_in_config] - - -def cleanup_removed_agents(config: Config): - """ - Will remove any agents that are no longer present - """ - - if not config: - return - - for agent_in_config in list(config.agents.keys()): - if not get_agent_class(agent_in_config): - log.info("removing agent from config", agent=agent_in_config) - del config.agents[agent_in_config] diff --git a/src/talemate/config/state.py b/src/talemate/config/state.py new file mode 100644 index 00000000..56ff9d3e --- /dev/null +++ b/src/talemate/config/state.py @@ -0,0 +1,161 @@ +import structlog +import yaml +from talemate.path import CONFIG_FILE +import talemate.emit.async_signals as async_signals +from talemate.agents.registry import get_agent_class +from talemate.client.registry import get_client_class + +from .schema import Config + +log = structlog.get_logger("talemate.config") + +CONFIG = None + +async_signals.register( + "config.saved", + "config.saved.after", + "config.loaded", +) + + +def _load_config() -> Config: + log.debug("loading config", file_path=CONFIG_FILE) + with open(CONFIG_FILE, "r") as file: + yaml_data = yaml.safe_load(file) + return Config.model_validate(yaml_data) + + +def get_config() -> Config: + global CONFIG + if CONFIG is None: + CONFIG = _load_config() + return CONFIG + + +async def update_config(other_config: Config | dict): + if isinstance(other_config, dict): + keys = list(other_config.keys()) + other_config = Config.model_validate(other_config) + else: + keys = None + + config: Config = get_config() + + # if keys is None, do full update + if keys is None: + for field in Config.model_fields: + setattr(config, field.name, getattr(other_config, field.name)) + else: + for key in keys: + setattr(config, key, getattr(other_config, key)) + + await config.set_dirty() + + +def save_config(): + """ + Save the config file to the given path. + """ + + log.debug("Saving config", file_path=CONFIG_FILE) + + config = get_config().model_dump(exclude_none=True) + + # we dont want to persist the following, so we drop them: + # - presets.inference_defaults + # - presets.embeddings_defaults + + if "inference_defaults" in config["presets"]: + config["presets"].pop("inference_defaults") + + if "embeddings_defaults" in config["presets"]: + config["presets"].pop("embeddings_defaults") + + # for normal presets we only want to persist if they have changed + for preset_name, preset in list(config["presets"]["inference"].items()): + if not preset.get("changed"): + config["presets"]["inference"].pop(preset_name) + + # in inference groups also only keep if changed + for _, group in list(config["presets"]["inference_groups"].items()): + for preset_name, preset in list(group["presets"].items()): + if not preset.get("changed"): + group["presets"].pop(preset_name) + + # if presets is empty, remove it + if not config["presets"]["inference"]: + config["presets"].pop("inference") + + # if system_prompts is empty, remove it + if not config["system_prompts"]: + config.pop("system_prompts") + + # set any client preset_group to "" if it references an + # entry that no longer exists in inference_groups + for client in config["clients"].values(): + if not client.get("preset_group"): + continue + + if client["preset_group"] not in config["presets"].get("inference_groups", {}): + log.warning( + f"Client {client['name']} references non-existent preset group {client['preset_group']}, setting to default" + ) + client["preset_group"] = "" + + with open(CONFIG_FILE, "w") as file: + yaml.dump(config, file) + + +def cleanup_removed_clients(config: Config): + """ + Will remove any clients that are no longer present + """ + + if not config: + return + + for client_in_config in list(config.clients.keys()): + client_config = config.clients[client_in_config] + if not get_client_class(client_config.type): + log.info("removing client from config", client=client_in_config) + del config.clients[client_in_config] + + +def cleanup_removed_agents(config: Config): + """ + Will remove any agents that are no longer present + """ + + if not config: + return + + for agent_in_config in list(config.agents.keys()): + if not get_agent_class(agent_in_config): + log.info("removing agent from config", agent=agent_in_config) + del config.agents[agent_in_config] + + +def cleanup() -> Config: + log.info("cleaning up config") + + config = get_config() + + cleanup_removed_clients(config) + cleanup_removed_agents(config) + + save_config() + + return config + + +async def commit_config(): + """ + Will commit the config to the file + """ + + config = get_config() + if not config.dirty: + return + + save_config() + config.dirty = False diff --git a/src/talemate/emit/signals.py b/src/talemate/emit/signals.py index 35150312..635ab1fb 100644 --- a/src/talemate/emit/signals.py +++ b/src/talemate/emit/signals.py @@ -38,8 +38,6 @@ AudioQueue = signal("audio_queue") MessageEdited = signal("message_edited") -ConfigSaved = signal("config_saved") - ImageGenerated = signal("image_generated") ImageGenerationFailed = signal("image_generation_failed") @@ -79,7 +77,6 @@ handlers = { "message_edited": MessageEdited, "prompt_sent": PromptSent, "audio_queue": AudioQueue, - "config_saved": ConfigSaved, "status": StatusMessage, "image_generated": ImageGenerated, "image_generation_failed": ImageGenerationFailed, diff --git a/src/talemate/exceptions.py b/src/talemate/exceptions.py index adc10ef8..dd5a2555 100644 --- a/src/talemate/exceptions.py +++ b/src/talemate/exceptions.py @@ -1,3 +1,22 @@ +__all__ = [ + "TalemateError", + "TalemateInterrupt", + "ExitScene", + "RestartSceneLoop", + "ResetScene", + "GenerationCancelled", + "GenerationProcessingError", + "ReasoningResponseError", + "RenderPromptError", + "LLMAccuracyError", + "SceneInactiveError", + "UnknownDataSpec", + "ActedAsCharacter", + "AbortCommand", + "AbortWaitForInput", +] + + class TalemateError(Exception): pass @@ -42,6 +61,25 @@ class GenerationCancelled(TalemateInterrupt): pass +class GenerationProcessingError(TalemateError): + """ + Exception to raise when there is an error processing a generation + """ + + pass + + +class ReasoningResponseError(GenerationProcessingError): + """ + Exception to raise when there is an error processing a reasoning response + """ + + def __init__(self): + super().__init__( + "Reasoning response pattern not found in response - this means that either the pattern is wrong, the reasoning budget is too low or the model does not support reasoning." + ) + + class RenderPromptError(TalemateError): """ Exception to raise when there is an error rendering a prompt diff --git a/src/talemate/export.py b/src/talemate/export.py index 0157ef91..8101ea24 100644 --- a/src/talemate/export.py +++ b/src/talemate/export.py @@ -4,21 +4,32 @@ Functions that facilitate exporting of a talemate scene import base64 import enum +import os +import shutil +import tempfile +import zipfile +from pathlib import Path +from typing import Union import pydantic +import structlog from talemate.tale_mate import Scene +log = structlog.get_logger("talemate.export") + __all__ = [ "ExportFormat", "ExportOptions", "export", "export_talemate", + "export_talemate_complete", ] class ExportFormat(str, enum.Enum): talemate = "talemate" + talemate_complete = "talemate_complete" class ExportOptions(pydantic.BaseModel): @@ -29,22 +40,28 @@ class ExportOptions(pydantic.BaseModel): name: str format: ExportFormat = ExportFormat.talemate reset_progress: bool = True + include_assets: bool = True + include_nodes: bool = True + include_info: bool = True + include_templates: bool = True -async def export(scene: Scene, options: ExportOptions): +async def export(scene: Scene, options: ExportOptions) -> Union[str, bytes]: """ Export a scene """ if options.format == ExportFormat.talemate: return await export_talemate(scene, options) + elif options.format == ExportFormat.talemate_complete: + return await export_talemate_complete(scene, options) raise ValueError(f"Unsupported export format: {options.format}") async def export_talemate(scene: Scene, options: ExportOptions) -> str: """ - Export a scene in talemate format + Export a scene in talemate format (JSON only, legacy format) """ # Reset progress if options.reset_progress: @@ -52,10 +69,164 @@ async def export_talemate(scene: Scene, options: ExportOptions) -> str: # Export scene - # json strng + # json string scene_json = scene.json # encode base64 scene_base64 = base64.b64encode(scene_json.encode()).decode() return scene_base64 + + +async def export_talemate_complete(scene: Scene, options: ExportOptions) -> bytes: + """ + Export a complete scene in ZIP format including all assets, nodes, info, and templates + """ + # Reset progress + if options.reset_progress: + scene.reset() + + log.info( + "Starting complete scene export", + scene_name=scene.name, + options=options.model_dump(), + ) + + # Create temporary directory for export + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + # Export main scene JSON + scene_json_path = temp_path / "scene.json" + with open(scene_json_path, "w", encoding="utf-8") as f: + f.write(scene.json) + + log.debug("Exported scene JSON", path=scene_json_path) + + # Copy assets directory if it exists and option is enabled + if options.include_assets and scene.assets: + try: + assets_source = Path(scene.assets.asset_directory) + if assets_source.exists(): + assets_dest = temp_path / "assets" + shutil.copytree(assets_source, assets_dest) + log.debug( + "Copied assets directory", + source=assets_source, + dest=assets_dest, + ) + else: + log.debug("Assets directory does not exist", path=assets_source) + except Exception as e: + log.warning("Failed to copy assets directory", error=str(e)) + + # Copy nodes directory if it exists and option is enabled + if options.include_nodes: + try: + nodes_source = Path(scene.save_dir) / "nodes" + if nodes_source.exists(): + nodes_dest = temp_path / "nodes" + shutil.copytree(nodes_source, nodes_dest) + log.debug( + "Copied nodes directory", source=nodes_source, dest=nodes_dest + ) + else: + log.debug("Nodes directory does not exist", path=nodes_source) + except Exception as e: + log.warning("Failed to copy nodes directory", error=str(e)) + + # Copy info directory if it exists and option is enabled + if options.include_info: + try: + info_source = Path(scene.save_dir) / "info" + if info_source.exists(): + info_dest = temp_path / "info" + shutil.copytree(info_source, info_dest) + log.debug( + "Copied info directory", source=info_source, dest=info_dest + ) + else: + log.debug("Info directory does not exist", path=info_source) + except Exception as e: + log.warning("Failed to copy info directory", error=str(e)) + + # Copy templates directory if it exists and option is enabled + if options.include_templates: + try: + templates_source = Path(scene.save_dir) / "templates" + if templates_source.exists(): + templates_dest = temp_path / "templates" + shutil.copytree(templates_source, templates_dest) + log.debug( + "Copied templates directory", + source=templates_source, + dest=templates_dest, + ) + else: + log.debug( + "Templates directory does not exist", path=templates_source + ) + except Exception as e: + log.warning("Failed to copy templates directory", error=str(e)) + + # Copy restore file if it exists and is set + if scene.restore_from: + try: + restore_source = Path(scene.save_dir) / scene.restore_from + if restore_source.exists(): + # Copy to root of ZIP (same level as scene.json) + restore_dest = temp_path / scene.restore_from + shutil.copy2(restore_source, restore_dest) + log.debug( + "Copied restore file", + source=restore_source, + dest=restore_dest, + filename=scene.restore_from, + ) + else: + log.warning( + "Restore file does not exist", + path=restore_source, + filename=scene.restore_from, + ) + except Exception as e: + log.warning( + "Failed to copy restore file", + error=str(e), + filename=scene.restore_from, + ) + + # Create ZIP file + with tempfile.NamedTemporaryFile(suffix=".zip", delete=False) as temp_zip: + temp_zip_path = temp_zip.name + + try: + with zipfile.ZipFile( + temp_zip_path, "w", zipfile.ZIP_DEFLATED, compresslevel=6 + ) as zipf: + # Add all files from temp directory to ZIP + for root, _, files in os.walk(temp_path): + for file in files: + file_path = Path(root) / file + # Calculate relative path from temp_path + arcname = file_path.relative_to(temp_path) + zipf.write(file_path, arcname) + log.debug("Added file to ZIP", file=arcname) + + # Read ZIP file into memory + with open(temp_zip_path, "rb") as f: + zip_bytes = f.read() + + log.info( + "Complete scene export finished", + scene_name=scene.name, + zip_size=len(zip_bytes), + files_count=len(list(temp_path.rglob("*"))), + ) + + return zip_bytes + + finally: + # Clean up temporary ZIP file + if os.path.exists(temp_zip_path): + os.unlink(temp_zip_path) diff --git a/src/talemate/game/engine/nodes/focal.py b/src/talemate/game/engine/nodes/focal.py index af24709d..15e773ea 100644 --- a/src/talemate/game/engine/nodes/focal.py +++ b/src/talemate/game/engine/nodes/focal.py @@ -55,6 +55,7 @@ class Focal(Node): Properties: - template: The prompt template name - max_calls: The maximum number of calls to make + - response_length: The maximum length of the response Outputs: - state: The current graph state @@ -90,6 +91,16 @@ class Focal(Node): max=10, ) + response_length = PropertyField( + name="response_length", + description="The maximum length of the response", + type="int", + default=1024, + step=128, + min=1, + max=8192, + ) + def __init__(self, title="AI Function Calling", **kwargs): super().__init__(title=title, **kwargs) @@ -104,6 +115,7 @@ class Focal(Node): self.set_property("template", UNRESOLVED) self.set_property("max_calls", 1) self.set_property("retries", 0) + self.set_property("response_length", 1024) self.add_output("state") self.add_output("calls", socket_type="list") @@ -119,6 +131,7 @@ class Focal(Node): template_vars = self.get_input_value("template_vars") max_calls = self.require_number_input("max_calls", types=(int,)) retries = self.require_number_input("retries", types=(int,)) + response_length = self.require_number_input("response_length", types=(int,)) if not hasattr(agent, "client"): raise InputValueError( @@ -144,9 +157,11 @@ class Focal(Node): max_calls=max_calls, scene=scene, retries=retries, + response_length=response_length, vars={ "scene_loop": state.shared.get("scene_loop", {}), "local": state.data, + "response_length": response_length, }, **template_vars, ) diff --git a/src/talemate/game/engine/nodes/history.py b/src/talemate/game/engine/nodes/history.py index 9578d51b..daf22976 100644 --- a/src/talemate/game/engine/nodes/history.py +++ b/src/talemate/game/engine/nodes/history.py @@ -147,6 +147,29 @@ class PopHistory(Node): self.set_output_values({"message": message}) +@register("scene/history/HasHistory") +class HasHistory(Node): + """ + Check if the scene has history + """ + + def __init__(self, title="Scene Has History", **kwargs): + super().__init__(title=title, **kwargs) + + def setup(self): + self.add_output("has_history", socket_type="bool") + + async def run(self, state: GraphState): + scene: "Scene" = active_scene.get() + + messages: scene_message.SceneMessage | None = scene.last_message_of_type( + ["character", "narrator", "context_investigation"], + max_iterations=100, + ) + + self.set_output_values({"has_history": messages is not None}) + + @register("scene/history/LastMessageOfType") class LastMessageOfType(Node): """ diff --git a/src/talemate/game/engine/nodes/modules/scene/dynamic-storyline.json b/src/talemate/game/engine/nodes/modules/scene/dynamic-storyline.json index 2e105b7e..fab4cee6 100644 --- a/src/talemate/game/engine/nodes/modules/scene/dynamic-storyline.json +++ b/src/talemate/game/engine/nodes/modules/scene/dynamic-storyline.json @@ -20,7 +20,7 @@ "scope": "game" }, "x": 733, - "y": 1031, + "y": 1066, "width": 254, "height": 146, "collapsed": false, @@ -51,8 +51,8 @@ "name": "topic", "scope": "local" }, - "x": 27, - "y": 1577, + "x": 26, + "y": 1612, "width": 210, "height": 122, "collapsed": false, @@ -60,34 +60,6 @@ "registry": "state/GetState", "base_type": "core/Node" }, - "db171bea-e4a0-48b9-9a98-a7cd354d8fcf": { - "title": "Get Scene State", - "id": "db171bea-e4a0-48b9-9a98-a7cd354d8fcf", - "properties": {}, - "x": 24, - "y": 79, - "width": 140, - "height": 106, - "collapsed": false, - "inherited": false, - "registry": "scene/GetSceneState", - "base_type": "core/Node" - }, - "97204757-5fb9-4d81-ac90-20825b9a1b32": { - "title": "GET obj.history", - "id": "97204757-5fb9-4d81-ac90-20825b9a1b32", - "properties": { - "attribute": "history" - }, - "x": 204, - "y": 79, - "width": 210, - "height": 98, - "collapsed": false, - "inherited": false, - "registry": "data/Get", - "base_type": "core/Node" - }, "a14b3571-899f-47e6-90cd-6bf0fec6e4bb": { "title": "Set Introduction", "id": "a14b3571-899f-47e6-90cd-6bf0fec6e4bb", @@ -95,8 +67,8 @@ "introduction": null, "emit_history": true }, - "x": 1049, - "y": 1464, + "x": 1048, + "y": 1499, "width": 210, "height": 102, "collapsed": false, @@ -108,8 +80,8 @@ "title": "Theme", "id": "7438bc9b-4b38-4c5c-a9a8-df5ebf098691", "properties": {}, - "x": 1109, - "y": 1704, + "x": 1108, + "y": 1739, "width": 140, "height": 26, "collapsed": false, @@ -123,8 +95,8 @@ "properties": { "pass_through": false }, - "x": 279, - "y": 1434, + "x": 278, + "y": 1469, "width": 210, "height": 78, "collapsed": false, @@ -139,8 +111,8 @@ "name": "intro_generated", "scope": "game" }, - "x": 519, - "y": 1514, + "x": 518, + "y": 1549, "width": 210, "height": 122, "collapsed": false, @@ -155,39 +127,8 @@ "name": "intro_generated", "scope": "game" }, - "x": 27, - "y": 1387, - "width": 210, - "height": 122, - "collapsed": false, - "inherited": false, - "registry": "state/GetState", - "base_type": "core/Node" - }, - "be9bdeff-2232-4e00-894c-6476c4efdbc7": { - "title": "Switch", - "id": "be9bdeff-2232-4e00-894c-6476c4efdbc7", - "properties": { - "pass_through": false - }, - "x": 879, - "y": 76, - "width": 210, - "height": 78, - "collapsed": false, - "inherited": false, - "registry": "core/Switch", - "base_type": "core/Node" - }, - "912201d4-2678-45d4-a032-12707836c723": { - "title": "GET local.abort", - "id": "912201d4-2678-45d4-a032-12707836c723", - "properties": { - "name": "abort", - "scope": "local" - }, "x": 26, - "y": 608, + "y": 1422, "width": 210, "height": 122, "collapsed": false, @@ -202,7 +143,7 @@ "pass_through": false }, "x": 275, - "y": 628, + "y": 662, "width": 210, "height": 78, "collapsed": false, @@ -217,7 +158,7 @@ "exception": "StopGraphExecution" }, "x": 535, - "y": 638, + "y": 672, "width": 269, "height": 78, "collapsed": false, @@ -231,8 +172,8 @@ "properties": { "value": true }, - "x": 1004, - "y": 218, + "x": 526, + "y": 308, "width": 210, "height": 58, "collapsed": true, @@ -247,8 +188,8 @@ "name": "intro_generated", "scope": "game" }, - "x": 1164, - "y": 338, + "x": 684, + "y": 375, "width": 210, "height": 122, "collapsed": false, @@ -262,8 +203,8 @@ "properties": { "pass_through": false }, - "x": 1404, - "y": 348, + "x": 924, + "y": 385, "width": 210, "height": 78, "collapsed": true, @@ -278,8 +219,8 @@ "name": "intro_generated", "scope": "game" }, - "x": 1690, - "y": 239, + "x": 1210, + "y": 276, "width": 210, "height": 142, "collapsed": false, @@ -291,8 +232,8 @@ "title": "AND Router", "id": "99f3eba0-1baa-4f59-af90-920e8e6c1e93", "properties": {}, - "x": 1520, - "y": 249, + "x": 1040, + "y": 286, "width": 140, "height": 106, "collapsed": false, @@ -307,8 +248,8 @@ "name": "abort", "scope": "local" }, - "x": 1170, - "y": 139, + "x": 690, + "y": 176, "width": 210, "height": 142, "collapsed": false, @@ -324,8 +265,8 @@ "status": "error", "as_scene_message": true }, - "x": 1950, - "y": 239, + "x": 1470, + "y": 276, "width": 210, "height": 166, "collapsed": false, @@ -339,8 +280,8 @@ "properties": { "value": "Scene already has history, cannot generate random story premise." }, - "x": 1690, - "y": 439, + "x": 1210, + "y": 476, "width": 210, "height": 58, "collapsed": false, @@ -348,21 +289,6 @@ "registry": "data/string/MakeText", "base_type": "core/Node" }, - "07c15b6d-a275-4156-8f98-1759db36e4d0": { - "title": "Stage 1", - "id": "07c15b6d-a275-4156-8f98-1759db36e4d0", - "properties": { - "stage": 1 - }, - "x": 2197, - "y": 230, - "width": 210, - "height": 118, - "collapsed": false, - "inherited": false, - "registry": "core/Stage", - "base_type": "core/Node" - }, "afbb4f0a-5662-432b-9bc7-d30609a7d5a2": { "title": "Stage 2", "id": "afbb4f0a-5662-432b-9bc7-d30609a7d5a2", @@ -370,7 +296,7 @@ "stage": 2 }, "x": 857, - "y": 634, + "y": 668, "width": 210, "height": 118, "collapsed": false, @@ -378,37 +304,6 @@ "registry": "core/Stage", "base_type": "core/Node" }, - "3eb98fe1-b1bd-4583-bd74-23bcc155e750": { - "title": "Length", - "id": "3eb98fe1-b1bd-4583-bd74-23bcc155e750", - "properties": {}, - "x": 450, - "y": 140, - "width": 140, - "height": 26, - "collapsed": false, - "inherited": false, - "registry": "data/Length", - "base_type": "core/Node" - }, - "331ca96e-1a3c-48ea-9cfe-5b6962448063": { - "title": "Compare", - "id": "331ca96e-1a3c-48ea-9cfe-5b6962448063", - "properties": { - "operation": "greater_than", - "tolerance": 0.0001, - "a": 0, - "b": 0 - }, - "x": 630, - "y": 140, - "width": 210, - "height": 150, - "collapsed": false, - "inherited": false, - "registry": "data/number/Compare", - "base_type": "core/Node" - }, "536380db-ae7a-4ab3-bc3f-c08a47896998": { "title": "Module Style", "id": "536380db-ae7a-4ab3-bc3f-c08a47896998", @@ -434,8 +329,8 @@ "name": "analysis_instructions", "scope": "local" }, - "x": 31, - "y": 1757, + "x": 30, + "y": 1792, "width": 260, "height": 122, "collapsed": false, @@ -450,8 +345,8 @@ "name": "analysis_enabled", "scope": "local" }, - "x": 31, - "y": 1937, + "x": 30, + "y": 1972, "width": 250, "height": 122, "collapsed": false, @@ -466,8 +361,8 @@ "name": "intro_length", "scope": "local" }, - "x": 31, - "y": 2117, + "x": 30, + "y": 2152, "width": 250, "height": 122, "collapsed": false, @@ -479,8 +374,8 @@ "title": "Generate Storyline", "id": "958c9db5-6df8-4d04-919d-c0a4b42fe3f8", "properties": {}, - "x": 771, - "y": 1676, + "x": 770, + "y": 1711, "width": 228, "height": 106, "collapsed": false, @@ -494,8 +389,8 @@ "properties": { "stage": 4 }, - "x": 1311, - "y": 1446, + "x": 1310, + "y": 1481, "width": 210, "height": 118, "collapsed": false, @@ -511,7 +406,7 @@ "emit_history": true }, "x": 1019, - "y": 1046, + "y": 1081, "width": 210, "height": 102, "collapsed": false, @@ -526,7 +421,7 @@ "pass_through": false }, "x": 266, - "y": 1133, + "y": 1168, "width": 210, "height": 78, "collapsed": false, @@ -542,7 +437,7 @@ "scope": "game" }, "x": 26, - "y": 1133, + "y": 1168, "width": 210, "height": 122, "collapsed": false, @@ -558,7 +453,7 @@ "scope": "local" }, "x": 26, - "y": 913, + "y": 948, "width": 210, "height": 122, "collapsed": false, @@ -573,7 +468,7 @@ "pass_through": true }, "x": 276, - "y": 933, + "y": 968, "width": 210, "height": 78, "collapsed": false, @@ -586,7 +481,7 @@ "id": "0c73a403-ada1-45fa-ac3d-9a4890f4c9d0", "properties": {}, "x": 546, - "y": 1033, + "y": 1068, "width": 140, "height": 106, "collapsed": false, @@ -601,7 +496,7 @@ "stage": 3 }, "x": 1266, - "y": 1043, + "y": 1078, "width": 210, "height": 118, "collapsed": false, @@ -802,6 +697,65 @@ "inherited": false, "registry": "core/ModuleProperty", "base_type": "core/Node" + }, + "2d28f380-7998-452a-be35-bdb0fc2367f5": { + "title": "Has History", + "id": "2d28f380-7998-452a-be35-bdb0fc2367f5", + "properties": {}, + "x": 65, + "y": 259, + "width": 140, + "height": 26, + "collapsed": false, + "inherited": false, + "registry": "scene/history/HasHistory", + "base_type": "core/Node" + }, + "be9bdeff-2232-4e00-894c-6476c4efdbc7": { + "title": "Switch", + "id": "be9bdeff-2232-4e00-894c-6476c4efdbc7", + "properties": { + "pass_through": false + }, + "x": 265, + "y": 249, + "width": 210, + "height": 78, + "collapsed": false, + "inherited": false, + "registry": "core/Switch", + "base_type": "core/Node" + }, + "07c15b6d-a275-4156-8f98-1759db36e4d0": { + "title": "Stage 1", + "id": "07c15b6d-a275-4156-8f98-1759db36e4d0", + "properties": { + "stage": 1 + }, + "x": 1714, + "y": 265, + "width": 210, + "height": 118, + "collapsed": false, + "inherited": false, + "registry": "core/Stage", + "base_type": "core/Node" + }, + "912201d4-2678-45d4-a032-12707836c723": { + "title": "GET local.abort", + "id": "912201d4-2678-45d4-a032-12707836c723", + "properties": { + "name": "abort", + "scope": "local" + }, + "x": 30, + "y": 644, + "width": 210, + "height": 122, + "collapsed": false, + "inherited": false, + "registry": "state/GetState", + "base_type": "core/Node" } }, "edges": { @@ -814,12 +768,6 @@ "2a4e0aed-e85a-4fba-ae30-630c71f91db9.value": [ "958c9db5-6df8-4d04-919d-c0a4b42fe3f8.topic" ], - "db171bea-e4a0-48b9-9a98-a7cd354d8fcf.scene": [ - "97204757-5fb9-4d81-ac90-20825b9a1b32.object" - ], - "97204757-5fb9-4d81-ac90-20825b9a1b32.value": [ - "3eb98fe1-b1bd-4583-bd74-23bcc155e750.object" - ], "a14b3571-899f-47e6-90cd-6bf0fec6e4bb.state": [ "ca241a90-daa3-40e4-9b7a-b857c334cd39.state" ], @@ -833,12 +781,6 @@ "37313611-7de1-478b-8840-e5ea7ba856fe.value": [ "11965433-d217-47d1-bc37-8808426798ce.value" ], - "be9bdeff-2232-4e00-894c-6476c4efdbc7.yes": [ - "626e25be-9d1f-460a-9611-7adc1a1fb28f.state" - ], - "912201d4-2678-45d4-a032-12707836c723.value": [ - "91636e7d-d05b-4f5b-a234-6e4a1e6a6412.value" - ], "91636e7d-d05b-4f5b-a234-6e4a1e6a6412.yes": [ "4637ce9d-ac09-4f68-85d9-5e9ac9639d3d.state" ], @@ -870,12 +812,6 @@ "c3398b66-a29a-4453-b17e-d9f8f5fb7efa.value": [ "514bede6-cfda-42eb-8285-8c11181e62f6.message" ], - "3eb98fe1-b1bd-4583-bd74-23bcc155e750.length": [ - "331ca96e-1a3c-48ea-9cfe-5b6962448063.a" - ], - "331ca96e-1a3c-48ea-9cfe-5b6962448063.result": [ - "be9bdeff-2232-4e00-894c-6476c4efdbc7.value" - ], "907bc1c7-c19b-4a28-ba0c-c386bbfd0aa9.value": [ "958c9db5-6df8-4d04-919d-c0a4b42fe3f8.analysis_instructions" ], @@ -950,13 +886,22 @@ ], "3f749c9f-71b0-4e9c-b99d-859e53f20569.value": [ "c327b0c4-900e-4d27-a0b9-0d47e8479732.value" + ], + "2d28f380-7998-452a-be35-bdb0fc2367f5.has_history": [ + "be9bdeff-2232-4e00-894c-6476c4efdbc7.value" + ], + "be9bdeff-2232-4e00-894c-6476c4efdbc7.yes": [ + "626e25be-9d1f-460a-9611-7adc1a1fb28f.state" + ], + "912201d4-2678-45d4-a032-12707836c723.value": [ + "91636e7d-d05b-4f5b-a234-6e4a1e6a6412.value" ] }, "groups": [ { "title": "Generate Premise - Stage 4", "x": 1, - "y": 1283, + "y": 1318, "width": 1557, "height": 980, "color": "#3f789e", @@ -976,9 +921,9 @@ { "title": "Scene valid for randomized premise? - Stage 1", "x": 1, - "y": -3, - "width": 2431, - "height": 528, + "y": -24, + "width": 1947, + "height": 583, "color": "#b58b2a", "font_size": 24, "inherited": false @@ -986,7 +931,7 @@ { "title": "Reset - Stage 3", "x": 1, - "y": 773, + "y": 808, "width": 1500, "height": 507, "color": "#88A", @@ -996,7 +941,7 @@ { "title": "Abort - Stage 2", "x": 1, - "y": 527, + "y": 562, "width": 1149, "height": 243, "color": "#b58b2a", @@ -1007,59 +952,59 @@ "comments": [ { "text": "SetState here so we don't infinitely try to generate premise if something goes wrong.", - "x": 519, - "y": 1364, + "x": 518, + "y": 1399, "width": 200, "inherited": false }, { "text": "The scene already has a history, we set the abort flag.", - "x": 1180, - "y": 19, + "x": 700, + "y": 56, "width": 200, "inherited": false }, { "text": "If this is the first attempt to generate a dynamic premise ...", - "x": 1480, - "y": 129, + "x": 1000, + "y": 166, "width": 200, "inherited": false }, { "text": "... flag that an intro generation attempt was made ...", - "x": 1700, - "y": 129, + "x": 1220, + "y": 166, "width": 200, "inherited": false }, { "text": "... and send a notification to the user.", - "x": 1950, - "y": 139, + "x": 1470, + "y": 176, "width": 200, "inherited": false }, - { - "text": "Checking if the scene already has messages in its history.", - "x": 470, - "y": 40, - "width": 343, - "inherited": false - }, - { - "text": "When a scene already has meessages in its history it does not make sense to generate a new dynamic premise. At least in the current iteration of this module. So this stage checks for that and initiates the abort flag if needed.", - "x": 120, - "y": 300, - "width": 408, - "inherited": false - }, { "text": "When we are generating the first introduction OR when we are resetting the process: Remove the current introductory text.", "x": 546, - "y": 853, + "y": 888, "width": 200, "inherited": false + }, + { + "text": "Checking if the scene already has messages in its history.", + "x": 45, + "y": 139, + "width": 343, + "inherited": false + }, + { + "text": "When a scene already has meessages in its history it does not make sense to generate a new dynamic premise. At least in the current iteration of this module. So this stage checks for that and initiates the abort flag if needed.", + "x": 37, + "y": 434, + "width": 408, + "inherited": false } ], "extends": null, diff --git a/src/talemate/game/engine/nodes/scene.py b/src/talemate/game/engine/nodes/scene.py index 9693440f..66533e51 100644 --- a/src/talemate/game/engine/nodes/scene.py +++ b/src/talemate/game/engine/nodes/scene.py @@ -223,11 +223,16 @@ class MakeCharacter(Node): actor = ActorCls(character, get_agent("conversation")) + log.warning( + "Make character", + character=character, + add_to_scene=add_to_scene, + is_active=is_active, + ) if add_to_scene: await scene.add_actor(actor) if not is_active: await deactivate_character(character) - self.set_output_values({"actor": actor, "character": character}) diff --git a/src/talemate/game/engine/nodes/string.py b/src/talemate/game/engine/nodes/string.py index bf5193c3..570a20a5 100644 --- a/src/talemate/game/engine/nodes/string.py +++ b/src/talemate/game/engine/nodes/string.py @@ -5,6 +5,24 @@ from .registry import register log = structlog.get_logger("talemate.game.engine.nodes.string") +@register("data/string/AsString") +class AsString(Node): + """ + Converts a value to a string + """ + + def __init__(self, title="As String", **kwargs): + super().__init__(title=title, **kwargs) + + def setup(self): + self.add_input("value", socket_type="any") + self.add_output("value", socket_type="str") + + async def run(self, state: GraphState): + value = self.normalized_input_value("value") + self.set_output_values({"value": str(value)}) + + @register("data/string/Make") class MakeString(Node): """Creates a string diff --git a/src/talemate/game/focal/__init__.py b/src/talemate/game/focal/__init__.py index 0aea270b..548778c3 100644 --- a/src/talemate/game/focal/__init__.py +++ b/src/talemate/game/focal/__init__.py @@ -8,7 +8,7 @@ This does NOT use API specific function calling (like openai or anthropic), but import structlog import traceback -from typing import Callable +from typing import Callable, TYPE_CHECKING from contextvars import ContextVar from talemate.client.base import ClientBase @@ -16,9 +16,14 @@ from talemate.prompts.base import Prompt from talemate.util.data import ( extract_data, ) +from talemate.instance import get_agent from .schema import Argument, Call, Callback, State + +if TYPE_CHECKING: + from talemate.agents.director import DirectorAgent + __all__ = [ "Argument", "Call", @@ -62,12 +67,14 @@ class Focal: max_calls: int = 5, retries: int = 0, schema_format: str = "json", + response_length: int = 1024, **kwargs, ): self.client = client self.context = kwargs self.max_calls = max_calls self.retries = retries + self.response_length = response_length self.state = State(schema_format=schema_format) self.callbacks = {callback.name: callback for callback in callbacks} @@ -101,7 +108,7 @@ class Focal: response = await Prompt.request( template_name, self.client, - "analyze_long", + f"analyze_{self.response_length}", vars={ **self.context, "focal": self, @@ -146,6 +153,8 @@ class Focal: calls_made = 0 + director: "DirectorAgent" = get_agent("director") + for call in calls: if calls_made >= self.max_calls: log.warning("focal.execute.max_calls_reached", max_calls=self.max_calls) @@ -165,6 +174,9 @@ class Focal: log.debug( f"focal.execute - Calling {callback.name}", arguments=call.arguments ) + + await director.log_function_call(call) + result = await callback.fn(**call.arguments) call.result = result call.called = True @@ -205,7 +217,7 @@ class Focal: _, calls_json = await Prompt.request( "focal.extract_calls", self.client, - "analyze_long", + f"analyze_{self.response_length}", vars={ **self.context, "text": response, diff --git a/src/talemate/instance.py b/src/talemate/instance.py index 0b1699c5..64a957c8 100644 --- a/src/talemate/instance.py +++ b/src/talemate/instance.py @@ -9,8 +9,11 @@ import structlog import talemate.agents as agents import talemate.client as clients import talemate.client.bootstrap as bootstrap +from talemate.client.base import ClientStatus from talemate.emit import emit from talemate.emit.signals import handlers +import talemate.emit.async_signals as async_signals +from talemate.config import get_config, Config log = structlog.get_logger("talemate") @@ -18,56 +21,29 @@ AGENTS = {} CLIENTS = {} -def get_agent(typ: str, *create_args, **create_kwargs): +def get_agent(typ: str): agent = AGENTS.get(typ) - if agent: - return agent + if not agent: + raise KeyError(f"Agent {typ} has not been instantiated") - if create_args or create_kwargs: - cls = agents.get_agent_class(typ) - agent = cls(*create_args, **create_kwargs) - set_agent(typ, agent) - return agent + return agent -def set_agent(typ, agent): - AGENTS[typ] = agent - - -async def destroy_client(name: str, config: dict): +async def destroy_client(name: str): client = CLIENTS.get(name) if client: - await client.destroy(config) + await client.destroy() del CLIENTS[name] -def get_client(name: str, *create_args, **create_kwargs): +def get_client(name: str): client = CLIENTS.get(name) - system_prompts = create_kwargs.pop("system_prompts", None) + if not client: + raise KeyError(f"Client {name} has not been instantiated") - if client: - if create_kwargs: - if system_prompts: - client.set_system_prompts(system_prompts) - client.reconfigure(**create_kwargs) - return client - - if "type" in create_kwargs: - typ = create_kwargs.get("type") - cls = clients.get_client_class(typ) - client = cls(name=name, *create_args, **create_kwargs) - - if system_prompts: - client.set_system_prompts(system_prompts) - - set_client(name, client) - return client - - -def set_client(name, client): - CLIENTS[name] = client + return client def agent_types(): @@ -198,3 +174,142 @@ async def agent_ready_checks(): await agent.ready_check() elif agent and not agent.enabled: await agent.setup_check() + + +def get_active_client(): + for client in CLIENTS.values(): + if client.enabled: + return client + return None + + +async def instantiate_agents(): + config: Config = get_config() + + for typ, cls in agents.AGENT_CLASSES.items(): + if typ in AGENTS: + continue + + agent_config = config.agents.get(typ) + if agent_config: + _agent_config = agent_config.model_dump() + + client_name = _agent_config.pop("client", None) + if client_name: + _agent_config["client"] = CLIENTS.get(client_name) + + _agent_config.pop("name", None) + actions = _agent_config.pop("actions", None) + enabled = _agent_config.pop("enabled", True) + + agent = cls(**_agent_config) + + if actions: + await agent.apply_config(actions=actions) + + if not enabled and agent.has_toggle: + agent.is_enabled = False + elif enabled is True and agent.has_toggle: + agent.is_enabled = True + + AGENTS[typ] = agent + await agent.emit_status() + else: + agent = cls() + AGENTS[typ] = agent + await agent.emit_status() + + await ensure_agent_llm_client() + + +async def instantiate_clients(): + config: Config = get_config() + for name, client_config in config.clients.items(): + if name in CLIENTS: + continue + + client = clients.get_client_class(client_config.type)( + **client_config.model_dump() + ) + CLIENTS[name] = client + + await emit_clients_status() + + +async def configure_agents(): + config: Config = get_config() + for name, agent_config in config.agents.items(): + agent = AGENTS.get(name) + if not agent: + log.warn("agent not found", name=name) + continue + + await agent.apply_config(**agent_config.model_dump()) + await agent.emit_status() + + await ensure_agent_llm_client() + + +async def ensure_agent_llm_client(): + config: Config = get_config() + for name, agent in AGENTS.items(): + agent_config = config.agents.get(name) + + if not agent: + log.warn("agent not found", name=name) + continue + + if not agent.requires_llm_client: + continue + + client_name = agent_config.client if agent_config else None + + if not client_name: + client = get_active_client() + + elif not CLIENTS.get(client_name): + client = get_active_client() + + else: + client = CLIENTS.get(client_name) + if client and not client.enabled: + client = get_active_client() + + log.debug( + "ensure_agent_llm_client", + agent=agent.agent_type, + client=client.client_type if client else None, + ) + + if agent.client != client: + agent.client = client + await agent.emit_status() + + +async def purge_clients(): + """Checks for clients in CLIENTS that are not longer in the config + and removes them + """ + config: Config = get_config() + for name, _ in list(CLIENTS.items()): + if name in config.clients: + continue + await destroy_client(name) + + +async def on_config_changed(config: Config): + await emit_clients_status() + emit_agents_status() + + +async def on_client_disabled(client_status: ClientStatus): + await ensure_agent_llm_client() + + +async def on_client_enabled(client_status: ClientStatus): + await ensure_agent_llm_client() + + +async_signals.get("config.changed").connect(on_config_changed) +async_signals.get("client.disabled").connect(on_client_disabled) +async_signals.get("client.enabled").connect(on_client_enabled) diff --git a/src/talemate/load.py b/src/talemate/load.py index aaf7e0b9..90bbad16 100644 --- a/src/talemate/load.py +++ b/src/talemate/load.py @@ -1,6 +1,12 @@ import enum import json import os +import shutil +import tempfile +import uuid +import zipfile +from pathlib import Path +from typing import TYPE_CHECKING import structlog @@ -8,7 +14,8 @@ import talemate.instance as instance from talemate import Actor, Character, Player, Scene from talemate.instance import get_agent from talemate.character import deactivate_character -from talemate.config import load_config +from talemate.config import get_config, Config +from talemate.config.schema import GamePlayerCharacter from talemate.context import SceneIsLoading from talemate.exceptions import UnknownDataSpec from talemate.game.state import GameState @@ -27,9 +34,15 @@ from talemate.world_state import WorldState from talemate.game.engine.nodes.registry import import_scene_node_definitions from talemate.scene.intent import SceneIntent from talemate.history import validate_history +import talemate.agents.tts.voice_library as voice_library +from talemate.path import SCENES_DIR + +if TYPE_CHECKING: + from talemate.agents.director import DirectorAgent __all__ = [ "load_scene", + "load_scene_from_zip", "load_character_from_image", "load_character_from_json", "transfer_character", @@ -40,6 +53,7 @@ log = structlog.get_logger("talemate.load") class ImportSpec(str, enum.Enum): talemate = "talemate" + talemate_complete = "talemate_complete" chara_card_v0 = "chara_card_v0" chara_card_v2 = "chara_card_v2" chara_card_v1 = "chara_card_v1" @@ -47,7 +61,7 @@ class ImportSpec(str, enum.Enum): @set_loading("Loading scene...") -async def load_scene(scene, file_path, conv_client, reset: bool = False): +async def load_scene(scene, file_path, reset: bool = False): """ Load the scene data from the given file path. """ @@ -56,7 +70,7 @@ async def load_scene(scene, file_path, conv_client, reset: bool = False): with SceneIsLoading(scene): if file_path == "$NEW_SCENE$": return await load_scene_from_data( - scene, new_scene(), conv_client, reset=True, empty=True + scene, new_scene(), reset=True, empty=True ) ext = os.path.splitext(file_path)[1].lower() @@ -66,6 +80,10 @@ async def load_scene(scene, file_path, conv_client, reset: bool = False): if ext in [".jpg", ".png", ".jpeg", ".webp"]: return await load_scene_from_character_card(scene, file_path) + # a zip file was uploaded, extract and load complete scene + if ext == ".zip": + return await load_scene_from_zip(scene, file_path, reset) + # a json file was uploaded, load the scene data with open(file_path, "r") as f: scene_data = json.load(f) @@ -79,9 +97,7 @@ async def load_scene(scene, file_path, conv_client, reset: bool = False): return await load_scene_from_character_card(scene, file_path) # if it is a talemate scene, load it - return await load_scene_from_data( - scene, scene_data, conv_client, reset, name=file_path - ) + return await load_scene_from_data(scene, scene_data, reset, name=file_path) finally: await scene.add_to_recent_scenes() @@ -115,10 +131,10 @@ async def load_scene_from_character_card(scene, file_path): Load a character card (tavern etc.) from the given file path. """ - director = get_agent("director") - LOADING_STEPS = 5 + director: "DirectorAgent" = get_agent("director") + LOADING_STEPS = 6 if director.auto_direct_enabled: - LOADING_STEPS += 3 + LOADING_STEPS += 2 loading_status = LoadingStatus(LOADING_STEPS) loading_status("Loading character card...") @@ -136,9 +152,9 @@ async def load_scene_from_character_card(scene, file_path): character = load_character_from_image(file_path, image_format) image = True - conversation = scene.get_helper("conversation").agent - creator = scene.get_helper("creator").agent - memory = scene.get_helper("memory").agent + conversation = instance.get_agent("conversation") + creator = instance.get_agent("creator") + memory = instance.get_agent("memory") actor = Actor(character, conversation) @@ -194,7 +210,7 @@ async def load_scene_from_character_card(scene, file_path): if character.base_attributes.get("description"): character.description = character.base_attributes.pop("description") - await character.commit_to_memory(scene.get_helper("memory").agent) + await character.commit_to_memory(memory) log.debug("base_attributes parsed", base_attributes=character.base_attributes) except Exception as e: @@ -206,17 +222,20 @@ async def load_scene_from_character_card(scene, file_path): scene.assets.set_cover_image_from_file_path(file_path) character.cover_image = scene.assets.cover_image + # assign tts voice to character + await director.assign_voice_to_character(character) + # if auto direct is enabled, generate a story intent # and then set the scene intent try: + loading_status("Generating story intent...") + creator = get_agent("creator") + story_intent = await creator.contextual_generate_from_args( + context="scene intent:overall", + length=256, + ) + scene.intent_state.intent = story_intent if director.auto_direct_enabled: - loading_status("Generating story intent...") - creator = get_agent("creator") - story_intent = await creator.contextual_generate_from_args( - context="story intent:overall", - length=256, - ) - scene.intent_state.intent = story_intent loading_status("Generating scene types...") await director.auto_direct_generate_scene_types( instructions=story_intent, @@ -229,15 +248,36 @@ async def load_scene_from_character_card(scene, file_path): scene.saved = False - await scene.save_restore("initial.json") - scene.restore_from = "initial.json" + restore_file = "initial.json" + + # check if restore_file exists already + if os.path.exists(Path(scene.save_dir) / restore_file): + uid = str(uuid.uuid4())[:8] + restore_file = f"initial-{uid}.json" + log.warning( + "Restore file already exists, creating a new one", + restore_file=restore_file, + ) + + await scene.save_restore(restore_file) + scene.restore_from = restore_file import_scene_node_definitions(scene) + save_file = f"{scene.project_name}.json" + + # check if save_file exists already + if os.path.exists(Path(scene.save_dir) / save_file): + uid = str(uuid.uuid4())[:8] + save_file = f"{scene.project_name}-{uid}.json" + log.warning( + "Save file already exists, creating a new one", + save_file=save_file, + ) await scene.save( save_as=True, auto=True, - copy_name=f"{scene.project_name}.json", + copy_name=save_file, ) return scene @@ -246,15 +286,15 @@ async def load_scene_from_character_card(scene, file_path): async def load_scene_from_data( scene, scene_data, - conv_client, reset: bool = False, name: str | None = None, empty: bool = False, ): loading_status = LoadingStatus(1) reset_message_id() + config: Config = get_config() - memory = scene.get_helper("memory").agent + memory = instance.get_agent("memory") scene.description = scene_data.get("description", "") scene.intro = scene_data.get("intro", "") or scene.description @@ -318,27 +358,199 @@ async def load_scene_from_data( scene.inactive_characters.pop(character.name) if not character.is_player: - agent = instance.get_agent("conversation", client=conv_client) - actor = Actor(character, agent) + agent = instance.get_agent("conversation") + actor = Actor(character=character, agent=agent) else: - actor = Player(character, None) + actor = Player(character=character, agent=None) await scene.add_actor(actor) # if there is nio player character, add the default player character await handle_no_player_character( scene, - add_default_character=scene.config.get("game", {}) - .get("general", {}) - .get("add_default_character", True), + add_default_character=config.game.general.add_default_character, ) # the scene has been saved before (since we just loaded it), so we set the saved flag to True # as long as the scene has a memory_id. scene.saved = "memory_id" in scene_data + # load the scene voice library + scene.voice_library = await voice_library.load_scene_voice_library(scene) + log.debug("scene voice library", voice_library=scene.voice_library) + return scene +@set_loading("Importing scene archive...") +async def load_scene_from_zip(scene, zip_path, reset: bool = False): + """ + Load a complete scene from a ZIP file containing scene.json and all assets/nodes/info/templates + """ + log.info("Loading complete scene from ZIP", zip_path=zip_path, reset=reset) + + # Verify ZIP file + if not zipfile.is_zipfile(zip_path): + raise ValueError(f"File is not a valid ZIP archive: {zip_path}") + + # Extract ZIP to temporary directory + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + + log.debug("Extracting ZIP archive", zip_path=zip_path, temp_dir=temp_dir) + + with zipfile.ZipFile(zip_path, "r") as zipf: + # Check if scene.json exists in ZIP + if "scene.json" not in zipf.namelist(): + raise ValueError( + "ZIP archive does not contain required scene.json file" + ) + + # Extract all files + zipf.extractall(temp_path) + log.debug("Extracted ZIP contents", files=len(zipf.namelist())) + + # Load scene.json + scene_json_path = temp_path / "scene.json" + with open(scene_json_path, "r", encoding="utf-8") as f: + scene_data = json.load(f) + + log.debug( + "Loaded scene JSON from ZIP", scene_name=scene_data.get("name", "Unknown") + ) + + # Generate unique scene name for ZIP imports to avoid conflicts + # The scene's save_dir is derived from its name, so we set the name + base_scene_name = scene_data.get("name", "imported-scene") + + # Handle directory name conflicts by adding suffix to the scene name + scene_name = base_scene_name + counter = 1 + + # Convert scene name to project name format (same as Scene.project_name property) + def to_project_name(name): + return name.replace(" ", "-").replace("'", "").lower() + + potential_dir = os.path.join(str(SCENES_DIR), to_project_name(scene_name)) + + while os.path.exists(potential_dir): + scene_name = f"{base_scene_name}-{counter}" + potential_dir = os.path.join(str(SCENES_DIR), to_project_name(scene_name)) + counter += 1 + if counter > 100: # Safety limit + scene_name = f"{base_scene_name}-{uuid.uuid4().hex[:8]}" + potential_dir = os.path.join( + str(SCENES_DIR), to_project_name(scene_name) + ) + break + + # Set the scene name (which will determine save_dir via the property) + scene.name = scene_name + + log.debug( + "Generated unique scene name for ZIP import", + original_name=base_scene_name, + final_name=scene_name, + project_name=to_project_name(scene_name), + save_dir=scene.save_dir, + ) + + # Create scene save directory (this happens automatically via the save_dir property) + # but we explicitly access it to trigger directory creation + actual_save_dir = scene.save_dir # This triggers directory creation + log.debug("Scene save directory prepared", save_dir=actual_save_dir) + + # Restore assets if they exist in ZIP + assets_source = temp_path / "assets" + if assets_source.exists(): + assets_dest = Path(scene.save_dir) / "assets" + shutil.copytree(assets_source, assets_dest) + log.debug("Loaded assets directory", source=assets_source, dest=assets_dest) + + # Restore nodes if they exist in ZIP + nodes_source = temp_path / "nodes" + if nodes_source.exists(): + nodes_dest = Path(scene.save_dir) / "nodes" + shutil.copytree(nodes_source, nodes_dest) + log.debug("Loaded nodes directory", source=nodes_source, dest=nodes_dest) + + # Restore info if it exists in ZIP + info_source = temp_path / "info" + if info_source.exists(): + info_dest = Path(scene.save_dir) / "info" + shutil.copytree(info_source, info_dest) + log.debug("Loaded info directory", source=info_source, dest=info_dest) + + # Restore templates if they exist in ZIP + templates_source = temp_path / "templates" + if templates_source.exists(): + templates_dest = Path(scene.save_dir) / "templates" + shutil.copytree(templates_source, templates_dest) + log.debug( + "Loaded templates directory", + source=templates_source, + dest=templates_dest, + ) + + # Restore restore file if it exists in ZIP and is referenced in scene_data + restore_filename = scene_data.get("restore_from") + if restore_filename: + restore_source = temp_path / restore_filename + if restore_source.exists(): + restore_dest = Path(scene.save_dir) / restore_filename + shutil.copy2(restore_source, restore_dest) + log.debug( + "Restored restore file", + source=restore_source, + dest=restore_dest, + filename=restore_filename, + ) + else: + log.warning( + "Restore file referenced in scene data but not found in ZIP, unsetting restore_from", + filename=restore_filename, + ) + scene.restore_from = None + + # Update scene_data with the conflict-resolved name so saves go to the right directory + scene_data = scene_data.copy() # Don't modify the original + scene_data["name"] = scene.name # Use the conflict-resolved name + + log.info( + "Complete scene import finished", + final_scene_name=scene.name, + save_dir=scene.save_dir, + ) + + # Load the scene data with the updated name + # Use the scene name (without .zip extension) for the filename + zip_basename = os.path.basename(zip_path) + clean_name = ( + zip_basename.replace(".zip", "") + if zip_basename.endswith(".zip") + else zip_basename + ) + result = await load_scene_from_data(scene, scene_data, reset, name=clean_name) + + # If no restore_from is set, set it to the initial.json file + if not scene.restore_from: + scene.restore_from = "initial.json" + await scene.save_restore("initial.json") + log.debug( + "Set restore_from to initial.json", restore_from=scene.restore_from + ) + + # Save the scene to ensure the JSON file is written to the correct directory + # This ensures both the assets and the scene JSON are in the same place + await scene.save(auto=False, force=True) + log.debug( + "Saved imported scene to directory", + save_dir=scene.save_dir, + filename=scene.filename, + ) + + return result + + async def transfer_character(scene, scene_json_path, character_name): """ Load a character from a scene json file and add it to the current scene. @@ -351,7 +563,7 @@ async def transfer_character(scene, scene_json_path, character_name): with open(scene_json_path, "r") as f: scene_data = json.load(f) - agent = scene.get_helper("conversation").agent + agent = instance.get_agent("conversation") # Find the character in the characters list for character_data in scene_data["characters"]: @@ -461,8 +673,12 @@ def character_from_chara_data(data: dict) -> Character: Generates a barebones character from a character card data dictionary. """ - character = Character("", "", "") - character.color = "red" + character = Character( + name="UNKNOWN", + description="", + greeting_text="", + ) + if "name" in data: character.name = data["name"] @@ -519,21 +735,20 @@ def default_player_character() -> Player | None: Return a default player character. :return: Default player character. """ - default_player_character = ( - load_config().get("game", {}).get("default_player_character", {}) - ) - name = default_player_character.get("name") + config: Config = get_config() + default_player_character: GamePlayerCharacter = config.game.default_player_character + name = default_player_character.name if not name: # We don't have a valid default player character, so we return None return None - color = default_player_character.get("color", "cyan") - description = default_player_character.get("description", "") + color = default_player_character.color + description = default_player_character.description return Player( Character( - name, + name=name, description=description, greeting_text="", color=color, diff --git a/src/talemate/path.py b/src/talemate/path.py new file mode 100644 index 00000000..6705b4ce --- /dev/null +++ b/src/talemate/path.py @@ -0,0 +1,17 @@ +from pathlib import Path + +__all__ = [ + "TALEMATE_ROOT", + "SCENES_DIR", + "TEMPLATES_DIR", + "TTS_DIR", + "CONFIG_FILE", +] + +TALEMATE_ROOT = Path(__file__).parent.parent.parent +SCENES_DIR = TALEMATE_ROOT / "scenes" +TEMPLATES_DIR = TALEMATE_ROOT / "templates" +TTS_DIR = TALEMATE_ROOT / "tts" + + +CONFIG_FILE = TALEMATE_ROOT / "config.yaml" diff --git a/src/talemate/prompts/base.py b/src/talemate/prompts/base.py index cb6c9428..a670588b 100644 --- a/src/talemate/prompts/base.py +++ b/src/talemate/prompts/base.py @@ -24,7 +24,7 @@ import structlog import talemate.instance as instance import talemate.thematic_generators as thematic_generators -from talemate.config import load_config +from talemate.config import get_config from talemate.context import regeneration_context, active_scene from talemate.emit import emit from talemate.exceptions import LLMAccuracyError, RenderPromptError @@ -311,7 +311,7 @@ class Prompt: @property def config(self): if not hasattr(self, "_config"): - self._config = load_config() + self._config = get_config() return self._config def __str__(self): @@ -863,6 +863,9 @@ class Prompt: # Extract YAML from markdown code blocks if "```yaml" in response and "```" in response.split("```yaml", 1)[1]: yaml_block = response.split("```yaml", 1)[1].split("```", 1)[0] + # Starts with ```yaml but has not ``` at the end + elif "```yaml" in response and "```" not in response.split("```yaml", 1)[1]: + yaml_block = response.split("```yaml", 1)[1] elif "```" in response: # Try any code block as fallback yaml_block = response.split("```", 1)[1].split("```", 1)[0] @@ -1027,7 +1030,9 @@ class Prompt: self.client = client - response = await client.send_prompt(str(self), kind=kind) + response = await client.send_prompt( + str(self), kind=kind, data_expected=self.data_response + ) # Handle prepared response prepending based on response format if not self.data_response: @@ -1041,22 +1046,32 @@ class Prompt: ) json_start = response.lstrip().startswith("{") - yaml_block = response.lstrip().startswith("```yaml") + yaml_block = "```yaml" in response + json_block = "```json" in response - # If response doesn't start with expected format markers, prepend the prepared response - if (format_type == "json" and not json_start) or ( - format_type == "yaml" and not yaml_block - ): - pad = " " if self.pad_prepended_response else "" - if format_type == "yaml": - if self.client.can_be_coerced: - response = self.prepared_response + response.rstrip() + if format_type == "json" and json_block: + response = response.split("```json", 1)[1].split("```", 1)[0] + elif format_type == "yaml" and yaml_block: + response = response.split("```yaml", 1)[1].split("```", 1)[0].strip() + else: + # If response doesn't start with expected format markers, prepend the prepared response + if (format_type == "json" and not json_start) or ( + format_type == "yaml" and not yaml_block + ): + pad = " " if self.pad_prepended_response else "" + if format_type == "yaml": + if self.client.can_be_coerced: + response = self.prepared_response + response.rstrip() + else: + response = ( + self.prepared_response.rstrip() + + "\n " + + response.rstrip() + ) else: response = ( - self.prepared_response.rstrip() + "\n " + response.rstrip() + self.prepared_response.rstrip() + pad + response.strip() ) - else: - response = self.prepared_response.rstrip() + pad + response.strip() if self.eval_response: return await self.evaluate(response) diff --git a/src/talemate/prompts/templates/common/narrative-patterns.jinja2 b/src/talemate/prompts/templates/common/narrative-patterns.jinja2 new file mode 100644 index 00000000..e66f8f9d --- /dev/null +++ b/src/talemate/prompts/templates/common/narrative-patterns.jinja2 @@ -0,0 +1,66 @@ +{% set response_patterns = [ + "PROSE DIALOGUE PROSE", + "SHORT_DIALOGUE PROSE DIALOGUE", + "DIALOGUE PROSE SHORT_DIALOGUE", + "PROSE SHORT_DIALOGUE PROSE", + "SHORT_DIALOGUE PROSE SHORT_DIALOGUE", + "PROSE DIALOGUE SHORT_DIALOGUE", + "DIALOGUE PROSE DIALOGUE", + "SHORT_DIALOGUE PROSE PROSE", + "PROSE DIALOGUE PROSE DIALOGUE", + "DIALOGUE PROSE PROSE" +] %} +{% set selected_pattern = response_patterns[range(0, response_patterns|length)|random] %} + +**CRITICAL: Follow this exact response structure**: {{ selected_pattern }} + +**DO NOT include these labels in your response. These are structural guidelines only:** +- PROSE: 2-4 sentences of narrative description (actions, thoughts, environment, sensory details) +- DIALOGUE: 1-2 sentences of spoken words in quotes +- SHORT_DIALOGUE: Few words of spoken words in quotes + +**Format**: Write flowing narrative prose without character name prefixes, section labels, or formatting markers. The pattern shows the STRUCTURE to follow, not text to include. + +**Example for your selected pattern ({{ selected_pattern }})**: + +{% if selected_pattern == "PROSE DIALOGUE PROSE" %} +``` example +The rain drummed against the window as shadows danced across the empty room. A distant clock chimed midnight, its echo swallowed by the storm. "Something's not right about this place," she whispered, eyes scanning the peeling wallpaper for any sign of what might be wrong. The floorboards groaned beneath her feet as if the house itself was responding to her presence. +``` +{% elif selected_pattern == "SHORT_DIALOGUE PROSE DIALOGUE" %} +``` example +"Wait." She froze at the warehouse entrance, her instincts screaming danger. Dust motes swirled in the afternoon light streaming through broken windows, and somewhere in the distance, metal creaked against metal. "I need to call for backup first," she muttered, her voice steady despite the unease crawling up her spine. +``` +{% elif selected_pattern == "DIALOGUE PROSE SHORT_DIALOGUE" %} +``` example +"I've been waiting here for hours," she said, stepping out from behind the marble pillar. "Though I wasn't sure anyone would actually come." The museum's grand hall stretched endlessly around her, filled with ancient artifacts that seemed to watch from their glass cases. "Finally." +``` +{% elif selected_pattern == "PROSE SHORT_DIALOGUE PROSE" %} +``` example +The cafe bustled with morning energy as steam rose from countless coffee cups. Conversations blended into a comfortable hum of urban life around her. "One more chance," she muttered under her breath. The letter in her hands felt heavier than paper should, its contents potentially changing everything she thought she knew about her family's past. +``` +{% elif selected_pattern == "SHORT_DIALOGUE PROSE SHORT_DIALOGUE" %} +``` example +"Found it." Her flashlight beam illuminated ancient symbols carved deep into the stone wall. Centuries of dust coated everything in the buried chamber, and the air tasted of forgotten time on her tongue. "Incredible." +``` +{% elif selected_pattern == "PROSE DIALOGUE SHORT_DIALOGUE" %} +``` example +The library's silence was broken only by the soft whisper of pages turning under her fingers. Afternoon sunlight slanted through tall windows, casting long shadows between the towering bookshelves as she studied the ancient manuscript. "This changes everything," she breathed, hardly daring to believe what she was reading. "Incredible." +``` +{% elif selected_pattern == "DIALOGUE PROSE DIALOGUE" %} +``` example +"The coordinates match exactly," she announced, checking her instruments one final time. The ship rocked gently beneath her feet as fog began to roll in from the open sea, obscuring the horizon line she had been studying. "But I still think this is a mistake," she added, doubt creeping into her voice. +``` +{% elif selected_pattern == "SHORT_DIALOGUE PROSE PROSE" %} +``` example +"Ready." She stepped into the elevator as it descended into darkness, her equipment secured and her mind focused on the mission ahead. Twenty floors underground, the facility hummed with an energy that made the air itself feel electric against her skin. Somewhere in the distance, machinery operated with mechanical precision that spoke of purposes she was only beginning to understand. +``` +{% elif selected_pattern == "PROSE DIALOGUE PROSE DIALOGUE" %} +``` example +The garden party continued around her as if nothing had changed, but everything had. Laughter and champagne glasses created a perfect facade of normalcy that she had to maintain. "I need to find somewhere private," she whispered to herself, scanning the crowd for an escape route. The weight of what she had discovered pressed against her chest like a physical burden. "This could change everything," she murmured, her diplomatic smile never wavering despite the turmoil beneath. +``` +{% elif selected_pattern == "DIALOGUE PROSE PROSE" %} +``` example +"The test results came back positive," she said to the empty laboratory, setting down the folder with deliberate care. The silence stretched around her except for the gentle hum of machinery and the distant sound of traffic from the street below. Everything she had theorized, everything she had hoped to prove, was now undeniable reality staring back at her from printed charts and data. The implications would ripple through every aspect of her understanding, changing not just her research but potentially the course of her entire career. +``` +{% endif %} \ No newline at end of file diff --git a/src/talemate/prompts/templates/common/response-length.jinja2 b/src/talemate/prompts/templates/common/response-length.jinja2 index e00f6b8e..7e1b6a0b 100644 --- a/src/talemate/prompts/templates/common/response-length.jinja2 +++ b/src/talemate/prompts/templates/common/response-length.jinja2 @@ -1,9 +1,14 @@ {% if response_length -%} - {% if response_length <= 128 %}{% set response_length_units = "1 - 2 sentences" %} - {% elif response_length <= 256 %}{% set response_length_units = "2 - 4 sentences" %} - {% elif response_length <= 512 %}{% set response_length_units = "4 - 6 sentences" %} - {% elif response_length <= 1024 %}{% set response_length_units = "2 paragraphs" %} - {% else %}{% set response_length_units = "3 paragraphs" %} + {% if response_length <= 32 %}{% set response_length_units = "very few words" %} + {% elif response_length <= 64 %}{% set response_length_units = "1 - 2 sentences" %} + {% elif response_length <= 128 %}{% set response_length_units = "2 - 3 sentences" %} + {% elif response_length <= 256 %}{% set response_length_units = "3 - 5 sentences" %} + {% elif response_length <= 384 %}{% set response_length_units = "1 paragraph" %} + {% elif response_length <= 512 %}{% set response_length_units = "2 paragraphs" %} + {% elif response_length <= 1024 %}{% set response_length_units = "4 paragraphs" %} + {% else %}{% set response_length_units = "multiple paragraphs" %} {% endif%} {% endif -%} -{% if response_length_units -%}{{ prefix }}{{ response_length_units }}{{ suffix }}{% endif %} \ No newline at end of file +{% if response_length_units -%} +The length of your response must fit within {{ response_length_units }}. +{% endif %} \ No newline at end of file diff --git a/src/talemate/prompts/templates/conversation/dialogue-narrative.jinja2 b/src/talemate/prompts/templates/conversation/dialogue-narrative.jinja2 new file mode 100644 index 00000000..db05a959 --- /dev/null +++ b/src/talemate/prompts/templates/conversation/dialogue-narrative.jinja2 @@ -0,0 +1,80 @@ +{% extends "dialogue.jinja2" %} +{% block task_main_text %} +<|SECTION:TASK|> +You are writing a novel-style narrative continuation featuring {{ talking_character.name }} in a scene with {{ formatted_names }} in {{ scene.context }}. + +{% with exclude_phase_intent=false %}{% include "scene-intent.jinja2" %}{% endwith %} + +{% include "writing-style-instructions.jinja2" %} + +Your task is to write the next part of the story from {{ talking_character.name }}'s perspective, continuing the narrative in flowing, novel-like prose. + +{% set dialogue_examples -%} +{% for example in talking_character.random_dialogue_examples(scene, num=2, strip_name=True) -%} +``` example +{{ example }} +``` + +{% endfor -%} +{% endset %} + +## Writing Guidelines: + +**CRITICAL - Character Focus**: +- You are ONLY writing for {{ talking_character.name }} +- NEVER write dialogue for other characters +- NEVER describe other characters' actions, thoughts, or reactions +- NEVER make other characters speak or act +- Focus EXCLUSIVELY on {{ talking_character.name }}'s perspective, actions, thoughts, and words +- Other characters can exist in the scene but you cannot control them +- **ENVIRONMENTAL REACTIONS ARE ALLOWED**: You CAN describe how the environment or objects respond (e.g., "the door opened," "rain started," "the fire crackled") + +Really think about the above!!! + +**Character Goals**: {% if talking_character.sheet %}Consider {{ talking_character.name }}'s character sheet and any goals, motivations, or personality traits that should influence their actions and decisions.{% else %}Stay true to {{ talking_character.name }}'s established personality and motivations.{% endif %} + +**Narrative Style**: +- Write in clear, natural prose +- Integrate dialogue smoothly into the narrative +- Include relevant internal thoughts and emotions +- Show character motivations through actions and brief inner monologue +- Use concise, focused descriptions +- **AVOID PURPLE PROSE**: Keep descriptions practical and avoid overly flowery or elaborate language. Prefer simple, direct descriptions over ornate ones +- **BE CONCISE**: Don't over-describe scenes, emotions, or actions. A few well-chosen details are better than lengthy descriptions +- **CRITICAL - Tense and Perspective Consistency**: Examine the existing conversation history carefully and maintain the EXACT same tense (past/present) and perspective (first/second/third person) used in previous messages. If previous messages use third person past tense ("He walked"), continue with third person past tense. If they use first person present ("I walk"), continue with first person present. Do NOT switch styles mid-conversation. + +**Scene Progression - PRIORITIZE MOVING FORWARD**: Always advance the story. Don't just react - make things happen. Consider: +- What {{ talking_character.name }} wants to achieve in this moment +- How they would naturally respond to the current situation +- What actions or words would move the story forward meaningfully +- How to maintain continuity with previous events +- **TAKE ACTION**: Have {{ talking_character.name }} do something new, make a decision, or change the situation rather than just describing the current state + +**Avoid Repetition**: +- Don't repeat phrases, actions, or descriptions from recent messages +- Vary your sentence structure and vocabulary +- If {{ talking_character.name }} has already expressed similar thoughts or performed similar actions recently, find a fresh angle or new development +- Move the story forward rather than rehashing previous moments +- **Vary your opening patterns**: Avoid starting consecutive responses with similar sentence structures (e.g., "{{ talking_character.name }}'s [object]..." or "{{ talking_character.name }} [verbed]...") +- **Focus on different aspects**: If you've recently described equipment/tools, shift to emotions, environment, or internal thoughts instead + + +Based on {{ talking_character.name}}'s established dialogue patterns, maintain consistency with their voice and speaking style. +{% if dialogue_examples.strip() %} +{{ dialogue_examples.strip() }} +{%- else -%} +{% include "narrative-patterns.jinja2" %} +{%- endif %} + +{{ task_instructions }} + +Remember: Write clear, engaging prose that captures {{ talking_character.name }}'s experience in this moment. Focus on their perspective, thoughts, and actions while maintaining the natural flow of the story. Keep it concise and avoid unnecessary embellishment. + +**FINAL REMINDER**: You are {{ talking_character.name }}. Write ONLY what {{ talking_character.name }} thinks, says, and does. Do not write for any other character. + +<|CLOSE_SECTION|> +{% endblock -%} + +{% block response_scaffolding %}{{ bot_token }}{% if partial_message -%} +{{ partial_message.strip() }} +{% endif %}{% endblock -%} \ No newline at end of file diff --git a/src/talemate/prompts/templates/conversation/dialogue.jinja2 b/src/talemate/prompts/templates/conversation/dialogue.jinja2 index 2187d483..0cf89f04 100644 --- a/src/talemate/prompts/templates/conversation/dialogue.jinja2 +++ b/src/talemate/prompts/templates/conversation/dialogue.jinja2 @@ -29,7 +29,7 @@ {% if not director_guidance -%} {# INSERT ACTING INSTRUCTIONS VIA OFFSET #} {%- if actor_instructions_offset > 0 and talking_character.dialogue_instructions and scene.count_messages() > actor_instructions_offset -%} - {%- set _ = scene_history.insert(-actor_instructions_offset, "(Internal acting instructions for "+talking_character.name+": "+talking_character.dialogue_instructions+" "+actor_instructions+")") -%} + {%- set _ = scene_history.insert(-actor_instructions_offset, "(Broad character guidance for "+talking_character.name+": "+talking_character.dialogue_instructions+" "+actor_instructions+")") -%} {% endif -%} {% endif -%} {# END INSERT ACTING INSTRUCTIONS VIA OFFSET #} @@ -78,6 +78,8 @@ {{ task_main_text }} +<|RESPONSE_LENGTH_INSTRUCTIONS|> + {% if scene_context_length < large_context_threshold %}{{ acting_instructions }}{% endif %}{# if scene context is relatively short, its beneficial to move the acting instructions into the task #} {% with _text=scene_context_text %}{% include "internal-note-help.jinja2" %}{% endwith %} diff --git a/src/talemate/prompts/templates/creator/determine-content-context.jinja2 b/src/talemate/prompts/templates/creator/determine-content-context.jinja2 index eb7ca083..e523c60e 100644 --- a/src/talemate/prompts/templates/creator/determine-content-context.jinja2 +++ b/src/talemate/prompts/templates/creator/determine-content-context.jinja2 @@ -22,7 +22,7 @@ Your response should be "Content context: a ..." Examples: -{% for content_context in config.get('creator', {}).get('content_context',[]) -%} +{% for content_context in config.creator.content_context -%} - {{ content_context }} {% endfor -%} <|CLOSE_SECTION|> diff --git a/src/talemate/prompts/templates/director/cm-assign-voice.jinja2 b/src/talemate/prompts/templates/director/cm-assign-voice.jinja2 new file mode 100644 index 00000000..7117a17f --- /dev/null +++ b/src/talemate/prompts/templates/director/cm-assign-voice.jinja2 @@ -0,0 +1,59 @@ +{% set extra_context_content -%} +{% include "extra-context.jinja2" %} +{# scene analysis exists #}{% if scene.agent_state.summarizer and scene.agent_state.summarizer.scene_analysis %}{{ scene.agent_state.summarizer.scene_analysis }} {% endif %} +{% endset %} +{% set character_context_content -%} +<|SECTION:CHARACTER DETAILS - {{ character.name }}|> +{{ character.description }} + +{{ character.sheet }} +<|CLOSE_SECTION|> +{% endset %} +{% set voices_content -%} +<|SECTION:VOICES|> +{% for voice in voices %} +{ + "voice_id": "{{ voice.id }}", + "name": "{{ voice.label }}", + "tags": {{ voice.tags|tojson }}, + "used": {{ voice.used }} +} +{% endfor %} +<|CLOSE_SECTION|> +{% endset %} +{{ extra_context_content }} +{{ character_context_content }} +{{ voices_content }} +{% set context_tokens = count_tokens(extra_context_content) + count_tokens(character_context_content) + count_tokens(voices_content) %} +{% set budget=min(max_tokens-300-context_tokens, 1024) %} +{% with budget=budget %}{% include "scene-context.jinja2" %}{% endwith %} + +<|SECTION:FUNCTION CALLING INSTRUCTIONS|> +{{ focal.render_instructions() }} + +{{ + focal.callbacks.assign_voice.render( + "Assign a voice to "+character.name+". Use the tags and the character information to determine the best voice.", + voice_id="The voice id", + examples=[ + { + "voice_id": "kokoro:af_heart", + }, + { + "voice_id": "elevenlabs:wBXNqKUATyqu0RtYt25i", + } + ] + ) +}} +<|CLOSE_SECTION|> +<|SECTION:TASK|> +Assign a voice to {{ character.name }}. Use the tags and the character information to determine the best voice. + +First analyze the character and the available voices then make your choice by calling the `assign_voice` function. + +Prefer unused voices over used ones, but the most important thing is that the voice is a good fit for the character, so you can reuse voices if needed. + +{% if narrator_voice %}The narrator voice is {{ narrator_voice.id }}.{% endif %} + +You MUST call the `assign_voice` function. +<|CLOSE_SECTION|> \ No newline at end of file diff --git a/src/talemate/prompts/templates/director/guide-conversation.jinja2 b/src/talemate/prompts/templates/director/guide-conversation.jinja2 index e0960e3f..ddc76d38 100644 --- a/src/talemate/prompts/templates/director/guide-conversation.jinja2 +++ b/src/talemate/prompts/templates/director/guide-conversation.jinja2 @@ -76,13 +76,7 @@ Focus solely on WHAT needs to be conveyed. Trust the writer to capture {{ charac Finally ALWAYS briefly state the formatting guidelines: Speech MUST go inside "". -{% if response_length < 200 %}{% set num_sentences="1-2" -%} -{% elif response_length < 300 %}{% set num_sentences="3-4" -%} -{% elif response_length < 500 %}{% set num_sentences="4-5" -%} -{% elif response_length < 700 %}{% set num_sentences="6-7" -%} -{% elif response_length < 1000 %}{% set num_sentences="7-8" -%} -{% else %}{% set num_sentences="8-10" -%} -{% endif %}Fit your guidance within {{ num_sentences }} sentences. +{% include "response-length.jinja2" %} Use terse, direct language. Cut all unnecessary words. Be blunt and brief like scribbles on a notepad. @@ -90,4 +84,4 @@ Provide your response in the following format: GUIDANCE: ... your guidance for the story writer ... <|CLOSE_SECTION|> -{{ bot_token }} GUIDANCE: \ No newline at end of file +{{ bot_token }}GUIDANCE: \ No newline at end of file diff --git a/src/talemate/prompts/templates/director/guide-narration.jinja2 b/src/talemate/prompts/templates/director/guide-narration.jinja2 index 45a4a87f..6326c69f 100644 --- a/src/talemate/prompts/templates/director/guide-narration.jinja2 +++ b/src/talemate/prompts/templates/director/guide-narration.jinja2 @@ -64,13 +64,8 @@ Content Classification: {{ scene.context }} {% include "guide-narration-writing-style.jinja2" %} ### Rules for your instructions -{% if response_length < 200 %}{% set num_sentences="1-2" -%} -{% elif response_length < 300 %}{% set num_sentences="2-3" -%} -{% elif response_length < 500 %}{% set num_sentences="3-4" -%} -{% elif response_length < 700 %}{% set num_sentences="4-5" -%} -{% elif response_length < 1000 %}{% set num_sentences="6-7" -%} -{% else %}{% set num_sentences="7-8" -%} -{% endif %}Fit your guidance within {{ num_sentences }} sentences. + +{% include "response-length.jinja2" %} Use terse, direct language. Cut all unnecessary words. Be blunt and brief like scribbles on a notepad. @@ -78,4 +73,4 @@ Provide your response in the following format: GUIDANCE: ... your guidance for the narrator ... <|CLOSE_SECTION|> -{{ bot_token }} GUIDANCE: \ No newline at end of file +{{ bot_token }}GUIDANCE: \ No newline at end of file diff --git a/src/talemate/prompts/templates/director/scene-context.jinja2 b/src/talemate/prompts/templates/director/scene-context.jinja2 index 123582c0..5bbd5167 100644 --- a/src/talemate/prompts/templates/director/scene-context.jinja2 +++ b/src/talemate/prompts/templates/director/scene-context.jinja2 @@ -11,7 +11,6 @@ {{ scene_context }} {% endfor %} {% endset %} -{% endif %} {% with memory_prompt = history %}{% include "memory-context.jinja2" %}{% endwith %} <|SECTION:SCENE|> {{ scene_context_text }} diff --git a/src/talemate/prompts/templates/narrator/narrate-character-entry.jinja2 b/src/talemate/prompts/templates/narrator/narrate-character-entry.jinja2 index e316b0d7..fa28f77c 100644 --- a/src/talemate/prompts/templates/narrator/narrate-character-entry.jinja2 +++ b/src/talemate/prompts/templates/narrator/narrate-character-entry.jinja2 @@ -17,5 +17,5 @@ Narrate the entrance of {{ character.name }} into the scene. {% if not narrative {% include "narrative-direction.jinja2" %} -Write 2 to 4 sentences. {{ extra_instructions }} +{{ extra_instructions }} <|CLOSE_SECTION|> \ No newline at end of file diff --git a/src/talemate/prompts/templates/narrator/narrate-character-exit.jinja2 b/src/talemate/prompts/templates/narrator/narrate-character-exit.jinja2 index 0c454c3c..3704c580 100644 --- a/src/talemate/prompts/templates/narrator/narrate-character-exit.jinja2 +++ b/src/talemate/prompts/templates/narrator/narrate-character-exit.jinja2 @@ -17,5 +17,5 @@ Narrate the exit of {{ character.name }} from the scene.{% if not narrative_dire {% include "narrative-direction.jinja2" %} -Write 2 to 4 sentences. {{ extra_instructions }} +{{ extra_instructions }} <|CLOSE_SECTION|> \ No newline at end of file diff --git a/src/talemate/prompts/templates/narrator/narrate-query.jinja2 b/src/talemate/prompts/templates/narrator/narrate-query.jinja2 index d4f4ac5e..0b2cc7d1 100644 --- a/src/talemate/prompts/templates/narrator/narrate-query.jinja2 +++ b/src/talemate/prompts/templates/narrator/narrate-query.jinja2 @@ -25,7 +25,6 @@ Use the established context to inform your responses, anchoring them to final li Provide information that maintains continuity with everything up to and including the final line. Respond as an omniscient, all-seeing narrator with deep knowledge of the story world. -Respond with 1-2 sentences of concise narration fitting the scene's context. Focus on descriptive prose and implied experiences. Embody the narrator's role completely, using a unique narrative voice. diff --git a/src/talemate/prompts/templates/narrator/narrate-time-passage.jinja2 b/src/talemate/prompts/templates/narrator/narrate-time-passage.jinja2 index d43f7247..95799723 100644 --- a/src/talemate/prompts/templates/narrator/narrate-time-passage.jinja2 +++ b/src/talemate/prompts/templates/narrator/narrate-time-passage.jinja2 @@ -7,6 +7,6 @@ Narrate the passage of time that just occured, subtly move the story forward, and set up the next scene. Your main goal is to fill in what happened during the time passage. {% include "narrative-direction.jinja2" %} -Write 2 to 4 sentences. {{ extra_instructions }} +{{ extra_instructions }} <|CLOSE_SECTION|> {{ bot_token }}{{ time_passed }}: \ No newline at end of file diff --git a/src/talemate/prompts/templates/narrator/narrative-direction.jinja2 b/src/talemate/prompts/templates/narrator/narrative-direction.jinja2 index 482d13d6..d21b630d 100644 --- a/src/talemate/prompts/templates/narrator/narrative-direction.jinja2 +++ b/src/talemate/prompts/templates/narrator/narrative-direction.jinja2 @@ -16,8 +16,7 @@ directions. {% else %} Maintain an informal, conversational tone. {# writing style and guidance END #}{% endif %} -{# scene analysis exists #}{% if agent_context_state["summarizer__scene_analysis"] %}Use the scene analysis to help -ground your narration.{% endif %} +{# scene analysis exists #}{% if agent_context_state["summarizer__scene_analysis"] %}Use the scene analysis to help ground your narration.{% endif %} {# context investigation exists #}{% if agent_context_state["summarizer__context_investigation"] %}Use the historical context to help ground your narration.{% endif %} {# regenerate-context #} \ No newline at end of file diff --git a/src/talemate/prompts/templates/summarizer/analyze-scene-for-next-conversation.jinja2 b/src/talemate/prompts/templates/summarizer/analyze-scene-for-next-conversation.jinja2 index 4c7e40a3..976a4d72 100644 --- a/src/talemate/prompts/templates/summarizer/analyze-scene-for-next-conversation.jinja2 +++ b/src/talemate/prompts/templates/summarizer/analyze-scene-for-next-conversation.jinja2 @@ -46,8 +46,6 @@ The information you write will be given to the other story editors to write {{ c Note that the 'Potentially relevant information' section has been filled in from a previous prompt and may not be relevant at all. {% endif %} -{% if length >= 1024 %}{{ li() }}. Carefully plan {{ character.name }}'s next action, taking your analysis into account. This must be a short list of instructions and considerations, not narrative text. (Aim for 2 - 3 items){% endif %} - {% if length <= 256 %}Your analysis should be 1 - 2 paragraphs long.{% elif length <= 512 %}Your analysis should be 2 - 3 paragraphs long.{% endif %} {% if length <= 512 %}Use terse, direct language. Cut all unnecessary words. Be blunt and brief like scribbles on a notepad.{% endif %} diff --git a/src/talemate/prompts/templates/summarizer/markup-context-for-tts.jinja2 b/src/talemate/prompts/templates/summarizer/markup-context-for-tts.jinja2 new file mode 100644 index 00000000..02f7085e --- /dev/null +++ b/src/talemate/prompts/templates/summarizer/markup-context-for-tts.jinja2 @@ -0,0 +1,135 @@ +<|SECTION:CHARACTERS|> +{% for character in scene.characters -%} +### {{ character.name }} +{{ character.description }} + +{% endfor %} +<|CLOSE_SECTION|> +{% include "content-classification.jinja2" %} + +<|SECTION:SCENE|> +{% include "extra-context.jinja2" -%} + +{% set scene_context = scene.context_history( + budget=1500, + min_dialogue=10, + sections=False, + keep_director=False, + chapter_labels=False + ) +-%} +{% for scene_line in scene_context -%} +{{ scene_line }} + +{% endfor %} +<|CLOSE_SECTION|> +<|SECTION:TASK|> +Your task is to break down the text into dialogue and narration, identifying the speaker for each line. + +Dialogue is any text that is between double quotes. Each piece of dialogue and narration should be on its own line, prefixed with the speaker name in square brackets. + +Use [Narrator] for ALL exposition and narrative text - this includes action descriptions, dialogue tags (like "he said"), character thoughts, and any text that is not spoken dialogue. +Use [Speaker Name] for dialogue ONLY, with normal capitalization (e.g., [John], not [JOHN]). + +Your response should contain: +1. First, an section with your reasoning about who is speaking each line +2. Then, a section with the text broken down line by line with speaker tags +<|CLOSE_SECTION|> +<|SECTION:EXAMPLES|> + +He stopped at the door and looked back at Jasmine. "I will be back soon." + + +First part is narration describing actions. The dialogue is spoken by "he" which from context refers to John. + + +[Narrator] He stopped at the door and looked back at Jasmine. +[John] I will be back soon. + + + +"Are you coming?" Mary asked. David shook his head. "Not today." + + +First quote is explicitly attributed to Mary through "Mary asked." Middle section is narration (including "Mary asked" and David's action). David speaks the second quote as indicated by the preceding action "David shook his head." + + +[Mary] Are you coming? +[Narrator] Mary asked. David shook his head. +[David] Not today. + + + +Sarah rushed into the room. "You'll never believe what happened!" + +Mike looked up from his book. "What now?" He set it aside. "Did you get the promotion?" + +"Even better!" She pulled out her phone. "I won that contest - first place!" + +"The photography one?" Mike leaned forward. "That's amazing!" + + +All non-quoted text is narration, regardless of whether it describes actions, thoughts, or dialogue attribution. Only the actual quoted speech is assigned to characters. + + +[Narrator] Sarah rushed into the room. +[Sarah] You'll never believe what happened! +[Narrator] Mike looked up from his book. +[Mike] What now? +[Narrator] He set it aside. +[Mike] Did you get the promotion? +[Sarah] Even better! +[Narrator] She pulled out her phone. +[Sarah] I won that contest - first place! +[Mike] The photography one? +[Narrator] Mike leaned forward. +[Mike] That's amazing! + + + +"Listen," Tom said, his voice low. "I need to tell you something." He glanced around nervously. "But you can't tell anyone, okay?" His hands trembled as he continued. "I saw what happened that night." + + +All dialogue is Tom's - first quote has explicit attribution "Tom said," and subsequent pronouns "He" refer back to Tom. All descriptive text including "Tom said, his voice low" is narration. + + +[Tom] Listen, +[Narrator] Tom said, his voice low. +[Tom] I need to tell you something. +[Narrator] He glanced around nervously. +[Tom] But you can't tell anyone, okay? +[Narrator] His hands trembled as he continued. +[Tom] I saw what happened that night. + + + +Emma burst out laughing. "You actually believed him?" She wiped tears from her eyes. "Oh, that's priceless." + +"I don't see what's so funny," Marcus replied stiffly. He paused, then sighed. "Fine, maybe I was a bit naive." + + +All action descriptions and dialogue tags are narration. Only the quoted speech is attributed to the characters. + + +[Narrator] Emma burst out laughing. +[Emma] You actually believed him? +[Narrator] She wiped tears from her eyes. +[Emma] Oh, that's priceless. +[Marcus] I don't see what's so funny, +[Narrator] Marcus replied stiffly. He paused, then sighed. +[Marcus] Fine, maybe I was a bit naive. + +<|CLOSE_SECTION|> +<|SECTION:GUIDELINES|> +- Break down ALL text into separate lines for dialogue and narration +- Each line should start with [Speaker Name] or [Narrator] +- CRITICAL: Only actual spoken dialogue (text within quotes) should be attributed to characters +- EVERYTHING else is [Narrator]: action descriptions, dialogue tags ("he said"), thoughts, scene setting, etc. +- Use proper name casing (John, not JOHN) +- Use [Unknown] if speaker cannot be determined +- Do not include quotation marks in the dialogue lines +- Preserve all text exactly - only reorganize into the new format +<|CLOSE_SECTION|> +<|SECTION:TEXT|> +{{ text }} +{{ set_prepared_response("\n") }} \ No newline at end of file diff --git a/src/talemate/prompts/templates/world_state/analyze-history-and-follow-instructions.jinja2 b/src/talemate/prompts/templates/world_state/analyze-history-and-follow-instructions.jinja2 index e608eda1..a445e3be 100644 --- a/src/talemate/prompts/templates/world_state/analyze-history-and-follow-instructions.jinja2 +++ b/src/talemate/prompts/templates/world_state/analyze-history-and-follow-instructions.jinja2 @@ -16,8 +16,4 @@ Begin by always grounding your answer with a location, event and time, if possible. -{% if response_length < 512 %} -Your response should be 1 to 3 sentences long. Keep it concise but informative. -{% elif response_length < 1024 %} -Your response should be 2 to 4 sentences long. -{% endif %} \ No newline at end of file +{% include "response-length.jinja2" %} \ No newline at end of file diff --git a/src/talemate/prompts/templates/world_state/determine-content-context.jinja2 b/src/talemate/prompts/templates/world_state/determine-content-context.jinja2 index 565ece71..d75f0b4c 100644 --- a/src/talemate/prompts/templates/world_state/determine-content-context.jinja2 +++ b/src/talemate/prompts/templates/world_state/determine-content-context.jinja2 @@ -10,7 +10,7 @@ The content context should be a single short phrase classification that describe Choices: -{% for content_context in config.get('creator', {}).get('content_context',[]) -%} +{% for content_context in config.creator.content_context -%} - {{ content_context }} {% endfor -%} {% for content_context in extra_choices -%} diff --git a/src/talemate/prompts/templates/world_state/request-world-state-v2.jinja2 b/src/talemate/prompts/templates/world_state/request-world-state-v2.jinja2 index 597f2ad5..1d46e436 100644 --- a/src/talemate/prompts/templates/world_state/request-world-state-v2.jinja2 +++ b/src/talemate/prompts/templates/world_state/request-world-state-v2.jinja2 @@ -52,7 +52,7 @@ No dialogue so far {% endif -%} <|CLOSE_SECTION|> <|SECTION:TASK|> -Create a JSON object for the world state that reflects the scene progression so far. +Create {{ data_format_type() }} data for the world state that reflects the scene progression so far. The world state needs to include important concrete and material items present in the scene during line {{ final_line_number }}. The world state needs to include persons (characters) interacting during line {{ final_line_number }}. @@ -69,6 +69,8 @@ characters should have the following attributes: `emotion`, `snapshot` items should have the following attributes: `snapshot` item keys must be reader friendly, so "Item name" instead of "item_name". +Don't overthink it. + <|CLOSE_SECTION|> <|SECTION:UPDATED WORLD STATE|> {{ set_data_response(dict(characters={"name":{}}), cutoff=3) }} \ No newline at end of file diff --git a/src/talemate/prompts/templates/world_state/update-reinforcements.jinja2 b/src/talemate/prompts/templates/world_state/update-reinforcements.jinja2 index c8a84b3a..f9350593 100644 --- a/src/talemate/prompts/templates/world_state/update-reinforcements.jinja2 +++ b/src/talemate/prompts/templates/world_state/update-reinforcements.jinja2 @@ -26,6 +26,9 @@ No dialogue so far {% endif -%} <|CLOSE_SECTION|> + +{% include "writing-style-instructions.jinja2" %} + <|SECTION:TASK|> {# QUESTION #} {%- if question.strip()[-1] == '?' %} @@ -49,7 +52,7 @@ YOUR ANSWER IS CONFIDENT, MAKE CREATIVE CHOICES AS NEEDED. {% if instructions %} {{ instructions }} {% endif %} -The tone of your answer should be consistent with the tone of the story so far. +The tone of your answer must be consistent with the tone of the story so far. Question: {{ question }} (At line {{ final_line_number }} in the scene progression) {% if answer %}Previous Answer: {{ answer }} @@ -77,7 +80,7 @@ YOUR ANSWER IS CONFIDENT, MAKE CREATIVE CHOICES AS NEEDED. {% if instructions %} {{ instructions }} {% endif %} -The tone of your answer should be consistent with the tone of the story so far. +The tone of your answer must be consistent with the tone of the story so far. {% if answer %}Previous Value: {{ answer }} {% endif-%} diff --git a/src/talemate/scene_assets.py b/src/talemate/scene_assets.py index 3ca25bbf..2ebd45b8 100644 --- a/src/talemate/scene_assets.py +++ b/src/talemate/scene_assets.py @@ -40,7 +40,7 @@ class SceneAssets: self.cover_image = None @property - def asset_directory(self): + def asset_directory(self) -> str: """ Returns the scene's asset path """ diff --git a/src/talemate/scene_message.py b/src/talemate/scene_message.py index 40132655..3883e103 100644 --- a/src/talemate/scene_message.py +++ b/src/talemate/scene_message.py @@ -36,8 +36,8 @@ class Flags(enum.IntFlag): Flags for messages """ - NONE = 0 - HIDDEN = 1 + NONE = 0x0 + HIDDEN = 0x1 @dataclass @@ -146,6 +146,8 @@ class SceneMessage: def as_format(self, format: str, **kwargs) -> str: if format == "movie_script": return self.message.rstrip("\n") + "\n" + elif format == "narrative": + return self.message.strip() return self.message def set_source(self, agent: str, function: str, **kwargs): @@ -218,6 +220,8 @@ class CharacterMessage(SceneMessage): def as_format(self, format: str, **kwargs) -> str: if format == "movie_script": return self.as_movie_script + elif format == "narrative": + return self.without_name.strip() return self.message @@ -267,6 +271,7 @@ class DirectorMessage(SceneMessage): action: str = "actor_instruction" source: str = "ai" typ = "director" + subtype: str | None = None @property def character_name(self) -> str: @@ -343,7 +348,7 @@ class DirectorMessage(SceneMessage): return "" mode = kwargs.get("mode", "direction") - if format == "movie_script": + if format in ["movie_script", "narrative"]: if mode == "internal_monologue": return f"\n({self.as_inner_monologue})\n" else: @@ -384,7 +389,7 @@ class ReinforcementMessage(SceneMessage): return f"# Internal note for {self.character_name} - {self.question}\n{self.message}" def as_format(self, format: str, **kwargs) -> str: - if format == "movie_script": + if format in ["movie_script", "narrative"]: message = str(self)[2:] return f"\n({message})\n" return f"\n{self.message}\n" @@ -452,7 +457,7 @@ class ContextInvestigationMessage(SceneMessage): return rv def as_format(self, format: str, **kwargs) -> str: - if format == "movie_script": + if format in ["movie_script", "narrative"]: message = str(self)[2:] return f"\n({message})\n".replace("*", "") return f"\n{self.message}\n".replace("*", "") diff --git a/src/talemate/server/api.py b/src/talemate/server/api.py index 66f85c88..39e4afc2 100644 --- a/src/talemate/server/api.py +++ b/src/talemate/server/api.py @@ -8,7 +8,7 @@ import websockets import talemate.instance as instance from talemate import VERSION -from talemate.config import load_config +from talemate.config import get_config, Config, commit_config, update_config from talemate.client.system_prompts import RENDER_CACHE as SYSTEM_PROMPTS_CACHE from talemate.server.websocket_server import WebsocketHandler from talemate.util.data import JSONEncoder @@ -18,8 +18,34 @@ from talemate.game.engine.nodes.registry import import_initial_node_definitions log = structlog.get_logger("talemate") +# Only one active frontend websocket is allowed at a time. +# We keep a reference to the active websocket here and reject subsequent +# connection attempts while it is still open. +_active_frontend_websocket = None + async def websocket_endpoint(websocket): + global _active_frontend_websocket + + # Reject the connection if another frontend is already connected. + if _active_frontend_websocket is not None: + try: + await websocket.send( + json.dumps( + { + "type": "system", + "status": "error", + "message": "Another Talemate frontend is already connected. Only one connection is allowed.", + } + ) + ) + finally: + # Close the websocket with a normal closure code. + await websocket.close() + return + + # Mark this websocket as the active frontend connection. + _active_frontend_websocket = websocket # Create a queue for outgoing messages message_queue = asyncio.Queue() handler = WebsocketHandler(websocket, message_queue) @@ -30,6 +56,7 @@ async def websocket_endpoint(websocket): import_initial_node_definitions() async def frontend_disconnect(exc): + global _active_frontend_websocket nonlocal scene_task log.warning(f"frontend disconnected: {exc}") @@ -44,6 +71,9 @@ async def websocket_endpoint(websocket): handler.scene.continue_scene = False if scene_task: scene_task.cancel() + # Clear the active websocket reference so a new frontend can connect. + if _active_frontend_websocket is websocket: + _active_frontend_websocket = None # Create a task to send messages from the queue async def send_messages(): @@ -61,6 +91,7 @@ async def websocket_endpoint(websocket): while True: await instance.emit_clients_status() await instance.agent_ready_checks() + await commit_config() await asyncio.sleep(3) # create a task that will retriece client boostrap information @@ -130,6 +161,7 @@ async def websocket_endpoint(websocket): }, } ) + instance.emit_agents_status() if scene_data and filename: file_path = handler.handle_character_card_upload( @@ -156,9 +188,14 @@ async def websocket_endpoint(websocket): query = data.get("query", "") handler.request_scenes_list(query) elif action_type == "configure_clients": - await handler.configure_clients(data.get("clients")) + await update_config({"clients": data.get("clients")}) + await instance.instantiate_clients() + await instance.purge_clients() + await instance.emit_clients_status() + await instance.ensure_agent_llm_client() elif action_type == "configure_agents": - await handler.configure_agents(data.get("agents")) + await update_config({"agents": data.get("agents")}) + await instance.configure_agents() elif action_type == "request_client_status": await handler.request_client_status() elif action_type == "delete_message": @@ -184,7 +221,7 @@ async def websocket_endpoint(websocket): elif action_type == "request_app_config": log.info("request_app_config") - config = load_config() + config: Config = get_config().model_dump() config.update(system_prompt_defaults=SYSTEM_PROMPTS_CACHE) await message_queue.put( diff --git a/src/talemate/server/assistant.py b/src/talemate/server/assistant.py index 1abd187d..4f7860ee 100644 --- a/src/talemate/server/assistant.py +++ b/src/talemate/server/assistant.py @@ -5,6 +5,7 @@ import traceback from talemate.agents.creator.assistant import ContentGenerationContext from talemate.emit import emit from talemate.instance import get_agent +from talemate.server.websocket_plugin import Plugin log = structlog.get_logger("talemate.server.assistant") @@ -14,26 +15,12 @@ class ForkScenePayload(pydantic.BaseModel): save_name: str | None = None -class AssistantPlugin: +class AssistantPlugin(Plugin): router = "assistant" - @property - def scene(self): - return self.websocket_handler.scene - def __init__(self, websocket_handler): self.websocket_handler = websocket_handler - async def handle(self, data: dict): - log.info("assistant action", action=data.get("action")) - - fn = getattr(self, f"handle_{data.get('action')}", None) - - if fn is None: - return - - await fn(data) - async def handle_contextual_generate(self, data: dict): payload = ContentGenerationContext(**data) creator = get_agent("creator") @@ -61,7 +48,7 @@ class AssistantPlugin: async def handle_autocomplete(self, data: dict): data = ContentGenerationContext(**data) try: - creator = self.scene.get_helper("creator").agent + creator = get_agent("creator") context_type, context_name = data.computed_context if context_type == "dialogue": diff --git a/src/talemate/server/config.py b/src/talemate/server/config.py index d368c2a5..5a8cdda6 100644 --- a/src/talemate/server/config.py +++ b/src/talemate/server/config.py @@ -5,8 +5,9 @@ import os from talemate import VERSION from talemate.client.model_prompts import model_prompt from talemate.client.registry import CLIENT_CLASSES +from talemate.client.base import ClientBase from talemate.config import Config as AppConfigData -from talemate.config import load_config, save_config +from talemate.config import get_config, Config, update_config from talemate.emit import emit from talemate.instance import emit_clients_status, get_client @@ -60,15 +61,13 @@ class ConfigPlugin: async def handle_save(self, data): app_config_data = ConfigPayload(**data) - current_config = load_config() - - current_config.update(app_config_data.dict().get("config")) - - save_config(current_config) - - self.websocket_handler.config = current_config + await update_config(app_config_data.config.model_dump()) self.websocket_handler.queue_put( - {"type": "app_config", "data": load_config(), "version": VERSION} + { + "type": "app_config", + "data": get_config().model_dump(), + "version": VERSION, + } ) self.websocket_handler.queue_put( { @@ -82,20 +81,18 @@ class ConfigPlugin: payload = DefaultCharacterPayload(**data["data"]) - current_config = load_config() - - current_config["game"]["default_player_character"] = payload.model_dump() + config: Config = get_config() + config.game.default_player_character = payload.model_dump() log.info( "Saving default character", - character=current_config["game"]["default_player_character"], + character=config.game.default_player_character, ) - save_config(current_config) + await config.set_dirty() - self.websocket_handler.config = current_config self.websocket_handler.queue_put( - {"type": "app_config", "data": load_config(), "version": VERSION} + {"type": "app_config", "data": config.model_dump(), "version": VERSION} ) self.websocket_handler.queue_put( { @@ -204,9 +201,15 @@ class ConfigPlugin: payload = ToggleClientPayload(**data) log.info("Toggling client", name=payload.name, state=payload.state) - client = get_client(payload.name) + client: ClientBase = get_client(payload.name) - client.enabled = payload.state + current_state = client.enabled + + if current_state != payload.state: + if not payload.state: + await client.disable() + else: + await client.enable() self.websocket_handler.queue_put( { @@ -226,13 +229,13 @@ class ConfigPlugin: log.info("Removing scene from recents", path=payload.path) - current_config = load_config(as_model=True) + config: Config = get_config() - for recent_scene in list(current_config.recent_scenes.scenes): + for recent_scene in list(config.recent_scenes.scenes): if recent_scene.path == payload.path: - current_config.recent_scenes.scenes.remove(recent_scene) + config.recent_scenes.scenes.remove(recent_scene) - save_config(current_config) + await config.set_dirty() self.websocket_handler.queue_put( { @@ -245,12 +248,14 @@ class ConfigPlugin: ) self.websocket_handler.queue_put( - {"type": "app_config", "data": load_config(), "version": VERSION} + {"type": "app_config", "data": config.model_dump(), "version": VERSION} ) async def handle_delete_scene(self, data): payload = DeleteScenePayload(**data) + await self.handle_remove_scene_from_recents(data) + log.info("Deleting scene", path=payload.path) # remove the file @@ -269,6 +274,8 @@ class ConfigPlugin: } ) + config: Config = get_config() + self.websocket_handler.queue_put( - {"type": "app_config", "data": load_config(), "version": VERSION} + {"type": "app_config", "data": config.model_dump(), "version": VERSION} ) diff --git a/src/talemate/server/devtools.py b/src/talemate/server/devtools.py index 3bc17ffc..4c3d93bc 100644 --- a/src/talemate/server/devtools.py +++ b/src/talemate/server/devtools.py @@ -1,5 +1,7 @@ import pydantic import structlog +from talemate.instance import get_client +from talemate.client.base import ClientBase from talemate.scene.state_editor import SceneStateEditor from talemate.scene.schema import SceneState from talemate.server.websocket_plugin import Plugin @@ -39,7 +41,7 @@ class DevToolsPlugin(Plugin): async def handle_test_prompt(self, data): payload = TestPromptPayload(**data) - client = self.websocket_handler.llm_clients[payload.client_name]["client"] + client: ClientBase = get_client(payload.client_name) log.info( "Testing prompt", @@ -66,6 +68,7 @@ class DevToolsPlugin(Plugin): "client_name": payload.client_name, "kind": payload.kind, "response": response, + "reasoning": client.reasoning_response, }, } ) diff --git a/src/talemate/server/quick_settings.py b/src/talemate/server/quick_settings.py index 10784fda..0e58ac97 100644 --- a/src/talemate/server/quick_settings.py +++ b/src/talemate/server/quick_settings.py @@ -3,7 +3,7 @@ from typing import Any import pydantic import structlog -from talemate.config import save_config +from talemate.config import get_config, Config log = structlog.get_logger("talemate.server.quick_settings") @@ -35,15 +35,16 @@ class QuickSettingsPlugin: async def handle_set(self, data: dict): payload = SetQuickSettingsPayload(**data) + config: Config = get_config() if payload.setting == "auto_save": - self.scene.config["game"]["general"]["auto_save"] = payload.value + config.game.general.auto_save = payload.value elif payload.setting == "auto_progress": - self.scene.config["game"]["general"]["auto_progress"] = payload.value + config.game.general.auto_progress = payload.value else: raise NotImplementedError(f"Setting {payload.setting} not implemented.") - save_config(self.scene.config) + await config.set_dirty() self.websocket_handler.queue_put( {"type": self.router, "action": "set_done", "data": payload.model_dump()} diff --git a/src/talemate/server/run.py b/src/talemate/server/run.py index 712d7498..6f9694e4 100644 --- a/src/talemate/server/run.py +++ b/src/talemate/server/run.py @@ -1,5 +1,8 @@ print("Talemate starting.") -print("Startup may take a moment to download some dependencies, please be patient ...") +print("Startup may take a moment to initialize some dependencies, please be patient...") +import time + +t_import_start = time.perf_counter() import os import logging @@ -12,9 +15,11 @@ import sys import websockets -from talemate.server.api import websocket_endpoint +import talemate.config # noqa: F401 from talemate.version import VERSION +print("Initialization time", time.perf_counter() - t_import_start) + TALEMATE_DEBUG = os.environ.get("TALEMATE_DEBUG", "0") log_level = logging.DEBUG if TALEMATE_DEBUG == "1" else logging.INFO @@ -115,6 +120,7 @@ def run_server(args): :param args: command line arguments parsed by argparse """ + import talemate.client.registry import talemate.agents.custom import talemate.client.custom import talemate.agents @@ -123,9 +129,13 @@ def run_server(args): from talemate.prompts.overrides import get_template_overrides import talemate.client.system_prompts as system_prompts from talemate.emit.base import emit + import talemate.agents.tts.voice_library as voice_library # import node libraries import talemate.game.engine.nodes.load_definitions + import talemate.config + import talemate.instance + from talemate.server.api import websocket_endpoint config = talemate.config.cleanup() @@ -150,6 +160,10 @@ def run_server(args): # Get (or create) the asyncio event loop loop = asyncio.get_event_loop() + loop.run_until_complete(voice_library.require_instance()) + loop.run_until_complete(talemate.instance.instantiate_clients()) + loop.run_until_complete(talemate.instance.instantiate_agents()) + # websockets>=12 requires ``websockets.serve`` to be called from within a # running event-loop (it uses ``asyncio.get_running_loop()`` internally). # Calling it directly, before the loop is running, raises @@ -180,7 +194,7 @@ def run_server(args): frontend_task = None log.info("talemate backend started", host=args.host, port=args.port) - emit("talemate_started", data=config.model_dump()) + emit("talemate_started", data={"config": config}) try: loop.run_forever() diff --git a/src/talemate/server/websocket_plugin.py b/src/talemate/server/websocket_plugin.py index 86cf5943..60bd5ecc 100644 --- a/src/talemate/server/websocket_plugin.py +++ b/src/talemate/server/websocket_plugin.py @@ -1,5 +1,10 @@ import structlog +from typing import TYPE_CHECKING from talemate.emit import emit +import traceback + +if TYPE_CHECKING: + from talemate.tale_mate import Scene __all__ = [ "Plugin", @@ -9,10 +14,10 @@ log = structlog.get_logger("talemate.server.visual") class Plugin: - router = "router" + router: str = "router" @property - def scene(self): + def scene(self) -> "Scene | None": return self.websocket_handler.scene def __init__(self, websocket_handler): @@ -58,4 +63,14 @@ class Plugin: if fn is None: return - await fn(data) + try: + await fn(data) + except Exception as e: + action_name = data.get("action") + log.error( + "Error handling action", + action=action_name, + error=e, + traceback=traceback.format_exc(), + ) + await self.signal_operation_failed(f"Error during {action_name}: {e}") diff --git a/src/talemate/server/websocket_server.py b/src/talemate/server/websocket_server.py index f6de03a7..ab1149d1 100644 --- a/src/talemate/server/websocket_server.py +++ b/src/talemate/server/websocket_server.py @@ -6,13 +6,13 @@ import traceback import structlog import talemate.instance as instance -from talemate import Helper, Scene -from talemate.client.base import ClientBase -from talemate.client.registry import CLIENT_CLASSES +from talemate import Scene from talemate.client.system_prompts import RENDER_CACHE as SYSTEM_PROMPTS_CACHE -from talemate.config import SceneAssetUpload, load_config, save_config +from talemate.config.schema import SceneAssetUpload +from talemate.config import get_config, Config from talemate.context import ActiveScene from talemate.emit import Emission, Receiver, abort_wait_for_input, emit +import talemate.emit.async_signals as async_signals from talemate.files import list_scenes_directory from talemate.load import load_scene from talemate.scene_assets import Asset @@ -39,20 +39,11 @@ AGENT_INSTANCES = {} class WebsocketHandler(Receiver): def __init__(self, socket, out_queue, llm_clients=dict()): - self.agents = {typ: {"name": typ} for typ in instance.agent_types()} self.socket = socket self.waiting_for_input = False self.input = None self.scene = Scene() self.out_queue = out_queue - self.config = load_config() - - for name, agent_config in self.config.get("agents", {}).items(): - self.agents[name] = agent_config - - self.llm_clients = self.config.get("clients", llm_clients) - - instance.get_agent("memory", self.scene) self.routes = { assistant.AssistantPlugin.router: assistant.AssistantPlugin(self), @@ -77,15 +68,13 @@ class WebsocketHandler(Receiver): # to connect signals handlers to the websocket handler self.connect() - # connect LLM clients - loop = asyncio.get_event_loop() - loop.run_until_complete(self.connect_llm_clients()) - self.set_agent_routers() - # self.request_scenes_list() + instance.emit_agents_status() - # instance.emit_clients_status() + @property + def config(self) -> Config: + return get_config() def set_agent_routers(self): for agent_type, agent in instance.AGENTS.items(): @@ -110,89 +99,22 @@ class WebsocketHandler(Receiver): if hasattr(plugin, "disconnect"): plugin.disconnect() - async def connect_llm_clients(self): - client = None - - for client_name, client_config in self.llm_clients.items(): - try: - client = self.llm_clients[client_name]["client"] = instance.get_client( - **client_config - ) - except TypeError as e: - raise - log.error("Error connecting to client", client_name=client_name, e=e) - continue - - log.info( - "Configured client", - client_name=client_name, - client_type=client.client_type, - ) - - await self.connect_agents() - - async def connect_agents(self): - if not self.llm_clients: - instance.emit_agents_status() - return - - self.set_agent_routers() - - for agent_typ, agent_config in self.agents.items(): - try: - client = self.llm_clients.get(agent_config.get("client"))["client"] - except TypeError: - client = None - - if not client or not client.enabled: - # select first enabled client - try: - client = self.get_first_enabled_client() - agent_config["client"] = client.name - except IndexError: - client = None - - if not client: - agent_config["client"] = None - - if client: - log.debug("Linked agent", agent_typ=agent_typ, client=client.name) - else: - log.warning("No client available for agent", agent_typ=agent_typ) - - agent = instance.get_agent(agent_typ, client=client) - agent.client = client - await agent.apply_config(**agent_config) - - instance.emit_agents_status() - - def get_first_enabled_client(self) -> ClientBase: - """ - Will return the first enabled client available - - If no enabled clients are available, an IndexError will be raised - """ - for client in self.llm_clients.values(): - if client and client["client"].enabled: - return client["client"] - raise IndexError("No enabled clients available") + def connect(self): + super().connect() + async_signals.get("config.changed").connect(self.on_config_changed) def init_scene(self): # Setup scene scene = Scene() # Init helper agents - for agent_typ, agent_config in self.agents.items(): + for agent_typ in instance.agent_types(): + agent = instance.get_agent(agent_typ) + agent.connect(scene) + agent.scene = scene if agent_typ == "memory": - agent_config["scene"] = scene - - log.debug("init agent", agent_typ=agent_typ, agent_config=agent_config) - agent = instance.get_agent(agent_typ, **agent_config) - - # if getattr(agent, "client", None): - # self.llm_clients[agent.client.name] = agent.client - - scene.add_helper(Helper(agent)) + continue + log.debug("init agent", agent_typ=agent_typ) return scene @@ -211,8 +133,6 @@ class WebsocketHandler(Receiver): await asyncio.sleep(0.1) return - conversation_helper = scene.get_helper("conversation") - scene.active = True with ActiveScene(scene): @@ -220,7 +140,6 @@ class WebsocketHandler(Receiver): scene = await load_scene( scene, path_or_data, - conversation_helper.agent.client, reset=reset, ) except MemoryAgentError as e: @@ -246,145 +165,6 @@ class WebsocketHandler(Receiver): # Schedule the put coroutine to run as soon as possible loop.call_soon_threadsafe(lambda: self.out_queue.put_nowait(data)) - async def configure_clients(self, clients): - existing = set(self.llm_clients.keys()) - - self.llm_clients = {} - - # log.info("Configuring clients", clients=clients) - - for client in clients: - client.pop("status", None) - client_cls = CLIENT_CLASSES.get(client["type"]) - - # so hacky, such sad - ignore_model_names = [ - "Disabled", - "No model loaded", - "Could not connect", - "No API key set", - ] - if client.get("model") in ignore_model_names: - # if client instance exists copy model_name from it - _client = instance.get_client(client["name"]) - if _client: - client["model"] = getattr(_client, "model_name", None) - else: - client.pop("model", None) - - if not client_cls: - log.error("Client type not found", client=client) - continue - - client_config = self.llm_clients[client["name"]] = { - "name": client["name"], - "type": client["type"], - "enabled": client.get("enabled", True), - "system_prompts": client.get("system_prompts", {}), - "preset_group": client.get("preset_group", ""), - } - for dfl_key in client_cls.Meta().defaults.dict().keys(): - client_config[dfl_key] = client.get( - dfl_key, client.get("data", {}).get(dfl_key) - ) - - # find clients that have been removed - removed = existing - set(self.llm_clients.keys()) - if removed: - for agent_typ, agent_config in self.agents.items(): - if ( - "client" - in instance.agents.AGENT_CLASSES[agent_typ].config_options() - ): - agent = instance.get_agent(agent_typ) - if agent and agent.client and agent.client.name in removed: - agent_config["client"] = None - agent.client = None - instance.emit_agent_status(agent.__class__, agent) - - for name in removed: - log.debug("Destroying client", name=name) - await instance.destroy_client(name, self.config) - - self.config["clients"] = self.llm_clients - - await self.connect_llm_clients() - save_config(self.config) - - instance.sync_emit_clients_status() - - async def configure_agents(self, agents): - self.agents = {typ: {} for typ in instance.agent_types()} - - log.debug("Configuring agents") - - for agent in agents: - name = agent["name"] - - # special case for memory agent - if name == "memory" or name == "tts": - self.agents[name] = { - "name": name, - } - agent_instance = instance.get_agent(name, **self.agents[name]) - if agent_instance.has_toggle: - self.agents[name]["enabled"] = agent["enabled"] - - if getattr(agent_instance, "actions", None): - self.agents[name]["actions"] = agent.get("actions", {}) - - await agent_instance.apply_config(**self.agents[name]) - log.debug("Configured agent", name=name) - continue - - if name not in self.agents: - continue - - if isinstance(agent["client"], dict): - try: - client_name = agent["client"]["client"]["value"] - except KeyError: - continue - else: - client_name = agent["client"] - - if client_name not in self.llm_clients: - continue - - self.agents[name] = { - "client": self.llm_clients[client_name]["name"], - "name": name, - } - - agent_instance = instance.get_agent(name, **self.agents[name]) - - try: - agent_instance.client = self.llm_clients[client_name]["client"] - except KeyError: - self.llm_clients[client_name]["client"] = agent_instance.client = ( - instance.get_client(client_name) - ) - - if agent_instance.has_toggle: - self.agents[name]["enabled"] = agent["enabled"] - - if getattr(agent_instance, "actions", None): - self.agents[name]["actions"] = agent.get("actions", {}) - - await agent_instance.apply_config(**self.agents[name]) - - log.debug( - "Configured agent", - name=name, - client_name=self.llm_clients[client_name]["name"], - client=self.llm_clients[client_name]["client"], - ) - - self.config["agents"] = self.agents - save_config(self.config) - - instance.emit_agents_status() - def handle(self, emission: Emission): called = super().handle(emission) @@ -454,6 +234,8 @@ class WebsocketHandler(Receiver): "character": character, "action": emission.message_object.action, "direction_mode": direction_mode, + "subtype": emission.message_object.subtype, + "data": emission.data, "flags": ( int(emission.message_object.flags) if emission.message_object else 0 ), @@ -553,13 +335,15 @@ class WebsocketHandler(Receiver): } ) - def handle_config_saved(self, emission: Emission): - emission.data.update(system_prompt_defaults=SYSTEM_PROMPTS_CACHE) + async def on_config_changed(self, config: Config): + data = config.model_dump() + + data.update(system_prompt_defaults=SYSTEM_PROMPTS_CACHE) self.queue_put( { "type": "app_config", - "data": emission.data, + "data": data, } ) @@ -582,7 +366,12 @@ class WebsocketHandler(Receiver): ) def handle_client_status(self, emission: Emission): - client = instance.get_client(emission.id) + try: + client = instance.get_client(emission.id) + except KeyError: + return + + enable_api_auth = client.Meta().enable_api_auth if client else False self.queue_put( { "type": "client_status", @@ -593,7 +382,9 @@ class WebsocketHandler(Receiver): "data": emission.data, "max_token_length": client.max_token_length if client else 8192, "api_url": getattr(client, "api_url", None) if client else None, - "api_key": getattr(client, "api_key", None) if client else None, + "api_key": getattr(client, "api_key", None) + if enable_api_auth + else None, } ) diff --git a/src/talemate/server/world_state_manager/__init__.py b/src/talemate/server/world_state_manager/__init__.py index c0028450..72d99ddb 100644 --- a/src/talemate/server/world_state_manager/__init__.py +++ b/src/talemate/server/world_state_manager/__init__.py @@ -1,6 +1,7 @@ import asyncio import uuid from typing import Any, Union +import base64 import pydantic import structlog @@ -15,6 +16,7 @@ from talemate.server.websocket_plugin import Plugin from .scene_intent import SceneIntentMixin from .history import HistoryMixin +from .character import CharacterMixin log = structlog.get_logger("talemate.server.world_state_manager") @@ -184,7 +186,7 @@ class SuggestionPayload(pydantic.BaseModel): proposal_uid: str | None = None -class WorldStateManagerPlugin(SceneIntentMixin, HistoryMixin, Plugin): +class WorldStateManagerPlugin(SceneIntentMixin, HistoryMixin, CharacterMixin, Plugin): router = "world_state_manager" @property @@ -988,11 +990,23 @@ class WorldStateManagerPlugin(SceneIntentMixin, HistoryMixin, Plugin): payload = ExportOptions(**data) scene_data = await export(self.scene, payload) + # Handle different export formats + if isinstance(scene_data, bytes): + # ZIP export - encode as base64 for websocket transmission + exported_data = base64.b64encode(scene_data).decode() + file_extension = "zip" + else: + # Legacy JSON export - already base64 encoded + exported_data = scene_data + file_extension = "json" + self.websocket_handler.queue_put( { "type": "world_state_manager", "action": "scene_exported", - "data": scene_data, + "data": exported_data, + "format": payload.format.value, + "file_extension": file_extension, } ) await self.signal_operation_done() diff --git a/src/talemate/server/world_state_manager/character.py b/src/talemate/server/world_state_manager/character.py new file mode 100644 index 00000000..96485dea --- /dev/null +++ b/src/talemate/server/world_state_manager/character.py @@ -0,0 +1,62 @@ +import pydantic +import structlog + +log = structlog.get_logger("talemate.server.world_state_manager.character") + + +class UpdateCharacterVoicePayload(pydantic.BaseModel): + """Payload for updating a character voice.""" + + name: str + voice_id: str | None = None + + +class CharacterMixin: + """Mixin adding websocket handlers for character voice assignment.""" + + async def handle_update_character_voice(self, data: dict): + """Assign or clear a voice for a character. + + Expected payload + ----------------- + { + "type": "world_state_manager", + "action": "update_character_voice", + "name": "", + "voice_id": "" | null + } + """ + try: + payload = UpdateCharacterVoicePayload(**data) + except pydantic.ValidationError as e: + log.error("Invalid payload for update_character_voice", error=e) + await self.signal_operation_failed(str(e)) + return + + # Persist change via world state manager helper + try: + await self.world_state_manager.update_character_voice( + payload.name, payload.voice_id + ) + except Exception as e: + log.error( + "Failed to update character voice", character=payload.name, error=e + ) + await self.signal_operation_failed("Failed to update character voice") + return + + # Notify frontend + self.websocket_handler.queue_put( + { + "type": "world_state_manager", + "action": "character_voice_updated", + "data": payload.model_dump(), + } + ) + + # Re-emit updated character details so UI stays in sync + if hasattr(self, "handle_get_character_details"): + await self.handle_get_character_details({"name": payload.name}) + + await self.signal_operation_done() + self.scene.emit_status() diff --git a/src/talemate/tale_mate.py b/src/talemate/tale_mate.py index 6eede35b..6ce43d49 100644 --- a/src/talemate/tale_mate.py +++ b/src/talemate/tale_mate.py @@ -1,11 +1,10 @@ import asyncio import json import os -import random import re import traceback import uuid -from typing import Dict, Generator, List, Callable +from typing import Generator, Callable import isodate import structlog @@ -20,10 +19,9 @@ import talemate.save as save import talemate.util as util import talemate.world_state.templates as world_state_templates from talemate.agents.context import active_agent -from talemate.config import load_config +from talemate.config import Config, get_config from talemate.context import interaction -from talemate.emit import Emitter, emit, wait_for_input -from talemate.emit.signals import handlers +from talemate.emit import Emitter, emit from talemate.exceptions import ( ExitScene, LLMAccuracyError, @@ -34,7 +32,6 @@ from talemate.exceptions import ( GenerationCancelled, ) from talemate.game.state import GameState -from talemate.instance import get_agent from talemate.scene_assets import SceneAssets from talemate.scene_message import ( CharacterMessage, @@ -54,569 +51,29 @@ from talemate.game.engine.nodes.layout import load_graph from talemate.game.engine.nodes.packaging import initialize_packages from talemate.scene.intent import SceneIntent from talemate.history import emit_archive_add, ArchiveEntry +from talemate.character import Character +from talemate.agents.tts.schema import VoiceLibrary +from talemate.instance import get_agent __all__ = [ "Character", "Actor", "Scene", - "Helper", "Player", ] log = structlog.get_logger("talemate") -async_signals.register("scene_init") -async_signals.register("game_loop_start") -async_signals.register("game_loop") -async_signals.register("game_loop_actor_iter") -async_signals.register("game_loop_new_message") -async_signals.register("player_turn_start") - - -class Character: - """ - A character for the AI to roleplay, with a name, description, and greeting text. - """ - - def __init__( - self, - name: str, - description: str = "", - greeting_text: str = "", - gender: str = "female", - color: str = "cyan", - example_dialogue: List[str] = [], - is_player: bool = False, - history_events: list[dict] = None, - base_attributes: dict = None, - details: dict[str, str] = None, - **kwargs, - ): - self.name = name - self.description = description - self.greeting_text = greeting_text - self.example_dialogue = example_dialogue - self.gender = gender - self.color = color - self.is_player = is_player - self.history_events = history_events or [] - self.base_attributes = base_attributes or {} - self.details = details or {} - self.cover_image = kwargs.get("cover_image") - self.dialogue_instructions = kwargs.get("dialogue_instructions") - - self.memory_dirty = False - - @property - def persona(self): - return self.description - - @property - def serialize(self) -> Dict[str, str]: - return { - "name": self.name, - "description": self.description, - "greeting_text": self.greeting_text, - "base_attributes": self.base_attributes, - "details": self.details, - "gender": self.gender, - "color": self.color, - "example_dialogue": self.example_dialogue, - "history_events": self.history_events, - "is_player": self.is_player, - "cover_image": self.cover_image, - "dialogue_instructions": self.dialogue_instructions, - } - - @property - def sheet(self) -> str: - sheet = self.base_attributes or { - "name": self.name, - "gender": self.gender, - "description": self.description, - } - - sheet_list = [] - - for key, value in sheet.items(): - sheet_list.append(f"{key}: {value}") - - return "\n".join(sheet_list) - - @property - def random_dialogue_example(self): - """ - Get a random example dialogue line for this character. - - Returns: - str: The random example dialogue line. - """ - if not self.example_dialogue: - return "" - - return random.choice(self.example_dialogue) - - def __repr__(self): - return f"Character: {self.name}" - - def set_color(self, color: str = None): - # if no color provided, chose a random color - - if color is None: - color = util.random_color() - self.color = color - - def set_cover_image(self, asset_id: str, initial_only: bool = False): - if self.cover_image and initial_only: - return - - self.cover_image = asset_id - - def sheet_filtered(self, *exclude): - sheet = self.base_attributes or { - "name": self.name, - "gender": self.gender, - "description": self.description, - } - - sheet_list = [] - - for key, value in sheet.items(): - if key not in exclude: - sheet_list.append(f"{key}: {value}") - - return "\n".join(sheet_list) - - def random_dialogue_examples( - self, - scene: "Scene", - num: int = 3, - strip_name: bool = False, - max_backlog: int = 250, - max_length: int = 192, - ) -> list[str]: - """ - Get multiple random example dialogue lines for this character. - - Will return up to `num` examples and not have any duplicates. - """ - - history_examples = self._random_dialogue_examples_from_history( - scene, num, max_backlog - ) - - if len(history_examples) < num: - random_examples = self._random_dialogue_examples( - num - len(history_examples), strip_name - ) - - for example in random_examples: - history_examples.append(example) - - # ensure sane example lengths - - history_examples = [ - util.strip_partial_sentences(example[:max_length]) - for example in history_examples - ] - - log.debug("random_dialogue_examples", history_examples=history_examples) - return history_examples - - def _random_dialogue_examples_from_history( - self, scene: "Scene", num: int = 3, max_backlog: int = 250 - ) -> list[str]: - """ - Get multiple random example dialogue lines for this character from the scene's history. - - Will checks the last `max_backlog` messages in the scene's history and returns up to `num` examples. - """ - - history = scene.history[-max_backlog:] - - examples = [] - - for message in history: - if not isinstance(message, CharacterMessage): - continue - - if message.character_name != self.name: - continue - - examples.append(message.without_name.strip()) - - if not examples: - return [] - - return random.sample(examples, min(num, len(examples))) - - def _random_dialogue_examples( - self, num: int = 3, strip_name: bool = False - ) -> list[str]: - """ - Get multiple random example dialogue lines for this character. - - Will return up to `num` examples and not have any duplicates. - """ - - if not self.example_dialogue: - return [] - - # create copy of example_dialogue so we dont modify the original - - examples = self.example_dialogue.copy() - - # shuffle the examples so we get a random order - - random.shuffle(examples) - - # now pop examples until we have `num` examples or we run out of examples - - if strip_name: - examples = [example.split(":", 1)[1].strip() for example in examples] - - return [examples.pop() for _ in range(min(num, len(examples)))] - - def filtered_sheet(self, attributes: list[str]): - """ - Same as sheet but only returns the attributes in the given list - - Attributes that dont exist will be ignored - """ - - sheet_list = [] - - for key, value in self.base_attributes.items(): - if key.lower() not in attributes: - continue - sheet_list.append(f"{key}: {value}") - - return "\n".join(sheet_list) - - def rename(self, new_name: str): - """ - Rename the character. - - Args: - new_name (str): The new name of the character. - - Returns: - None - """ - - orig_name = self.name - self.name = new_name - - if orig_name.lower() == "you": - # we dont want to replace "you" in the description - # or anywhere else so we can just return here - return - - if self.description: - self.description = self.description.replace(f"{orig_name}", self.name) - for k, v in self.base_attributes.items(): - if isinstance(v, str): - self.base_attributes[k] = v.replace(f"{orig_name}", self.name) - for i, v in list(self.details.items()): - if isinstance(v, str): - self.details[i] = v.replace(f"{orig_name}", self.name) - self.memory_dirty = True - - def introduce_main_character(self, character): - """ - Makes this character aware of the main character's name in the scene. - - This will replace all occurrences of {{user}} (case-insensitive) in all of the character's properties - with the main character's name. - """ - - properties = ["description", "greeting_text"] - - pattern = re.compile(re.escape("{{user}}"), re.IGNORECASE) - - for prop in properties: - prop_value = getattr(self, prop) - - try: - updated_prop_value = pattern.sub(character.name, prop_value) - except Exception as e: - log.error( - "introduce_main_character", - error=e, - traceback=traceback.format_exc(), - ) - updated_prop_value = prop_value - setattr(self, prop, updated_prop_value) - - # also replace in all example dialogue - - for i, dialogue in enumerate(self.example_dialogue): - self.example_dialogue[i] = pattern.sub(character.name, dialogue) - - def update(self, **kwargs): - """ - Update character properties with given key-value pairs. - """ - - for key, value in kwargs.items(): - setattr(self, key, value) - - self.memory_dirty = True - - async def purge_from_memory(self): - """ - Purges this character's details from memory. - """ - memory_agent = get_agent("memory") - await memory_agent.delete({"character": self.name}) - log.info("purged character from memory", character=self.name) - - async def commit_to_memory(self, memory_agent): - """ - Commits this character's details to the memory agent. (vectordb) - """ - - items = [] - - if not self.base_attributes or "description" not in self.base_attributes: - if not self.description: - self.description = "" - description_chunks = [ - chunk.strip() for chunk in self.description.split("\n") if chunk.strip() - ] - - for idx in range(len(description_chunks)): - chunk = description_chunks[idx] - - items.append( - { - "text": f"{self.name}: {chunk}", - "id": f"{self.name}.description.{idx}", - "meta": { - "character": self.name, - "attr": "description", - "typ": "base_attribute", - }, - } - ) - - seen_attributes = set() - - for attr, value in self.base_attributes.items(): - if attr.startswith("_"): - continue - - if attr.lower() in ["name", "scenario_context", "_prompt", "_template"]: - continue - - seen_attributes.add(attr) - - items.append( - { - "text": f"{self.name}'s {attr}: {value}", - "id": f"{self.name}.{attr}", - "meta": { - "character": self.name, - "attr": attr, - "typ": "base_attribute", - }, - } - ) - - for key, detail in self.details.items(): - # if colliding with attribute name, prefix with detail_ - if key in seen_attributes: - key = f"detail_{key}" - - items.append( - { - "text": f"{self.name} - {key}: {detail}", - "id": f"{self.name}.{key}", - "meta": { - "character": self.name, - "typ": "details", - "detail": key, - }, - } - ) - - # await memory_agent.add(detail, None) - - for history_event in self.history_events: - if not history_event or not history_event["summary"]: - continue - - items.append( - { - "text": history_event["summary"], - "meta": { - "character": self.name, - "typ": "history_event", - }, - } - ) - - # await memory_agent.add(history_event["summary"], None) - - if items: - await memory_agent.add_many(items) - - self.memory_dirty = False - - async def commit_single_attribute_to_memory( - self, memory_agent, attribute: str, value: str - ): - """ - Commits a single attribute to memory - """ - - items = [] - - # remove old attribute if it exists - - await memory_agent.delete( - {"character": self.name, "typ": "base_attribute", "attr": attribute} - ) - - self.base_attributes[attribute] = value - - items.append( - { - "text": f"{self.name}'s {attribute}: {self.base_attributes[attribute]}", - "id": f"{self.name}.{attribute}", - "meta": { - "character": self.name, - "attr": attribute, - "typ": "base_attribute", - }, - } - ) - - log.debug("commit_single_attribute_to_memory", items=items) - - await memory_agent.add_many(items) - - async def commit_single_detail_to_memory( - self, memory_agent, detail: str, value: str - ): - """ - Commits a single detail to memory - """ - - items = [] - - # remove old detail if it exists - - await memory_agent.delete( - {"character": self.name, "typ": "details", "detail": detail} - ) - - self.details[detail] = value - - items.append( - { - "text": f"{self.name} - {detail}: {value}", - "id": f"{self.name}.{detail}", - "meta": { - "character": self.name, - "typ": "details", - "detail": detail, - }, - } - ) - - log.debug("commit_single_detail_to_memory", items=items) - - await memory_agent.add_many(items) - - async def set_detail(self, name: str, value): - memory_agent = get_agent("memory") - if not value: - try: - del self.details[name] - await memory_agent.delete( - {"character": self.name, "typ": "details", "detail": name} - ) - except KeyError: - pass - else: - self.details[name] = value - await self.commit_single_detail_to_memory(memory_agent, name, value) - - def set_detail_defer(self, name: str, value): - self.details[name] = value - self.memory_dirty = True - - def get_detail(self, name: str): - return self.details.get(name) - - async def set_base_attribute(self, name: str, value): - memory_agent = get_agent("memory") - - if not value: - try: - del self.base_attributes[name] - await memory_agent.delete( - {"character": self.name, "typ": "base_attribute", "attr": name} - ) - except KeyError: - pass - else: - self.base_attributes[name] = value - await self.commit_single_attribute_to_memory(memory_agent, name, value) - - def set_base_attribute_defer(self, name: str, value): - self.base_attributes[name] = value - self.memory_dirty = True - - def get_base_attribute(self, name: str): - return self.base_attributes.get(name) - - async def set_description(self, description: str): - memory_agent = get_agent("memory") - self.description = description - - items = [] - - await memory_agent.delete( - {"character": self.name, "typ": "base_attribute", "attr": "description"} - ) - - description_chunks = [ - chunk.strip() for chunk in self.description.split("\n") if chunk.strip() - ] - - for idx in range(len(description_chunks)): - chunk = description_chunks[idx] - - items.append( - { - "text": f"{self.name}: {chunk}", - "id": f"{self.name}.description.{idx}", - "meta": { - "character": self.name, - "attr": "description", - "typ": "base_attribute", - }, - } - ) - - await memory_agent.add_many(items) - - -class Helper: - """ - Wrapper for non-conversational agents, such as summarization agents - """ - - def __init__(self, agent: agents.Agent, **options): - self.agent = agent - self.options = options - - @property - def agent_type(self): - return self.agent.agent_type +async_signals.register( + "scene_init", + "scene_init_after", + "game_loop_start", + "game_loop", + "game_loop_actor_iter", + "game_loop_new_message", + "player_turn_start", +) class Actor: @@ -670,6 +127,7 @@ class Scene(Emitter): self.inactive_characters = {} self.layered_history = [] self.assets = SceneAssets(scene=self) + self.voice_library: VoiceLibrary = VoiceLibrary() self.description = "" self.intro = "" self.outline = "" @@ -695,8 +153,6 @@ class Scene(Emitter): # happen as save-as and not overwrite the original self.immutable_save = False - self.config = load_config() - self.context = "" self.commands = commands.Manager(self) self.environment = "scene" @@ -735,12 +191,17 @@ class Scene(Emitter): "game_loop_new_message": async_signals.get("game_loop_new_message"), "scene_init": async_signals.get("scene_init"), "player_turn_start": async_signals.get("player_turn_start"), + "config.changed": async_signals.get("config.changed"), } self.setup_emitter(scene=self) self.world_state.emit() + @property + def config(self) -> Config: + return get_config() + @property def main_character(self) -> Actor | None: try: @@ -761,6 +222,22 @@ class Scene(Emitter): for actor in self.actors: yield actor.character + @property + def all_characters(self) -> Generator[Character, None, None]: + """ + Returns all characters in the scene, including inactive characters + """ + + for actor in self.actors: + yield actor.character + + for character in self.inactive_characters.values(): + yield character + + @property + def all_character_names(self): + return [character.name for character in self.all_characters] + @property def npcs(self): for actor in self.actors: @@ -853,11 +330,11 @@ class Scene(Emitter): @property def auto_save(self): - return self.config.get("game", {}).get("general", {}).get("auto_save", True) + return self.config.game.general.auto_save @property def auto_progress(self): - return self.config.get("game", {}).get("general", {}).get("auto_progress", True) + return self.config.game.general.auto_progress @property def world_state_manager(self) -> WorldStateManager: @@ -865,7 +342,7 @@ class Scene(Emitter): @property def conversation_format(self): - return self.get_helper("conversation").agent.conversation_format + return get_agent("conversation").conversation_format @property def writing_style(self) -> world_state_templates.WritingStyle | None: @@ -880,7 +357,7 @@ class Scene(Emitter): @property def max_backscroll(self): - return self.config.get("game", {}).get("general", {}).get("max_backscroll", 512) + return self.config.game.general.max_backscroll @property def nodes_filename(self): @@ -934,19 +411,18 @@ class Scene(Emitter): """ connect scenes to signals """ - handlers["config_saved"].connect(self.on_config_saved) + self.signals["config.changed"].connect(self.on_config_changed) def disconnect(self): """ disconnect scenes from signals """ - handlers["config_saved"].disconnect(self.on_config_saved) + self.signals["config.changed"].disconnect(self.on_config_changed) def __del__(self): self.disconnect() - def on_config_saved(self, event): - self.config = event.data + async def on_config_changed(self, event): self.emit_status() def recent_history(self, max_tokens: int = 2048): @@ -1341,9 +817,8 @@ class Scene(Emitter): if actor.character.base_attributes.get("scenario overview"): self.description = actor.character.base_attributes["scenario overview"] - memory_helper = self.get_helper("memory") - if memory_helper: - await actor.character.commit_to_memory(memory_helper.agent) + memory = get_agent("memory") + await actor.character.commit_to_memory(memory) async def remove_character( self, character: Character, purge_from_memory: bool = True @@ -1376,22 +851,6 @@ class Scene(Emitter): actor.character = None - def add_helper(self, helper: Helper): - """ - Add a helper to the scene - """ - self.helpers.append(helper) - helper.agent.connect(self) - - def get_helper(self, agent_type): - """ - Returns the helper of the given agent class if it exists - """ - - for helper in self.helpers: - if helper.agent_type == agent_type: - return helper - def get_character(self, character_name: str, partial: bool = False): """ Returns the character with the given name if it exists @@ -1496,7 +955,7 @@ class Scene(Emitter): except AttributeError: intro = self.intro - editor = self.get_helper("editor").agent + editor = get_agent("editor") if editor.fix_exposition_enabled and editor.fix_exposition_narrator: if '"' not in intro and "*" not in intro: @@ -1544,10 +1003,8 @@ class Scene(Emitter): show_hidden = kwargs.get("show_hidden", False) conversation_format = self.conversation_format - actor_direction_mode = self.get_helper("director").agent.actor_direction_mode - layered_history_enabled = self.get_helper( - "summarizer" - ).agent.layered_history_enabled + actor_direction_mode = get_agent("director").actor_direction_mode + layered_history_enabled = get_agent("summarizer").layered_history_enabled include_reinforcements = kwargs.get("include_reinforcements", True) assured_dialogue_num = kwargs.get("assured_dialogue_num", 5) @@ -1808,7 +1265,7 @@ class Scene(Emitter): "inactive_characters": list(self.inactive_characters.keys()), "context": self.context, "assets": self.assets.dict(), - "characters": [actor.character.serialize for actor in self.actors], + "characters": [actor.character.model_dump() for actor in self.actors], "character_colors": { character.name: character.color for character in self.get_characters() @@ -2014,7 +1471,7 @@ class Scene(Emitter): self.active_pins = list(_active_pins.pins.values()) async def ensure_memory_db(self): - memory = self.get_helper("memory").agent + memory = get_agent("memory") if not memory.db: await memory.set_db() @@ -2196,11 +1653,10 @@ class Scene(Emitter): self.filename = copy_name if not self.name and not auto: - self.name = await wait_for_input("Enter scenario name: ") - self.filename = "base.json" + raise TalemateError("Scene has no name, cannot save") elif not self.filename and not auto: - self.filename = await wait_for_input("Enter save name: ") + self.filename = str(uuid.uuid4())[:10] self.filename = self.filename.replace(" ", "-").lower() + ".json" if self.filename and not self.filename.endswith(".json"): @@ -2212,7 +1668,7 @@ class Scene(Emitter): if save_as: self.immutable_save = False - memory_agent = self.get_helper("memory").agent + memory_agent = get_agent("memory") memory_agent.close_db(self) self.memory_id = str(uuid.uuid4())[:10] await self.commit_to_memory() @@ -2264,14 +1720,14 @@ class Scene(Emitter): async def add_to_recent_scenes(self): log.debug("add_to_recent_scenes", filename=self.filename) - config = load_config(as_model=True) + config = get_config() config.recent_scenes.push(self) - config.save() + await config.set_dirty() async def commit_to_memory(self): # will recommit scene to long term memory - memory = self.get_helper("memory").agent + memory = get_agent("memory") memory.drop_db() await memory.set_db() @@ -2308,7 +1764,7 @@ class Scene(Emitter): self.actors = [] async def reset_memory(self): - memory_agent = self.get_helper("memory").agent + memory_agent = get_agent("memory") memory_agent.close_db(self) self.memory_id = str(uuid.uuid4())[:10] await self.commit_to_memory() @@ -2334,7 +1790,7 @@ class Scene(Emitter): await load_scene( self, os.path.join(self.save_dir, self.restore_from), - self.get_helper("conversation").agent.client, + get_agent("conversation").client, ) await self.reset_memory() @@ -2371,9 +1827,9 @@ class Scene(Emitter): "environment": scene.environment, "archived_history": scene.archived_history, "layered_history": scene.layered_history, - "characters": [actor.character.serialize for actor in scene.actors], + "characters": [actor.character.model_dump() for actor in scene.actors], "inactive_characters": { - name: character.serialize + name: character.model_dump() for name, character in scene.inactive_characters.items() }, "context": scene.context, @@ -2406,3 +1862,6 @@ class Scene(Emitter): if self.cancel_requested: self.cancel_requested = False raise GenerationCancelled("action cancelled") + + +Character.model_rebuild() diff --git a/src/talemate/util/dialogue.py b/src/talemate/util/dialogue.py index bed08833..8219160e 100644 --- a/src/talemate/util/dialogue.py +++ b/src/talemate/util/dialogue.py @@ -1,5 +1,8 @@ import re import structlog +import pydantic +from typing import Literal +from nltk.tokenize import sent_tokenize __all__ = [ "handle_endofline_special_delimiter", @@ -14,11 +17,21 @@ __all__ = [ "ensure_dialog_line_format", "clean_uneven_markers", "split_anchor_text", + "separate_dialogue_from_exposition", + "parse_tts_markup", + "separate_sentences", + "DialogueChunk", ] log = structlog.get_logger("talemate.util.dialogue") +class DialogueChunk(pydantic.BaseModel): + text: str + type: Literal["dialogue", "exposition"] + speaker: str | None = None + + def handle_endofline_special_delimiter(content: str) -> str: # END-OF-LINE is a custom delimter that can exist 0 to n times # it should split total_result on the last one, take the left side @@ -428,3 +441,132 @@ def split_anchor_text(text: str, anchor_length: int = 10) -> tuple[str, str]: anchor = " ".join(words[mid_point:]) return non_anchor, anchor + + +def separate_sentences(text: str) -> str: + """ + Separates a text into sentences and joins them with double newlines. + """ + return "\n\n".join(sent_tokenize(text)) + + +def parse_tts_markup(markup: str) -> list[DialogueChunk]: + """ + Parses TTS markup in the format: + [Speaker Name] text content + [Narrator] narrative text + + Returns a list of DialogueChunk objects. + """ + if not markup: + return [] + + chunks = [] + lines = markup.strip().split("\n") + + for line in lines: + line = line.strip() + if not line: + continue + + # Match pattern [Speaker Name] followed by text + match = re.match(r"^\[([^\]]+)\]\s*(.*)", line) + if match: + speaker_name = match.group(1).strip() + text_content = match.group(2).strip() + + if not text_content: + continue + + # Determine if this is dialogue or exposition + if speaker_name.lower() == "narrator": + chunk_type = "exposition" + speaker = None + else: + chunk_type = "dialogue" + speaker = speaker_name + + chunks.append( + DialogueChunk(text=text_content, type=chunk_type, speaker=speaker) + ) + else: + # Line doesn't match expected format, treat as exposition + if line: + chunks.append(DialogueChunk(text=line, type="exposition", speaker=None)) + + return chunks + + +def separate_dialogue_from_exposition(text: str) -> list[DialogueChunk]: + """ + Separates dialogue from exposition in a text. + + Returns a list of DialogueChunk objects, where each chunk is either dialogue or exposition. + Dialogue is defined as any text between double quotes (""). + Everything else is exposition (regardless of asterisks or other markers). + + Speakers may be identified by curly braces, e.g. {John} - if so they are always at the beginning of a dialogue segment. + + For example: + + "{John}I am leaving now." - speaker is John + "I am leaving now." - speaker is narrator (or not identified`) + """ + if not text: + return [] + + chunks = [] + current_segment = "" + in_dialogue = False + prev_speaker: str | None = None + + for i, char in enumerate(text): + if char == '"': + # Quote marks are transition points + if in_dialogue: + # We're ending a dialogue segment - include the closing quote + current_segment += char + + # Detect speaker names of the form "{Name}" that immediately follow the opening quote. + # Example: "{John}Hello." -> speaker="John", text='"Hello."' + + speaker = None + speaker_match = re.match(r'^"\{([^}]+)\}', current_segment) + if speaker_match: + # speaker_match.group(1) contains the name without braces + speaker = speaker_match.group(1) + # Remove the curly-braced name but keep the leading quote + current_segment = current_segment.replace(f"{{{speaker}}}", "", 1) + + prev_speaker = speaker + elif prev_speaker: + speaker = prev_speaker + + chunks.append( + DialogueChunk( + text=current_segment, + type="dialogue", + speaker=speaker, + ) + ) + current_segment = "" + in_dialogue = False + else: + # We're starting a dialogue segment + if current_segment: + # Save any exposition before the dialogue + chunks.append( + DialogueChunk(text=current_segment, type="exposition") + ) + current_segment = char + in_dialogue = True + else: + # Regular character - add to current segment + current_segment += char + + # Don't forget the last segment if it exists + if current_segment: + chunk_type = "dialogue" if in_dialogue else "exposition" + chunks.append(DialogueChunk(text=current_segment, type=chunk_type)) + + return chunks diff --git a/src/talemate/ux/schema.py b/src/talemate/ux/schema.py index c4cba71a..9835e4ef 100644 --- a/src/talemate/ux/schema.py +++ b/src/talemate/ux/schema.py @@ -2,11 +2,42 @@ import pydantic __all__ = [ "Note", + "Field", + "Column", ] +class Action(pydantic.BaseModel): + action_name: str + arguments: list[str | int | float | bool] = pydantic.Field(default_factory=list) + label: str = None + icon: str = None + + class Note(pydantic.BaseModel): text: str title: str = None color: str = "muted" icon: str = "mdi-information-outline" + + actions: list[Action] = pydantic.Field(default_factory=list) + + +class Field(pydantic.BaseModel): + name: str + label: str + type: str + value: int | float | str | bool | list | None = None + choices: list[dict[str, str | int | float | bool]] = pydantic.Field( + default_factory=list + ) + max: int | float | None = None + min: int | float | None = None + step: int | float | None = None + description: str = "" + + required: bool = False + + +class Column(Field): + pass diff --git a/src/talemate/version.py b/src/talemate/version.py index b2053a32..5b77e36e 100644 --- a/src/talemate/version.py +++ b/src/talemate/version.py @@ -1,3 +1,3 @@ __all__ = ["VERSION"] -VERSION = "0.31.0" +VERSION = "0.32.0" diff --git a/src/talemate/world_state/manager.py b/src/talemate/world_state/manager.py index b5f753a6..7f680735 100644 --- a/src/talemate/world_state/manager.py +++ b/src/talemate/world_state/manager.py @@ -4,7 +4,8 @@ import pydantic import structlog import talemate.world_state.templates as world_state_templates -from talemate.character import activate_character, deactivate_character +from talemate.agents.tts.util import get_voice +from talemate.character import activate_character, deactivate_character, set_voice from talemate.instance import get_agent from talemate.emit import emit from talemate.world_state import ( @@ -13,6 +14,7 @@ from talemate.world_state import ( Reinforcement, Suggestion, ) +from talemate.agents.tts.schema import Voice if TYPE_CHECKING: from talemate.tale_mate import Character, Scene @@ -52,6 +54,7 @@ class CharacterDetails(pydantic.BaseModel): actor: CharacterActor = pydantic.Field(default_factory=CharacterActor) cover_image: Union[str, None] = None color: Union[str, None] = None + voice: Union[Voice, None] = None class World(pydantic.BaseModel): @@ -163,6 +166,7 @@ class WorldStateManager: ), cover_image=character.cover_image, color=character.color, + voice=character.voice, ) # sorted base attributes @@ -317,6 +321,29 @@ class WorldStateManager: character.set_color(color) + async def update_character_voice(self, character_name: str, voice_id: str | None): + """Assign or clear a voice for the given character. + + Args: + character_name: Name of the character to update. + voice_id: The unique id of the voice in the voice library ("provider:voice_id"). + If *None* or empty string, the character voice assignment is cleared. + """ + character = self.scene.get_character(character_name) + if not character: + log.error("character not found", character_name=character_name) + return + + if voice_id: + voice = get_voice(self.scene, voice_id) + if not voice: + log.warning("voice not found in library", voice_id=voice_id) + + await set_voice(character, voice) + else: + # Clear voice assignment + await set_voice(character, None) + async def update_character_actor( self, character_name: str, diff --git a/start-frontend.sh b/start-frontend.sh index aa5ebc26..71d9a4db 100755 --- a/start-frontend.sh +++ b/start-frontend.sh @@ -1,2 +1,2 @@ cd talemate_frontend -npm run serve \ No newline at end of file +npm run serve -- --host 0.0.0.0 --port 8080 \ No newline at end of file diff --git a/talemate_frontend/README.md b/talemate_frontend/README.md index 18a618c5..0467f1cd 100644 --- a/talemate_frontend/README.md +++ b/talemate_frontend/README.md @@ -21,4 +21,4 @@ npm run lint ``` ### Customize configuration -See [Configuration Reference](https://cli.vuejs.org/config/). +See [Configuration Reference](https://vite.dev/config/). diff --git a/talemate_frontend/babel.config.js b/talemate_frontend/babel.config.js deleted file mode 100644 index e9558405..00000000 --- a/talemate_frontend/babel.config.js +++ /dev/null @@ -1,5 +0,0 @@ -module.exports = { - presets: [ - '@vue/cli-plugin-babel/preset' - ] -} diff --git a/talemate_frontend/eslint.config.js b/talemate_frontend/eslint.config.js new file mode 100644 index 00000000..11de21eb --- /dev/null +++ b/talemate_frontend/eslint.config.js @@ -0,0 +1,23 @@ +import js from "@eslint/js"; +import pluginVue from "eslint-plugin-vue"; +import globals from "globals"; + +export default [ + { + ignores: ["dist/**/*", "node_modules/**/*", "*.min.js"], + }, + js.configs.recommended, + ...pluginVue.configs["flat/essential"], + { + languageOptions: { + ecmaVersion: "latest", + sourceType: "module", + globals: { + node: true, + ...globals.browser, + ...globals.node, + }, + }, + rules: {}, + }, +]; diff --git a/talemate_frontend/example.env.development.local b/talemate_frontend/example.env.development.local index 5cae18be..5a1597e4 100644 --- a/talemate_frontend/example.env.development.local +++ b/talemate_frontend/example.env.development.local @@ -1,3 +1,3 @@ ALLOWED_HOSTS=example.com # wss if behind ssl, ws if not -VUE_APP_TALEMATE_BACKEND_WEBSOCKET_URL=wss://example.com:5050 \ No newline at end of file +VITE_TALEMATE_BACKEND_WEBSOCKET_URL=wss://example.com:5050 \ No newline at end of file diff --git a/talemate_frontend/public/index.html b/talemate_frontend/index.html similarity index 55% rename from talemate_frontend/public/index.html rename to talemate_frontend/index.html index 03536544..1de8925d 100644 --- a/talemate_frontend/public/index.html +++ b/talemate_frontend/index.html @@ -4,14 +4,14 @@ - + Talemate
- + diff --git a/talemate_frontend/package-lock.json b/talemate_frontend/package-lock.json index 36af26a0..067e29c0 100644 --- a/talemate_frontend/package-lock.json +++ b/talemate_frontend/package-lock.json @@ -1,12 +1,12 @@ { "name": "talemate_frontend", - "version": "0.31.0", + "version": "0.32.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "talemate_frontend", - "version": "0.31.0", + "version": "0.32.0", "dependencies": { "@codemirror/lang-json": "^6.0.1", "@codemirror/lang-markdown": "^6.2.5", @@ -19,330 +19,22 @@ "core-js": "^3.37.1", "dot-prop": "^9.0.0", "litegraph.js": "^0.7.18", + "lodash": "^4.17.21", "marked": "^15.0.12", "roboto-fontface": "*", "uuid": "^10.0.0", "vue": "^3.5", "vue-codemirror": "^6.1.1", - "vuetify": "^3.8", + "vuetify": "^3.9", "webfontloader": "^1.6.28" }, "devDependencies": { - "@babel/core": "^7", - "@babel/eslint-parser": "^7", - "@vue/cli-plugin-babel": "~5.0.8", + "@vitejs/plugin-vue": "^6.0.0", "eslint": "^9.0.0", "eslint-plugin-vue": "^10.0.0", "postcss": "^8.5.3", - "vue-cli-plugin-vuetify": "~2.5.8", - "webpack-plugin-vuetify": "^3.1" - } - }, - "node_modules/@achrinza/node-ipc": { - "version": "9.2.9", - "resolved": "https://registry.npmjs.org/@achrinza/node-ipc/-/node-ipc-9.2.9.tgz", - "integrity": "sha512-7s0VcTwiK/0tNOVdSX9FWMeFdOEcsAOz9HesBldXxFMaGvIak7KC2z9tV9EgsQXn6KUsWsfIkViMNuIo0GoZDQ==", - "dev": true, - "dependencies": { - "@node-ipc/js-queue": "2.0.3", - "event-pubsub": "4.3.0", - "js-message": "1.0.7" - }, - "engines": { - "node": "8 || 9 || 10 || 11 || 12 || 13 || 14 || 15 || 16 || 17 || 18 || 19 || 20 || 21 || 22" - } - }, - "node_modules/@ampproject/remapping": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", - "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", - "dev": true, - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", - "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", - "dev": true, - "dependencies": { - "@babel/helper-validator-identifier": "^7.27.1", - "js-tokens": "^4.0.0", - "picocolors": "^1.1.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/compat-data": { - "version": "7.27.5", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.27.5.tgz", - "integrity": "sha512-KiRAp/VoJaWkkte84TvUd9qjdbZAdiqyvMxrGl1N6vzFogKmaLgoM3L1kgtLicp2HP5fBJS8JrZKLVIZGVJAVg==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core": { - "version": "7.27.4", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.27.4.tgz", - "integrity": "sha512-bXYxrXFubeYdvB0NhD/NBB3Qi6aZeV20GOWVI47t2dkecCEoneR4NPVcb7abpXDEvejgrUfFtG6vG/zxAKmg+g==", - "dev": true, - "dependencies": { - "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.27.3", - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-module-transforms": "^7.27.3", - "@babel/helpers": "^7.27.4", - "@babel/parser": "^7.27.4", - "@babel/template": "^7.27.2", - "@babel/traverse": "^7.27.4", - "@babel/types": "^7.27.3", - "convert-source-map": "^2.0.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.2", - "json5": "^2.2.3", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/babel" - } - }, - "node_modules/@babel/eslint-parser": { - "version": "7.27.5", - "resolved": "https://registry.npmjs.org/@babel/eslint-parser/-/eslint-parser-7.27.5.tgz", - "integrity": "sha512-HLkYQfRICudzcOtjGwkPvGc5nF1b4ljLZh1IRDj50lRZ718NAKVgQpIAUX8bfg6u/yuSKY3L7E0YzIV+OxrB8Q==", - "dev": true, - "dependencies": { - "@nicolo-ribaudo/eslint-scope-5-internals": "5.1.1-v1", - "eslint-visitor-keys": "^2.1.0", - "semver": "^6.3.1" - }, - "engines": { - "node": "^10.13.0 || ^12.13.0 || >=14.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.11.0", - "eslint": "^7.5.0 || ^8.0.0 || ^9.0.0" - } - }, - "node_modules/@babel/generator": { - "version": "7.27.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.27.5.tgz", - "integrity": "sha512-ZGhA37l0e/g2s1Cnzdix0O3aLYm66eF8aufiVteOgnwxgnRP8GoyMj7VWsgWnQbVKXyge7hqrFh2K2TQM6t1Hw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.27.5", - "@babel/types": "^7.27.3", - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.25", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-annotate-as-pure": { - "version": "7.27.3", - "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", - "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.27.3" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-compilation-targets": { - "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", - "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", - "dev": true, - "dependencies": { - "@babel/compat-data": "^7.27.2", - "@babel/helper-validator-option": "^7.27.1", - "browserslist": "^4.24.0", - "lru-cache": "^5.1.1", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-create-class-features-plugin": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.27.1.tgz", - "integrity": "sha512-QwGAmuvM17btKU5VqXfb+Giw4JcN0hjuufz3DYnpeVDvZLAObloM77bhMXiqry3Iio+Ai4phVRDwl6WU10+r5A==", - "dev": true, - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.1", - "@babel/helper-member-expression-to-functions": "^7.27.1", - "@babel/helper-optimise-call-expression": "^7.27.1", - "@babel/helper-replace-supers": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/traverse": "^7.27.1", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-create-regexp-features-plugin": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.27.1.tgz", - "integrity": "sha512-uVDC72XVf8UbrH5qQTc18Agb8emwjTiZrQE11Nv3CuBEZmVvTwwE9CBUEvHku06gQCAyYf8Nv6ja1IN+6LMbxQ==", - "dev": true, - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.1", - "regexpu-core": "^6.2.0", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-define-polyfill-provider": { - "version": "0.6.4", - "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.4.tgz", - "integrity": "sha512-jljfR1rGnXXNWnmQg2K3+bvhkxB51Rl32QRaOTuwwjviGrHzIbSc8+x9CpraDtbT7mfyjXObULP4w/adunNwAw==", - "dev": true, - "dependencies": { - "@babel/helper-compilation-targets": "^7.22.6", - "@babel/helper-plugin-utils": "^7.22.5", - "debug": "^4.1.1", - "lodash.debounce": "^4.0.8", - "resolve": "^1.14.2" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.27.1.tgz", - "integrity": "sha512-E5chM8eWjTp/aNoVpcbfM7mLxu9XGLWYise2eBKGQomAk/Mb4XoxyqXTZbuTohbsl8EKqdlMhnDI2CCLfcs9wA==", - "dev": true, - "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-imports": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", - "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", - "dev": true, - "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-transforms": { - "version": "7.27.3", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.27.3.tgz", - "integrity": "sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg==", - "dev": true, - "dependencies": { - "@babel/helper-module-imports": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1", - "@babel/traverse": "^7.27.3" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-optimise-call-expression": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.27.1.tgz", - "integrity": "sha512-URMGH08NzYFhubNSGJrpUEphGKQwMQYBySzat5cAByY1/YgIRkULnIy3tAMeszlL/so2HbeilYloUmSpd7GdVw==", - "dev": true, - "dependencies": { - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-plugin-utils": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", - "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-remap-async-to-generator": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.27.1.tgz", - "integrity": "sha512-7fiA521aVw8lSPeI4ZOD3vRFkoqkJcS+z4hFo82bFSH/2tNd6eJ5qCVMS5OzDmZh/kaHQeBaeyxK6wljcPtveA==", - "dev": true, - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.1", - "@babel/helper-wrap-function": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-replace-supers": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.27.1.tgz", - "integrity": "sha512-7EHz6qDZc8RYS5ElPoShMheWvEgERonFCs7IAonWLLUTXW59DP14bCZt89/GKyreYn8g3S83m21FelHKbeDCKA==", - "dev": true, - "dependencies": { - "@babel/helper-member-expression-to-functions": "^7.27.1", - "@babel/helper-optimise-call-expression": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-skip-transparent-expression-wrappers": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.27.1.tgz", - "integrity": "sha512-Tub4ZKEXqbPjXgWLl2+3JpQAYBJ8+ikpQ2Ocj/q/r0LwE3UhENh7EUabyHjz2kCEsrRY83ew2DQdHluuiDQFzg==", - "dev": true, - "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" + "vite": "^7.0.4", + "vite-plugin-vuetify": "^2.1.1" } }, "node_modules/@babel/helper-string-parser": { @@ -361,48 +53,12 @@ "node": ">=6.9.0" } }, - "node_modules/@babel/helper-validator-option": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", - "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-wrap-function": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.27.1.tgz", - "integrity": "sha512-NFJK2sHUvrjo8wAU/nQTWU890/zB2jj0qBcCbZbbf+005cAsv6tMjXz31fBign6M5ov1o0Bllu+9nbqkfsjjJQ==", - "dev": true, - "dependencies": { - "@babel/template": "^7.27.1", - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helpers": { - "version": "7.27.6", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.27.6.tgz", - "integrity": "sha512-muE8Tt8M22638HU31A3CgfSUciwz1fhATfoVai05aPXGor//CdWDCbnlY1yvBPo07njuVOCNGCSp/GTt12lIug==", - "dev": true, - "dependencies": { - "@babel/template": "^7.27.2", - "@babel/types": "^7.27.6" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/parser": { - "version": "7.27.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.5.tgz", - "integrity": "sha512-OsQd175SxWkGlzbny8J3K8TnnDD0N3lrIUtB92xwyRpzaenGZhxDvxN/JgU00U3CDZNj9tPuDJ5H0WS4Nt3vKg==", + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.0.tgz", + "integrity": "sha512-jVZGvOxOuNSsuQuLRTh13nU0AogFlw32w/MT+LV6D3sP5WdbW61E77RnkbaO2dUvmPAYrBDJXGn5gGS6tH4j8g==", "dependencies": { - "@babel/types": "^7.27.3" + "@babel/types": "^7.28.0" }, "bin": { "parser": "bin/babel-parser.js" @@ -411,1168 +67,10 @@ "node": ">=6.0.0" } }, - "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.27.1.tgz", - "integrity": "sha512-QPG3C9cCVRQLxAVwmefEmwdTanECuUBMQZ/ym5kiw3XKCGA7qkuQLcjWWHcrD/GKbn/WmJwaezfuuAOcyKlRPA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-safari-class-field-initializer-scope": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-class-field-initializer-scope/-/plugin-bugfix-safari-class-field-initializer-scope-7.27.1.tgz", - "integrity": "sha512-qNeq3bCKnGgLkEXUuFry6dPlGfCdQNZbn7yUAPCInwAJHMU7THJfrBSozkcWq5sNM6RcF3S8XyQL2A52KNR9IA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.27.1.tgz", - "integrity": "sha512-g4L7OYun04N1WyqMNjldFwlfPCLVkgB54A/YCXICZYBsvJJE3kByKv9c9+R/nAfmIfjl2rKYLNyMHboYbZaWaA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.27.1.tgz", - "integrity": "sha512-oO02gcONcD5O1iTLi/6frMJBIwWEHceWGSGqrpCmEL8nogiS6J9PBlE48CaK20/Jx1LuRml9aDftLgdjXT8+Cw==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/plugin-transform-optional-chaining": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.13.0" - } - }, - "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.27.1.tgz", - "integrity": "sha512-6BpaYGDavZqkI6yT+KSPdpZFfpnd68UKXbcjI9pJ13pvHhPrCKWOOLp+ysvMeA+DxnhuPpgIaRpxRxo5A9t5jw==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-proposal-class-properties": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz", - "integrity": "sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==", - "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-class-properties instead.", - "dev": true, - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.18.6", - "@babel/helper-plugin-utils": "^7.18.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-proposal-decorators": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.27.1.tgz", - "integrity": "sha512-DTxe4LBPrtFdsWzgpmbBKevg3e9PBy+dXRt19kSbucbZvL2uqtdqwwpluL1jfxYE0wIDTFp1nTy/q6gNLsxXrg==", - "dev": true, - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/plugin-syntax-decorators": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-proposal-private-property-in-object": { - "version": "7.21.0-placeholder-for-preset-env.2", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz", - "integrity": "sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w==", - "dev": true, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-decorators": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.27.1.tgz", - "integrity": "sha512-YMq8Z87Lhl8EGkmb0MwYkt36QnxC+fzCgrl66ereamPlYToRpIk5nUjKUY3QKLWq8mwUB1BgbeXcTJhZOCDg5A==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-dynamic-import": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", - "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-import-assertions": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.27.1.tgz", - "integrity": "sha512-UT/Jrhw57xg4ILHLFnzFpPDlMbcdEicaAtjPQpbj9wa8T4r5KVWCimHcL/460g8Ht0DMxDyjsLgiWSkVjnwPFg==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-import-attributes": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.27.1.tgz", - "integrity": "sha512-oFT0FrKHgF53f4vOsZGi2Hh3I35PfSmVs4IBFLFj4dnafP+hIWDLg3VyKmUHfLoLHlyxY4C7DGtmHuJgn+IGww==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-jsx": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz", - "integrity": "sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-unicode-sets-regex": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz", - "integrity": "sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg==", - "dev": true, - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.18.6", - "@babel/helper-plugin-utils": "^7.18.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-arrow-functions": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.27.1.tgz", - "integrity": "sha512-8Z4TGic6xW70FKThA5HYEKKyBpOOsucTOD1DjU3fZxDg+K3zBJcXMFnt/4yQiZnf5+MiOMSXQ9PaEK/Ilh1DeA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-async-generator-functions": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.27.1.tgz", - "integrity": "sha512-eST9RrwlpaoJBDHShc+DS2SG4ATTi2MYNb4OxYkf3n+7eb49LWpnS+HSpVfW4x927qQwgk8A2hGNVaajAEw0EA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-remap-async-to-generator": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-async-to-generator": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.27.1.tgz", - "integrity": "sha512-NREkZsZVJS4xmTr8qzE5y8AfIPqsdQfRuUiLRTEzb7Qii8iFWCyDKaUV2c0rCuh4ljDZ98ALHP/PetiBV2nddA==", - "dev": true, - "dependencies": { - "@babel/helper-module-imports": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-remap-async-to-generator": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-block-scoped-functions": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.27.1.tgz", - "integrity": "sha512-cnqkuOtZLapWYZUYM5rVIdv1nXYuFVIltZ6ZJ7nIj585QsjKM5dhL2Fu/lICXZ1OyIAFc7Qy+bvDAtTXqGrlhg==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-block-scoping": { - "version": "7.27.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.27.5.tgz", - "integrity": "sha512-JF6uE2s67f0y2RZcm2kpAUEbD50vH62TyWVebxwHAlbSdM49VqPz8t4a1uIjp4NIOIZ4xzLfjY5emt/RCyC7TQ==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-class-properties": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.27.1.tgz", - "integrity": "sha512-D0VcalChDMtuRvJIu3U/fwWjf8ZMykz5iZsg77Nuj821vCKI3zCyRLwRdWbsuJ/uRwZhZ002QtCqIkwC/ZkvbA==", - "dev": true, - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-class-static-block": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.27.1.tgz", - "integrity": "sha512-s734HmYU78MVzZ++joYM+NkJusItbdRcbm+AGRgJCt3iA+yux0QpD9cBVdz3tKyrjVYWRl7j0mHSmv4lhV0aoA==", - "dev": true, - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.12.0" - } - }, - "node_modules/@babel/plugin-transform-classes": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.27.1.tgz", - "integrity": "sha512-7iLhfFAubmpeJe/Wo2TVuDrykh/zlWXLzPNdL0Jqn/Xu8R3QQ8h9ff8FQoISZOsw74/HFqFI7NX63HN7QFIHKA==", - "dev": true, - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.1", - "@babel/helper-compilation-targets": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-replace-supers": "^7.27.1", - "@babel/traverse": "^7.27.1", - "globals": "^11.1.0" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-computed-properties": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.27.1.tgz", - "integrity": "sha512-lj9PGWvMTVksbWiDT2tW68zGS/cyo4AkZ/QTp0sQT0mjPopCmrSkzxeXkznjqBxzDI6TclZhOJbBmbBLjuOZUw==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/template": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-destructuring": { - "version": "7.27.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.27.3.tgz", - "integrity": "sha512-s4Jrok82JpiaIprtY2nHsYmrThKvvwgHwjgd7UMiYhZaN0asdXNLr0y+NjTfkA7SyQE5i2Fb7eawUOZmLvyqOA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-dotall-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.27.1.tgz", - "integrity": "sha512-gEbkDVGRvjj7+T1ivxrfgygpT7GUd4vmODtYpbs0gZATdkX8/iSnOtZSxiZnsgm1YjTgjI6VKBGSJJevkrclzw==", - "dev": true, - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-duplicate-keys": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.27.1.tgz", - "integrity": "sha512-MTyJk98sHvSs+cvZ4nOauwTTG1JeonDjSGvGGUNHreGQns+Mpt6WX/dVzWBHgg+dYZhkC4X+zTDfkTU+Vy9y7Q==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-duplicate-named-capturing-groups-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-named-capturing-groups-regex/-/plugin-transform-duplicate-named-capturing-groups-regex-7.27.1.tgz", - "integrity": "sha512-hkGcueTEzuhB30B3eJCbCYeCaaEQOmQR0AdvzpD4LoN0GXMWzzGSuRrxR2xTnCrvNbVwK9N6/jQ92GSLfiZWoQ==", - "dev": true, - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-dynamic-import": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.27.1.tgz", - "integrity": "sha512-MHzkWQcEmjzzVW9j2q8LGjwGWpG2mjwaaB0BNQwst3FIjqsg8Ct/mIZlvSPJvfi9y2AC8mi/ktxbFVL9pZ1I4A==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-exponentiation-operator": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.27.1.tgz", - "integrity": "sha512-uspvXnhHvGKf2r4VVtBpeFnuDWsJLQ6MF6lGJLC89jBR1uoVeqM416AZtTuhTezOfgHicpJQmoD5YUakO/YmXQ==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-export-namespace-from": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.27.1.tgz", - "integrity": "sha512-tQvHWSZ3/jH2xuq/vZDy0jNn+ZdXJeM8gHvX4lnJmsc3+50yPlWdZXIc5ay+umX+2/tJIqHqiEqcJvxlmIvRvQ==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-for-of": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.27.1.tgz", - "integrity": "sha512-BfbWFFEJFQzLCQ5N8VocnCtA8J1CLkNTe2Ms2wocj75dd6VpiqS5Z5quTYcUoo4Yq+DN0rtikODccuv7RU81sw==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-function-name": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.27.1.tgz", - "integrity": "sha512-1bQeydJF9Nr1eBCMMbC+hdwmRlsv5XYOMu03YSWFwNs0HsAmtSxxF1fyuYPqemVldVyFmlCU7w8UE14LupUSZQ==", - "dev": true, - "dependencies": { - "@babel/helper-compilation-targets": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-json-strings": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.27.1.tgz", - "integrity": "sha512-6WVLVJiTjqcQauBhn1LkICsR2H+zm62I3h9faTDKt1qP4jn2o72tSvqMwtGFKGTpojce0gJs+76eZ2uCHRZh0Q==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-literals": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.27.1.tgz", - "integrity": "sha512-0HCFSepIpLTkLcsi86GG3mTUzxV5jpmbv97hTETW3yzrAij8aqlD36toB1D0daVFJM8NK6GvKO0gslVQmm+zZA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-logical-assignment-operators": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.27.1.tgz", - "integrity": "sha512-SJvDs5dXxiae4FbSL1aBJlG4wvl594N6YEVVn9e3JGulwioy6z3oPjx/sQBO3Y4NwUu5HNix6KJ3wBZoewcdbw==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-member-expression-literals": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.27.1.tgz", - "integrity": "sha512-hqoBX4dcZ1I33jCSWcXrP+1Ku7kdqXf1oeah7ooKOIiAdKQ+uqftgCFNOSzA5AMS2XIHEYeGFg4cKRCdpxzVOQ==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-amd": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.27.1.tgz", - "integrity": "sha512-iCsytMg/N9/oFq6n+gFTvUYDZQOMK5kEdeYxmxt91fcJGycfxVP9CnrxoliM0oumFERba2i8ZtwRUCMhvP1LnA==", - "dev": true, - "dependencies": { - "@babel/helper-module-transforms": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-commonjs": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.27.1.tgz", - "integrity": "sha512-OJguuwlTYlN0gBZFRPqwOGNWssZjfIUdS7HMYtN8c1KmwpwHFBwTeFZrg9XZa+DFTitWOW5iTAG7tyCUPsCCyw==", - "dev": true, - "dependencies": { - "@babel/helper-module-transforms": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-systemjs": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.27.1.tgz", - "integrity": "sha512-w5N1XzsRbc0PQStASMksmUeqECuzKuTJer7kFagK8AXgpCMkeDMO5S+aaFb7A51ZYDF7XI34qsTX+fkHiIm5yA==", - "dev": true, - "dependencies": { - "@babel/helper-module-transforms": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-umd": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.27.1.tgz", - "integrity": "sha512-iQBE/xC5BV1OxJbp6WG7jq9IWiD+xxlZhLrdwpPkTX3ydmXdvoCpyfJN7acaIBZaOqTfr76pgzqBJflNbeRK+w==", - "dev": true, - "dependencies": { - "@babel/helper-module-transforms": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.27.1.tgz", - "integrity": "sha512-SstR5JYy8ddZvD6MhV0tM/j16Qds4mIpJTOd1Yu9J9pJjH93bxHECF7pgtc28XvkzTD6Pxcm/0Z73Hvk7kb3Ng==", - "dev": true, - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-new-target": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.27.1.tgz", - "integrity": "sha512-f6PiYeqXQ05lYq3TIfIDu/MtliKUbNwkGApPUvyo6+tc7uaR4cPjPe7DFPr15Uyycg2lZU6btZ575CuQoYh7MQ==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-nullish-coalescing-operator": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.27.1.tgz", - "integrity": "sha512-aGZh6xMo6q9vq1JGcw58lZ1Z0+i0xB2x0XaauNIUXd6O1xXc3RwoWEBlsTQrY4KQ9Jf0s5rgD6SiNkaUdJegTA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-numeric-separator": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.27.1.tgz", - "integrity": "sha512-fdPKAcujuvEChxDBJ5c+0BTaS6revLV7CJL08e4m3de8qJfNIuCc2nc7XJYOjBoTMJeqSmwXJ0ypE14RCjLwaw==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-object-rest-spread": { - "version": "7.27.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.27.3.tgz", - "integrity": "sha512-7ZZtznF9g4l2JCImCo5LNKFHB5eXnN39lLtLY5Tg+VkR0jwOt7TBciMckuiQIOIW7L5tkQOCh3bVGYeXgMx52Q==", - "dev": true, - "dependencies": { - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/plugin-transform-destructuring": "^7.27.3", - "@babel/plugin-transform-parameters": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-object-super": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.27.1.tgz", - "integrity": "sha512-SFy8S9plRPbIcxlJ8A6mT/CxFdJx/c04JEctz4jf8YZaVS2px34j7NXRrlGlHkN/M2gnpL37ZpGRGVFLd3l8Ng==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-replace-supers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-optional-catch-binding": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.27.1.tgz", - "integrity": "sha512-txEAEKzYrHEX4xSZN4kJ+OfKXFVSWKB2ZxM9dpcE3wT7smwkNmXo5ORRlVzMVdJbD+Q8ILTgSD7959uj+3Dm3Q==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-optional-chaining": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.27.1.tgz", - "integrity": "sha512-BQmKPPIuc8EkZgNKsv0X4bPmOoayeu4F1YCwx2/CfmDSXDbp7GnzlUH+/ul5VGfRg1AoFPsrIThlEBj2xb4CAg==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-parameters": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.27.1.tgz", - "integrity": "sha512-018KRk76HWKeZ5l4oTj2zPpSh+NbGdt0st5S6x0pga6HgrjBOJb24mMDHorFopOOd6YHkLgOZ+zaCjZGPO4aKg==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-private-methods": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.27.1.tgz", - "integrity": "sha512-10FVt+X55AjRAYI9BrdISN9/AQWHqldOeZDUoLyif1Kn05a56xVBXb8ZouL8pZ9jem8QpXaOt8TS7RHUIS+GPA==", - "dev": true, - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-private-property-in-object": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.27.1.tgz", - "integrity": "sha512-5J+IhqTi1XPa0DXF83jYOaARrX+41gOewWbkPyjMNRDqgOCqdffGh8L3f/Ek5utaEBZExjSAzcyjmV9SSAWObQ==", - "dev": true, - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.1", - "@babel/helper-create-class-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-property-literals": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.27.1.tgz", - "integrity": "sha512-oThy3BCuCha8kDZ8ZkgOg2exvPYUlprMukKQXI1r1pJ47NCvxfkEy8vK+r/hT9nF0Aa4H1WUPZZjHTFtAhGfmQ==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-regenerator": { - "version": "7.27.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.27.5.tgz", - "integrity": "sha512-uhB8yHerfe3MWnuLAhEbeQ4afVoqv8BQsPqrTv7e/jZ9y00kJL6l9a/f4OWaKxotmjzewfEyXE1vgDJenkQ2/Q==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-regexp-modifiers": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regexp-modifiers/-/plugin-transform-regexp-modifiers-7.27.1.tgz", - "integrity": "sha512-TtEciroaiODtXvLZv4rmfMhkCv8jx3wgKpL68PuiPh2M4fvz5jhsA7697N1gMvkvr/JTF13DrFYyEbY9U7cVPA==", - "dev": true, - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-reserved-words": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.27.1.tgz", - "integrity": "sha512-V2ABPHIJX4kC7HegLkYoDpfg9PVmuWy/i6vUM5eGK22bx4YVFD3M5F0QQnWQoDs6AGsUWTVOopBiMFQgHaSkVw==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-runtime": { - "version": "7.27.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.27.4.tgz", - "integrity": "sha512-D68nR5zxU64EUzV8i7T3R5XP0Xhrou/amNnddsRQssx6GrTLdZl1rLxyjtVZBd+v/NVX4AbTPOB5aU8thAZV1A==", - "dev": true, - "dependencies": { - "@babel/helper-module-imports": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1", - "babel-plugin-polyfill-corejs2": "^0.4.10", - "babel-plugin-polyfill-corejs3": "^0.11.0", - "babel-plugin-polyfill-regenerator": "^0.6.1", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-shorthand-properties": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.27.1.tgz", - "integrity": "sha512-N/wH1vcn4oYawbJ13Y/FxcQrWk63jhfNa7jef0ih7PHSIHX2LB7GWE1rkPrOnka9kwMxb6hMl19p7lidA+EHmQ==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-spread": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.27.1.tgz", - "integrity": "sha512-kpb3HUqaILBJcRFVhFUs6Trdd4mkrzcGXss+6/mxUd273PfbWqSDHRzMT2234gIg2QYfAjvXLSquP1xECSg09Q==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-sticky-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.27.1.tgz", - "integrity": "sha512-lhInBO5bi/Kowe2/aLdBAawijx+q1pQzicSgnkB6dUPc1+RC8QmJHKf2OjvU+NZWitguJHEaEmbV6VWEouT58g==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-template-literals": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.27.1.tgz", - "integrity": "sha512-fBJKiV7F2DxZUkg5EtHKXQdbsbURW3DZKQUWphDum0uRP6eHGGa/He9mc0mypL680pb+e/lDIthRohlv8NCHkg==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-typeof-symbol": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.27.1.tgz", - "integrity": "sha512-RiSILC+nRJM7FY5srIyc4/fGIwUhyDuuBSdWn4y6yT6gm652DpCHZjIipgn6B7MQ1ITOUnAKWixEUjQRIBIcLw==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-escapes": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.27.1.tgz", - "integrity": "sha512-Ysg4v6AmF26k9vpfFuTZg8HRfVWzsh1kVfowA23y9j/Gu6dOuahdUVhkLqpObp3JIv27MLSii6noRnuKN8H0Mg==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-property-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.27.1.tgz", - "integrity": "sha512-uW20S39PnaTImxp39O5qFlHLS9LJEmANjMG7SxIhap8rCHqu0Ik+tLEPX5DKmHn6CsWQ7j3lix2tFOa5YtL12Q==", - "dev": true, - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.27.1.tgz", - "integrity": "sha512-xvINq24TRojDuyt6JGtHmkVkrfVV3FPT16uytxImLeBZqW3/H52yN+kM1MGuyPkIQxrzKwPHs5U/MP3qKyzkGw==", - "dev": true, - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-sets-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.27.1.tgz", - "integrity": "sha512-EtkOujbc4cgvb0mlpQefi4NTPBzhSIevblFevACNLUspmrALgmEBdL/XfnyyITfd8fKBZrZys92zOWcik7j9Tw==", - "dev": true, - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/preset-env": { - "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.27.2.tgz", - "integrity": "sha512-Ma4zSuYSlGNRlCLO+EAzLnCmJK2vdstgv+n7aUP+/IKZrOfWHOJVdSJtuub8RzHTj3ahD37k5OKJWvzf16TQyQ==", - "dev": true, - "dependencies": { - "@babel/compat-data": "^7.27.2", - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-validator-option": "^7.27.1", - "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.27.1", - "@babel/plugin-bugfix-safari-class-field-initializer-scope": "^7.27.1", - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.27.1", - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.27.1", - "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.27.1", - "@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2", - "@babel/plugin-syntax-import-assertions": "^7.27.1", - "@babel/plugin-syntax-import-attributes": "^7.27.1", - "@babel/plugin-syntax-unicode-sets-regex": "^7.18.6", - "@babel/plugin-transform-arrow-functions": "^7.27.1", - "@babel/plugin-transform-async-generator-functions": "^7.27.1", - "@babel/plugin-transform-async-to-generator": "^7.27.1", - "@babel/plugin-transform-block-scoped-functions": "^7.27.1", - "@babel/plugin-transform-block-scoping": "^7.27.1", - "@babel/plugin-transform-class-properties": "^7.27.1", - "@babel/plugin-transform-class-static-block": "^7.27.1", - "@babel/plugin-transform-classes": "^7.27.1", - "@babel/plugin-transform-computed-properties": "^7.27.1", - "@babel/plugin-transform-destructuring": "^7.27.1", - "@babel/plugin-transform-dotall-regex": "^7.27.1", - "@babel/plugin-transform-duplicate-keys": "^7.27.1", - "@babel/plugin-transform-duplicate-named-capturing-groups-regex": "^7.27.1", - "@babel/plugin-transform-dynamic-import": "^7.27.1", - "@babel/plugin-transform-exponentiation-operator": "^7.27.1", - "@babel/plugin-transform-export-namespace-from": "^7.27.1", - "@babel/plugin-transform-for-of": "^7.27.1", - "@babel/plugin-transform-function-name": "^7.27.1", - "@babel/plugin-transform-json-strings": "^7.27.1", - "@babel/plugin-transform-literals": "^7.27.1", - "@babel/plugin-transform-logical-assignment-operators": "^7.27.1", - "@babel/plugin-transform-member-expression-literals": "^7.27.1", - "@babel/plugin-transform-modules-amd": "^7.27.1", - "@babel/plugin-transform-modules-commonjs": "^7.27.1", - "@babel/plugin-transform-modules-systemjs": "^7.27.1", - "@babel/plugin-transform-modules-umd": "^7.27.1", - "@babel/plugin-transform-named-capturing-groups-regex": "^7.27.1", - "@babel/plugin-transform-new-target": "^7.27.1", - "@babel/plugin-transform-nullish-coalescing-operator": "^7.27.1", - "@babel/plugin-transform-numeric-separator": "^7.27.1", - "@babel/plugin-transform-object-rest-spread": "^7.27.2", - "@babel/plugin-transform-object-super": "^7.27.1", - "@babel/plugin-transform-optional-catch-binding": "^7.27.1", - "@babel/plugin-transform-optional-chaining": "^7.27.1", - "@babel/plugin-transform-parameters": "^7.27.1", - "@babel/plugin-transform-private-methods": "^7.27.1", - "@babel/plugin-transform-private-property-in-object": "^7.27.1", - "@babel/plugin-transform-property-literals": "^7.27.1", - "@babel/plugin-transform-regenerator": "^7.27.1", - "@babel/plugin-transform-regexp-modifiers": "^7.27.1", - "@babel/plugin-transform-reserved-words": "^7.27.1", - "@babel/plugin-transform-shorthand-properties": "^7.27.1", - "@babel/plugin-transform-spread": "^7.27.1", - "@babel/plugin-transform-sticky-regex": "^7.27.1", - "@babel/plugin-transform-template-literals": "^7.27.1", - "@babel/plugin-transform-typeof-symbol": "^7.27.1", - "@babel/plugin-transform-unicode-escapes": "^7.27.1", - "@babel/plugin-transform-unicode-property-regex": "^7.27.1", - "@babel/plugin-transform-unicode-regex": "^7.27.1", - "@babel/plugin-transform-unicode-sets-regex": "^7.27.1", - "@babel/preset-modules": "0.1.6-no-external-plugins", - "babel-plugin-polyfill-corejs2": "^0.4.10", - "babel-plugin-polyfill-corejs3": "^0.11.0", - "babel-plugin-polyfill-regenerator": "^0.6.1", - "core-js-compat": "^3.40.0", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/preset-modules": { - "version": "0.1.6-no-external-plugins", - "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz", - "integrity": "sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/types": "^7.4.4", - "esutils": "^2.0.2" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/@babel/runtime": { - "version": "7.27.6", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.27.6.tgz", - "integrity": "sha512-vbavdySgbTTrmFE+EsiqUTzlOr5bzlnJtUv9PynGCAKvfQqjIXbvFdumPM/GxMDfyuGMJaJAU6TO4zc1Jf1i8Q==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/template": { - "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", - "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/parser": "^7.27.2", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/traverse": { - "version": "7.27.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.27.4.tgz", - "integrity": "sha512-oNcu2QbHqts9BtOWJosOVJapWjBDSxGCpFvikNR5TGDYDQf3JwpIoMzIKrvfoti93cLfPJEG4tH9SPVeyCGgdA==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.27.3", - "@babel/parser": "^7.27.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.27.3", - "debug": "^4.3.1", - "globals": "^11.1.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/types": { - "version": "7.27.6", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.6.tgz", - "integrity": "sha512-ETyHEk2VHHvl9b9jZP5IHPavHYk57EhanlRRuae9XCpb/j5bDCbPPMOBfCWhnl/7EDJz0jEMCi/RhccCE8r1+Q==", + "version": "7.28.2", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.2.tgz", + "integrity": "sha512-ruv7Ae4J5dUYULmeXw1gmb7rYRz57OWCPM57pHojnLq/3Z1CK2lNSLTCVjxVk1F/TZHwOZZrOWi0ur95BbLxNQ==", "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.27.1" @@ -1617,9 +115,9 @@ } }, "node_modules/@codemirror/lang-cpp": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/@codemirror/lang-cpp/-/lang-cpp-6.0.2.tgz", - "integrity": "sha512-6oYEYUKHvrnacXxWxYa6t4puTlbN3dgV662BDfSH8+MfjQjVmP697/KYTDOqpxgerkvoNm7q5wlFMBeX8ZMocg==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/@codemirror/lang-cpp/-/lang-cpp-6.0.3.tgz", + "integrity": "sha512-URM26M3vunFFn9/sm6rzqrBzDgfWuDixp85uTY49wKudToc2jTHUrKIGGKs+QWND+YLofNNZpxcNGRynFJfvgA==", "dependencies": { "@codemirror/language": "^6.0.0", "@lezer/cpp": "^1.0.0" @@ -1666,9 +164,9 @@ } }, "node_modules/@codemirror/lang-java": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/@codemirror/lang-java/-/lang-java-6.0.1.tgz", - "integrity": "sha512-OOnmhH67h97jHzCuFaIEspbmsT98fNdhVhmA3zCxW0cn7l8rChDhZtwiwJ/JOKXgfm4J+ELxQihxaI7bj7mJRg==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-java/-/lang-java-6.0.2.tgz", + "integrity": "sha512-m5Nt1mQ/cznJY7tMfQTJchmrjdjQ71IDs+55d1GAa8DGaB8JXWsVCkVT284C3RTASaY43YknrK2X3hPO/J3MOQ==", "dependencies": { "@codemirror/language": "^6.0.0", "@lezer/java": "^1.0.0" @@ -1689,9 +187,9 @@ } }, "node_modules/@codemirror/lang-json": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/@codemirror/lang-json/-/lang-json-6.0.1.tgz", - "integrity": "sha512-+T1flHdgpqDDlJZ2Lkil/rLiRy684WMLc74xUnjJH48GQdfJo/pudlTRreZmKwzP8/tGdKf83wlbAdOCzlJOGQ==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-json/-/lang-json-6.0.2.tgz", + "integrity": "sha512-x2OtO+AvwEHrEwR0FyyPtfDUiloG3rnVTSZV1W8UteaLL8/MajQd8DpvUb2YVzC+/T18aSDv0H9mu+xw0EStoQ==", "dependencies": { "@codemirror/language": "^6.0.0", "@lezer/json": "^1.0.0" @@ -1725,9 +223,9 @@ } }, "node_modules/@codemirror/lang-markdown": { - "version": "6.3.2", - "resolved": "https://registry.npmjs.org/@codemirror/lang-markdown/-/lang-markdown-6.3.2.tgz", - "integrity": "sha512-c/5MYinGbFxYl4itE9q/rgN/sMTjOr8XL5OWnC+EaRMLfCbVUmmubTJfdgpfcSS2SCaT7b+Q+xi3l6CgoE+BsA==", + "version": "6.3.3", + "resolved": "https://registry.npmjs.org/@codemirror/lang-markdown/-/lang-markdown-6.3.3.tgz", + "integrity": "sha512-1fn1hQAPWlSSMCvnF810AkhWpNLkJpl66CRfIy3vVl20Sl4NwChkorCHqpMtNbXr1EuMJsrDnhEpjZxKZ2UX3A==", "dependencies": { "@codemirror/autocomplete": "^6.7.1", "@codemirror/lang-html": "^6.0.0", @@ -1739,9 +237,9 @@ } }, "node_modules/@codemirror/lang-php": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/@codemirror/lang-php/-/lang-php-6.0.1.tgz", - "integrity": "sha512-ublojMdw/PNWa7qdN5TMsjmqkNuTBD3k6ndZ4Z0S25SBAiweFGyY68AS3xNcIOlb6DDFDvKlinLQ40vSLqf8xA==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-php/-/lang-php-6.0.2.tgz", + "integrity": "sha512-ZKy2v1n8Fc8oEXj0Th0PUMXzQJ0AIR6TaZU+PbDHExFwdu+guzOA4jmCHS1Nz4vbFezwD7LyBdDnddSJeScMCA==", "dependencies": { "@codemirror/lang-html": "^6.0.0", "@codemirror/language": "^6.0.0", @@ -1763,9 +261,9 @@ } }, "node_modules/@codemirror/lang-rust": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/@codemirror/lang-rust/-/lang-rust-6.0.1.tgz", - "integrity": "sha512-344EMWFBzWArHWdZn/NcgkwMvZIWUR1GEBdwG8FEp++6o6vT6KL9V7vGs2ONsKxxFUPXKI0SPcWhyYyl2zPYxQ==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-rust/-/lang-rust-6.0.2.tgz", + "integrity": "sha512-EZaGjCUegtiU7kSMvOfEZpaCReowEf3yNidYu7+vfuGTm9ow4mthAparY5hisJqOHmJowVH3Upu+eJlUji6qqA==", "dependencies": { "@codemirror/language": "^6.0.0", "@lezer/rust": "^1.0.0" @@ -1784,9 +282,9 @@ } }, "node_modules/@codemirror/lang-sql": { - "version": "6.9.0", - "resolved": "https://registry.npmjs.org/@codemirror/lang-sql/-/lang-sql-6.9.0.tgz", - "integrity": "sha512-xmtpWqKSgum1B1J3Ro6rf7nuPqf2+kJQg5SjrofCAcyCThOe0ihSktSoXfXuhQBnwx1QbmreBbLJM5Jru6zitg==", + "version": "6.9.1", + "resolved": "https://registry.npmjs.org/@codemirror/lang-sql/-/lang-sql-6.9.1.tgz", + "integrity": "sha512-ecSk3gm/mlINcURMcvkCZmXgdzPSq8r/yfCtTB4vgqGGIbBC2IJIAy7GqYTy5pgBEooTVmHP2GZK6Z7h63CDGg==", "dependencies": { "@codemirror/autocomplete": "^6.0.0", "@codemirror/language": "^6.0.0", @@ -1848,9 +346,9 @@ } }, "node_modules/@codemirror/language": { - "version": "6.11.1", - "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.11.1.tgz", - "integrity": "sha512-5kS1U7emOGV84vxC+ruBty5sUgcD0te6dyupyRVG2zaSjhTDM73LhVKUtVwiqSe6QwmEoA4SCiU8AKPFyumAWQ==", + "version": "6.11.2", + "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.11.2.tgz", + "integrity": "sha512-p44TsNArL4IVXDTbapUmEkAlvWs2CFQbcfc0ymDsis1kH2wh0gcY96AS29c/vp2d0y2Tquk1EDSaawpzilUiAw==", "dependencies": { "@codemirror/state": "^6.0.0", "@codemirror/view": "^6.23.0", @@ -1927,9 +425,9 @@ } }, "node_modules/@codemirror/theme-one-dark": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/@codemirror/theme-one-dark/-/theme-one-dark-6.1.2.tgz", - "integrity": "sha512-F+sH0X16j/qFLMAfbciKTxVOwkdAS336b7AXTKOZhy8BR3eH/RelsnLgLFINrpST63mmN2OuwUt0W2ndUgYwUA==", + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/@codemirror/theme-one-dark/-/theme-one-dark-6.1.3.tgz", + "integrity": "sha512-NzBdIvEJmx6fjeremiGp3t/okrLPYT0d9orIc7AFun8oZcRk58aejkqhv6spnz4MLAevrKNPMQYXEWMg4s+sKA==", "dependencies": { "@codemirror/language": "^6.0.0", "@codemirror/state": "^6.0.0", @@ -1938,9 +436,9 @@ } }, "node_modules/@codemirror/view": { - "version": "6.37.1", - "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.37.1.tgz", - "integrity": "sha512-Qy4CAUwngy/VQkEz0XzMKVRcckQuqLYWKqVpDDDghBe5FSXSqfVrJn49nw3ePZHxRUz4nRmb05Lgi+9csWo4eg==", + "version": "6.38.1", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.38.1.tgz", + "integrity": "sha512-RmTOkE7hRU3OVREqFVITWHz6ocgBjv08GoePscAakgVQfciA3SGCEk7mb9IzwW61cKKmlTpHXG6DUE5Ubx+MGQ==", "dependencies": { "@codemirror/state": "^6.5.0", "crelt": "^1.0.6", @@ -1948,14 +446,394 @@ "w3c-keyname": "^2.2.4" } }, - "node_modules/@discoveryjs/json-ext": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz", - "integrity": "sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==", - "dev": true, - "peer": true, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.8.tgz", + "integrity": "sha512-urAvrUedIqEiFR3FYSLTWQgLu5tb+m0qZw0NBEasUeo6wuqatkMDaRT+1uABiGXEu5vqgPd7FGE1BhsAIy9QVA==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "aix" + ], "engines": { - "node": ">=10.0.0" + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.8.tgz", + "integrity": "sha512-RONsAvGCz5oWyePVnLdZY/HHwA++nxYWIX1atInlaW6SEkwq6XkP3+cb825EUcRs5Vss/lGh/2YxAb5xqc07Uw==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.8.tgz", + "integrity": "sha512-OD3p7LYzWpLhZEyATcTSJ67qB5D+20vbtr6vHlHWSQYhKtzUYrETuWThmzFpZtFsBIxRvhO07+UgVA9m0i/O1w==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.8.tgz", + "integrity": "sha512-yJAVPklM5+4+9dTeKwHOaA+LQkmrKFX96BM0A/2zQrbS6ENCmxc4OVoBs5dPkCCak2roAD+jKCdnmOqKszPkjA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.8.tgz", + "integrity": "sha512-Jw0mxgIaYX6R8ODrdkLLPwBqHTtYHJSmzzd+QeytSugzQ0Vg4c5rDky5VgkoowbZQahCbsv1rT1KW72MPIkevw==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.8.tgz", + "integrity": "sha512-Vh2gLxxHnuoQ+GjPNvDSDRpoBCUzY4Pu0kBqMBDlK4fuWbKgGtmDIeEC081xi26PPjn+1tct+Bh8FjyLlw1Zlg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.8.tgz", + "integrity": "sha512-YPJ7hDQ9DnNe5vxOm6jaie9QsTwcKedPvizTVlqWG9GBSq+BuyWEDazlGaDTC5NGU4QJd666V0yqCBL2oWKPfA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.8.tgz", + "integrity": "sha512-MmaEXxQRdXNFsRN/KcIimLnSJrk2r5H8v+WVafRWz5xdSVmWLoITZQXcgehI2ZE6gioE6HirAEToM/RvFBeuhw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.8.tgz", + "integrity": "sha512-FuzEP9BixzZohl1kLf76KEVOsxtIBFwCaLupVuk4eFVnOZfU+Wsn+x5Ryam7nILV2pkq2TqQM9EZPsOBuMC+kg==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.8.tgz", + "integrity": "sha512-WIgg00ARWv/uYLU7lsuDK00d/hHSfES5BzdWAdAig1ioV5kaFNrtK8EqGcUBJhYqotlUByUKz5Qo6u8tt7iD/w==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.8.tgz", + "integrity": "sha512-A1D9YzRX1i+1AJZuFFUMP1E9fMaYY+GnSQil9Tlw05utlE86EKTUA7RjwHDkEitmLYiFsRd9HwKBPEftNdBfjg==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.8.tgz", + "integrity": "sha512-O7k1J/dwHkY1RMVvglFHl1HzutGEFFZ3kNiDMSOyUrB7WcoHGf96Sh+64nTRT26l3GMbCW01Ekh/ThKM5iI7hQ==", + "cpu": [ + "loong64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.8.tgz", + "integrity": "sha512-uv+dqfRazte3BzfMp8PAQXmdGHQt2oC/y2ovwpTteqrMx2lwaksiFZ/bdkXJC19ttTvNXBuWH53zy/aTj1FgGw==", + "cpu": [ + "mips64el" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.8.tgz", + "integrity": "sha512-GyG0KcMi1GBavP5JgAkkstMGyMholMDybAf8wF5A70CALlDM2p/f7YFE7H92eDeH/VBtFJA5MT4nRPDGg4JuzQ==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.8.tgz", + "integrity": "sha512-rAqDYFv3yzMrq7GIcen3XP7TUEG/4LK86LUPMIz6RT8A6pRIDn0sDcvjudVZBiiTcZCY9y2SgYX2lgK3AF+1eg==", + "cpu": [ + "riscv64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.8.tgz", + "integrity": "sha512-Xutvh6VjlbcHpsIIbwY8GVRbwoviWT19tFhgdA7DlenLGC/mbc3lBoVb7jxj9Z+eyGqvcnSyIltYUrkKzWqSvg==", + "cpu": [ + "s390x" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.8.tgz", + "integrity": "sha512-ASFQhgY4ElXh3nDcOMTkQero4b1lgubskNlhIfJrsH5OKZXDpUAKBlNS0Kx81jwOBp+HCeZqmoJuihTv57/jvQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.8.tgz", + "integrity": "sha512-d1KfruIeohqAi6SA+gENMuObDbEjn22olAR7egqnkCD9DGBG0wsEARotkLgXDu6c4ncgWTZJtN5vcgxzWRMzcw==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.8.tgz", + "integrity": "sha512-nVDCkrvx2ua+XQNyfrujIG38+YGyuy2Ru9kKVNyh5jAys6n+l44tTtToqHjino2My8VAY6Lw9H7RI73XFi66Cg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.8.tgz", + "integrity": "sha512-j8HgrDuSJFAujkivSMSfPQSAa5Fxbvk4rgNAS5i3K+r8s1X0p1uOO2Hl2xNsGFppOeHOLAVgYwDVlmxhq5h+SQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.8.tgz", + "integrity": "sha512-1h8MUAwa0VhNCDp6Af0HToI2TJFAn1uqT9Al6DJVzdIBAd21m/G0Yfc77KDM3uF3T/YaOgQq3qTJHPbTOInaIQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.8.tgz", + "integrity": "sha512-r2nVa5SIK9tSWd0kJd9HCffnDHKchTGikb//9c7HX+r+wHYCpQrSgxhlY6KWV1nFo1l4KFbsMlHk+L6fekLsUg==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.8.tgz", + "integrity": "sha512-zUlaP2S12YhQ2UzUfcCuMDHQFJyKABkAjvO5YSndMiIkMimPmxA+BYSBikWgsRpvyxuRnow4nS5NPnf9fpv41w==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.8.tgz", + "integrity": "sha512-YEGFFWESlPva8hGL+zvj2z/SaK+pH0SwOM0Nc/d+rVnW7GSTFlLBGzZkuSU9kFIGIo8q9X3ucpZhu8PDN5A2sQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.8.tgz", + "integrity": "sha512-hiGgGC6KZ5LZz58OL/+qVVoZiuZlUYlYHNAmczOm7bs2oE1XriPFi5ZHHrS8ACpV5EjySrnoCKmcbQMN+ojnHg==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.8.tgz", + "integrity": "sha512-cn3Yr7+OaaZq1c+2pe+8yxC8E144SReCQjN6/2ynubzYjvyqZjTXfQJpAcQpsdJq3My7XADANiYGHoFC69pLQw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" } }, "node_modules/@eslint-community/eslint-utils": { @@ -1998,9 +876,9 @@ } }, "node_modules/@eslint/config-array": { - "version": "0.20.0", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.20.0.tgz", - "integrity": "sha512-fxlS1kkIjx8+vy2SjuCB94q3htSNrufYTXubwiBFeaQHbH6Ipi43gFJq2zCMt6PHhImH3Xmr0NksKDvchWlpQQ==", + "version": "0.21.0", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz", + "integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==", "dev": true, "dependencies": { "@eslint/object-schema": "^2.1.6", @@ -2012,18 +890,18 @@ } }, "node_modules/@eslint/config-helpers": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.2.2.tgz", - "integrity": "sha512-+GPzk8PlG0sPpzdU5ZvIRMPidzAnZDl/s9L+y13iodqvb8leL53bTannOrQ/Im7UkpsmFU5Ily5U60LWixnmLg==", + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.3.0.tgz", + "integrity": "sha512-ViuymvFmcJi04qdZeDc2whTHryouGcDlaxPqarTD0ZE10ISpxGUVZGZDx4w01upyIynL3iu6IXH2bS1NhclQMw==", "dev": true, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, "node_modules/@eslint/core": { - "version": "0.14.0", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.14.0.tgz", - "integrity": "sha512-qIbV0/JZr7iSDjqAc60IqbLdsj9GDt16xQtWD+B78d/HAlvysGdZZ6rpJHGAc2T0FQx1X6thsSPdnoiGKdNtdg==", + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.1.tgz", + "integrity": "sha512-bkOp+iumZCCbt1K1CmWf0R9pM5yKpDv+ZXtvSyQpudrI9kuFLp+bM2WOPXImuD/ceQuaa8f5pj93Y7zyECIGNA==", "dev": true, "dependencies": { "@types/json-schema": "^7.0.15" @@ -2055,22 +933,10 @@ "url": "https://opencollective.com/eslint" } }, - "node_modules/@eslint/eslintrc/node_modules/globals": { - "version": "14.0.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", - "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", - "dev": true, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/@eslint/js": { - "version": "9.28.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.28.0.tgz", - "integrity": "sha512-fnqSjGWd/CoIp4EXIxWVK/sHA6DOHN4+8Ix2cX5ycOY7LG0UY8nHCU5pIp2eaE1Mc7Qd8kHspYNzYXT2ojPLzg==", + "version": "9.32.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.32.0.tgz", + "integrity": "sha512-BBpRFZK3eX6uMLKz8WxFOBIFFcGFJ/g8XuwjTHCqHROSIsopI+ddn/d5Cfh36+7+e5edVS8dbSHnBNhrLEX0zg==", "dev": true, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2089,33 +955,18 @@ } }, "node_modules/@eslint/plugin-kit": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.1.tgz", - "integrity": "sha512-0J+zgWxHN+xXONWIyPWKFMgVuJoZuGiIFu8yxk7RJjxkzpGmyja5wRFqZIVtjDVOQpV+Rw0iOAjYPE2eQyjr0w==", + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.4.tgz", + "integrity": "sha512-Ul5l+lHEcw3L5+k8POx6r74mxEYKG5kOb6Xpy2gCRW6zweT6TEhAf8vhxGgjhqrd/VO/Dirhsb+1hNpD1ue9hw==", "dev": true, "dependencies": { - "@eslint/core": "^0.14.0", + "@eslint/core": "^0.15.1", "levn": "^0.4.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, - "node_modules/@hapi/hoek": { - "version": "9.3.0", - "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz", - "integrity": "sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==", - "dev": true - }, - "node_modules/@hapi/topo": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz", - "integrity": "sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==", - "dev": true, - "dependencies": { - "@hapi/hoek": "^9.0.0" - } - }, "node_modules/@humanfs/core": { "version": "0.19.1", "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", @@ -2177,69 +1028,10 @@ "url": "https://github.com/sponsors/nzakas" } }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.8", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", - "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", - "devOptional": true, - "dependencies": { - "@jridgewell/set-array": "^1.2.1", - "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", - "devOptional": true, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/set-array": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", - "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", - "devOptional": true, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/source-map": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.6.tgz", - "integrity": "sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ==", - "devOptional": true, - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.25" - } - }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", - "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==" - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.25", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", - "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", - "devOptional": true, - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, - "node_modules/@leichtgewicht/ip-codec": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.5.tgz", - "integrity": "sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw==", - "dev": true, - "peer": true + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz", + "integrity": "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==" }, "node_modules/@lezer/common": { "version": "1.2.3", @@ -2257,9 +1049,9 @@ } }, "node_modules/@lezer/css": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@lezer/css/-/css-1.2.1.tgz", - "integrity": "sha512-2F5tOqzKEKbCUNraIXc0f6HKeyKlmMWJnBB0i4XW6dJgssrZO/YlZ2pY5xgyqDleqqhiNJ3dQhbrV2aClZQMvg==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@lezer/css/-/css-1.3.0.tgz", + "integrity": "sha512-pBL7hup88KbI7hXnZV3PQsn43DHy6TWyzuyk2AO9UyoXcDltvIdqWKE1dLL/45JVZ+YZkHe1WVHqO6wugZZWcw==", "dependencies": { "@lezer/common": "^1.2.0", "@lezer/highlight": "^1.0.0", @@ -2342,9 +1134,9 @@ } }, "node_modules/@lezer/php": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@lezer/php/-/php-1.0.2.tgz", - "integrity": "sha512-GN7BnqtGRpFyeoKSEqxvGvhJQiI4zkgmYnDk/JIyc7H7Ifc1tkPnUn/R2R8meH3h/aBf5rzjvU8ZQoyiNDtDrA==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@lezer/php/-/php-1.0.4.tgz", + "integrity": "sha512-D2dJ0t8Z28/G1guztRczMFvPDUqzeMLSQbdWQmaiHV7urc8NlEOnjYk9UrZ531OcLiRxD4Ihcbv7AsDpNKDRaQ==", "dependencies": { "@lezer/common": "^1.2.0", "@lezer/highlight": "^1.0.0", @@ -2411,190 +1203,251 @@ "resolved": "https://registry.npmjs.org/@mdi/font/-/font-7.4.47.tgz", "integrity": "sha512-43MtGpd585SNzHZPcYowu/84Vz2a2g31TvPMTm9uTiCSWzaheQySUcSyUH/46fPnuPQWof2yd0pGBtzee/IQWw==" }, - "node_modules/@nicolo-ribaudo/eslint-scope-5-internals": { - "version": "5.1.1-v1", - "resolved": "https://registry.npmjs.org/@nicolo-ribaudo/eslint-scope-5-internals/-/eslint-scope-5-internals-5.1.1-v1.tgz", - "integrity": "sha512-54/JRvkLIzzDWshCWfuhadfrfZVPiElY8Fcgmg1HroEly/EDSszzhBAsarCux+D/kOslTRquNzuyGSmUSTTHGg==", - "dev": true, - "dependencies": { - "eslint-scope": "5.1.1" - } - }, - "node_modules/@node-ipc/js-queue": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@node-ipc/js-queue/-/js-queue-2.0.3.tgz", - "integrity": "sha512-fL1wpr8hhD5gT2dA1qifeVaoDFlQR5es8tFuKqjHX+kdOtdNHnxkVZbtIrR2rxnMFvehkjaZRNV2H/gPXlb0hw==", - "dev": true, - "dependencies": { - "easy-stack": "1.0.1" - }, - "engines": { - "node": ">=1.0.0" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "peer": true, - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "peer": true, - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@polka/url": { - "version": "1.0.0-next.29", - "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.29.tgz", - "integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==", - "dev": true, - "peer": true - }, - "node_modules/@sideway/address": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.5.tgz", - "integrity": "sha512-IqO/DUQHUkPeixNQ8n0JA6102hT9CmaljNTPmQ1u8MEhBo/R4Q8eKLN/vGZxuebwOroDB4cbpjheD4+/sKFK4Q==", - "dev": true, - "dependencies": { - "@hapi/hoek": "^9.0.0" - } - }, - "node_modules/@sideway/formula": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.1.tgz", - "integrity": "sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg==", + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.29", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.29.tgz", + "integrity": "sha512-NIJgOsMjbxAXvoGq/X0gD7VPMQ8j9g0BiDaNjVNVjvl+iKXxL3Jre0v31RmBYeLEmkbj2s02v8vFTbUXi5XS2Q==", "dev": true }, - "node_modules/@sideway/pinpoint": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz", - "integrity": "sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==", - "dev": true + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.46.2.tgz", + "integrity": "sha512-Zj3Hl6sN34xJtMv7Anwb5Gu01yujyE/cLBDB2gnHTAHaWS1Z38L7kuSG+oAh0giZMqG060f/YBStXtMH6FvPMA==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "android" + ] }, - "node_modules/@soda/friendly-errors-webpack-plugin": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/@soda/friendly-errors-webpack-plugin/-/friendly-errors-webpack-plugin-1.8.1.tgz", - "integrity": "sha512-h2ooWqP8XuFqTXT+NyAFbrArzfQA7R6HTezADrvD9Re8fxMLTPPniLdqVTdDaO0eIoLaAwKT+d6w+5GeTk7Vbg==", - "dev": true, - "peer": true, - "dependencies": { - "chalk": "^3.0.0", - "error-stack-parser": "^2.0.6", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8.0.0" - }, - "peerDependencies": { - "webpack": "^4.0.0 || ^5.0.0" - } + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.46.2.tgz", + "integrity": "sha512-nTeCWY83kN64oQ5MGz3CgtPx8NSOhC5lWtsjTs+8JAJNLcP3QbLCtDDgUKQc/Ro/frpMq4SHUaHN6AMltcEoLQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "android" + ] }, - "node_modules/@soda/get-current-script": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@soda/get-current-script/-/get-current-script-1.0.2.tgz", - "integrity": "sha512-T7VNNlYVM1SgQ+VsMYhnDkcGmWhQdL0bDyGm5TlQ3GBXnJscEClUUOKduWTmm2zCnvNLC1hc3JpuXjs/nFOc5w==", - "dev": true, - "peer": true + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.46.2.tgz", + "integrity": "sha512-HV7bW2Fb/F5KPdM/9bApunQh68YVDU8sO8BvcW9OngQVN3HHHkw99wFupuUJfGR9pYLLAjcAOA6iO+evsbBaPQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ] }, - "node_modules/@trysound/sax": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz", - "integrity": "sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==", - "dev": true, - "peer": true, - "engines": { - "node": ">=10.13.0" - } + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.46.2.tgz", + "integrity": "sha512-SSj8TlYV5nJixSsm/y3QXfhspSiLYP11zpfwp6G/YDXctf3Xkdnk4woJIF5VQe0of2OjzTt8EsxnJDCdHd2xMA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ] }, - "node_modules/@types/body-parser": { - "version": "1.19.5", - "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", - "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", - "dev": true, - "peer": true, - "dependencies": { - "@types/connect": "*", - "@types/node": "*" - } + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.46.2.tgz", + "integrity": "sha512-ZyrsG4TIT9xnOlLsSSi9w/X29tCbK1yegE49RYm3tu3wF1L/B6LVMqnEWyDB26d9Ecx9zrmXCiPmIabVuLmNSg==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "freebsd" + ] }, - "node_modules/@types/bonjour": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@types/bonjour/-/bonjour-3.5.13.tgz", - "integrity": "sha512-z9fJ5Im06zvUL548KvYNecEVlA7cVDkGUi6kZusb04mpyEFKCIZJvloCcmpmLaIahDpOQGHaHmG6imtPMmPXGQ==", - "dev": true, - "peer": true, - "dependencies": { - "@types/node": "*" - } + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.46.2.tgz", + "integrity": "sha512-pCgHFoOECwVCJ5GFq8+gR8SBKnMO+xe5UEqbemxBpCKYQddRQMgomv1104RnLSg7nNvgKy05sLsY51+OVRyiVw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "freebsd" + ] }, - "node_modules/@types/connect": { - "version": "3.4.38", - "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", - "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", - "dev": true, - "peer": true, - "dependencies": { - "@types/node": "*" - } + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.46.2.tgz", + "integrity": "sha512-EtP8aquZ0xQg0ETFcxUbU71MZlHaw9MChwrQzatiE8U/bvi5uv/oChExXC4mWhjiqK7azGJBqU0tt5H123SzVA==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ] }, - "node_modules/@types/connect-history-api-fallback": { - "version": "1.5.4", - "resolved": "https://registry.npmjs.org/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.5.4.tgz", - "integrity": "sha512-n6Cr2xS1h4uAulPRdlw6Jl6s1oG8KrVilPN2yUITEs+K48EzMJJ3W1xy8K5eWuFvjp3R74AOIGSmp2UfBJ8HFw==", - "dev": true, - "peer": true, - "dependencies": { - "@types/express-serve-static-core": "*", - "@types/node": "*" - } + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.46.2.tgz", + "integrity": "sha512-qO7F7U3u1nfxYRPM8HqFtLd+raev2K137dsV08q/LRKRLEc7RsiDWihUnrINdsWQxPR9jqZ8DIIZ1zJJAm5PjQ==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ] }, - "node_modules/@types/eslint": { - "version": "9.6.1", - "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-9.6.1.tgz", - "integrity": "sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag==", - "devOptional": true, - "dependencies": { - "@types/estree": "*", - "@types/json-schema": "*" - } + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.46.2.tgz", + "integrity": "sha512-3dRaqLfcOXYsfvw5xMrxAk9Lb1f395gkoBYzSFcc/scgRFptRXL9DOaDpMiehf9CO8ZDRJW2z45b6fpU5nwjng==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ] }, - "node_modules/@types/eslint-scope": { - "version": "3.7.7", - "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.7.tgz", - "integrity": "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==", - "devOptional": true, - "dependencies": { - "@types/eslint": "*", - "@types/estree": "*" - } + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.46.2.tgz", + "integrity": "sha512-fhHFTutA7SM+IrR6lIfiHskxmpmPTJUXpWIsBXpeEwNgZzZZSg/q4i6FU4J8qOGyJ0TR+wXBwx/L7Ho9z0+uDg==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loongarch64-gnu": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.46.2.tgz", + "integrity": "sha512-i7wfGFXu8x4+FRqPymzjD+Hyav8l95UIZ773j7J7zRYc3Xsxy2wIn4x+llpunexXe6laaO72iEjeeGyUFmjKeA==", + "cpu": [ + "loong64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.46.2.tgz", + "integrity": "sha512-B/l0dFcHVUnqcGZWKcWBSV2PF01YUt0Rvlurci5P+neqY/yMKchGU8ullZvIv5e8Y1C6wOn+U03mrDylP5q9Yw==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.46.2.tgz", + "integrity": "sha512-32k4ENb5ygtkMwPMucAb8MtV8olkPT03oiTxJbgkJa7lJ7dZMr0GCFJlyvy+K8iq7F/iuOr41ZdUHaOiqyR3iQ==", + "cpu": [ + "riscv64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.46.2.tgz", + "integrity": "sha512-t5B2loThlFEauloaQkZg9gxV05BYeITLvLkWOkRXogP4qHXLkWSbSHKM9S6H1schf/0YGP/qNKtiISlxvfmmZw==", + "cpu": [ + "riscv64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.46.2.tgz", + "integrity": "sha512-YKjekwTEKgbB7n17gmODSmJVUIvj8CX7q5442/CK80L8nqOUbMtf8b01QkG3jOqyr1rotrAnW6B/qiHwfcuWQA==", + "cpu": [ + "s390x" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.46.2.tgz", + "integrity": "sha512-Jj5a9RUoe5ra+MEyERkDKLwTXVu6s3aACP51nkfnK9wJTraCC8IMe3snOfALkrjTYd2G1ViE1hICj0fZ7ALBPA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.46.2.tgz", + "integrity": "sha512-7kX69DIrBeD7yNp4A5b81izs8BqoZkCIaxQaOpumcJ1S/kmqNFjPhDu1LHeVXv0SexfHQv5cqHsxLOjETuqDuA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.46.2.tgz", + "integrity": "sha512-wiJWMIpeaak/jsbaq2HMh/rzZxHVW1rU6coyeNNpMwk5isiPjSTx0a4YLSlYDwBH/WBvLz+EtsNqQScZTLJy3g==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.46.2.tgz", + "integrity": "sha512-gBgaUDESVzMgWZhcyjfs9QFK16D8K6QZpwAaVNJxYDLHWayOta4ZMjGm/vsAEy3hvlS2GosVFlBlP9/Wb85DqQ==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.46.2.tgz", + "integrity": "sha512-CvUo2ixeIQGtF6WvuB87XWqPQkoFAFqW+HUo/WzHwuHDvIwZCtjdWXoYCcr06iKGydiqTclC4jU/TNObC/xKZg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ] }, "node_modules/@types/estree": { "version": "1.0.8", @@ -2602,843 +1455,123 @@ "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", "devOptional": true }, - "node_modules/@types/express": { - "version": "4.17.22", - "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.22.tgz", - "integrity": "sha512-eZUmSnhRX9YRSkplpz0N+k6NljUUn5l3EWZIKZvYzhvMphEuNiyyy1viH/ejgt66JWgALwC/gtSUAeQKtSwW/w==", - "dev": true, - "peer": true, - "dependencies": { - "@types/body-parser": "*", - "@types/express-serve-static-core": "^4.17.33", - "@types/qs": "*", - "@types/serve-static": "*" - } - }, - "node_modules/@types/express-serve-static-core": { - "version": "5.0.6", - "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-5.0.6.tgz", - "integrity": "sha512-3xhRnjJPkULekpSzgtoNYYcTWgEZkp4myc+Saevii5JPnHNvHMRlBSHDbs7Bh1iPPoVTERHEZXyhyLbMEsExsA==", - "dev": true, - "peer": true, - "dependencies": { - "@types/node": "*", - "@types/qs": "*", - "@types/range-parser": "*", - "@types/send": "*" - } - }, - "node_modules/@types/express/node_modules/@types/express-serve-static-core": { - "version": "4.19.6", - "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.6.tgz", - "integrity": "sha512-N4LZ2xG7DatVqhCZzOGb1Yi5lMbXSZcmdLDe9EzSndPV2HpWYWzRbaerl2n27irrm94EPpprqa8KpskPT085+A==", - "dev": true, - "peer": true, - "dependencies": { - "@types/node": "*", - "@types/qs": "*", - "@types/range-parser": "*", - "@types/send": "*" - } - }, - "node_modules/@types/html-minifier-terser": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz", - "integrity": "sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg==", - "dev": true, - "peer": true - }, - "node_modules/@types/http-errors": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", - "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==", - "dev": true, - "peer": true - }, - "node_modules/@types/http-proxy": { - "version": "1.17.16", - "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.16.tgz", - "integrity": "sha512-sdWoUajOB1cd0A8cRRQ1cfyWNbmFKLAqBB89Y8x5iYyG/mkJHc0YUH8pdWBy2omi9qtCpiIgGjuwO0dQST2l5w==", - "dev": true, - "peer": true, - "dependencies": { - "@types/node": "*" - } - }, "node_modules/@types/json-schema": { "version": "7.0.15", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", - "devOptional": true - }, - "node_modules/@types/mime": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", - "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", - "dev": true, - "peer": true - }, - "node_modules/@types/minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==", - "dev": true, - "peer": true - }, - "node_modules/@types/node": { - "version": "22.15.30", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.30.tgz", - "integrity": "sha512-6Q7lr06bEHdlfplU6YRbgG1SFBdlsfNC4/lX+SkhiTs0cpJkOElmWls8PxDFv4yY/xKb8Y6SO0OmSX4wgqTZbA==", - "devOptional": true, - "dependencies": { - "undici-types": "~6.21.0" - } - }, - "node_modules/@types/node-forge": { - "version": "1.3.11", - "resolved": "https://registry.npmjs.org/@types/node-forge/-/node-forge-1.3.11.tgz", - "integrity": "sha512-FQx220y22OKNTqaByeBGqHWYz4cl94tpcxeFdvBo3wjG6XPBuZ0BNgNZRV5J5TFmmcsJ4IzsLkmGRiQbnYsBEQ==", - "dev": true, - "peer": true, - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/normalize-package-data": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", - "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==", "dev": true }, - "node_modules/@types/parse-json": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.2.tgz", - "integrity": "sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==", - "dev": true, - "peer": true - }, - "node_modules/@types/qs": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz", - "integrity": "sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==", - "dev": true, - "peer": true - }, - "node_modules/@types/range-parser": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", - "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", - "dev": true, - "peer": true - }, - "node_modules/@types/retry": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.0.tgz", - "integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==", - "dev": true, - "peer": true - }, - "node_modules/@types/send": { - "version": "0.17.4", - "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", - "integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==", - "dev": true, - "peer": true, - "dependencies": { - "@types/mime": "^1", - "@types/node": "*" - } - }, - "node_modules/@types/serve-index": { - "version": "1.9.4", - "resolved": "https://registry.npmjs.org/@types/serve-index/-/serve-index-1.9.4.tgz", - "integrity": "sha512-qLpGZ/c2fhSs5gnYsQxtDEq3Oy8SXPClIXkW5ghvAvsNuVSA8k+gCONcUCS/UjLEYvYps+e8uBtfgXgvhwfNug==", - "dev": true, - "peer": true, - "dependencies": { - "@types/express": "*" - } - }, - "node_modules/@types/serve-static": { - "version": "1.15.7", - "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz", - "integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==", - "dev": true, - "peer": true, - "dependencies": { - "@types/http-errors": "*", - "@types/node": "*", - "@types/send": "*" - } - }, - "node_modules/@types/sockjs": { - "version": "0.3.36", - "resolved": "https://registry.npmjs.org/@types/sockjs/-/sockjs-0.3.36.tgz", - "integrity": "sha512-MK9V6NzAS1+Ud7JV9lJLFqW85VbC9dq3LmwZCuBe4wBDgKC0Kj/jd8Xl+nSviU+Qc3+m7umHHyHg//2KSa0a0Q==", - "dev": true, - "peer": true, - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/ws": { - "version": "8.18.1", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz", - "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==", - "dev": true, - "peer": true, - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@vue/babel-helper-vue-jsx-merge-props": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@vue/babel-helper-vue-jsx-merge-props/-/babel-helper-vue-jsx-merge-props-1.4.0.tgz", - "integrity": "sha512-JkqXfCkUDp4PIlFdDQ0TdXoIejMtTHP67/pvxlgeY+u5k3LEdKuWZ3LK6xkxo52uDoABIVyRwqVkfLQJhk7VBA==", - "dev": true - }, - "node_modules/@vue/babel-helper-vue-transform-on": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@vue/babel-helper-vue-transform-on/-/babel-helper-vue-transform-on-1.4.0.tgz", - "integrity": "sha512-mCokbouEQ/ocRce/FpKCRItGo+013tHg7tixg3DUNS+6bmIchPt66012kBMm476vyEIJPafrvOf4E5OYj3shSw==", - "dev": true - }, - "node_modules/@vue/babel-plugin-jsx": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@vue/babel-plugin-jsx/-/babel-plugin-jsx-1.4.0.tgz", - "integrity": "sha512-9zAHmwgMWlaN6qRKdrg1uKsBKHvnUU+Py+MOCTuYZBoZsopa90Di10QRjB+YPnVss0BZbG/H5XFwJY1fTxJWhA==", + "node_modules/@vitejs/plugin-vue": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-6.0.1.tgz", + "integrity": "sha512-+MaE752hU0wfPFJEUAIxqw18+20euHHdxVtMvbFcOEpjEyfqXH/5DCoTHiVJ0J29EhTJdoTkjEv5YBKU9dnoTw==", "dev": true, "dependencies": { - "@babel/helper-module-imports": "^7.25.9", - "@babel/helper-plugin-utils": "^7.26.5", - "@babel/plugin-syntax-jsx": "^7.25.9", - "@babel/template": "^7.26.9", - "@babel/traverse": "^7.26.9", - "@babel/types": "^7.26.9", - "@vue/babel-helper-vue-transform-on": "1.4.0", - "@vue/babel-plugin-resolve-type": "1.4.0", - "@vue/shared": "^3.5.13" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - }, - "peerDependenciesMeta": { - "@babel/core": { - "optional": true - } - } - }, - "node_modules/@vue/babel-plugin-resolve-type": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@vue/babel-plugin-resolve-type/-/babel-plugin-resolve-type-1.4.0.tgz", - "integrity": "sha512-4xqDRRbQQEWHQyjlYSgZsWj44KfiF6D+ktCuXyZ8EnVDYV3pztmXJDf1HveAjUAXxAnR8daCQT51RneWWxtTyQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.26.2", - "@babel/helper-module-imports": "^7.25.9", - "@babel/helper-plugin-utils": "^7.26.5", - "@babel/parser": "^7.26.9", - "@vue/compiler-sfc": "^3.5.13" - }, - "funding": { - "url": "https://github.com/sponsors/sxzz" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@vue/babel-plugin-transform-vue-jsx": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@vue/babel-plugin-transform-vue-jsx/-/babel-plugin-transform-vue-jsx-1.4.0.tgz", - "integrity": "sha512-Fmastxw4MMx0vlgLS4XBX0XiBbUFzoMGeVXuMV08wyOfXdikAFqBTuYPR0tlk+XskL19EzHc39SgjrPGY23JnA==", - "dev": true, - "dependencies": { - "@babel/helper-module-imports": "^7.0.0", - "@babel/plugin-syntax-jsx": "^7.2.0", - "@vue/babel-helper-vue-jsx-merge-props": "^1.4.0", - "html-tags": "^2.0.0", - "lodash.kebabcase": "^4.1.1", - "svg-tags": "^1.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@vue/babel-preset-app": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/@vue/babel-preset-app/-/babel-preset-app-5.0.8.tgz", - "integrity": "sha512-yl+5qhpjd8e1G4cMXfORkkBlvtPCIgmRf3IYCWYDKIQ7m+PPa5iTm4feiNmCMD6yGqQWMhhK/7M3oWGL9boKwg==", - "dev": true, - "dependencies": { - "@babel/core": "^7.12.16", - "@babel/helper-compilation-targets": "^7.12.16", - "@babel/helper-module-imports": "^7.12.13", - "@babel/plugin-proposal-class-properties": "^7.12.13", - "@babel/plugin-proposal-decorators": "^7.12.13", - "@babel/plugin-syntax-dynamic-import": "^7.8.3", - "@babel/plugin-syntax-jsx": "^7.12.13", - "@babel/plugin-transform-runtime": "^7.12.15", - "@babel/preset-env": "^7.12.16", - "@babel/runtime": "^7.12.13", - "@vue/babel-plugin-jsx": "^1.0.3", - "@vue/babel-preset-jsx": "^1.1.2", - "babel-plugin-dynamic-import-node": "^2.3.3", - "core-js": "^3.8.3", - "core-js-compat": "^3.8.3", - "semver": "^7.3.4" - }, - "peerDependencies": { - "@babel/core": "*", - "core-js": "^3", - "vue": "^2 || ^3.2.13" - }, - "peerDependenciesMeta": { - "core-js": { - "optional": true - }, - "vue": { - "optional": true - } - } - }, - "node_modules/@vue/babel-preset-app/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" + "@rolldown/pluginutils": "1.0.0-beta.29" }, "engines": { - "node": ">=10" - } - }, - "node_modules/@vue/babel-preset-jsx": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@vue/babel-preset-jsx/-/babel-preset-jsx-1.4.0.tgz", - "integrity": "sha512-QmfRpssBOPZWL5xw7fOuHNifCQcNQC1PrOo/4fu6xlhlKJJKSA3HqX92Nvgyx8fqHZTUGMPHmFA+IDqwXlqkSA==", - "dev": true, - "dependencies": { - "@vue/babel-helper-vue-jsx-merge-props": "^1.4.0", - "@vue/babel-plugin-transform-vue-jsx": "^1.4.0", - "@vue/babel-sugar-composition-api-inject-h": "^1.4.0", - "@vue/babel-sugar-composition-api-render-instance": "^1.4.0", - "@vue/babel-sugar-functional-vue": "^1.4.0", - "@vue/babel-sugar-inject-h": "^1.4.0", - "@vue/babel-sugar-v-model": "^1.4.0", - "@vue/babel-sugar-v-on": "^1.4.0" + "node": "^20.19.0 || >=22.12.0" }, "peerDependencies": { - "@babel/core": "^7.0.0-0", - "vue": "*" - }, - "peerDependenciesMeta": { - "vue": { - "optional": true - } + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0", + "vue": "^3.2.25" } }, - "node_modules/@vue/babel-sugar-composition-api-inject-h": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@vue/babel-sugar-composition-api-inject-h/-/babel-sugar-composition-api-inject-h-1.4.0.tgz", - "integrity": "sha512-VQq6zEddJHctnG4w3TfmlVp5FzDavUSut/DwR0xVoe/mJKXyMcsIibL42wPntozITEoY90aBV0/1d2KjxHU52g==", - "dev": true, - "dependencies": { - "@babel/plugin-syntax-jsx": "^7.2.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@vue/babel-sugar-composition-api-render-instance": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@vue/babel-sugar-composition-api-render-instance/-/babel-sugar-composition-api-render-instance-1.4.0.tgz", - "integrity": "sha512-6ZDAzcxvy7VcnCjNdHJ59mwK02ZFuP5CnucloidqlZwVQv5CQLijc3lGpR7MD3TWFi78J7+a8J56YxbCtHgT9Q==", - "dev": true, - "dependencies": { - "@babel/plugin-syntax-jsx": "^7.2.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@vue/babel-sugar-functional-vue": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@vue/babel-sugar-functional-vue/-/babel-sugar-functional-vue-1.4.0.tgz", - "integrity": "sha512-lTEB4WUFNzYt2In6JsoF9sAYVTo84wC4e+PoZWSgM6FUtqRJz7wMylaEhSRgG71YF+wfLD6cc9nqVeXN2rwBvw==", - "dev": true, - "dependencies": { - "@babel/plugin-syntax-jsx": "^7.2.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@vue/babel-sugar-inject-h": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@vue/babel-sugar-inject-h/-/babel-sugar-inject-h-1.4.0.tgz", - "integrity": "sha512-muwWrPKli77uO2fFM7eA3G1lAGnERuSz2NgAxuOLzrsTlQl8W4G+wwbM4nB6iewlKbwKRae3nL03UaF5ffAPMA==", - "dev": true, - "dependencies": { - "@babel/plugin-syntax-jsx": "^7.2.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@vue/babel-sugar-v-model": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@vue/babel-sugar-v-model/-/babel-sugar-v-model-1.4.0.tgz", - "integrity": "sha512-0t4HGgXb7WHYLBciZzN5s0Hzqan4Ue+p/3FdQdcaHAb7s5D9WZFGoSxEZHrR1TFVZlAPu1bejTKGeAzaaG3NCQ==", - "dev": true, - "dependencies": { - "@babel/plugin-syntax-jsx": "^7.2.0", - "@vue/babel-helper-vue-jsx-merge-props": "^1.4.0", - "@vue/babel-plugin-transform-vue-jsx": "^1.4.0", - "camelcase": "^5.0.0", - "html-tags": "^2.0.0", - "svg-tags": "^1.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@vue/babel-sugar-v-on": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@vue/babel-sugar-v-on/-/babel-sugar-v-on-1.4.0.tgz", - "integrity": "sha512-m+zud4wKLzSKgQrWwhqRObWzmTuyzl6vOP7024lrpeJM4x2UhQtRDLgYjXAw9xBXjCwS0pP9kXjg91F9ZNo9JA==", - "dev": true, - "dependencies": { - "@babel/plugin-syntax-jsx": "^7.2.0", - "@vue/babel-plugin-transform-vue-jsx": "^1.4.0", - "camelcase": "^5.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@vue/cli-overlay": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/@vue/cli-overlay/-/cli-overlay-5.0.8.tgz", - "integrity": "sha512-KmtievE/B4kcXp6SuM2gzsnSd8WebkQpg3XaB6GmFh1BJGRqa1UiW9up7L/Q67uOdTigHxr5Ar2lZms4RcDjwQ==", - "dev": true, - "peer": true - }, - "node_modules/@vue/cli-plugin-babel": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/@vue/cli-plugin-babel/-/cli-plugin-babel-5.0.8.tgz", - "integrity": "sha512-a4qqkml3FAJ3auqB2kN2EMPocb/iu0ykeELwed+9B1c1nQ1HKgslKMHMPavYx3Cd/QAx2mBD4hwKBqZXEI/CsQ==", - "dev": true, - "dependencies": { - "@babel/core": "^7.12.16", - "@vue/babel-preset-app": "^5.0.8", - "@vue/cli-shared-utils": "^5.0.8", - "babel-loader": "^8.2.2", - "thread-loader": "^3.0.0", - "webpack": "^5.54.0" - }, - "peerDependencies": { - "@vue/cli-service": "^3.0.0 || ^4.0.0 || ^5.0.0-0" - } - }, - "node_modules/@vue/cli-plugin-router": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/@vue/cli-plugin-router/-/cli-plugin-router-5.0.8.tgz", - "integrity": "sha512-Gmv4dsGdAsWPqVijz3Ux2OS2HkMrWi1ENj2cYL75nUeL+Xj5HEstSqdtfZ0b1q9NCce+BFB6QnHfTBXc/fCvMg==", - "dev": true, - "peer": true, - "dependencies": { - "@vue/cli-shared-utils": "^5.0.8" - }, - "peerDependencies": { - "@vue/cli-service": "^3.0.0 || ^4.0.0 || ^5.0.0-0" - } - }, - "node_modules/@vue/cli-plugin-vuex": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/@vue/cli-plugin-vuex/-/cli-plugin-vuex-5.0.8.tgz", - "integrity": "sha512-HSYWPqrunRE5ZZs8kVwiY6oWcn95qf/OQabwLfprhdpFWAGtLStShjsGED2aDpSSeGAskQETrtR/5h7VqgIlBA==", - "dev": true, - "peer": true, - "peerDependencies": { - "@vue/cli-service": "^3.0.0 || ^4.0.0 || ^5.0.0-0" - } - }, - "node_modules/@vue/cli-service": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/@vue/cli-service/-/cli-service-5.0.8.tgz", - "integrity": "sha512-nV7tYQLe7YsTtzFrfOMIHc5N2hp5lHG2rpYr0aNja9rNljdgcPZLyQRb2YRivTHqTv7lI962UXFURcpStHgyFw==", - "dev": true, - "peer": true, - "dependencies": { - "@babel/helper-compilation-targets": "^7.12.16", - "@soda/friendly-errors-webpack-plugin": "^1.8.0", - "@soda/get-current-script": "^1.0.2", - "@types/minimist": "^1.2.0", - "@vue/cli-overlay": "^5.0.8", - "@vue/cli-plugin-router": "^5.0.8", - "@vue/cli-plugin-vuex": "^5.0.8", - "@vue/cli-shared-utils": "^5.0.8", - "@vue/component-compiler-utils": "^3.3.0", - "@vue/vue-loader-v15": "npm:vue-loader@^15.9.7", - "@vue/web-component-wrapper": "^1.3.0", - "acorn": "^8.0.5", - "acorn-walk": "^8.0.2", - "address": "^1.1.2", - "autoprefixer": "^10.2.4", - "browserslist": "^4.16.3", - "case-sensitive-paths-webpack-plugin": "^2.3.0", - "cli-highlight": "^2.1.10", - "clipboardy": "^2.3.0", - "cliui": "^7.0.4", - "copy-webpack-plugin": "^9.0.1", - "css-loader": "^6.5.0", - "css-minimizer-webpack-plugin": "^3.0.2", - "cssnano": "^5.0.0", - "debug": "^4.1.1", - "default-gateway": "^6.0.3", - "dotenv": "^10.0.0", - "dotenv-expand": "^5.1.0", - "fs-extra": "^9.1.0", - "globby": "^11.0.2", - "hash-sum": "^2.0.0", - "html-webpack-plugin": "^5.1.0", - "is-file-esm": "^1.0.0", - "launch-editor-middleware": "^2.2.1", - "lodash.defaultsdeep": "^4.6.1", - "lodash.mapvalues": "^4.6.0", - "mini-css-extract-plugin": "^2.5.3", - "minimist": "^1.2.5", - "module-alias": "^2.2.2", - "portfinder": "^1.0.26", - "postcss": "^8.2.6", - "postcss-loader": "^6.1.1", - "progress-webpack-plugin": "^1.0.12", - "ssri": "^8.0.1", - "terser-webpack-plugin": "^5.1.1", - "thread-loader": "^3.0.0", - "vue-loader": "^17.0.0", - "vue-style-loader": "^4.1.3", - "webpack": "^5.54.0", - "webpack-bundle-analyzer": "^4.4.0", - "webpack-chain": "^6.5.1", - "webpack-dev-server": "^4.7.3", - "webpack-merge": "^5.7.3", - "webpack-virtual-modules": "^0.4.2", - "whatwg-fetch": "^3.6.2" - }, - "bin": { - "vue-cli-service": "bin/vue-cli-service.js" - }, - "engines": { - "node": "^12.0.0 || >= 14.0.0" - }, - "peerDependencies": { - "vue-template-compiler": "^2.0.0", - "webpack-sources": "*" - }, - "peerDependenciesMeta": { - "cache-loader": { - "optional": true - }, - "less-loader": { - "optional": true - }, - "pug-plain-loader": { - "optional": true - }, - "raw-loader": { - "optional": true - }, - "sass-loader": { - "optional": true - }, - "stylus-loader": { - "optional": true - }, - "vue-template-compiler": { - "optional": true - }, - "webpack-sources": { - "optional": true - } - } - }, - "node_modules/@vue/cli-shared-utils": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/@vue/cli-shared-utils/-/cli-shared-utils-5.0.8.tgz", - "integrity": "sha512-uK2YB7bBVuQhjOJF+O52P9yFMXeJVj7ozqJkwYE9PlMHL1LMHjtCYm4cSdOebuPzyP+/9p0BimM/OqxsevIopQ==", - "dev": true, - "dependencies": { - "@achrinza/node-ipc": "^9.2.5", - "chalk": "^4.1.2", - "execa": "^1.0.0", - "joi": "^17.4.0", - "launch-editor": "^2.2.1", - "lru-cache": "^6.0.0", - "node-fetch": "^2.6.7", - "open": "^8.0.2", - "ora": "^5.3.0", - "read-pkg": "^5.1.1", - "semver": "^7.3.4", - "strip-ansi": "^6.0.0" - } - }, - "node_modules/@vue/cli-shared-utils/node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/@vue/cli-shared-utils/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@vue/cli-shared-utils/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@vue/cli-shared-utils/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/@vue/compiler-core": { - "version": "3.5.16", - "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.16.tgz", - "integrity": "sha512-AOQS2eaQOaaZQoL1u+2rCJIKDruNXVBZSiUD3chnUrsoX5ZTQMaCvXlWNIfxBJuU15r1o7+mpo5223KVtIhAgQ==", + "version": "3.5.18", + "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.18.tgz", + "integrity": "sha512-3slwjQrrV1TO8MoXgy3aynDQ7lslj5UqDxuHnrzHtpON5CBinhWjJETciPngpin/T3OuW3tXUf86tEurusnztw==", "dependencies": { - "@babel/parser": "^7.27.2", - "@vue/shared": "3.5.16", + "@babel/parser": "^7.28.0", + "@vue/shared": "3.5.18", "entities": "^4.5.0", "estree-walker": "^2.0.2", "source-map-js": "^1.2.1" } }, "node_modules/@vue/compiler-dom": { - "version": "3.5.16", - "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.16.tgz", - "integrity": "sha512-SSJIhBr/teipXiXjmWOVWLnxjNGo65Oj/8wTEQz0nqwQeP75jWZ0n4sF24Zxoht1cuJoWopwj0J0exYwCJ0dCQ==", + "version": "3.5.18", + "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.18.tgz", + "integrity": "sha512-RMbU6NTU70++B1JyVJbNbeFkK+A+Q7y9XKE2EM4NLGm2WFR8x9MbAtWxPPLdm0wUkuZv9trpwfSlL6tjdIa1+A==", "dependencies": { - "@vue/compiler-core": "3.5.16", - "@vue/shared": "3.5.16" + "@vue/compiler-core": "3.5.18", + "@vue/shared": "3.5.18" } }, "node_modules/@vue/compiler-sfc": { - "version": "3.5.16", - "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.16.tgz", - "integrity": "sha512-rQR6VSFNpiinDy/DVUE0vHoIDUF++6p910cgcZoaAUm3POxgNOOdS/xgoll3rNdKYTYPnnbARDCZOyZ+QSe6Pw==", + "version": "3.5.18", + "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.18.tgz", + "integrity": "sha512-5aBjvGqsWs+MoxswZPoTB9nSDb3dhd1x30xrrltKujlCxo48j8HGDNj3QPhF4VIS0VQDUrA1xUfp2hEa+FNyXA==", "dependencies": { - "@babel/parser": "^7.27.2", - "@vue/compiler-core": "3.5.16", - "@vue/compiler-dom": "3.5.16", - "@vue/compiler-ssr": "3.5.16", - "@vue/shared": "3.5.16", + "@babel/parser": "^7.28.0", + "@vue/compiler-core": "3.5.18", + "@vue/compiler-dom": "3.5.18", + "@vue/compiler-ssr": "3.5.18", + "@vue/shared": "3.5.18", "estree-walker": "^2.0.2", "magic-string": "^0.30.17", - "postcss": "^8.5.3", + "postcss": "^8.5.6", "source-map-js": "^1.2.1" } }, "node_modules/@vue/compiler-ssr": { - "version": "3.5.16", - "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.16.tgz", - "integrity": "sha512-d2V7kfxbdsjrDSGlJE7my1ZzCXViEcqN6w14DOsDrUCHEA6vbnVCpRFfrc4ryCP/lCKzX2eS1YtnLE/BuC9f/A==", + "version": "3.5.18", + "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.18.tgz", + "integrity": "sha512-xM16Ak7rSWHkM3m22NlmcdIM+K4BMyFARAfV9hYFl+SFuRzrZ3uGMNW05kA5pmeMa0X9X963Kgou7ufdbpOP9g==", "dependencies": { - "@vue/compiler-dom": "3.5.16", - "@vue/shared": "3.5.16" + "@vue/compiler-dom": "3.5.18", + "@vue/shared": "3.5.18" } }, - "node_modules/@vue/component-compiler-utils": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/@vue/component-compiler-utils/-/component-compiler-utils-3.3.0.tgz", - "integrity": "sha512-97sfH2mYNU+2PzGrmK2haqffDpVASuib9/w2/noxiFi31Z54hW+q3izKQXXQZSNhtiUpAI36uSuYepeBe4wpHQ==", - "dev": true, - "peer": true, - "dependencies": { - "consolidate": "^0.15.1", - "hash-sum": "^1.0.2", - "lru-cache": "^4.1.2", - "merge-source-map": "^1.1.0", - "postcss": "^7.0.36", - "postcss-selector-parser": "^6.0.2", - "source-map": "~0.6.1", - "vue-template-es2015-compiler": "^1.9.0" - }, - "optionalDependencies": { - "prettier": "^1.18.2 || ^2.0.0" - } - }, - "node_modules/@vue/component-compiler-utils/node_modules/hash-sum": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/hash-sum/-/hash-sum-1.0.2.tgz", - "integrity": "sha512-fUs4B4L+mlt8/XAtSOGMUO1TXmAelItBPtJG7CyHJfYTdDjwisntGO2JQz7oUsatOY9o68+57eziUVNw/mRHmA==", - "dev": true, - "peer": true - }, - "node_modules/@vue/component-compiler-utils/node_modules/lru-cache": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", - "dev": true, - "peer": true, - "dependencies": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" - } - }, - "node_modules/@vue/component-compiler-utils/node_modules/picocolors": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz", - "integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==", - "dev": true, - "peer": true - }, - "node_modules/@vue/component-compiler-utils/node_modules/postcss": { - "version": "7.0.39", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz", - "integrity": "sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==", - "dev": true, - "peer": true, - "dependencies": { - "picocolors": "^0.2.1", - "source-map": "^0.6.1" - }, - "engines": { - "node": ">=6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - } - }, - "node_modules/@vue/component-compiler-utils/node_modules/yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==", - "dev": true, - "peer": true - }, "node_modules/@vue/reactivity": { - "version": "3.5.16", - "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.16.tgz", - "integrity": "sha512-FG5Q5ee/kxhIm1p2bykPpPwqiUBV3kFySsHEQha5BJvjXdZTUfmya7wP7zC39dFuZAcf/PD5S4Lni55vGLMhvA==", + "version": "3.5.18", + "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.18.tgz", + "integrity": "sha512-x0vPO5Imw+3sChLM5Y+B6G1zPjwdOri9e8V21NnTnlEvkxatHEH5B5KEAJcjuzQ7BsjGrKtfzuQ5eQwXh8HXBg==", "dependencies": { - "@vue/shared": "3.5.16" + "@vue/shared": "3.5.18" } }, "node_modules/@vue/runtime-core": { - "version": "3.5.16", - "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.16.tgz", - "integrity": "sha512-bw5Ykq6+JFHYxrQa7Tjr+VSzw7Dj4ldR/udyBZbq73fCdJmyy5MPIFR9IX/M5Qs+TtTjuyUTCnmK3lWWwpAcFQ==", + "version": "3.5.18", + "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.18.tgz", + "integrity": "sha512-DUpHa1HpeOQEt6+3nheUfqVXRog2kivkXHUhoqJiKR33SO4x+a5uNOMkV487WPerQkL0vUuRvq/7JhRgLW3S+w==", "dependencies": { - "@vue/reactivity": "3.5.16", - "@vue/shared": "3.5.16" + "@vue/reactivity": "3.5.18", + "@vue/shared": "3.5.18" } }, "node_modules/@vue/runtime-dom": { - "version": "3.5.16", - "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.16.tgz", - "integrity": "sha512-T1qqYJsG2xMGhImRUV9y/RseB9d0eCYZQ4CWca9ztCuiPj/XWNNN+lkNBuzVbia5z4/cgxdL28NoQCvC0Xcfww==", + "version": "3.5.18", + "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.18.tgz", + "integrity": "sha512-YwDj71iV05j4RnzZnZtGaXwPoUWeRsqinblgVJwR8XTXYZ9D5PbahHQgsbmzUvCWNF6x7siQ89HgnX5eWkr3mw==", "dependencies": { - "@vue/reactivity": "3.5.16", - "@vue/runtime-core": "3.5.16", - "@vue/shared": "3.5.16", + "@vue/reactivity": "3.5.18", + "@vue/runtime-core": "3.5.18", + "@vue/shared": "3.5.18", "csstype": "^3.1.3" } }, "node_modules/@vue/server-renderer": { - "version": "3.5.16", - "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.16.tgz", - "integrity": "sha512-BrX0qLiv/WugguGsnQUJiYOE0Fe5mZTwi6b7X/ybGB0vfrPH9z0gD/Y6WOR1sGCgX4gc25L1RYS5eYQKDMoNIg==", + "version": "3.5.18", + "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.18.tgz", + "integrity": "sha512-PvIHLUoWgSbDG7zLHqSqaCoZvHi6NNmfVFOqO+OnwvqMz/tqQr3FuGWS8ufluNddk7ZLBJYMrjcw1c6XzR12mA==", "dependencies": { - "@vue/compiler-ssr": "3.5.16", - "@vue/shared": "3.5.16" + "@vue/compiler-ssr": "3.5.18", + "@vue/shared": "3.5.18" }, "peerDependencies": { - "vue": "3.5.16" + "vue": "3.5.18" } }, "node_modules/@vue/shared": { - "version": "3.5.16", - "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.16.tgz", - "integrity": "sha512-c/0fWy3Jw6Z8L9FmTyYfkpM5zklnqqa9+a6dz3DvONRKW2NEbh46BP0FHuLFSWi2TnQEtp91Z6zOWNrU6QiyPg==" - }, - "node_modules/@vue/vue-loader-v15": { - "name": "vue-loader", - "version": "15.11.1", - "resolved": "https://registry.npmjs.org/vue-loader/-/vue-loader-15.11.1.tgz", - "integrity": "sha512-0iw4VchYLePqJfJu9s62ACWUXeSqM30SQqlIftbYWM3C+jpPcEHKSPUZBLjSF9au4HTHQ/naF6OGnO3Q/qGR3Q==", - "dev": true, - "peer": true, - "dependencies": { - "@vue/component-compiler-utils": "^3.1.0", - "hash-sum": "^1.0.2", - "loader-utils": "^1.1.0", - "vue-hot-reload-api": "^2.3.0", - "vue-style-loader": "^4.1.0" - }, - "peerDependencies": { - "css-loader": "*", - "webpack": "^3.0.0 || ^4.1.0 || ^5.0.0-0" - }, - "peerDependenciesMeta": { - "cache-loader": { - "optional": true - }, - "prettier": { - "optional": true - }, - "vue-template-compiler": { - "optional": true - } - } - }, - "node_modules/@vue/vue-loader-v15/node_modules/hash-sum": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/hash-sum/-/hash-sum-1.0.2.tgz", - "integrity": "sha512-fUs4B4L+mlt8/XAtSOGMUO1TXmAelItBPtJG7CyHJfYTdDjwisntGO2JQz7oUsatOY9o68+57eziUVNw/mRHmA==", - "dev": true, - "peer": true - }, - "node_modules/@vue/web-component-wrapper": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@vue/web-component-wrapper/-/web-component-wrapper-1.3.0.tgz", - "integrity": "sha512-Iu8Tbg3f+emIIMmI2ycSI8QcEuAUgPTgHwesDU1eKMLE4YC/c/sFbGc70QgMq31ijRftV0R7vCm9co6rldCeOA==", - "dev": true, - "peer": true + "version": "3.5.18", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.18.tgz", + "integrity": "sha512-cZy8Dq+uuIXbxCZpuLd2GJdeSO/lIzIspC2WtkqIpje5QyFbvLaI5wZtdUjLHjGZrlVX6GilejatWwVYYRc8tA==" }, "node_modules/@vuetify/loader-shared": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@vuetify/loader-shared/-/loader-shared-2.1.0.tgz", - "integrity": "sha512-dNE6Ceym9ijFsmJKB7YGW0cxs7xbYV8+1LjU6jd4P14xOt/ji4Igtgzt0rJFbxu+ZhAzqz853lhB0z8V9Dy9cQ==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@vuetify/loader-shared/-/loader-shared-2.1.1.tgz", + "integrity": "sha512-jSZTzTYaoiv8iwonFCVZQ0YYX/M+Uyl4ng+C4egMJT0Hcmh9gIxJL89qfZICDeo3g0IhqrvipW2FFKKRDMtVcA==", "devOptional": true, "dependencies": { "upath": "^2.0.1" @@ -3448,193 +1581,11 @@ "vuetify": "^3.0.0" } }, - "node_modules/@webassemblyjs/ast": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", - "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", - "devOptional": true, - "dependencies": { - "@webassemblyjs/helper-numbers": "1.13.2", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2" - } - }, - "node_modules/@webassemblyjs/floating-point-hex-parser": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.13.2.tgz", - "integrity": "sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA==", - "devOptional": true - }, - "node_modules/@webassemblyjs/helper-api-error": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.13.2.tgz", - "integrity": "sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ==", - "devOptional": true - }, - "node_modules/@webassemblyjs/helper-buffer": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.14.1.tgz", - "integrity": "sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA==", - "devOptional": true - }, - "node_modules/@webassemblyjs/helper-numbers": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.13.2.tgz", - "integrity": "sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA==", - "devOptional": true, - "dependencies": { - "@webassemblyjs/floating-point-hex-parser": "1.13.2", - "@webassemblyjs/helper-api-error": "1.13.2", - "@xtuc/long": "4.2.2" - } - }, - "node_modules/@webassemblyjs/helper-wasm-bytecode": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.13.2.tgz", - "integrity": "sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA==", - "devOptional": true - }, - "node_modules/@webassemblyjs/helper-wasm-section": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.14.1.tgz", - "integrity": "sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw==", - "devOptional": true, - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@webassemblyjs/helper-buffer": "1.14.1", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2", - "@webassemblyjs/wasm-gen": "1.14.1" - } - }, - "node_modules/@webassemblyjs/ieee754": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.13.2.tgz", - "integrity": "sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw==", - "devOptional": true, - "dependencies": { - "@xtuc/ieee754": "^1.2.0" - } - }, - "node_modules/@webassemblyjs/leb128": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.13.2.tgz", - "integrity": "sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw==", - "devOptional": true, - "dependencies": { - "@xtuc/long": "4.2.2" - } - }, - "node_modules/@webassemblyjs/utf8": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.13.2.tgz", - "integrity": "sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ==", - "devOptional": true - }, - "node_modules/@webassemblyjs/wasm-edit": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.14.1.tgz", - "integrity": "sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ==", - "devOptional": true, - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@webassemblyjs/helper-buffer": "1.14.1", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2", - "@webassemblyjs/helper-wasm-section": "1.14.1", - "@webassemblyjs/wasm-gen": "1.14.1", - "@webassemblyjs/wasm-opt": "1.14.1", - "@webassemblyjs/wasm-parser": "1.14.1", - "@webassemblyjs/wast-printer": "1.14.1" - } - }, - "node_modules/@webassemblyjs/wasm-gen": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.14.1.tgz", - "integrity": "sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg==", - "devOptional": true, - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2", - "@webassemblyjs/ieee754": "1.13.2", - "@webassemblyjs/leb128": "1.13.2", - "@webassemblyjs/utf8": "1.13.2" - } - }, - "node_modules/@webassemblyjs/wasm-opt": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.14.1.tgz", - "integrity": "sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw==", - "devOptional": true, - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@webassemblyjs/helper-buffer": "1.14.1", - "@webassemblyjs/wasm-gen": "1.14.1", - "@webassemblyjs/wasm-parser": "1.14.1" - } - }, - "node_modules/@webassemblyjs/wasm-parser": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.14.1.tgz", - "integrity": "sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ==", - "devOptional": true, - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@webassemblyjs/helper-api-error": "1.13.2", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2", - "@webassemblyjs/ieee754": "1.13.2", - "@webassemblyjs/leb128": "1.13.2", - "@webassemblyjs/utf8": "1.13.2" - } - }, - "node_modules/@webassemblyjs/wast-printer": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.14.1.tgz", - "integrity": "sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw==", - "devOptional": true, - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@xtuc/long": "4.2.2" - } - }, - "node_modules/@xtuc/ieee754": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", - "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==", - "devOptional": true - }, - "node_modules/@xtuc/long": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", - "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==", - "devOptional": true - }, - "node_modules/accepts": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", - "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", - "dev": true, - "peer": true, - "dependencies": { - "mime-types": "~2.1.34", - "negotiator": "0.6.3" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/accepts/node_modules/negotiator": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.6" - } - }, "node_modules/acorn": { - "version": "8.14.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", - "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", - "devOptional": true, + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, "bin": { "acorn": "bin/acorn" }, @@ -3651,34 +1602,11 @@ "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, - "node_modules/acorn-walk": { - "version": "8.3.4", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", - "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", - "dev": true, - "peer": true, - "dependencies": { - "acorn": "^8.11.0" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/address": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/address/-/address-1.2.2.tgz", - "integrity": "sha512-4B/qKCfeE/ODUaAUpSwfzazo5x29WD4r3vXiWsB7I2mSDAihwEqKO+g8GELZUQSSAo5e1XTYh3ZVfLyxBc12nA==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 10.0.0" - } - }, "node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "devOptional": true, + "dev": true, "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -3690,86 +1618,6 @@ "url": "https://github.com/sponsors/epoberezkin" } }, - "node_modules/ajv-formats": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", - "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", - "devOptional": true, - "dependencies": { - "ajv": "^8.0.0" - }, - "peerDependencies": { - "ajv": "^8.0.0" - }, - "peerDependenciesMeta": { - "ajv": { - "optional": true - } - } - }, - "node_modules/ajv-formats/node_modules/ajv": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", - "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", - "devOptional": true, - "dependencies": { - "fast-deep-equal": "^3.1.3", - "fast-uri": "^3.0.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/ajv-formats/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "devOptional": true - }, - "node_modules/ajv-keywords": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", - "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", - "devOptional": true, - "peerDependencies": { - "ajv": "^6.9.1" - } - }, - "node_modules/ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/ansi-html-community": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz", - "integrity": "sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==", - "dev": true, - "engines": [ - "node >= 0.8.0" - ], - "peer": true, - "bin": { - "ansi-html": "bin/ansi-html" - } - }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -3785,333 +1633,18 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/any-promise": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", - "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", - "dev": true, - "peer": true - }, - "node_modules/anymatch": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", - "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", - "dev": true, - "peer": true, - "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/arch": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", - "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "peer": true - }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", "dev": true }, - "node_modules/array-flatten": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", - "dev": true, - "peer": true - }, - "node_modules/array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "dev": true, - "peer": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/async": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", - "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", - "dev": true, - "peer": true - }, - "node_modules/at-least-node": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", - "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 4.0.0" - } - }, - "node_modules/autoprefixer": { - "version": "10.4.21", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.21.tgz", - "integrity": "sha512-O+A6LWV5LDHSJD3LjHYoNi4VLsj/Whi7k6zG12xTYaU4cQ8oxQGckXNX8cRHK5yOZ/ppVHe0ZBXGzSV9jXdVbQ==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/autoprefixer" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "peer": true, - "dependencies": { - "browserslist": "^4.24.4", - "caniuse-lite": "^1.0.30001702", - "fraction.js": "^4.3.7", - "normalize-range": "^0.1.2", - "picocolors": "^1.1.1", - "postcss-value-parser": "^4.2.0" - }, - "bin": { - "autoprefixer": "bin/autoprefixer" - }, - "engines": { - "node": "^10 || ^12 || >=14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/babel-loader": { - "version": "8.4.1", - "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.4.1.tgz", - "integrity": "sha512-nXzRChX+Z1GoE6yWavBQg6jDslyFF3SDjl2paADuoQtQW10JqShJt62R6eJQ5m/pjJFDT8xgKIWSP85OY8eXeA==", - "dev": true, - "dependencies": { - "find-cache-dir": "^3.3.1", - "loader-utils": "^2.0.4", - "make-dir": "^3.1.0", - "schema-utils": "^2.6.5" - }, - "engines": { - "node": ">= 8.9" - }, - "peerDependencies": { - "@babel/core": "^7.0.0", - "webpack": ">=2" - } - }, - "node_modules/babel-loader/node_modules/loader-utils": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz", - "integrity": "sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==", - "dev": true, - "dependencies": { - "big.js": "^5.2.2", - "emojis-list": "^3.0.0", - "json5": "^2.1.2" - }, - "engines": { - "node": ">=8.9.0" - } - }, - "node_modules/babel-plugin-dynamic-import-node": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz", - "integrity": "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==", - "dev": true, - "dependencies": { - "object.assign": "^4.1.0" - } - }, - "node_modules/babel-plugin-polyfill-corejs2": { - "version": "0.4.13", - "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.13.tgz", - "integrity": "sha512-3sX/eOms8kd3q2KZ6DAhKPc0dgm525Gqq5NtWKZ7QYYZEv57OQ54KtblzJzH1lQF/eQxO8KjWGIK9IPUJNus5g==", - "dev": true, - "dependencies": { - "@babel/compat-data": "^7.22.6", - "@babel/helper-define-polyfill-provider": "^0.6.4", - "semver": "^6.3.1" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/babel-plugin-polyfill-corejs3": { - "version": "0.11.1", - "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.11.1.tgz", - "integrity": "sha512-yGCqvBT4rwMczo28xkH/noxJ6MZ4nJfkVYdoDaC/utLtWrXxv27HVrzAeSbqR8SxDsp46n0YF47EbHoixy6rXQ==", - "dev": true, - "dependencies": { - "@babel/helper-define-polyfill-provider": "^0.6.3", - "core-js-compat": "^3.40.0" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/babel-plugin-polyfill-regenerator": { - "version": "0.6.4", - "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.6.4.tgz", - "integrity": "sha512-7gD3pRadPrbjhjLyxebmx/WrFYcuSjZ0XbdUujQMZ/fcE9oeewk2U/7PCvez84UeuK3oSjmPZ0Ch0dlupQvGzw==", - "dev": true, - "dependencies": { - "@babel/helper-define-polyfill-provider": "^0.6.4" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true }, - "node_modules/base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/batch": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz", - "integrity": "sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==", - "dev": true, - "peer": true - }, - "node_modules/big.js": { - "version": "5.2.2", - "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", - "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==", - "devOptional": true, - "engines": { - "node": "*" - } - }, - "node_modules/binary-extensions": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", - "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", - "dev": true, - "peer": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "dev": true, - "dependencies": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "node_modules/bluebird": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", - "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", - "dev": true, - "peer": true - }, - "node_modules/body-parser": { - "version": "1.20.3", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", - "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", - "dev": true, - "peer": true, - "dependencies": { - "bytes": "3.1.2", - "content-type": "~1.0.5", - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "on-finished": "2.4.1", - "qs": "6.13.0", - "raw-body": "2.5.2", - "type-is": "~1.6.18", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, - "node_modules/body-parser/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "peer": true, - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/body-parser/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true, - "peer": true - }, - "node_modules/bonjour-service": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/bonjour-service/-/bonjour-service-1.3.0.tgz", - "integrity": "sha512-3YuAUiSkWykd+2Azjgyxei8OWf8thdn8AITIog2M4UICzoqfjlqr64WIjEXZllf/W6vK1goqleSR6brGomxQqA==", - "dev": true, - "peer": true, - "dependencies": { - "fast-deep-equal": "^3.1.3", - "multicast-dns": "^7.2.5" - } - }, "node_modules/boolbase": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", @@ -4128,147 +1661,6 @@ "concat-map": "0.0.1" } }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, - "peer": true, - "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/browserslist": { - "version": "4.25.0", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.0.tgz", - "integrity": "sha512-PJ8gYKeS5e/whHBh8xrwYK+dAvEj7JXtz6uTucnMRB8OiGTsKccFekoRrjajPBHV8oOY+2tI4uxeceSimKwMFA==", - "devOptional": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "dependencies": { - "caniuse-lite": "^1.0.30001718", - "electron-to-chromium": "^1.5.160", - "node-releases": "^2.0.19", - "update-browserslist-db": "^1.1.3" - }, - "bin": { - "browserslist": "cli.js" - }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" - } - }, - "node_modules/buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, - "node_modules/buffer-from": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", - "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", - "devOptional": true - }, - "node_modules/bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/call-bind": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", - "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", - "dev": true, - "dependencies": { - "call-bind-apply-helpers": "^1.0.0", - "es-define-property": "^1.0.0", - "get-intrinsic": "^1.2.4", - "set-function-length": "^1.2.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/call-bind-apply-helpers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", - "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", - "dev": true, - "dependencies": { - "es-errors": "^1.3.0", - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/call-bound": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", - "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", - "dev": true, - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "get-intrinsic": "^1.3.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/callsite": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz", - "integrity": "sha512-0vdNRFXn5q+dtOqjfFtmtlI9N2eVZ7LMyEV2iKC5mEEFvSg/69Ml6b/WU2qF8W1nLRa0wiSrDT3Y5jOHZCwKPQ==", - "devOptional": true, - "engines": { - "node": "*" - } - }, "node_modules/callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -4278,183 +1670,11 @@ "node": ">=6" } }, - "node_modules/camel-case": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", - "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", - "dev": true, - "peer": true, - "dependencies": { - "pascal-case": "^3.1.2", - "tslib": "^2.0.3" - } - }, - "node_modules/camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/caniuse-api": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz", - "integrity": "sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==", - "dev": true, - "peer": true, - "dependencies": { - "browserslist": "^4.0.0", - "caniuse-lite": "^1.0.0", - "lodash.memoize": "^4.1.2", - "lodash.uniq": "^4.5.0" - } - }, - "node_modules/caniuse-lite": { - "version": "1.0.30001721", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001721.tgz", - "integrity": "sha512-cOuvmUVtKrtEaoKiO0rSc29jcjwMwX5tOHDy4MgVFEWiUXj4uBMJkwI8MDySkgXidpMiHUcviogAvFi4pA2hDQ==", - "devOptional": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ] - }, - "node_modules/case-sensitive-paths-webpack-plugin": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz", - "integrity": "sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw==", - "dev": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, "node_modules/chalk": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", - "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", - "dev": true, - "peer": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/chokidar": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", - "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", - "dev": true, - "peer": true, - "dependencies": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" - }, - "engines": { - "node": ">= 8.10.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - } - }, - "node_modules/chokidar/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "peer": true, - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/chrome-trace-event": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.4.tgz", - "integrity": "sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ==", - "devOptional": true, - "engines": { - "node": ">=6.0" - } - }, - "node_modules/clean-css": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-5.3.3.tgz", - "integrity": "sha512-D5J+kHaVb/wKSFcyyV75uCn8fiY4sV38XJoe4CUyGQ+mOU/fMVYUdH1hJC+CJQ5uY3EnW27SbJYS4X8BiLrAFg==", - "dev": true, - "peer": true, - "dependencies": { - "source-map": "~0.6.0" - }, - "engines": { - "node": ">= 10.0" - } - }, - "node_modules/cli-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", - "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", - "dev": true, - "dependencies": { - "restore-cursor": "^3.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cli-highlight": { - "version": "2.1.11", - "resolved": "https://registry.npmjs.org/cli-highlight/-/cli-highlight-2.1.11.tgz", - "integrity": "sha512-9KDcoEVwyUXrjcJNvHD0NFc/hiwe/WPVYIleQh2O1N2Zro5gWJZ/K+3DGn8w8P/F6FxOgzyC5bxDyHIgCSPhGg==", - "dev": true, - "peer": true, - "dependencies": { - "chalk": "^4.0.0", - "highlight.js": "^10.7.1", - "mz": "^2.4.0", - "parse5": "^5.1.1", - "parse5-htmlparser2-tree-adapter": "^6.0.0", - "yargs": "^16.0.0" - }, - "bin": { - "highlight": "bin/highlight" - }, - "engines": { - "node": ">=8.0.0", - "npm": ">=5.0.0" - } - }, - "node_modules/cli-highlight/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, - "peer": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -4466,73 +1686,10 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/cli-spinners": { - "version": "2.9.2", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", - "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", - "dev": true, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/clipboardy": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/clipboardy/-/clipboardy-2.3.0.tgz", - "integrity": "sha512-mKhiIL2DrQIsuXMgBgnfEHOZOryC7kY7YO//TN6c63wlEm3NG5tz+YgY5rVi29KCmq/QQjKYvM7a19+MDOTHOQ==", - "dev": true, - "peer": true, - "dependencies": { - "arch": "^2.1.1", - "execa": "^1.0.0", - "is-wsl": "^2.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cliui": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", - "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", - "dev": true, - "peer": true, - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", - "wrap-ansi": "^7.0.0" - } - }, - "node_modules/clone": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", - "integrity": "sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==", - "dev": true, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/clone-deep": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", - "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", - "dev": true, - "peer": true, - "dependencies": { - "is-plain-object": "^2.0.4", - "kind-of": "^6.0.2", - "shallow-clone": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/codemirror": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/codemirror/-/codemirror-6.0.1.tgz", - "integrity": "sha512-J8j+nZ+CdWmIeFIGXEFbFPtpiYacFMDR8GlHK3IyHQJMCaVRfGx9NT+Hxivv1ckLWPvNdZqndbr/7lVhrf/Svg==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/codemirror/-/codemirror-6.0.2.tgz", + "integrity": "sha512-VhydHotNW5w1UGK0Qj96BwSk/Zqbp9WbnyK2W/eVMv4QyF41INRGpjUhFJY7/uDNuudSc33a/PKr4iDqRduvHw==", "peer": true, "dependencies": { "@codemirror/autocomplete": "^6.0.0", @@ -4562,258 +1719,22 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/colord": { - "version": "2.9.3", - "resolved": "https://registry.npmjs.org/colord/-/colord-2.9.3.tgz", - "integrity": "sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==", - "dev": true, - "peer": true - }, - "node_modules/colorette": { - "version": "2.0.20", - "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", - "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", - "dev": true, - "peer": true - }, - "node_modules/commander": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", - "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 12" - } - }, - "node_modules/common-path-prefix": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/common-path-prefix/-/common-path-prefix-3.0.0.tgz", - "integrity": "sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w==", - "devOptional": true - }, - "node_modules/commondir": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", - "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==", - "dev": true - }, - "node_modules/compressible": { - "version": "2.0.18", - "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", - "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", - "dev": true, - "peer": true, - "dependencies": { - "mime-db": ">= 1.43.0 < 2" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/compression": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/compression/-/compression-1.8.0.tgz", - "integrity": "sha512-k6WLKfunuqCYD3t6AsuPGvQWaKwuLLh2/xHNcX4qE+vIfDNXpSqnrhwA7O53R7WVQUnt8dVAIW+YHr7xTgOgGA==", - "dev": true, - "peer": true, - "dependencies": { - "bytes": "3.1.2", - "compressible": "~2.0.18", - "debug": "2.6.9", - "negotiator": "~0.6.4", - "on-headers": "~1.0.2", - "safe-buffer": "5.2.1", - "vary": "~1.1.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/compression/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "peer": true, - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/compression/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true, - "peer": true - }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", "dev": true }, - "node_modules/connect-history-api-fallback": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz", - "integrity": "sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/consolidate": { - "version": "0.15.1", - "resolved": "https://registry.npmjs.org/consolidate/-/consolidate-0.15.1.tgz", - "integrity": "sha512-DW46nrsMJgy9kqAbPt5rKaCr7uFtpo4mSUvLHIUbJEjm0vo+aY5QLwBUq3FK4tRnJr/X0Psc0C4jf/h+HtXSMw==", - "deprecated": "Please upgrade to consolidate v1.0.0+ as it has been modernized with several long-awaited fixes implemented. Maintenance is supported by Forward Email at https://forwardemail.net ; follow/watch https://github.com/ladjs/consolidate for updates and release changelog", - "dev": true, - "peer": true, - "dependencies": { - "bluebird": "^3.1.1" - }, - "engines": { - "node": ">= 0.10.0" - } - }, - "node_modules/content-disposition": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", - "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", - "dev": true, - "peer": true, - "dependencies": { - "safe-buffer": "5.2.1" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/content-type": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", - "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/convert-source-map": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", - "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", - "dev": true - }, - "node_modules/cookie": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", - "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/cookie-signature": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", - "dev": true, - "peer": true - }, - "node_modules/copy-webpack-plugin": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-9.1.0.tgz", - "integrity": "sha512-rxnR7PaGigJzhqETHGmAcxKnLZSR5u1Y3/bcIv/1FnqXedcL/E2ewK7ZCNrArJKCiSv8yVXhTqetJh8inDvfsA==", - "dev": true, - "peer": true, - "dependencies": { - "fast-glob": "^3.2.7", - "glob-parent": "^6.0.1", - "globby": "^11.0.3", - "normalize-path": "^3.0.0", - "schema-utils": "^3.1.1", - "serialize-javascript": "^6.0.0" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.1.0" - } - }, - "node_modules/copy-webpack-plugin/node_modules/schema-utils": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", - "integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==", - "dev": true, - "peer": true, - "dependencies": { - "@types/json-schema": "^7.0.8", - "ajv": "^6.12.5", - "ajv-keywords": "^3.5.2" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, "node_modules/core-js": { - "version": "3.42.0", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.42.0.tgz", - "integrity": "sha512-Sz4PP4ZA+Rq4II21qkNqOEDTDrCvcANId3xpIgB34NDkWc3UduWj2dqEtN9yZIq8Dk3HyPI33x9sqqU5C8sr0g==", + "version": "3.44.0", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.44.0.tgz", + "integrity": "sha512-aFCtd4l6GvAXwVEh3XbbVqJGHDJt0OZRa+5ePGx3LLwi12WfexqQxcsohb2wgsa/92xtl19Hd66G/L+TaAxDMw==", "hasInstallScript": true, "funding": { "type": "opencollective", "url": "https://opencollective.com/core-js" } }, - "node_modules/core-js-compat": { - "version": "3.42.0", - "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.42.0.tgz", - "integrity": "sha512-bQasjMfyDGyaeWKBIu33lHh9qlSR0MFE/Nmc6nMjf/iU9b3rSMdAYz1Baxrv4lPdGUsTqZudHA4jIGSJy0SWZQ==", - "dev": true, - "dependencies": { - "browserslist": "^4.24.4" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/core-js" - } - }, - "node_modules/core-util-is": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", - "dev": true, - "peer": true - }, - "node_modules/cosmiconfig": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.1.0.tgz", - "integrity": "sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==", - "dev": true, - "peer": true, - "dependencies": { - "@types/parse-json": "^4.0.0", - "import-fresh": "^3.2.1", - "parse-json": "^5.0.0", - "path-type": "^4.0.0", - "yaml": "^1.10.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/crelt": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz", @@ -4833,208 +1754,6 @@ "node": ">= 8" } }, - "node_modules/css-declaration-sorter": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-6.4.1.tgz", - "integrity": "sha512-rtdthzxKuyq6IzqX6jEcIzQF/YqccluefyCYheovBOLhFT/drQA9zj/UbRAa9J7C0o6EG6u3E6g+vKkay7/k3g==", - "dev": true, - "peer": true, - "engines": { - "node": "^10 || ^12 || >=14" - }, - "peerDependencies": { - "postcss": "^8.0.9" - } - }, - "node_modules/css-loader": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-6.11.0.tgz", - "integrity": "sha512-CTJ+AEQJjq5NzLga5pE39qdiSV56F8ywCIsqNIRF0r7BDgWsN25aazToqAFg7ZrtA/U016xudB3ffgweORxX7g==", - "dev": true, - "peer": true, - "dependencies": { - "icss-utils": "^5.1.0", - "postcss": "^8.4.33", - "postcss-modules-extract-imports": "^3.1.0", - "postcss-modules-local-by-default": "^4.0.5", - "postcss-modules-scope": "^3.2.0", - "postcss-modules-values": "^4.0.0", - "postcss-value-parser": "^4.2.0", - "semver": "^7.5.4" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "@rspack/core": "0.x || 1.x", - "webpack": "^5.0.0" - }, - "peerDependenciesMeta": { - "@rspack/core": { - "optional": true - }, - "webpack": { - "optional": true - } - } - }, - "node_modules/css-loader/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "peer": true, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/css-minimizer-webpack-plugin": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz", - "integrity": "sha512-1u6D71zeIfgngN2XNRJefc/hY7Ybsxd74Jm4qngIXyUEk7fss3VUzuHxLAq/R8NAba4QU9OUSaMZlbpRc7bM4Q==", - "dev": true, - "peer": true, - "dependencies": { - "cssnano": "^5.0.6", - "jest-worker": "^27.0.2", - "postcss": "^8.3.5", - "schema-utils": "^4.0.0", - "serialize-javascript": "^6.0.0", - "source-map": "^0.6.1" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.0.0" - }, - "peerDependenciesMeta": { - "@parcel/css": { - "optional": true - }, - "clean-css": { - "optional": true - }, - "csso": { - "optional": true - }, - "esbuild": { - "optional": true - } - } - }, - "node_modules/css-minimizer-webpack-plugin/node_modules/ajv": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", - "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", - "dev": true, - "peer": true, - "dependencies": { - "fast-deep-equal": "^3.1.3", - "fast-uri": "^3.0.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/css-minimizer-webpack-plugin/node_modules/ajv-keywords": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", - "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", - "dev": true, - "peer": true, - "dependencies": { - "fast-deep-equal": "^3.1.3" - }, - "peerDependencies": { - "ajv": "^8.8.2" - } - }, - "node_modules/css-minimizer-webpack-plugin/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "dev": true, - "peer": true - }, - "node_modules/css-minimizer-webpack-plugin/node_modules/schema-utils": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.2.tgz", - "integrity": "sha512-Gn/JaSk/Mt9gYubxTtSn/QCV4em9mpAPiR1rqy/Ocu19u/G9J5WWdNoUT4SiV6mFC3y6cxyFcFwdzPM3FgxGAQ==", - "dev": true, - "peer": true, - "dependencies": { - "@types/json-schema": "^7.0.9", - "ajv": "^8.9.0", - "ajv-formats": "^2.1.1", - "ajv-keywords": "^5.1.0" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/css-select": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.3.0.tgz", - "integrity": "sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ==", - "dev": true, - "peer": true, - "dependencies": { - "boolbase": "^1.0.0", - "css-what": "^6.0.1", - "domhandler": "^4.3.1", - "domutils": "^2.8.0", - "nth-check": "^2.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, - "node_modules/css-tree": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.1.3.tgz", - "integrity": "sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q==", - "dev": true, - "peer": true, - "dependencies": { - "mdn-data": "2.0.14", - "source-map": "^0.6.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/css-what": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", - "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 6" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, "node_modules/cssesc": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", @@ -5047,115 +1766,16 @@ "node": ">=4" } }, - "node_modules/cssnano": { - "version": "5.1.15", - "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-5.1.15.tgz", - "integrity": "sha512-j+BKgDcLDQA+eDifLx0EO4XSA56b7uut3BQFH+wbSaSTuGLuiyTa/wbRYthUXX8LC9mLg+WWKe8h+qJuwTAbHw==", - "dev": true, - "peer": true, - "dependencies": { - "cssnano-preset-default": "^5.2.14", - "lilconfig": "^2.0.3", - "yaml": "^1.10.2" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/cssnano" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/cssnano-preset-default": { - "version": "5.2.14", - "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-5.2.14.tgz", - "integrity": "sha512-t0SFesj/ZV2OTylqQVOrFgEh5uanxbO6ZAdeCrNsUQ6fVuXwYTxJPNAGvGTxHbD68ldIJNec7PyYZDBrfDQ+6A==", - "dev": true, - "peer": true, - "dependencies": { - "css-declaration-sorter": "^6.3.1", - "cssnano-utils": "^3.1.0", - "postcss-calc": "^8.2.3", - "postcss-colormin": "^5.3.1", - "postcss-convert-values": "^5.1.3", - "postcss-discard-comments": "^5.1.2", - "postcss-discard-duplicates": "^5.1.0", - "postcss-discard-empty": "^5.1.1", - "postcss-discard-overridden": "^5.1.0", - "postcss-merge-longhand": "^5.1.7", - "postcss-merge-rules": "^5.1.4", - "postcss-minify-font-values": "^5.1.0", - "postcss-minify-gradients": "^5.1.1", - "postcss-minify-params": "^5.1.4", - "postcss-minify-selectors": "^5.2.1", - "postcss-normalize-charset": "^5.1.0", - "postcss-normalize-display-values": "^5.1.0", - "postcss-normalize-positions": "^5.1.1", - "postcss-normalize-repeat-style": "^5.1.1", - "postcss-normalize-string": "^5.1.0", - "postcss-normalize-timing-functions": "^5.1.0", - "postcss-normalize-unicode": "^5.1.1", - "postcss-normalize-url": "^5.1.0", - "postcss-normalize-whitespace": "^5.1.1", - "postcss-ordered-values": "^5.1.3", - "postcss-reduce-initial": "^5.1.2", - "postcss-reduce-transforms": "^5.1.0", - "postcss-svgo": "^5.1.0", - "postcss-unique-selectors": "^5.1.1" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/cssnano-utils": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-3.1.0.tgz", - "integrity": "sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA==", - "dev": true, - "peer": true, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/csso": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz", - "integrity": "sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==", - "dev": true, - "peer": true, - "dependencies": { - "css-tree": "^1.1.2" - }, - "engines": { - "node": ">=8.0.0" - } - }, "node_modules/csstype": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==" }, - "node_modules/debounce": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/debounce/-/debounce-1.2.1.tgz", - "integrity": "sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug==", - "dev": true, - "peer": true - }, "node_modules/debug": { "version": "4.4.1", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", - "dev": true, + "devOptional": true, "dependencies": { "ms": "^2.1.3" }, @@ -5168,306 +1788,12 @@ } } }, - "node_modules/decache": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/decache/-/decache-4.6.2.tgz", - "integrity": "sha512-2LPqkLeu8XWHU8qNCS3kcF6sCcb5zIzvWaAHYSvPfwhdd7mHuah29NssMzrTYyHN4F5oFy2ko9OBYxegtU0FEw==", - "devOptional": true, - "dependencies": { - "callsite": "^1.0.0" - } - }, "node_modules/deep-is": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", "dev": true }, - "node_modules/deepmerge": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-1.5.2.tgz", - "integrity": "sha512-95k0GDqvBjZavkuvzx/YqVLv/6YYa17fz6ILMSf7neqQITCPbnfEnQvEgMPNjH4kgobe7+WIL0yJEHku+H3qtQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/default-gateway": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-6.0.3.tgz", - "integrity": "sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg==", - "dev": true, - "peer": true, - "dependencies": { - "execa": "^5.0.0" - }, - "engines": { - "node": ">= 10" - } - }, - "node_modules/default-gateway/node_modules/execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", - "dev": true, - "peer": true, - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/default-gateway/node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "dev": true, - "peer": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/default-gateway/node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "dev": true, - "peer": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/default-gateway/node_modules/npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "dev": true, - "peer": true, - "dependencies": { - "path-key": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/defaults": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.4.tgz", - "integrity": "sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==", - "dev": true, - "dependencies": { - "clone": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/define-data-property": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", - "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", - "dev": true, - "dependencies": { - "es-define-property": "^1.0.0", - "es-errors": "^1.3.0", - "gopd": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/define-lazy-prop": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", - "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/define-properties": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", - "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", - "dev": true, - "dependencies": { - "define-data-property": "^1.0.1", - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/destroy": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", - "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, - "node_modules/detect-node": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz", - "integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==", - "dev": true, - "peer": true - }, - "node_modules/dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dev": true, - "peer": true, - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/dns-packet": { - "version": "5.6.1", - "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-5.6.1.tgz", - "integrity": "sha512-l4gcSouhcgIKRvyy99RNVOgxXiicE+2jZoNmaNmZ6JXiGajBOJAesk1OBlJuM5k2c+eudGdLxDqXuPCKIj6kpw==", - "dev": true, - "peer": true, - "dependencies": { - "@leichtgewicht/ip-codec": "^2.0.1" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/dom-converter": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz", - "integrity": "sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==", - "dev": true, - "peer": true, - "dependencies": { - "utila": "~0.4" - } - }, - "node_modules/dom-serializer": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz", - "integrity": "sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==", - "dev": true, - "peer": true, - "dependencies": { - "domelementtype": "^2.0.1", - "domhandler": "^4.2.0", - "entities": "^2.0.0" - }, - "funding": { - "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" - } - }, - "node_modules/dom-serializer/node_modules/entities": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", - "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==", - "dev": true, - "peer": true, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/domelementtype": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", - "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/fb55" - } - ], - "peer": true - }, - "node_modules/domhandler": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-4.3.1.tgz", - "integrity": "sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==", - "dev": true, - "peer": true, - "dependencies": { - "domelementtype": "^2.2.0" - }, - "engines": { - "node": ">= 4" - }, - "funding": { - "url": "https://github.com/fb55/domhandler?sponsor=1" - } - }, - "node_modules/domutils": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz", - "integrity": "sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==", - "dev": true, - "peer": true, - "dependencies": { - "dom-serializer": "^1.0.1", - "domelementtype": "^2.2.0", - "domhandler": "^4.2.0" - }, - "funding": { - "url": "https://github.com/fb55/domutils?sponsor=1" - } - }, - "node_modules/dot-case": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", - "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", - "dev": true, - "peer": true, - "dependencies": { - "no-case": "^3.0.4", - "tslib": "^2.0.3" - } - }, "node_modules/dot-prop": { "version": "9.0.0", "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-9.0.0.tgz", @@ -5482,114 +1808,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/dotenv": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-10.0.0.tgz", - "integrity": "sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q==", - "dev": true, - "peer": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/dotenv-expand": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-5.1.0.tgz", - "integrity": "sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==", - "dev": true, - "peer": true - }, - "node_modules/dunder-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", - "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", - "dev": true, - "dependencies": { - "call-bind-apply-helpers": "^1.0.1", - "es-errors": "^1.3.0", - "gopd": "^1.2.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/duplexer": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz", - "integrity": "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==", - "dev": true, - "peer": true - }, - "node_modules/easy-stack": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/easy-stack/-/easy-stack-1.0.1.tgz", - "integrity": "sha512-wK2sCs4feiiJeFXn3zvY0p41mdU5VUgbgs1rNsc/y5ngFUijdWd+iIN8eoyuZHKB8xN6BL4PdWmzqFmxNg6V2w==", - "dev": true, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", - "dev": true, - "peer": true - }, - "node_modules/electron-to-chromium": { - "version": "1.5.165", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.165.tgz", - "integrity": "sha512-naiMx1Z6Nb2TxPU6fiFrUrDTjyPMLdTtaOd2oLmG8zVSg2hCWGkhPyxwk+qRmZ1ytwVqUv0u7ZcDA5+ALhaUtw==", - "devOptional": true - }, - "node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "peer": true - }, - "node_modules/emojis-list": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz", - "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==", - "devOptional": true, - "engines": { - "node": ">= 4" - } - }, - "node_modules/encodeurl": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", - "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "dev": true, - "dependencies": { - "once": "^1.4.0" - } - }, - "node_modules/enhanced-resolve": { - "version": "5.18.1", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.1.tgz", - "integrity": "sha512-ZSW3ma5GkcQBIpwZTSRAI8N71Uuwgs93IezB7mf7R60tC8ZbJideoDNKjHn2O9KIlx6rkGTTEk1xUCK2E1Y2Yg==", - "devOptional": true, - "dependencies": { - "graceful-fs": "^4.2.4", - "tapable": "^2.2.0" - }, - "engines": { - "node": ">=10.13.0" - } - }, "node_modules/entities": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", @@ -5601,77 +1819,47 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, - "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "dev": true, - "dependencies": { - "is-arrayish": "^0.2.1" - } - }, - "node_modules/error-stack-parser": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/error-stack-parser/-/error-stack-parser-2.1.4.tgz", - "integrity": "sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ==", - "dev": true, - "peer": true, - "dependencies": { - "stackframe": "^1.3.4" - } - }, - "node_modules/es-define-property": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", - "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-errors": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", - "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-module-lexer": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", - "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", - "devOptional": true - }, - "node_modules/es-object-atoms": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", - "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", - "dev": true, - "dependencies": { - "es-errors": "^1.3.0" + "node_modules/esbuild": { + "version": "0.25.8", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.8.tgz", + "integrity": "sha512-vVC0USHGtMi8+R4Kz8rt6JhEWLxsv9Rnu/lGYbPR8u47B+DCBksq9JarW0zOO7bs37hyOK1l2/oqtbciutL5+Q==", + "devOptional": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" }, "engines": { - "node": ">= 0.4" + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.8", + "@esbuild/android-arm": "0.25.8", + "@esbuild/android-arm64": "0.25.8", + "@esbuild/android-x64": "0.25.8", + "@esbuild/darwin-arm64": "0.25.8", + "@esbuild/darwin-x64": "0.25.8", + "@esbuild/freebsd-arm64": "0.25.8", + "@esbuild/freebsd-x64": "0.25.8", + "@esbuild/linux-arm": "0.25.8", + "@esbuild/linux-arm64": "0.25.8", + "@esbuild/linux-ia32": "0.25.8", + "@esbuild/linux-loong64": "0.25.8", + "@esbuild/linux-mips64el": "0.25.8", + "@esbuild/linux-ppc64": "0.25.8", + "@esbuild/linux-riscv64": "0.25.8", + "@esbuild/linux-s390x": "0.25.8", + "@esbuild/linux-x64": "0.25.8", + "@esbuild/netbsd-arm64": "0.25.8", + "@esbuild/netbsd-x64": "0.25.8", + "@esbuild/openbsd-arm64": "0.25.8", + "@esbuild/openbsd-x64": "0.25.8", + "@esbuild/openharmony-arm64": "0.25.8", + "@esbuild/sunos-x64": "0.25.8", + "@esbuild/win32-arm64": "0.25.8", + "@esbuild/win32-ia32": "0.25.8", + "@esbuild/win32-x64": "0.25.8" } }, - "node_modules/escalade": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "devOptional": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/escape-html": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", - "dev": true, - "peer": true - }, "node_modules/escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", @@ -5685,19 +1873,19 @@ } }, "node_modules/eslint": { - "version": "9.28.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.28.0.tgz", - "integrity": "sha512-ocgh41VhRlf9+fVpe7QKzwLj9c92fDiqOj8Y3Sd4/ZmVA4Btx4PlUYPq4pp9JDyupkf1upbEXecxL2mwNV7jPQ==", + "version": "9.32.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.32.0.tgz", + "integrity": "sha512-LSehfdpgMeWcTZkWZVIJl+tkZ2nuSkyyB9C27MZqFWXuph7DvaowgcTvKqxvpLW1JZIk8PN7hFY3Rj9LQ7m7lg==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", - "@eslint/config-array": "^0.20.0", - "@eslint/config-helpers": "^0.2.1", - "@eslint/core": "^0.14.0", + "@eslint/config-array": "^0.21.0", + "@eslint/config-helpers": "^0.3.0", + "@eslint/core": "^0.15.0", "@eslint/eslintrc": "^3.3.1", - "@eslint/js": "9.28.0", - "@eslint/plugin-kit": "^0.3.1", + "@eslint/js": "9.32.0", + "@eslint/plugin-kit": "^0.3.4", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", @@ -5708,9 +1896,9 @@ "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^8.3.0", - "eslint-visitor-keys": "^4.2.0", - "espree": "^10.3.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", @@ -5745,9 +1933,9 @@ } }, "node_modules/eslint-plugin-vue": { - "version": "10.2.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-vue/-/eslint-plugin-vue-10.2.0.tgz", - "integrity": "sha512-tl9s+KN3z0hN2b8fV2xSs5ytGl7Esk1oSCxULLwFcdaElhZ8btYYZFrWxvh4En+czrSDtuLCeCOGa8HhEZuBdQ==", + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-vue/-/eslint-plugin-vue-10.4.0.tgz", + "integrity": "sha512-K6tP0dW8FJVZLQxa2S7LcE1lLw3X8VvB3t887Q6CLrFVxHYBXGANbXvwNzYIu6Ughx1bSJ5BDT0YB3ybPT39lw==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", @@ -5761,64 +1949,20 @@ "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "peerDependencies": { + "@typescript-eslint/parser": "^7.0.0 || ^8.0.0", "eslint": "^8.57.0 || ^9.0.0", "vue-eslint-parser": "^10.0.0" - } - }, - "node_modules/eslint-plugin-vue/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" }, - "engines": { - "node": ">=10" + "peerDependenciesMeta": { + "@typescript-eslint/parser": { + "optional": true + } } }, "node_modules/eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", - "devOptional": true, - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", - "dev": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/eslint/node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/eslint/node_modules/eslint-scope": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.3.0.tgz", - "integrity": "sha512-pUNxi75F8MJ/GdeKtVLSbYg4ZI34J6C0C7sbL4YOp2exGwen7ZsuBqKzUhXd0qMQ362yET3z+uPwKeg/0C2XCQ==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", "dev": true, "dependencies": { "esrecurse": "^4.3.0", @@ -5831,10 +1975,10 @@ "url": "https://opencollective.com/eslint" } }, - "node_modules/eslint/node_modules/eslint-visitor-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", - "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", "dev": true, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -5843,24 +1987,15 @@ "url": "https://opencollective.com/eslint" } }, - "node_modules/eslint/node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, "node_modules/espree": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz", - "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==", + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", "dev": true, "dependencies": { - "acorn": "^8.14.0", + "acorn": "^8.15.0", "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^4.2.0" + "eslint-visitor-keys": "^4.2.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -5869,18 +2004,6 @@ "url": "https://opencollective.com/eslint" } }, - "node_modules/espree/node_modules/eslint-visitor-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", - "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", - "dev": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, "node_modules/esquery": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", @@ -5893,20 +2016,11 @@ "node": ">=0.10" } }, - "node_modules/esquery/node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, "node_modules/esrecurse": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", - "devOptional": true, + "dev": true, "dependencies": { "estraverse": "^5.2.0" }, @@ -5914,20 +2028,11 @@ "node": ">=4.0" } }, - "node_modules/esrecurse/node_modules/estraverse": { + "node_modules/estraverse": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "devOptional": true, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "devOptional": true, + "dev": true, "engines": { "node": ">=4.0" } @@ -5946,231 +2051,17 @@ "node": ">=0.10.0" } }, - "node_modules/etag": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/event-pubsub": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/event-pubsub/-/event-pubsub-4.3.0.tgz", - "integrity": "sha512-z7IyloorXvKbFx9Bpie2+vMJKKx1fH1EN5yiTfp8CiLOTptSYy1g8H4yDpGlEdshL1PBiFtBHepF2cNsqeEeFQ==", - "dev": true, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/eventemitter3": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", - "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", - "dev": true, - "peer": true - }, - "node_modules/events": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", - "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", - "devOptional": true, - "engines": { - "node": ">=0.8.x" - } - }, - "node_modules/execa": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", - "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", - "dev": true, - "dependencies": { - "cross-spawn": "^6.0.0", - "get-stream": "^4.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/execa/node_modules/cross-spawn": { - "version": "6.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.6.tgz", - "integrity": "sha512-VqCUuhcd1iB+dsv8gxPttb5iZh/D0iubSP21g36KXdEuf6I5JiioesUVjpCdHV9MZRUfVFlvwtIUyPfxo5trtw==", - "dev": true, - "dependencies": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - }, - "engines": { - "node": ">=4.8" - } - }, - "node_modules/execa/node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/execa/node_modules/semver": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", - "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", - "dev": true, - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/execa/node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", - "dev": true, - "dependencies": { - "shebang-regex": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/execa/node_modules/shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/execa/node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "which": "bin/which" - } - }, - "node_modules/express": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", - "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", - "dev": true, - "peer": true, - "dependencies": { - "accepts": "~1.3.8", - "array-flatten": "1.1.1", - "body-parser": "1.20.3", - "content-disposition": "0.5.4", - "content-type": "~1.0.4", - "cookie": "0.7.1", - "cookie-signature": "1.0.6", - "debug": "2.6.9", - "depd": "2.0.0", - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "finalhandler": "1.3.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "merge-descriptors": "1.0.3", - "methods": "~1.1.2", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "path-to-regexp": "0.1.12", - "proxy-addr": "~2.0.7", - "qs": "6.13.0", - "range-parser": "~1.2.1", - "safe-buffer": "5.2.1", - "send": "0.19.0", - "serve-static": "1.16.2", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "type-is": "~1.6.18", - "utils-merge": "1.0.1", - "vary": "~1.1.2" - }, - "engines": { - "node": ">= 0.10.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" - } - }, - "node_modules/express/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "peer": true, - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/express/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true, - "peer": true - }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "devOptional": true - }, - "node_modules/fast-glob": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", - "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", - "dev": true, - "peer": true, - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.8" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fast-glob/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "peer": true, - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } + "dev": true }, "node_modules/fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "devOptional": true + "dev": true }, "node_modules/fast-levenshtein": { "version": "2.0.6", @@ -6178,66 +2069,18 @@ "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", "dev": true }, - "node_modules/fast-uri": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.6.tgz", - "integrity": "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw==", + "node_modules/fdir": { + "version": "6.4.6", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.6.tgz", + "integrity": "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==", "devOptional": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/fastify" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/fastify" + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true } - ] - }, - "node_modules/fastq": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", - "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", - "dev": true, - "peer": true, - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/faye-websocket": { - "version": "0.11.4", - "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz", - "integrity": "sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==", - "dev": true, - "peer": true, - "dependencies": { - "websocket-driver": ">=0.5.1" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/figures": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", - "integrity": "sha512-Oa2M9atig69ZkfwiApY8F2Yy+tzMbazyvqv21R0NsSC8floSOC09BbT1ITWAdoMGQvJ/aZnR1KMwdx9tvHnTNA==", - "dev": true, - "peer": true, - "dependencies": { - "escape-string-regexp": "^1.0.5" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/figures/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.8.0" } }, "node_modules/file-entry-cache": { @@ -6252,124 +2095,6 @@ "node": ">=16.0.0" } }, - "node_modules/file-loader": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz", - "integrity": "sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==", - "devOptional": true, - "dependencies": { - "loader-utils": "^2.0.0", - "schema-utils": "^3.0.0" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^4.0.0 || ^5.0.0" - } - }, - "node_modules/file-loader/node_modules/loader-utils": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz", - "integrity": "sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==", - "devOptional": true, - "dependencies": { - "big.js": "^5.2.2", - "emojis-list": "^3.0.0", - "json5": "^2.1.2" - }, - "engines": { - "node": ">=8.9.0" - } - }, - "node_modules/file-loader/node_modules/schema-utils": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", - "integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==", - "devOptional": true, - "dependencies": { - "@types/json-schema": "^7.0.8", - "ajv": "^6.12.5", - "ajv-keywords": "^3.5.2" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, - "peer": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/finalhandler": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", - "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", - "dev": true, - "peer": true, - "dependencies": { - "debug": "2.6.9", - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "statuses": "2.0.1", - "unpipe": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/finalhandler/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "peer": true, - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/finalhandler/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true, - "peer": true - }, - "node_modules/find-cache-dir": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz", - "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==", - "dev": true, - "dependencies": { - "commondir": "^1.0.1", - "make-dir": "^3.0.2", - "pkg-dir": "^4.1.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/avajs/find-cache-dir?sponsor=1" - } - }, "node_modules/find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", @@ -6386,16 +2111,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/flat": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", - "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", - "dev": true, - "peer": true, - "bin": { - "flat": "cli.js" - } - }, "node_modules/flat-cache": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", @@ -6415,203 +2130,19 @@ "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", "dev": true }, - "node_modules/follow-redirects": { - "version": "1.15.9", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", - "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", - "dev": true, - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "peer": true, - "engines": { - "node": ">=4.0" - }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } - } - }, - "node_modules/forwarded": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", - "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/fraction.js": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", - "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==", - "dev": true, - "peer": true, - "engines": { - "node": "*" - }, - "funding": { - "type": "patreon", - "url": "https://github.com/sponsors/rawify" - } - }, - "node_modules/fresh": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/fs-extra": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", - "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", - "dev": true, - "peer": true, - "dependencies": { - "at-least-node": "^1.0.0", - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/fs-monkey": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/fs-monkey/-/fs-monkey-1.0.6.tgz", - "integrity": "sha512-b1FMfwetIKymC0eioW7mTywihSQE4oLzQn1dB6rZB5fx/3NpNEdAWeCSMB+60/AeT0TCXsxzAlcYVEFCTAksWg==", - "dev": true, - "peer": true - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "dev": true - }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, "hasInstallScript": true, "optional": true, "os": [ "darwin" ], - "peer": true, "engines": { "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "dev": true, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "dev": true, - "peer": true, - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/get-intrinsic": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", - "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", - "dev": true, - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "es-define-property": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.1.1", - "function-bind": "^1.1.2", - "get-proto": "^1.0.1", - "gopd": "^1.2.0", - "has-symbols": "^1.1.0", - "hasown": "^2.0.2", - "math-intrinsics": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", - "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", - "dev": true, - "dependencies": { - "dunder-proto": "^1.0.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/get-stream": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", - "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", - "dev": true, - "dependencies": { - "pump": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dev": true, - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/glob-parent": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", @@ -6624,452 +2155,27 @@ "node": ">=10.13.0" } }, - "node_modules/glob-to-regexp": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", - "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", - "devOptional": true - }, "node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", "dev": true, "engines": { - "node": ">=4" - } - }, - "node_modules/globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "dev": true, - "peer": true, - "dependencies": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/gopd": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", - "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "devOptional": true - }, - "node_modules/gzip-size": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz", - "integrity": "sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==", - "dev": true, - "peer": true, - "dependencies": { - "duplexer": "^0.1.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/handle-thing": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz", - "integrity": "sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==", - "dev": true, - "peer": true - }, "node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "devOptional": true, + "dev": true, "engines": { "node": ">=8" } }, - "node_modules/has-property-descriptors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", - "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", - "dev": true, - "dependencies": { - "es-define-property": "^1.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-symbols": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", - "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/hash-sum": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/hash-sum/-/hash-sum-2.0.0.tgz", - "integrity": "sha512-WdZTbAByD+pHfl/g9QSsBIIwy8IT+EsPiKDs0KNX+zSHhdDLFKdZu0BQHljvO+0QI/BasbMSUa8wYNCZTvhslg==", - "dev": true, - "peer": true - }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/he": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", - "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", - "dev": true, - "peer": true, - "bin": { - "he": "bin/he" - } - }, - "node_modules/highlight.js": { - "version": "10.7.3", - "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-10.7.3.tgz", - "integrity": "sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==", - "dev": true, - "peer": true, - "engines": { - "node": "*" - } - }, - "node_modules/hosted-git-info": { - "version": "2.8.9", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", - "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", - "dev": true - }, - "node_modules/hpack.js": { - "version": "2.1.6", - "resolved": "https://registry.npmjs.org/hpack.js/-/hpack.js-2.1.6.tgz", - "integrity": "sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ==", - "dev": true, - "peer": true, - "dependencies": { - "inherits": "^2.0.1", - "obuf": "^1.0.0", - "readable-stream": "^2.0.1", - "wbuf": "^1.1.0" - } - }, - "node_modules/hpack.js/node_modules/readable-stream": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", - "dev": true, - "peer": true, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/hpack.js/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true, - "peer": true - }, - "node_modules/hpack.js/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "peer": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, - "node_modules/html-entities": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-2.6.0.tgz", - "integrity": "sha512-kig+rMn/QOVRvr7c86gQ8lWXq+Hkv6CbAH1hLu+RG338StTpE8Z0b44SDVaqVu7HGKf27frdmUYEs9hTUX/cLQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/mdevils" - }, - { - "type": "patreon", - "url": "https://patreon.com/mdevils" - } - ], - "peer": true - }, - "node_modules/html-escaper": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", - "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", - "dev": true, - "peer": true - }, - "node_modules/html-minifier-terser": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz", - "integrity": "sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw==", - "dev": true, - "peer": true, - "dependencies": { - "camel-case": "^4.1.2", - "clean-css": "^5.2.2", - "commander": "^8.3.0", - "he": "^1.2.0", - "param-case": "^3.0.4", - "relateurl": "^0.2.7", - "terser": "^5.10.0" - }, - "bin": { - "html-minifier-terser": "cli.js" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/html-tags": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-2.0.0.tgz", - "integrity": "sha512-+Il6N8cCo2wB/Vd3gqy/8TZhTD3QvcVeQLCnZiGkGCH3JP28IgGAY41giccp2W4R3jfyJPAP318FQTa1yU7K7g==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/html-webpack-plugin": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-5.6.3.tgz", - "integrity": "sha512-QSf1yjtSAsmf7rYBV7XX86uua4W/vkhIt0xNXKbsi2foEeW7vjJQz4bhnpL3xH+l1ryl1680uNv968Z+X6jSYg==", - "dev": true, - "peer": true, - "dependencies": { - "@types/html-minifier-terser": "^6.0.0", - "html-minifier-terser": "^6.0.2", - "lodash": "^4.17.21", - "pretty-error": "^4.0.0", - "tapable": "^2.0.0" - }, - "engines": { - "node": ">=10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/html-webpack-plugin" - }, - "peerDependencies": { - "@rspack/core": "0.x || 1.x", - "webpack": "^5.20.0" - }, - "peerDependenciesMeta": { - "@rspack/core": { - "optional": true - }, - "webpack": { - "optional": true - } - } - }, - "node_modules/htmlparser2": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-6.1.0.tgz", - "integrity": "sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A==", - "dev": true, - "funding": [ - "https://github.com/fb55/htmlparser2?sponsor=1", - { - "type": "github", - "url": "https://github.com/sponsors/fb55" - } - ], - "peer": true, - "dependencies": { - "domelementtype": "^2.0.1", - "domhandler": "^4.0.0", - "domutils": "^2.5.2", - "entities": "^2.0.0" - } - }, - "node_modules/htmlparser2/node_modules/entities": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", - "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==", - "dev": true, - "peer": true, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/http-deceiver": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz", - "integrity": "sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw==", - "dev": true, - "peer": true - }, - "node_modules/http-errors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", - "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", - "dev": true, - "peer": true, - "dependencies": { - "depd": "2.0.0", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "toidentifier": "1.0.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/http-parser-js": { - "version": "0.5.10", - "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.10.tgz", - "integrity": "sha512-Pysuw9XpUq5dVc/2SMHpuTY01RFl8fttgcyunjL7eEMhGM3cI4eOmiCycJDVCo/7O7ClfQD3SaI6ftDzqOXYMA==", - "dev": true, - "peer": true - }, - "node_modules/http-proxy": { - "version": "1.18.1", - "resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz", - "integrity": "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==", - "dev": true, - "peer": true, - "dependencies": { - "eventemitter3": "^4.0.0", - "follow-redirects": "^1.0.0", - "requires-port": "^1.0.0" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/http-proxy-middleware": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.9.tgz", - "integrity": "sha512-c1IyJYLYppU574+YI7R4QyX2ystMtVXZwIdzazUIPIJsHuWNd+mho2j+bKoHftndicGj9yh+xjd+l0yj7VeT1Q==", - "dev": true, - "peer": true, - "dependencies": { - "@types/http-proxy": "^1.17.8", - "http-proxy": "^1.18.1", - "is-glob": "^4.0.1", - "is-plain-obj": "^3.0.0", - "micromatch": "^4.0.2" - }, - "engines": { - "node": ">=12.0.0" - }, - "peerDependencies": { - "@types/express": "^4.17.13" - }, - "peerDependenciesMeta": { - "@types/express": { - "optional": true - } - } - }, - "node_modules/human-signals": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", - "dev": true, - "peer": true, - "engines": { - "node": ">=10.17.0" - } - }, - "node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "dev": true, - "peer": true, - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/icss-utils": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/icss-utils/-/icss-utils-5.1.0.tgz", - "integrity": "sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==", - "dev": true, - "peer": true, - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/ieee754": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, "node_modules/ignore": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", @@ -7104,91 +2210,6 @@ "node": ">=0.8.19" } }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", - "dev": true, - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true - }, - "node_modules/interpret": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", - "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", - "dev": true, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/ipaddr.js": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.2.0.tgz", - "integrity": "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 10" - } - }, - "node_modules/is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", - "dev": true - }, - "node_modules/is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", - "dev": true, - "peer": true, - "dependencies": { - "binary-extensions": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-core-module": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", - "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", - "dev": true, - "dependencies": { - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-docker": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", - "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", - "dev": true, - "bin": { - "is-docker": "cli.js" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -7198,26 +2219,6 @@ "node": ">=0.10.0" } }, - "node_modules/is-file-esm": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-file-esm/-/is-file-esm-1.0.0.tgz", - "integrity": "sha512-rZlaNKb4Mr8WlRu2A9XdeoKgnO5aA53XdPHgCKVyCrQ/rWi89RET1+bq37Ru46obaQXeiX4vmFIm1vks41hoSA==", - "dev": true, - "peer": true, - "dependencies": { - "read-pkg-up": "^7.0.1" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "peer": true, - "engines": { - "node": ">=8" - } - }, "node_modules/is-glob": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", @@ -7230,171 +2231,12 @@ "node": ">=0.10.0" } }, - "node_modules/is-interactive": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-1.0.0.tgz", - "integrity": "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-plain-obj": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz", - "integrity": "sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==", - "dev": true, - "peer": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-plain-object": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", - "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", - "dev": true, - "peer": true, - "dependencies": { - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-unicode-supported": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", - "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-wsl": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", - "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", - "dev": true, - "dependencies": { - "is-docker": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", - "dev": true, - "peer": true - }, "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "dev": true }, - "node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/javascript-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/javascript-stringify/-/javascript-stringify-2.1.0.tgz", - "integrity": "sha512-JVAfqNPTvNq3sB/VHQJAFxN/sPgKnsKrCwyRt15zwNCdrMMJDdcEOdubuy+DuJYYdm0ox1J4uzEuYKkN+9yhVg==", - "dev": true, - "peer": true - }, - "node_modules/jest-worker": { - "version": "27.5.1", - "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", - "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", - "devOptional": true, - "dependencies": { - "@types/node": "*", - "merge-stream": "^2.0.0", - "supports-color": "^8.0.0" - }, - "engines": { - "node": ">= 10.13.0" - } - }, - "node_modules/jest-worker/node_modules/supports-color": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", - "devOptional": true, - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/supports-color?sponsor=1" - } - }, - "node_modules/joi": { - "version": "17.13.3", - "resolved": "https://registry.npmjs.org/joi/-/joi-17.13.3.tgz", - "integrity": "sha512-otDA4ldcIx+ZXsKHWmp0YizCweVRZG96J10b0FevjfuncLO1oX59THoAmHkNubYJ+9gWsYsp5k8v4ib6oDv1fA==", - "dev": true, - "dependencies": { - "@hapi/hoek": "^9.3.0", - "@hapi/topo": "^5.1.0", - "@sideway/address": "^4.1.5", - "@sideway/formula": "^3.0.1", - "@sideway/pinpoint": "^2.0.0" - } - }, - "node_modules/js-message": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/js-message/-/js-message-1.0.7.tgz", - "integrity": "sha512-efJLHhLjIyKRewNS9EGZ4UpI8NguuL6fKkhRxVuMmrGV2xN/0APGdQYwLFky5w9naebSZ0OwAGp0G6/2Cg90rA==", - "dev": true, - "engines": { - "node": ">=0.6.0" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true - }, "node_modules/js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", @@ -7407,41 +2249,17 @@ "js-yaml": "bin/js-yaml.js" } }, - "node_modules/jsesc": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", - "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", - "dev": true, - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/json-buffer": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", "dev": true }, - "node_modules/json-parse-better-errors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", - "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", - "dev": true - }, - "node_modules/json-parse-even-better-errors": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", - "devOptional": true - }, "node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "devOptional": true + "dev": true }, "node_modules/json-stable-stringify-without-jsonify": { "version": "1.0.1", @@ -7449,31 +2267,6 @@ "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", "dev": true }, - "node_modules/json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "devOptional": true, - "bin": { - "json5": "lib/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "dev": true, - "peer": true, - "dependencies": { - "universalify": "^2.0.0" - }, - "optionalDependencies": { - "graceful-fs": "^4.1.6" - } - }, "node_modules/keyv": { "version": "4.5.4", "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", @@ -7483,46 +2276,6 @@ "json-buffer": "3.0.1" } }, - "node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/klona": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/klona/-/klona-2.0.6.tgz", - "integrity": "sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 8" - } - }, - "node_modules/launch-editor": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/launch-editor/-/launch-editor-2.10.0.tgz", - "integrity": "sha512-D7dBRJo/qcGX9xlvt/6wUYzQxjh5G1RvZPgPv8vi4KRU99DVQL/oW7tnVOCCTm2HGeo3C5HvGE5Yrh6UBoZ0vA==", - "dev": true, - "dependencies": { - "picocolors": "^1.0.0", - "shell-quote": "^1.8.1" - } - }, - "node_modules/launch-editor-middleware": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/launch-editor-middleware/-/launch-editor-middleware-2.10.0.tgz", - "integrity": "sha512-RzZu7MeVlE3p1H6Sadc2BhuDGAj7bkeDCBpNq/zSENP4ohJGhso00k5+iYaRwKshIpiOAhMmimce+5D389xmSg==", - "dev": true, - "peer": true, - "dependencies": { - "launch-editor": "^2.10.0" - } - }, "node_modules/levn": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", @@ -7536,64 +2289,11 @@ "node": ">= 0.8.0" } }, - "node_modules/lilconfig": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", - "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/lines-and-columns": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", - "dev": true - }, "node_modules/litegraph.js": { "version": "0.7.18", "resolved": "https://registry.npmjs.org/litegraph.js/-/litegraph.js-0.7.18.tgz", "integrity": "sha512-1WEwjOO58j4FcLX8DvsuMXM371MEq4Y+8pBr3q2pBhJ9nDkwBtBd9Gj6bxArBKhW6i42bSOyv9ybeuez6NAxoQ==" }, - "node_modules/loader-runner": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz", - "integrity": "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==", - "devOptional": true, - "engines": { - "node": ">=6.11.5" - } - }, - "node_modules/loader-utils": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.2.tgz", - "integrity": "sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg==", - "dev": true, - "peer": true, - "dependencies": { - "big.js": "^5.2.2", - "emojis-list": "^3.0.0", - "json5": "^1.0.1" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/loader-utils/node_modules/json5": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", - "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", - "dev": true, - "peer": true, - "dependencies": { - "minimist": "^1.2.0" - }, - "bin": { - "json5": "lib/cli.js" - } - }, "node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -7612,42 +2312,7 @@ "node_modules/lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true, - "peer": true - }, - "node_modules/lodash.debounce": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", - "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==", - "dev": true - }, - "node_modules/lodash.defaultsdeep": { - "version": "4.6.1", - "resolved": "https://registry.npmjs.org/lodash.defaultsdeep/-/lodash.defaultsdeep-4.6.1.tgz", - "integrity": "sha512-3j8wdDzYuWO3lM3Reg03MuQR957t287Rpcxp1njpEa8oDrikb+FwGdW3n+FELh/A6qib6yPit0j/pv9G/yeAqA==", - "dev": true, - "peer": true - }, - "node_modules/lodash.kebabcase": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz", - "integrity": "sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g==", - "dev": true - }, - "node_modules/lodash.mapvalues": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/lodash.mapvalues/-/lodash.mapvalues-4.6.0.tgz", - "integrity": "sha512-JPFqXFeZQ7BfS00H58kClY7SPVeHertPE0lNuCyZ26/XlN8TvakYD7b9bGyNmXbT/D3BbtPAAmq90gPWqLkxlQ==", - "dev": true, - "peer": true - }, - "node_modules/lodash.memoize": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", - "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", - "dev": true, - "peer": true + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, "node_modules/lodash.merge": { "version": "4.6.2", @@ -7655,190 +2320,6 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, - "node_modules/lodash.uniq": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", - "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==", - "dev": true, - "peer": true - }, - "node_modules/log-symbols": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", - "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", - "dev": true, - "dependencies": { - "chalk": "^4.1.0", - "is-unicode-supported": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/log-symbols/node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/log-update": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/log-update/-/log-update-2.3.0.tgz", - "integrity": "sha512-vlP11XfFGyeNQlmEn9tJ66rEW1coA/79m5z6BCkudjbAGE83uhAcGYrBFwfs3AdLiLzGRusRPAbSPK9xZteCmg==", - "dev": true, - "peer": true, - "dependencies": { - "ansi-escapes": "^3.0.0", - "cli-cursor": "^2.0.0", - "wrap-ansi": "^3.0.1" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/log-update/node_modules/ansi-regex": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.1.tgz", - "integrity": "sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==", - "dev": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/log-update/node_modules/cli-cursor": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha512-8lgKz8LmCRYZZQDpRyT2m5rKJ08TnU4tR9FFFW2rxpxR1FzWi4PQ/NfyODchAatHaUgnSPVcx/R5w6NuTBzFiw==", - "dev": true, - "peer": true, - "dependencies": { - "restore-cursor": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/log-update/node_modules/is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", - "dev": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/log-update/node_modules/mimic-fn": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", - "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/log-update/node_modules/onetime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", - "integrity": "sha512-oyyPpiMaKARvvcgip+JV+7zci5L8D1W9RZIz2l1o08AM3pfspitVWnPt3mzHcBPp12oYMTy0pqrFs/C+m3EwsQ==", - "dev": true, - "peer": true, - "dependencies": { - "mimic-fn": "^1.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/log-update/node_modules/restore-cursor": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha512-6IzJLuGi4+R14vwagDHX+JrXmPVtPpn4mffDJ1UdR7/Edm87fl6yi8mMBIVvFtJaNTUvjughmW4hwLhRG7gC1Q==", - "dev": true, - "peer": true, - "dependencies": { - "onetime": "^2.0.0", - "signal-exit": "^3.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/log-update/node_modules/string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, - "peer": true, - "dependencies": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/log-update/node_modules/strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow==", - "dev": true, - "peer": true, - "dependencies": { - "ansi-regex": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/log-update/node_modules/wrap-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-3.0.1.tgz", - "integrity": "sha512-iXR3tDXpbnTpzjKSylUJRkLuOrEC7hwEB221cgn6wtF8wpmz28puFXAEfPT5zrjM3wahygB//VuWEr1vTkDcNQ==", - "dev": true, - "peer": true, - "dependencies": { - "string-width": "^2.1.1", - "strip-ansi": "^4.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/lower-case": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", - "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", - "dev": true, - "peer": true, - "dependencies": { - "tslib": "^2.0.3" - } - }, - "node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "dev": true, - "dependencies": { - "yallist": "^3.0.2" - } - }, "node_modules/magic-string": { "version": "0.30.17", "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", @@ -7847,21 +2328,6 @@ "@jridgewell/sourcemap-codec": "^1.5.0" } }, - "node_modules/make-dir": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", - "dev": true, - "dependencies": { - "semver": "^6.0.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/marked": { "version": "15.0.12", "resolved": "https://registry.npmjs.org/marked/-/marked-15.0.12.tgz", @@ -7873,233 +2339,6 @@ "node": ">= 18" } }, - "node_modules/math-intrinsics": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", - "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/mdn-data": { - "version": "2.0.14", - "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz", - "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==", - "dev": true, - "peer": true - }, - "node_modules/media-typer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/memfs": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.5.3.tgz", - "integrity": "sha512-UERzLsxzllchadvbPs5aolHh65ISpKpM+ccLbOJ8/vvpBKmAWf+la7dXFy7Mr0ySHbdHrFv5kGFCUHHe6GFEmw==", - "dev": true, - "peer": true, - "dependencies": { - "fs-monkey": "^1.0.4" - }, - "engines": { - "node": ">= 4.0.0" - } - }, - "node_modules/merge-descriptors": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", - "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", - "dev": true, - "peer": true, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/merge-source-map": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/merge-source-map/-/merge-source-map-1.1.0.tgz", - "integrity": "sha512-Qkcp7P2ygktpMPh2mCQZaf3jhN6D3Z/qVZHSdWvQ+2Ef5HgRAPBO57A77+ENm0CPx2+1Ce/MYKi3ymqdfuqibw==", - "dev": true, - "peer": true, - "dependencies": { - "source-map": "^0.6.1" - } - }, - "node_modules/merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "devOptional": true - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 8" - } - }, - "node_modules/methods": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, - "peer": true, - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/mime": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", - "dev": true, - "peer": true, - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "devOptional": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "devOptional": true, - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/mini-css-extract-plugin": { - "version": "2.9.2", - "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.9.2.tgz", - "integrity": "sha512-GJuACcS//jtq4kCtd5ii/M0SZf7OZRH+BxdqXZHaJfb8TJiVl+NgQRPwiYt2EuqeSkNydn/7vP+bcE27C5mb9w==", - "dev": true, - "peer": true, - "dependencies": { - "schema-utils": "^4.0.0", - "tapable": "^2.2.1" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.0.0" - } - }, - "node_modules/mini-css-extract-plugin/node_modules/ajv": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", - "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", - "dev": true, - "peer": true, - "dependencies": { - "fast-deep-equal": "^3.1.3", - "fast-uri": "^3.0.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/mini-css-extract-plugin/node_modules/ajv-keywords": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", - "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", - "dev": true, - "peer": true, - "dependencies": { - "fast-deep-equal": "^3.1.3" - }, - "peerDependencies": { - "ajv": "^8.8.2" - } - }, - "node_modules/mini-css-extract-plugin/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "dev": true, - "peer": true - }, - "node_modules/mini-css-extract-plugin/node_modules/schema-utils": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.2.tgz", - "integrity": "sha512-Gn/JaSk/Mt9gYubxTtSn/QCV4em9mpAPiR1rqy/Ocu19u/G9J5WWdNoUT4SiV6mFC3y6cxyFcFwdzPM3FgxGAQ==", - "dev": true, - "peer": true, - "dependencies": { - "@types/json-schema": "^7.0.9", - "ajv": "^8.9.0", - "ajv-formats": "^2.1.1", - "ajv-keywords": "^5.1.0" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/minimalistic-assert": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", - "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==", - "dev": true, - "peer": true - }, "node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -8112,96 +2351,11 @@ "node": "*" } }, - "node_modules/minimist": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", - "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", - "dev": true, - "peer": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dev": true, - "peer": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true, - "peer": true - }, - "node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "devOptional": true, - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/module-alias": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/module-alias/-/module-alias-2.2.3.tgz", - "integrity": "sha512-23g5BFj4zdQL/b6tor7Ji+QY4pEfNH784BMslY9Qb0UnJWRAt+lQGLYmRaM0KDBwIG23ffEBELhZDP2rhi9f/Q==", - "dev": true, - "peer": true - }, - "node_modules/mrmime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.1.tgz", - "integrity": "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=10" - } - }, "node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "dev": true - }, - "node_modules/multicast-dns": { - "version": "7.2.5", - "resolved": "https://registry.npmjs.org/multicast-dns/-/multicast-dns-7.2.5.tgz", - "integrity": "sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg==", - "dev": true, - "peer": true, - "dependencies": { - "dns-packet": "^5.2.2", - "thunky": "^1.0.2" - }, - "bin": { - "multicast-dns": "cli.js" - } - }, - "node_modules/mz": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", - "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", - "dev": true, - "peer": true, - "dependencies": { - "any-promise": "^1.0.0", - "object-assign": "^4.0.1", - "thenify-all": "^1.0.0" - } + "devOptional": true }, "node_modules/nanoid": { "version": "3.3.11", @@ -8226,150 +2380,6 @@ "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "dev": true }, - "node_modules/negotiator": { - "version": "0.6.4", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz", - "integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/neo-async": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", - "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", - "devOptional": true - }, - "node_modules/nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true - }, - "node_modules/no-case": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", - "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", - "dev": true, - "peer": true, - "dependencies": { - "lower-case": "^2.0.2", - "tslib": "^2.0.3" - } - }, - "node_modules/node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "dev": true, - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/node-forge": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", - "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 6.13.0" - } - }, - "node_modules/node-releases": { - "version": "2.0.19", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", - "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", - "devOptional": true - }, - "node_modules/normalize-package-data": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", - "dev": true, - "dependencies": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "node_modules/normalize-package-data/node_modules/semver": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", - "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", - "dev": true, - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/normalize-range": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", - "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/normalize-url": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz", - "integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==", - "dev": true, - "peer": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/npm-run-path": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", - "integrity": "sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==", - "dev": true, - "dependencies": { - "path-key": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/npm-run-path/node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/nth-check": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", @@ -8382,191 +2392,6 @@ "url": "https://github.com/fb55/nth-check?sponsor=1" } }, - "node_modules/null-loader": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/null-loader/-/null-loader-4.0.1.tgz", - "integrity": "sha512-pxqVbi4U6N26lq+LmgIbB5XATP0VdZKOG25DhHi8btMmJJefGArFyDg1yc4U3hWCJbMqSrw0qyrz1UQX+qYXqg==", - "devOptional": true, - "dependencies": { - "loader-utils": "^2.0.0", - "schema-utils": "^3.0.0" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^4.0.0 || ^5.0.0" - } - }, - "node_modules/null-loader/node_modules/loader-utils": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz", - "integrity": "sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==", - "devOptional": true, - "dependencies": { - "big.js": "^5.2.2", - "emojis-list": "^3.0.0", - "json5": "^2.1.2" - }, - "engines": { - "node": ">=8.9.0" - } - }, - "node_modules/null-loader/node_modules/schema-utils": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", - "integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==", - "devOptional": true, - "dependencies": { - "@types/json-schema": "^7.0.8", - "ajv": "^6.12.5", - "ajv-keywords": "^3.5.2" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-inspect": { - "version": "1.13.4", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", - "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/object.assign": { - "version": "4.1.7", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", - "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.3", - "define-properties": "^1.2.1", - "es-object-atoms": "^1.0.0", - "has-symbols": "^1.1.0", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/obuf": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz", - "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==", - "dev": true, - "peer": true - }, - "node_modules/on-finished": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", - "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", - "dev": true, - "peer": true, - "dependencies": { - "ee-first": "1.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/on-headers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", - "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "dev": true, - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/open": { - "version": "8.4.2", - "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", - "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", - "dev": true, - "dependencies": { - "define-lazy-prop": "^2.0.0", - "is-docker": "^2.1.1", - "is-wsl": "^2.2.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/opener": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz", - "integrity": "sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==", - "dev": true, - "peer": true, - "bin": { - "opener": "bin/opener-bin.js" - } - }, "node_modules/optionator": { "version": "0.9.4", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", @@ -8584,54 +2409,6 @@ "node": ">= 0.8.0" } }, - "node_modules/ora": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz", - "integrity": "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==", - "dev": true, - "dependencies": { - "bl": "^4.1.0", - "chalk": "^4.1.0", - "cli-cursor": "^3.1.0", - "cli-spinners": "^2.5.0", - "is-interactive": "^1.0.0", - "is-unicode-supported": "^0.1.0", - "log-symbols": "^4.1.0", - "strip-ansi": "^6.0.0", - "wcwidth": "^1.0.1" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ora/node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/p-finally": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", - "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/p-limit": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", @@ -8662,40 +2439,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/p-retry": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz", - "integrity": "sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==", - "dev": true, - "peer": true, - "dependencies": { - "@types/retry": "0.12.0", - "retry": "^0.13.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/param-case": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz", - "integrity": "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==", - "dev": true, - "peer": true, - "dependencies": { - "dot-case": "^3.0.4", - "tslib": "^2.0.3" - } - }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -8708,69 +2451,6 @@ "node": ">=6" } }, - "node_modules/parse-json": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", - "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-even-better-errors": "^2.3.0", - "lines-and-columns": "^1.1.6" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/parse5": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.1.tgz", - "integrity": "sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==", - "dev": true, - "peer": true - }, - "node_modules/parse5-htmlparser2-tree-adapter": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-6.0.1.tgz", - "integrity": "sha512-qPuWvbLgvDGilKc5BoicRovlT4MtYT6JfJyBOMDsKoiT+GiuP5qyrPCnR9HcPECIJJmZh5jRndyNThnhhb/vlA==", - "dev": true, - "peer": true, - "dependencies": { - "parse5": "^6.0.1" - } - }, - "node_modules/parse5-htmlparser2-tree-adapter/node_modules/parse5": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", - "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", - "dev": true, - "peer": true - }, - "node_modules/parseurl": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", - "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/pascal-case": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", - "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", - "dev": true, - "peer": true, - "dependencies": { - "no-case": "^3.0.4", - "tslib": "^2.0.3" - } - }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -8780,15 +2460,6 @@ "node": ">=8" } }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/path-key": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", @@ -8798,129 +2469,27 @@ "node": ">=8" } }, - "node_modules/path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "dev": true - }, - "node_modules/path-to-regexp": { - "version": "0.1.12", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", - "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", - "dev": true, - "peer": true - }, - "node_modules/path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "dev": true, - "peer": true, - "engines": { - "node": ">=8" - } - }, "node_modules/picocolors": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==" }, "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "peer": true, + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "devOptional": true, "engines": { - "node": ">=8.6" + "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/jonschlinkert" } }, - "node_modules/pkg-dir": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", - "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", - "dev": true, - "dependencies": { - "find-up": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/pkg-dir/node_modules/find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", - "dev": true, - "dependencies": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/pkg-dir/node_modules/locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", - "dev": true, - "dependencies": { - "p-locate": "^4.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/pkg-dir/node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "dev": true, - "dependencies": { - "p-try": "^2.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/pkg-dir/node_modules/p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", - "dev": true, - "dependencies": { - "p-limit": "^2.2.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/portfinder": { - "version": "1.0.37", - "resolved": "https://registry.npmjs.org/portfinder/-/portfinder-1.0.37.tgz", - "integrity": "sha512-yuGIEjDAYnnOex9ddMnKZEMFE0CcGo6zbfzDklkmT1m5z734ss6JMzN9rNB3+RR7iS+F10D4/BVIaXOyh8PQKw==", - "dev": true, - "peer": true, - "dependencies": { - "async": "^3.2.6", - "debug": "^4.3.6" - }, - "engines": { - "node": ">= 10.12" - } - }, "node_modules/postcss": { - "version": "8.5.4", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.4.tgz", - "integrity": "sha512-QSa9EBe+uwlGTFmHsPKokv3B/oEMQZxfqW0QqNCyhpa6mB1afzulwn8hihglqAb2pOw+BJgNlmXQ8la2VeHB7w==", + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", "funding": [ { "type": "opencollective", @@ -8944,532 +2513,6 @@ "node": "^10 || ^12 || >=14" } }, - "node_modules/postcss-calc": { - "version": "8.2.4", - "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-8.2.4.tgz", - "integrity": "sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q==", - "dev": true, - "peer": true, - "dependencies": { - "postcss-selector-parser": "^6.0.9", - "postcss-value-parser": "^4.2.0" - }, - "peerDependencies": { - "postcss": "^8.2.2" - } - }, - "node_modules/postcss-colormin": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-5.3.1.tgz", - "integrity": "sha512-UsWQG0AqTFQmpBegeLLc1+c3jIqBNB0zlDGRWR+dQ3pRKJL1oeMzyqmH3o2PIfn9MBdNrVPWhDbT769LxCTLJQ==", - "dev": true, - "peer": true, - "dependencies": { - "browserslist": "^4.21.4", - "caniuse-api": "^3.0.0", - "colord": "^2.9.1", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-convert-values": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-5.1.3.tgz", - "integrity": "sha512-82pC1xkJZtcJEfiLw6UXnXVXScgtBrjlO5CBmuDQc+dlb88ZYheFsjTn40+zBVi3DkfF7iezO0nJUPLcJK3pvA==", - "dev": true, - "peer": true, - "dependencies": { - "browserslist": "^4.21.4", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-discard-comments": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz", - "integrity": "sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ==", - "dev": true, - "peer": true, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-discard-duplicates": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz", - "integrity": "sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw==", - "dev": true, - "peer": true, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-discard-empty": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz", - "integrity": "sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A==", - "dev": true, - "peer": true, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-discard-overridden": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz", - "integrity": "sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw==", - "dev": true, - "peer": true, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-loader": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/postcss-loader/-/postcss-loader-6.2.1.tgz", - "integrity": "sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q==", - "dev": true, - "peer": true, - "dependencies": { - "cosmiconfig": "^7.0.0", - "klona": "^2.0.5", - "semver": "^7.3.5" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "postcss": "^7.0.0 || ^8.0.1", - "webpack": "^5.0.0" - } - }, - "node_modules/postcss-loader/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "peer": true, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/postcss-merge-longhand": { - "version": "5.1.7", - "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-5.1.7.tgz", - "integrity": "sha512-YCI9gZB+PLNskrK0BB3/2OzPnGhPkBEwmwhfYk1ilBHYVAZB7/tkTHFBAnCrvBBOmeYyMYw3DMjT55SyxMBzjQ==", - "dev": true, - "peer": true, - "dependencies": { - "postcss-value-parser": "^4.2.0", - "stylehacks": "^5.1.1" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-merge-rules": { - "version": "5.1.4", - "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-5.1.4.tgz", - "integrity": "sha512-0R2IuYpgU93y9lhVbO/OylTtKMVcHb67zjWIfCiKR9rWL3GUk1677LAqD/BcHizukdZEjT8Ru3oHRoAYoJy44g==", - "dev": true, - "peer": true, - "dependencies": { - "browserslist": "^4.21.4", - "caniuse-api": "^3.0.0", - "cssnano-utils": "^3.1.0", - "postcss-selector-parser": "^6.0.5" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-minify-font-values": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz", - "integrity": "sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA==", - "dev": true, - "peer": true, - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-minify-gradients": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz", - "integrity": "sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw==", - "dev": true, - "peer": true, - "dependencies": { - "colord": "^2.9.1", - "cssnano-utils": "^3.1.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-minify-params": { - "version": "5.1.4", - "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-5.1.4.tgz", - "integrity": "sha512-+mePA3MgdmVmv6g+30rn57USjOGSAyuxUmkfiWpzalZ8aiBkdPYjXWtHuwJGm1v5Ojy0Z0LaSYhHaLJQB0P8Jw==", - "dev": true, - "peer": true, - "dependencies": { - "browserslist": "^4.21.4", - "cssnano-utils": "^3.1.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-minify-selectors": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz", - "integrity": "sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg==", - "dev": true, - "peer": true, - "dependencies": { - "postcss-selector-parser": "^6.0.5" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-modules-extract-imports": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.1.0.tgz", - "integrity": "sha512-k3kNe0aNFQDAZGbin48pL2VNidTF0w4/eASDsxlyspobzU3wZQLOGj7L9gfRe0Jo9/4uud09DsjFNH7winGv8Q==", - "dev": true, - "peer": true, - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/postcss-modules-local-by-default": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.2.0.tgz", - "integrity": "sha512-5kcJm/zk+GJDSfw+V/42fJ5fhjL5YbFDl8nVdXkJPLLW+Vf9mTD5Xe0wqIaDnLuL2U6cDNpTr+UQ+v2HWIBhzw==", - "dev": true, - "peer": true, - "dependencies": { - "icss-utils": "^5.0.0", - "postcss-selector-parser": "^7.0.0", - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/postcss-modules-local-by-default/node_modules/postcss-selector-parser": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz", - "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==", - "dev": true, - "peer": true, - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-modules-scope": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-3.2.1.tgz", - "integrity": "sha512-m9jZstCVaqGjTAuny8MdgE88scJnCiQSlSrOWcTQgM2t32UBe+MUmFSO5t7VMSfAf/FJKImAxBav8ooCHJXCJA==", - "dev": true, - "peer": true, - "dependencies": { - "postcss-selector-parser": "^7.0.0" - }, - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/postcss-modules-scope/node_modules/postcss-selector-parser": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz", - "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==", - "dev": true, - "peer": true, - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-modules-values": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz", - "integrity": "sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ==", - "dev": true, - "peer": true, - "dependencies": { - "icss-utils": "^5.0.0" - }, - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/postcss-normalize-charset": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz", - "integrity": "sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg==", - "dev": true, - "peer": true, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-normalize-display-values": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz", - "integrity": "sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA==", - "dev": true, - "peer": true, - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-normalize-positions": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz", - "integrity": "sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg==", - "dev": true, - "peer": true, - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-normalize-repeat-style": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz", - "integrity": "sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g==", - "dev": true, - "peer": true, - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-normalize-string": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz", - "integrity": "sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w==", - "dev": true, - "peer": true, - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-normalize-timing-functions": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz", - "integrity": "sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg==", - "dev": true, - "peer": true, - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-normalize-unicode": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.1.tgz", - "integrity": "sha512-qnCL5jzkNUmKVhZoENp1mJiGNPcsJCs1aaRmURmeJGES23Z/ajaln+EPTD+rBeNkSryI+2WTdW+lwcVdOikrpA==", - "dev": true, - "peer": true, - "dependencies": { - "browserslist": "^4.21.4", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-normalize-url": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz", - "integrity": "sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew==", - "dev": true, - "peer": true, - "dependencies": { - "normalize-url": "^6.0.1", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-normalize-whitespace": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz", - "integrity": "sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA==", - "dev": true, - "peer": true, - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-ordered-values": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz", - "integrity": "sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ==", - "dev": true, - "peer": true, - "dependencies": { - "cssnano-utils": "^3.1.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-reduce-initial": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-5.1.2.tgz", - "integrity": "sha512-dE/y2XRaqAi6OvjzD22pjTUQ8eOfc6m/natGHgKFBK9DxFmIm69YmaRVQrGgFlEfc1HePIurY0TmDeROK05rIg==", - "dev": true, - "peer": true, - "dependencies": { - "browserslist": "^4.21.4", - "caniuse-api": "^3.0.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-reduce-transforms": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz", - "integrity": "sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ==", - "dev": true, - "peer": true, - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, "node_modules/postcss-selector-parser": { "version": "6.1.2", "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", @@ -9483,46 +2526,6 @@ "node": ">=4" } }, - "node_modules/postcss-svgo": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-5.1.0.tgz", - "integrity": "sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA==", - "dev": true, - "peer": true, - "dependencies": { - "postcss-value-parser": "^4.2.0", - "svgo": "^2.7.0" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-unique-selectors": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz", - "integrity": "sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA==", - "dev": true, - "peer": true, - "dependencies": { - "postcss-selector-parser": "^6.0.5" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, - "node_modules/postcss-value-parser": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", - "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", - "dev": true, - "peer": true - }, "node_modules/prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -9532,541 +2535,15 @@ "node": ">= 0.8.0" } }, - "node_modules/prettier": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", - "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", - "dev": true, - "optional": true, - "peer": true, - "bin": { - "prettier": "bin-prettier.js" - }, - "engines": { - "node": ">=10.13.0" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" - } - }, - "node_modules/pretty-error": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/pretty-error/-/pretty-error-4.0.0.tgz", - "integrity": "sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw==", - "dev": true, - "peer": true, - "dependencies": { - "lodash": "^4.17.20", - "renderkid": "^3.0.0" - } - }, - "node_modules/process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", - "dev": true, - "peer": true - }, - "node_modules/progress-webpack-plugin": { - "version": "1.0.16", - "resolved": "https://registry.npmjs.org/progress-webpack-plugin/-/progress-webpack-plugin-1.0.16.tgz", - "integrity": "sha512-sdiHuuKOzELcBANHfrupYo+r99iPRyOnw15qX+rNlVUqXGfjXdH4IgxriKwG1kNJwVswKQHMdj1hYZMcb9jFaA==", - "dev": true, - "peer": true, - "dependencies": { - "chalk": "^2.1.0", - "figures": "^2.0.0", - "log-update": "^2.3.0" - }, - "engines": { - "node": ">= 10.13.0" - }, - "peerDependencies": { - "webpack": "^2.0.0 || ^3.0.0 || ^4.0.0 || ^5.0.0" - } - }, - "node_modules/progress-webpack-plugin/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "peer": true, - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/progress-webpack-plugin/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "peer": true, - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/progress-webpack-plugin/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "peer": true, - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/progress-webpack-plugin/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true, - "peer": true - }, - "node_modules/progress-webpack-plugin/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/progress-webpack-plugin/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true, - "peer": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/progress-webpack-plugin/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "peer": true, - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/proxy-addr": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", - "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", - "dev": true, - "peer": true, - "dependencies": { - "forwarded": "0.2.0", - "ipaddr.js": "1.9.1" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/proxy-addr/node_modules/ipaddr.js": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", - "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/pseudomap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", - "integrity": "sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==", - "dev": true, - "peer": true - }, - "node_modules/pump": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.2.tgz", - "integrity": "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==", - "dev": true, - "dependencies": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, "node_modules/punycode": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "devOptional": true, + "dev": true, "engines": { "node": ">=6" } }, - "node_modules/qs": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", - "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", - "dev": true, - "peer": true, - "dependencies": { - "side-channel": "^1.0.6" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "peer": true - }, - "node_modules/randombytes": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", - "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", - "devOptional": true, - "dependencies": { - "safe-buffer": "^5.1.0" - } - }, - "node_modules/range-parser": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", - "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/raw-body": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", - "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", - "dev": true, - "peer": true, - "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/read-pkg": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", - "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", - "dev": true, - "dependencies": { - "@types/normalize-package-data": "^2.4.0", - "normalize-package-data": "^2.5.0", - "parse-json": "^5.0.0", - "type-fest": "^0.6.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/read-pkg-up": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", - "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", - "dev": true, - "peer": true, - "dependencies": { - "find-up": "^4.1.0", - "read-pkg": "^5.2.0", - "type-fest": "^0.8.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-pkg-up/node_modules/find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", - "dev": true, - "peer": true, - "dependencies": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/read-pkg-up/node_modules/locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", - "dev": true, - "peer": true, - "dependencies": { - "p-locate": "^4.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/read-pkg-up/node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "dev": true, - "peer": true, - "dependencies": { - "p-try": "^2.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-pkg-up/node_modules/p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", - "dev": true, - "peer": true, - "dependencies": { - "p-limit": "^2.2.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/read-pkg-up/node_modules/type-fest": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", - "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", - "dev": true, - "peer": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/read-pkg/node_modules/type-fest": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", - "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/readdirp": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", - "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", - "dev": true, - "peer": true, - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/rechoir": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", - "integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==", - "dev": true, - "dependencies": { - "resolve": "^1.1.6" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/regenerate": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", - "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==", - "dev": true - }, - "node_modules/regenerate-unicode-properties": { - "version": "10.2.0", - "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.2.0.tgz", - "integrity": "sha512-DqHn3DwbmmPVzeKj9woBadqmXxLvQoQIwu7nopMc72ztvxVmVk2SBhSnx67zuye5TP+lJsb/TBQsjLKhnDf3MA==", - "dev": true, - "dependencies": { - "regenerate": "^1.4.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/regexpu-core": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-6.2.0.tgz", - "integrity": "sha512-H66BPQMrv+V16t8xtmq+UC0CBpiTBA60V8ibS1QVReIp8T1z8hwFxqcGzm9K6lgsN7sB5edVH8a+ze6Fqm4weA==", - "dev": true, - "dependencies": { - "regenerate": "^1.4.2", - "regenerate-unicode-properties": "^10.2.0", - "regjsgen": "^0.8.0", - "regjsparser": "^0.12.0", - "unicode-match-property-ecmascript": "^2.0.0", - "unicode-match-property-value-ecmascript": "^2.1.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/regjsgen": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.8.0.tgz", - "integrity": "sha512-RvwtGe3d7LvWiDQXeQw8p5asZUmfU1G/l6WbUXeHta7Y2PEIvBTwH6E2EfmYUK8pxcxEdEmaomqyp0vZZ7C+3Q==", - "dev": true - }, - "node_modules/regjsparser": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.12.0.tgz", - "integrity": "sha512-cnE+y8bz4NhMjISKbgeVJtqNbtf5QpjZP+Bslo+UqkIt9QPnX9q095eiRRASJG1/tz6dlNr6Z5NsBiWYokp6EQ==", - "dev": true, - "dependencies": { - "jsesc": "~3.0.2" - }, - "bin": { - "regjsparser": "bin/parser" - } - }, - "node_modules/regjsparser/node_modules/jsesc": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz", - "integrity": "sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==", - "dev": true, - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/relateurl": { - "version": "0.2.7", - "resolved": "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz", - "integrity": "sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/renderkid": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz", - "integrity": "sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg==", - "dev": true, - "peer": true, - "dependencies": { - "css-select": "^4.1.3", - "dom-converter": "^0.2.0", - "htmlparser2": "^6.1.0", - "lodash": "^4.17.21", - "strip-ansi": "^6.0.1" - } - }, - "node_modules/require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/require-from-string": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", - "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", - "devOptional": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/requires-port": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", - "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==", - "dev": true, - "peer": true - }, - "node_modules/resolve": { - "version": "1.22.10", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", - "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", - "dev": true, - "dependencies": { - "is-core-module": "^2.16.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/resolve-from": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", @@ -10076,359 +2553,60 @@ "node": ">=4" } }, - "node_modules/restore-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", - "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", - "dev": true, - "dependencies": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/retry": { - "version": "0.13.1", - "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", - "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 4" - } - }, - "node_modules/reusify": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", - "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", - "dev": true, - "peer": true, - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "deprecated": "Rimraf versions prior to v4 are no longer supported", - "dev": true, - "peer": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/roboto-fontface": { "version": "0.10.0", "resolved": "https://registry.npmjs.org/roboto-fontface/-/roboto-fontface-0.10.0.tgz", "integrity": "sha512-OlwfYEgA2RdboZohpldlvJ1xngOins5d7ejqnIBWr9KaMxsnBqotpptRXTyfNRLnFpqzX6sTDt+X+a+6udnU8g==" }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "peer": true, - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "node_modules/rollup": { + "version": "4.46.2", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.46.2.tgz", + "integrity": "sha512-WMmLFI+Boh6xbop+OAGo9cQ3OgX9MIg7xOQjn+pTCwOkk+FNDAeAemXkJ3HzDJrVXleLOFVa1ipuc1AmEx1Dwg==", "devOptional": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true, - "peer": true - }, - "node_modules/schema-utils": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz", - "integrity": "sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg==", - "dev": true, "dependencies": { - "@types/json-schema": "^7.0.5", - "ajv": "^6.12.4", - "ajv-keywords": "^3.5.2" + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" }, "engines": { - "node": ">= 8.9.0" + "node": ">=18.0.0", + "npm": ">=8.0.0" }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/select-hose": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz", - "integrity": "sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg==", - "dev": true, - "peer": true - }, - "node_modules/selfsigned": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-2.4.1.tgz", - "integrity": "sha512-th5B4L2U+eGLq1TVh7zNRGBapioSORUeymIydxgFpwww9d2qyKvtuPU2jJuHvYAwwqi2Y596QBL3eEqcPEYL8Q==", - "dev": true, - "peer": true, - "dependencies": { - "@types/node-forge": "^1.3.0", - "node-forge": "^1" - }, - "engines": { - "node": ">=10" + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.46.2", + "@rollup/rollup-android-arm64": "4.46.2", + "@rollup/rollup-darwin-arm64": "4.46.2", + "@rollup/rollup-darwin-x64": "4.46.2", + "@rollup/rollup-freebsd-arm64": "4.46.2", + "@rollup/rollup-freebsd-x64": "4.46.2", + "@rollup/rollup-linux-arm-gnueabihf": "4.46.2", + "@rollup/rollup-linux-arm-musleabihf": "4.46.2", + "@rollup/rollup-linux-arm64-gnu": "4.46.2", + "@rollup/rollup-linux-arm64-musl": "4.46.2", + "@rollup/rollup-linux-loongarch64-gnu": "4.46.2", + "@rollup/rollup-linux-ppc64-gnu": "4.46.2", + "@rollup/rollup-linux-riscv64-gnu": "4.46.2", + "@rollup/rollup-linux-riscv64-musl": "4.46.2", + "@rollup/rollup-linux-s390x-gnu": "4.46.2", + "@rollup/rollup-linux-x64-gnu": "4.46.2", + "@rollup/rollup-linux-x64-musl": "4.46.2", + "@rollup/rollup-win32-arm64-msvc": "4.46.2", + "@rollup/rollup-win32-ia32-msvc": "4.46.2", + "@rollup/rollup-win32-x64-msvc": "4.46.2", + "fsevents": "~2.3.2" } }, "node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", "dev": true, "bin": { "semver": "bin/semver.js" - } - }, - "node_modules/send": { - "version": "0.19.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", - "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", - "dev": true, - "peer": true, - "dependencies": { - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "mime": "1.6.0", - "ms": "2.1.3", - "on-finished": "2.4.1", - "range-parser": "~1.2.1", - "statuses": "2.0.1" }, "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/send/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "peer": true, - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/send/node_modules/debug/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true, - "peer": true - }, - "node_modules/send/node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/serialize-javascript": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", - "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", - "devOptional": true, - "dependencies": { - "randombytes": "^2.1.0" - } - }, - "node_modules/serve-index": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz", - "integrity": "sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw==", - "dev": true, - "peer": true, - "dependencies": { - "accepts": "~1.3.4", - "batch": "0.6.1", - "debug": "2.6.9", - "escape-html": "~1.0.3", - "http-errors": "~1.6.2", - "mime-types": "~2.1.17", - "parseurl": "~1.3.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/serve-index/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "peer": true, - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/serve-index/node_modules/depd": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/serve-index/node_modules/http-errors": { - "version": "1.6.3", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", - "integrity": "sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A==", - "dev": true, - "peer": true, - "dependencies": { - "depd": "~1.1.2", - "inherits": "2.0.3", - "setprototypeof": "1.1.0", - "statuses": ">= 1.4.0 < 2" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/serve-index/node_modules/inherits": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==", - "dev": true, - "peer": true - }, - "node_modules/serve-index/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true, - "peer": true - }, - "node_modules/serve-index/node_modules/setprototypeof": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", - "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==", - "dev": true, - "peer": true - }, - "node_modules/serve-index/node_modules/statuses": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", - "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/serve-static": { - "version": "1.16.2", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", - "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", - "dev": true, - "peer": true, - "dependencies": { - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "parseurl": "~1.3.3", - "send": "0.19.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/set-function-length": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", - "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", - "dev": true, - "dependencies": { - "define-data-property": "^1.1.4", - "es-errors": "^1.3.0", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.4", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/setprototypeof": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", - "dev": true, - "peer": true - }, - "node_modules/shallow-clone": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", - "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", - "dev": true, - "peer": true, - "dependencies": { - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=8" + "node": ">=10" } }, "node_modules/shebang-command": { @@ -10452,173 +2630,6 @@ "node": ">=8" } }, - "node_modules/shell-quote": { - "version": "1.8.3", - "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz", - "integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/shelljs": { - "version": "0.8.5", - "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz", - "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", - "dev": true, - "dependencies": { - "glob": "^7.0.0", - "interpret": "^1.0.0", - "rechoir": "^0.6.2" - }, - "bin": { - "shjs": "bin/shjs" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/side-channel": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", - "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", - "dev": true, - "peer": true, - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3", - "side-channel-list": "^1.0.0", - "side-channel-map": "^1.0.1", - "side-channel-weakmap": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-list": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", - "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", - "dev": true, - "peer": true, - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-map": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", - "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", - "dev": true, - "peer": true, - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-weakmap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", - "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", - "dev": true, - "peer": true, - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3", - "side-channel-map": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true - }, - "node_modules/sirv": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/sirv/-/sirv-2.0.4.tgz", - "integrity": "sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==", - "dev": true, - "peer": true, - "dependencies": { - "@polka/url": "^1.0.0-next.24", - "mrmime": "^2.0.0", - "totalist": "^3.0.0" - }, - "engines": { - "node": ">= 10" - } - }, - "node_modules/slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true, - "peer": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/sockjs": { - "version": "0.3.24", - "resolved": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.24.tgz", - "integrity": "sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ==", - "dev": true, - "peer": true, - "dependencies": { - "faye-websocket": "^0.11.3", - "uuid": "^8.3.2", - "websocket-driver": "^0.7.4" - } - }, - "node_modules/sockjs/node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true, - "peer": true, - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "devOptional": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/source-map-js": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", @@ -10627,173 +2638,6 @@ "node": ">=0.10.0" } }, - "node_modules/source-map-support": { - "version": "0.5.21", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", - "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", - "devOptional": true, - "dependencies": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "node_modules/spdx-correct": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", - "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", - "dev": true, - "dependencies": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-exceptions": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", - "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", - "dev": true - }, - "node_modules/spdx-expression-parse": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", - "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", - "dev": true, - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-license-ids": { - "version": "3.0.21", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.21.tgz", - "integrity": "sha512-Bvg/8F5XephndSK3JffaRqdT+gyhfqIPwDHpX80tJrF8QQRYMo8sNMeaZ2Dp5+jhwKnUmIOyFFQfHRkjJm5nXg==", - "dev": true - }, - "node_modules/spdy": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/spdy/-/spdy-4.0.2.tgz", - "integrity": "sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==", - "dev": true, - "peer": true, - "dependencies": { - "debug": "^4.1.0", - "handle-thing": "^2.0.0", - "http-deceiver": "^1.2.7", - "select-hose": "^2.0.0", - "spdy-transport": "^3.0.0" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/spdy-transport": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/spdy-transport/-/spdy-transport-3.0.0.tgz", - "integrity": "sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==", - "dev": true, - "peer": true, - "dependencies": { - "debug": "^4.1.0", - "detect-node": "^2.0.4", - "hpack.js": "^2.1.6", - "obuf": "^1.1.2", - "readable-stream": "^3.0.6", - "wbuf": "^1.7.3" - } - }, - "node_modules/ssri": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-8.0.1.tgz", - "integrity": "sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ==", - "dev": true, - "peer": true, - "dependencies": { - "minipass": "^3.1.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/stable": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz", - "integrity": "sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==", - "deprecated": "Modern JS already guarantees Array#sort() is a stable sort, so this library is deprecated. See the compatibility table on MDN: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort#browser_compatibility", - "dev": true, - "peer": true - }, - "node_modules/stackframe": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/stackframe/-/stackframe-1.3.4.tgz", - "integrity": "sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw==", - "dev": true, - "peer": true - }, - "node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, - "node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "peer": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-eof": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", - "integrity": "sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "dev": true, - "peer": true, - "engines": { - "node": ">=6" - } - }, "node_modules/strip-json-comments": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", @@ -10811,23 +2655,6 @@ "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz", "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==" }, - "node_modules/stylehacks": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-5.1.1.tgz", - "integrity": "sha512-sBpcd5Hx7G6seo7b1LkpttvTz7ikD0LlH5RmdcBNb6fFR0Fl7LQwHDFr300q4cwUqi+IYrFGmsIHieMBfnN/Bw==", - "dev": true, - "peer": true, - "dependencies": { - "browserslist": "^4.21.4", - "postcss-selector-parser": "^6.0.4" - }, - "engines": { - "node": "^10 || ^12 || >=14.0" - }, - "peerDependencies": { - "postcss": "^8.2.15" - } - }, "node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -10840,307 +2667,22 @@ "node": ">=8" } }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/svg-tags": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/svg-tags/-/svg-tags-1.0.0.tgz", - "integrity": "sha512-ovssysQTa+luh7A5Weu3Rta6FJlFBBbInjOh722LIt6klpU2/HtdUbszju/G4devcvk8PGt7FCLv5wftu3THUA==", - "dev": true - }, - "node_modules/svgo": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/svgo/-/svgo-2.8.0.tgz", - "integrity": "sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg==", - "dev": true, - "peer": true, - "dependencies": { - "@trysound/sax": "0.2.0", - "commander": "^7.2.0", - "css-select": "^4.1.3", - "css-tree": "^1.1.3", - "csso": "^4.2.0", - "picocolors": "^1.0.0", - "stable": "^0.1.8" - }, - "bin": { - "svgo": "bin/svgo" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/svgo/node_modules/commander": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", - "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 10" - } - }, - "node_modules/tapable": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.2.tgz", - "integrity": "sha512-Re10+NauLTMCudc7T5WLFLAwDhQ0JWdrMK+9B2M8zR5hRExKmsRDCBA7/aV/pNJFltmBFO5BAMlQFi/vq3nKOg==", - "devOptional": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/terser": { - "version": "5.41.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.41.0.tgz", - "integrity": "sha512-H406eLPXpZbAX14+B8psIuvIr8+3c+2hkuYzpMkoE0ij+NdsVATbA78vb8neA/eqrj7rywa2pIkdmWRsXW6wmw==", + "node_modules/tinyglobby": { + "version": "0.2.14", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.14.tgz", + "integrity": "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==", "devOptional": true, "dependencies": { - "@jridgewell/source-map": "^0.3.3", - "acorn": "^8.14.0", - "commander": "^2.20.0", - "source-map-support": "~0.5.20" - }, - "bin": { - "terser": "bin/terser" + "fdir": "^6.4.4", + "picomatch": "^4.0.2" }, "engines": { - "node": ">=10" - } - }, - "node_modules/terser-webpack-plugin": { - "version": "5.3.14", - "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.14.tgz", - "integrity": "sha512-vkZjpUjb6OMS7dhV+tILUW6BhpDR7P2L/aQSAv+Uwk+m8KATX9EccViHTJR2qDtACKPIYndLGCyl3FMo+r2LMw==", - "devOptional": true, - "dependencies": { - "@jridgewell/trace-mapping": "^0.3.25", - "jest-worker": "^27.4.5", - "schema-utils": "^4.3.0", - "serialize-javascript": "^6.0.2", - "terser": "^5.31.1" - }, - "engines": { - "node": ">= 10.13.0" + "node": ">=12.0.0" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.1.0" - }, - "peerDependenciesMeta": { - "@swc/core": { - "optional": true - }, - "esbuild": { - "optional": true - }, - "uglify-js": { - "optional": true - } + "url": "https://github.com/sponsors/SuperchupuDev" } }, - "node_modules/terser-webpack-plugin/node_modules/ajv": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", - "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", - "devOptional": true, - "dependencies": { - "fast-deep-equal": "^3.1.3", - "fast-uri": "^3.0.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/terser-webpack-plugin/node_modules/ajv-keywords": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", - "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", - "devOptional": true, - "dependencies": { - "fast-deep-equal": "^3.1.3" - }, - "peerDependencies": { - "ajv": "^8.8.2" - } - }, - "node_modules/terser-webpack-plugin/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "devOptional": true - }, - "node_modules/terser-webpack-plugin/node_modules/schema-utils": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.2.tgz", - "integrity": "sha512-Gn/JaSk/Mt9gYubxTtSn/QCV4em9mpAPiR1rqy/Ocu19u/G9J5WWdNoUT4SiV6mFC3y6cxyFcFwdzPM3FgxGAQ==", - "devOptional": true, - "dependencies": { - "@types/json-schema": "^7.0.9", - "ajv": "^8.9.0", - "ajv-formats": "^2.1.1", - "ajv-keywords": "^5.1.0" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/terser/node_modules/commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", - "devOptional": true - }, - "node_modules/thenify": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", - "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", - "dev": true, - "peer": true, - "dependencies": { - "any-promise": "^1.0.0" - } - }, - "node_modules/thenify-all": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", - "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", - "dev": true, - "peer": true, - "dependencies": { - "thenify": ">= 3.1.0 < 4" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/thread-loader": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/thread-loader/-/thread-loader-3.0.4.tgz", - "integrity": "sha512-ByaL2TPb+m6yArpqQUZvP+5S1mZtXsEP7nWKKlAUTm7fCml8kB5s1uI3+eHRP2bk5mVYfRSBI7FFf+tWEyLZwA==", - "dev": true, - "dependencies": { - "json-parse-better-errors": "^1.0.2", - "loader-runner": "^4.1.0", - "loader-utils": "^2.0.0", - "neo-async": "^2.6.2", - "schema-utils": "^3.0.0" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^4.27.0 || ^5.0.0" - } - }, - "node_modules/thread-loader/node_modules/loader-utils": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz", - "integrity": "sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==", - "dev": true, - "dependencies": { - "big.js": "^5.2.2", - "emojis-list": "^3.0.0", - "json5": "^2.1.2" - }, - "engines": { - "node": ">=8.9.0" - } - }, - "node_modules/thread-loader/node_modules/schema-utils": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", - "integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==", - "dev": true, - "dependencies": { - "@types/json-schema": "^7.0.8", - "ajv": "^6.12.5", - "ajv-keywords": "^3.5.2" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/thunky": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", - "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==", - "dev": true, - "peer": true - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "peer": true, - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/toidentifier": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", - "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.6" - } - }, - "node_modules/totalist": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz", - "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", - "dev": true - }, - "node_modules/tslib": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", - "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", - "dev": true, - "peer": true - }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -11164,86 +2706,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/type-is": { - "version": "1.6.18", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", - "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", - "dev": true, - "peer": true, - "dependencies": { - "media-typer": "0.3.0", - "mime-types": "~2.1.24" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/undici-types": { - "version": "6.21.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", - "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", - "devOptional": true - }, - "node_modules/unicode-canonical-property-names-ecmascript": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.1.tgz", - "integrity": "sha512-dA8WbNeb2a6oQzAQ55YlT5vQAWGV9WXOsi3SskE3bcCdM0P4SDd+24zS/OCacdRq5BkdsRj9q3Pg6YyQoxIGqg==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-match-property-ecmascript": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", - "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", - "dev": true, - "dependencies": { - "unicode-canonical-property-names-ecmascript": "^2.0.0", - "unicode-property-aliases-ecmascript": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-match-property-value-ecmascript": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.2.0.tgz", - "integrity": "sha512-4IehN3V/+kkr5YeSSDDQG8QLqO26XpL2XP3GQtqwlT/QYSECAwFztxVHjlbh0+gjJ3XmNLS0zDsbgs9jWKExLg==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-property-aliases-ecmascript": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz", - "integrity": "sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/universalify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", - "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/unpipe": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.8" - } - }, "node_modules/upath": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/upath/-/upath-2.0.1.tgz", @@ -11254,41 +2716,11 @@ "yarn": "*" } }, - "node_modules/update-browserslist-db": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", - "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", - "devOptional": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "dependencies": { - "escalade": "^3.2.0", - "picocolors": "^1.1.1" - }, - "bin": { - "update-browserslist-db": "cli.js" - }, - "peerDependencies": { - "browserslist": ">= 4.21.0" - } - }, "node_modules/uri-js": { "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "devOptional": true, + "dev": true, "dependencies": { "punycode": "^2.1.0" } @@ -11299,23 +2731,6 @@ "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", "dev": true }, - "node_modules/utila": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/utila/-/utila-0.4.0.tgz", - "integrity": "sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA==", - "dev": true, - "peer": true - }, - "node_modules/utils-merge": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 0.4.0" - } - }, "node_modules/uuid": { "version": "10.0.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", @@ -11328,36 +2743,109 @@ "uuid": "dist/bin/uuid" } }, - "node_modules/validate-npm-package-license": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", - "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", - "dev": true, + "node_modules/vite": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.0.6.tgz", + "integrity": "sha512-MHFiOENNBd+Bd9uvc8GEsIzdkn1JxMmEeYX35tI3fv0sJBUTfW5tQsoaOwuY4KhBI09A3dUJ/DXf2yxPVPUceg==", + "devOptional": true, "dependencies": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" + "esbuild": "^0.25.0", + "fdir": "^6.4.6", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.40.0", + "tinyglobby": "^0.2.14" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } } }, - "node_modules/vary": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", - "dev": true, - "peer": true, + "node_modules/vite-plugin-vuetify": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/vite-plugin-vuetify/-/vite-plugin-vuetify-2.1.2.tgz", + "integrity": "sha512-I/wd6QS+DO6lHmuGoi1UTyvvBTQ2KDzQZ9oowJQEJ6OcjWfJnscYXx2ptm6S7fJSASuZT8jGRBL3LV4oS3LpaA==", + "devOptional": true, + "dependencies": { + "@vuetify/loader-shared": "^2.1.1", + "debug": "^4.3.3", + "upath": "^2.0.1" + }, "engines": { - "node": ">= 0.8" + "node": "^18.0.0 || >=20.0.0" + }, + "peerDependencies": { + "vite": ">=5", + "vue": "^3.0.0", + "vuetify": "^3.0.0" } }, "node_modules/vue": { - "version": "3.5.16", - "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.16.tgz", - "integrity": "sha512-rjOV2ecxMd5SiAmof2xzh2WxntRcigkX/He4YFJ6WdRvVUrbt6DxC1Iujh10XLl8xCDRDtGKMeO3D+pRQ1PP9w==", + "version": "3.5.18", + "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.18.tgz", + "integrity": "sha512-7W4Y4ZbMiQ3SEo+m9lnoNpV9xG7QVMLa+/0RFwwiAVkeYoyGXqWE85jabU4pllJNUzqfLShJ5YLptewhCWUgNA==", "dependencies": { - "@vue/compiler-dom": "3.5.16", - "@vue/compiler-sfc": "3.5.16", - "@vue/runtime-dom": "3.5.16", - "@vue/server-renderer": "3.5.16", - "@vue/shared": "3.5.16" + "@vue/compiler-dom": "3.5.18", + "@vue/compiler-sfc": "3.5.18", + "@vue/runtime-dom": "3.5.18", + "@vue/server-renderer": "3.5.18", + "@vue/shared": "3.5.18" }, "peerDependencies": { "typescript": "*" @@ -11368,40 +2856,6 @@ } } }, - "node_modules/vue-cli-plugin-vuetify": { - "version": "2.5.8", - "resolved": "https://registry.npmjs.org/vue-cli-plugin-vuetify/-/vue-cli-plugin-vuetify-2.5.8.tgz", - "integrity": "sha512-uqi0/URJETJBbWlQHD1l0pnY7JN8Ytu+AL1fw50HFlGByPa8/xx+mq19GkFXA9FcwFT01IqEc/TkxMPugchomg==", - "dev": true, - "dependencies": { - "null-loader": "^4.0.1", - "semver": "^7.1.2", - "shelljs": "^0.8.3" - }, - "peerDependencies": { - "webpack": "^4.0.0 || ^5.0.0" - }, - "peerDependenciesMeta": { - "sass-loader": { - "optional": true - }, - "vuetify-loader": { - "optional": true - } - } - }, - "node_modules/vue-cli-plugin-vuetify/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/vue-codemirror": { "version": "6.1.1", "resolved": "https://registry.npmjs.org/vue-codemirror/-/vue-codemirror-6.1.1.tgz", @@ -11418,9 +2872,9 @@ } }, "node_modules/vue-eslint-parser": { - "version": "10.1.3", - "resolved": "https://registry.npmjs.org/vue-eslint-parser/-/vue-eslint-parser-10.1.3.tgz", - "integrity": "sha512-dbCBnd2e02dYWsXoqX5yKUZlOt+ExIpq7hmHKPb5ZqKcjf++Eo0hMseFTZMLKThrUk61m+Uv6A2YSBve6ZvuDQ==", + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/vue-eslint-parser/-/vue-eslint-parser-10.2.0.tgz", + "integrity": "sha512-CydUvFOQKD928UzZhTp4pr2vWz1L+H99t7Pkln2QSPdvmURT0MoC4wUccfCnuEaihNsu9aYYyk+bep8rlfkUXw==", "dev": true, "peer": true, "dependencies": { @@ -11429,7 +2883,6 @@ "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "esquery": "^1.6.0", - "lodash": "^4.17.21", "semver": "^7.6.3" }, "engines": { @@ -11442,135 +2895,10 @@ "eslint": "^8.57.0 || ^9.0.0" } }, - "node_modules/vue-eslint-parser/node_modules/eslint-scope": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.3.0.tgz", - "integrity": "sha512-pUNxi75F8MJ/GdeKtVLSbYg4ZI34J6C0C7sbL4YOp2exGwen7ZsuBqKzUhXd0qMQ362yET3z+uPwKeg/0C2XCQ==", - "dev": true, - "peer": true, - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/vue-eslint-parser/node_modules/eslint-visitor-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", - "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", - "dev": true, - "peer": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/vue-eslint-parser/node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, - "peer": true, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/vue-eslint-parser/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", - "dev": true, - "peer": true, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/vue-hot-reload-api": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/vue-hot-reload-api/-/vue-hot-reload-api-2.3.4.tgz", - "integrity": "sha512-BXq3jwIagosjgNVae6tkHzzIk6a8MHFtzAdwhnV5VlvPTFxDCvIttgSiHWjdGoTJvXtmRu5HacExfdarRcFhog==", - "dev": true, - "peer": true - }, - "node_modules/vue-loader": { - "version": "17.4.2", - "resolved": "https://registry.npmjs.org/vue-loader/-/vue-loader-17.4.2.tgz", - "integrity": "sha512-yTKOA4R/VN4jqjw4y5HrynFL8AK0Z3/Jt7eOJXEitsm0GMRHDBjCfCiuTiLP7OESvsZYo2pATCWhDqxC5ZrM6w==", - "dev": true, - "peer": true, - "dependencies": { - "chalk": "^4.1.0", - "hash-sum": "^2.0.0", - "watchpack": "^2.4.0" - }, - "peerDependencies": { - "webpack": "^4.1.0 || ^5.0.0-0" - }, - "peerDependenciesMeta": { - "@vue/compiler-sfc": { - "optional": true - }, - "vue": { - "optional": true - } - } - }, - "node_modules/vue-loader/node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "peer": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/vue-style-loader": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/vue-style-loader/-/vue-style-loader-4.1.3.tgz", - "integrity": "sha512-sFuh0xfbtpRlKfm39ss/ikqs9AbKCoXZBpHeVZ8Tx650o0k0q/YCM7FRvigtxpACezfq6af+a7JeqVTWvncqDg==", - "dev": true, - "peer": true, - "dependencies": { - "hash-sum": "^1.0.2", - "loader-utils": "^1.0.2" - } - }, - "node_modules/vue-style-loader/node_modules/hash-sum": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/hash-sum/-/hash-sum-1.0.2.tgz", - "integrity": "sha512-fUs4B4L+mlt8/XAtSOGMUO1TXmAelItBPtJG7CyHJfYTdDjwisntGO2JQz7oUsatOY9o68+57eziUVNw/mRHmA==", - "dev": true, - "peer": true - }, - "node_modules/vue-template-es2015-compiler": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/vue-template-es2015-compiler/-/vue-template-es2015-compiler-1.9.1.tgz", - "integrity": "sha512-4gDntzrifFnCEvyoO8PqyJDmguXgVPxKiIxrBKjIowvL9l+N66196+72XVYR8BBf1Uv1Fgt3bGevJ+sEmxfZzw==", - "dev": true, - "peer": true - }, "node_modules/vuetify": { - "version": "3.8.8", - "resolved": "https://registry.npmjs.org/vuetify/-/vuetify-3.8.8.tgz", - "integrity": "sha512-EPFynvxh72PBgUVZnGpfYfGluz8dz/tXM1OzjszFOK7ywqS+bAm8K9jJq0MIlAG8HKE7gBFQwCJGkzIyuUDipA==", + "version": "3.9.3", + "resolved": "https://registry.npmjs.org/vuetify/-/vuetify-3.9.3.tgz", + "integrity": "sha512-0eruHdmRoAMBo/08RLDkTdtdu1vfkx+/PurUIDW2tz/k2GCp51e7KwgCn4uVyzH88KRgf2PKiz5UI5f93Xn05w==", "engines": { "node": "^12.20 || >=14.13" }, @@ -11601,646 +2929,11 @@ "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==" }, - "node_modules/watchpack": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.4.tgz", - "integrity": "sha512-c5EGNOiyxxV5qmTtAB7rbiXxi1ooX1pQKMLX/MIabJjRA0SJBQOjKF+KSVfHkr9U1cADPon0mRiVe/riyaiDUA==", - "devOptional": true, - "dependencies": { - "glob-to-regexp": "^0.4.1", - "graceful-fs": "^4.1.2" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/wbuf": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/wbuf/-/wbuf-1.7.3.tgz", - "integrity": "sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==", - "dev": true, - "peer": true, - "dependencies": { - "minimalistic-assert": "^1.0.0" - } - }, - "node_modules/wcwidth": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz", - "integrity": "sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==", - "dev": true, - "dependencies": { - "defaults": "^1.0.3" - } - }, "node_modules/webfontloader": { "version": "1.6.28", "resolved": "https://registry.npmjs.org/webfontloader/-/webfontloader-1.6.28.tgz", "integrity": "sha512-Egb0oFEga6f+nSgasH3E0M405Pzn6y3/9tOVanv/DLfa1YBIgcv90L18YyWnvXkRbIM17v5Kv6IT2N6g1x5tvQ==" }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", - "dev": true - }, - "node_modules/webpack": { - "version": "5.99.9", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.99.9.tgz", - "integrity": "sha512-brOPwM3JnmOa+7kd3NsmOUOwbDAj8FT9xDsG3IW0MgbN9yZV7Oi/s/+MNQ/EcSMqw7qfoRyXPoeEWT8zLVdVGg==", - "devOptional": true, - "dependencies": { - "@types/eslint-scope": "^3.7.7", - "@types/estree": "^1.0.6", - "@types/json-schema": "^7.0.15", - "@webassemblyjs/ast": "^1.14.1", - "@webassemblyjs/wasm-edit": "^1.14.1", - "@webassemblyjs/wasm-parser": "^1.14.1", - "acorn": "^8.14.0", - "browserslist": "^4.24.0", - "chrome-trace-event": "^1.0.2", - "enhanced-resolve": "^5.17.1", - "es-module-lexer": "^1.2.1", - "eslint-scope": "5.1.1", - "events": "^3.2.0", - "glob-to-regexp": "^0.4.1", - "graceful-fs": "^4.2.11", - "json-parse-even-better-errors": "^2.3.1", - "loader-runner": "^4.2.0", - "mime-types": "^2.1.27", - "neo-async": "^2.6.2", - "schema-utils": "^4.3.2", - "tapable": "^2.1.1", - "terser-webpack-plugin": "^5.3.11", - "watchpack": "^2.4.1", - "webpack-sources": "^3.2.3" - }, - "bin": { - "webpack": "bin/webpack.js" - }, - "engines": { - "node": ">=10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependenciesMeta": { - "webpack-cli": { - "optional": true - } - } - }, - "node_modules/webpack-bundle-analyzer": { - "version": "4.10.2", - "resolved": "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.10.2.tgz", - "integrity": "sha512-vJptkMm9pk5si4Bv922ZbKLV8UTT4zib4FPgXMhgzUny0bfDDkLXAVQs3ly3fS4/TN9ROFtb0NFrm04UXFE/Vw==", - "dev": true, - "peer": true, - "dependencies": { - "@discoveryjs/json-ext": "0.5.7", - "acorn": "^8.0.4", - "acorn-walk": "^8.0.0", - "commander": "^7.2.0", - "debounce": "^1.2.1", - "escape-string-regexp": "^4.0.0", - "gzip-size": "^6.0.0", - "html-escaper": "^2.0.2", - "opener": "^1.5.2", - "picocolors": "^1.0.0", - "sirv": "^2.0.3", - "ws": "^7.3.1" - }, - "bin": { - "webpack-bundle-analyzer": "lib/bin/analyzer.js" - }, - "engines": { - "node": ">= 10.13.0" - } - }, - "node_modules/webpack-bundle-analyzer/node_modules/commander": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", - "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 10" - } - }, - "node_modules/webpack-chain": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/webpack-chain/-/webpack-chain-6.5.1.tgz", - "integrity": "sha512-7doO/SRtLu8q5WM0s7vPKPWX580qhi0/yBHkOxNkv50f6qB76Zy9o2wRTrrPULqYTvQlVHuvbA8v+G5ayuUDsA==", - "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", - "dev": true, - "peer": true, - "dependencies": { - "deepmerge": "^1.5.2", - "javascript-stringify": "^2.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/webpack-dev-middleware": { - "version": "5.3.4", - "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz", - "integrity": "sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q==", - "dev": true, - "peer": true, - "dependencies": { - "colorette": "^2.0.10", - "memfs": "^3.4.3", - "mime-types": "^2.1.31", - "range-parser": "^1.2.1", - "schema-utils": "^4.0.0" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^4.0.0 || ^5.0.0" - } - }, - "node_modules/webpack-dev-middleware/node_modules/ajv": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", - "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", - "dev": true, - "peer": true, - "dependencies": { - "fast-deep-equal": "^3.1.3", - "fast-uri": "^3.0.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/webpack-dev-middleware/node_modules/ajv-keywords": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", - "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", - "dev": true, - "peer": true, - "dependencies": { - "fast-deep-equal": "^3.1.3" - }, - "peerDependencies": { - "ajv": "^8.8.2" - } - }, - "node_modules/webpack-dev-middleware/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "dev": true, - "peer": true - }, - "node_modules/webpack-dev-middleware/node_modules/schema-utils": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.2.tgz", - "integrity": "sha512-Gn/JaSk/Mt9gYubxTtSn/QCV4em9mpAPiR1rqy/Ocu19u/G9J5WWdNoUT4SiV6mFC3y6cxyFcFwdzPM3FgxGAQ==", - "dev": true, - "peer": true, - "dependencies": { - "@types/json-schema": "^7.0.9", - "ajv": "^8.9.0", - "ajv-formats": "^2.1.1", - "ajv-keywords": "^5.1.0" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/webpack-dev-server": { - "version": "4.15.2", - "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-4.15.2.tgz", - "integrity": "sha512-0XavAZbNJ5sDrCbkpWL8mia0o5WPOd2YGtxrEiZkBK9FjLppIUK2TgxK6qGD2P3hUXTJNNPVibrerKcx5WkR1g==", - "dev": true, - "peer": true, - "dependencies": { - "@types/bonjour": "^3.5.9", - "@types/connect-history-api-fallback": "^1.3.5", - "@types/express": "^4.17.13", - "@types/serve-index": "^1.9.1", - "@types/serve-static": "^1.13.10", - "@types/sockjs": "^0.3.33", - "@types/ws": "^8.5.5", - "ansi-html-community": "^0.0.8", - "bonjour-service": "^1.0.11", - "chokidar": "^3.5.3", - "colorette": "^2.0.10", - "compression": "^1.7.4", - "connect-history-api-fallback": "^2.0.0", - "default-gateway": "^6.0.3", - "express": "^4.17.3", - "graceful-fs": "^4.2.6", - "html-entities": "^2.3.2", - "http-proxy-middleware": "^2.0.3", - "ipaddr.js": "^2.0.1", - "launch-editor": "^2.6.0", - "open": "^8.0.9", - "p-retry": "^4.5.0", - "rimraf": "^3.0.2", - "schema-utils": "^4.0.0", - "selfsigned": "^2.1.1", - "serve-index": "^1.9.1", - "sockjs": "^0.3.24", - "spdy": "^4.0.2", - "webpack-dev-middleware": "^5.3.4", - "ws": "^8.13.0" - }, - "bin": { - "webpack-dev-server": "bin/webpack-dev-server.js" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^4.37.0 || ^5.0.0" - }, - "peerDependenciesMeta": { - "webpack": { - "optional": true - }, - "webpack-cli": { - "optional": true - } - } - }, - "node_modules/webpack-dev-server/node_modules/ajv": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", - "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", - "dev": true, - "peer": true, - "dependencies": { - "fast-deep-equal": "^3.1.3", - "fast-uri": "^3.0.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/webpack-dev-server/node_modules/ajv-keywords": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", - "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", - "dev": true, - "peer": true, - "dependencies": { - "fast-deep-equal": "^3.1.3" - }, - "peerDependencies": { - "ajv": "^8.8.2" - } - }, - "node_modules/webpack-dev-server/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "dev": true, - "peer": true - }, - "node_modules/webpack-dev-server/node_modules/schema-utils": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.2.tgz", - "integrity": "sha512-Gn/JaSk/Mt9gYubxTtSn/QCV4em9mpAPiR1rqy/Ocu19u/G9J5WWdNoUT4SiV6mFC3y6cxyFcFwdzPM3FgxGAQ==", - "dev": true, - "peer": true, - "dependencies": { - "@types/json-schema": "^7.0.9", - "ajv": "^8.9.0", - "ajv-formats": "^2.1.1", - "ajv-keywords": "^5.1.0" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/webpack-dev-server/node_modules/ws": { - "version": "8.18.2", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.2.tgz", - "integrity": "sha512-DMricUmwGZUVr++AEAe2uiVM7UoO9MAVZMDu05UQOaUII0lp+zOzLLU4Xqh/JvTqklB1T4uELaaPBKyjE1r4fQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, - "node_modules/webpack-merge": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.10.0.tgz", - "integrity": "sha512-+4zXKdx7UnO+1jaN4l2lHVD+mFvnlZQP/6ljaJVb4SZiwIKeUnrT5l0gkT8z+n4hKpC+jpOv6O9R+gLtag7pSA==", - "dev": true, - "peer": true, - "dependencies": { - "clone-deep": "^4.0.1", - "flat": "^5.0.2", - "wildcard": "^2.0.0" - }, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/webpack-plugin-vuetify": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/webpack-plugin-vuetify/-/webpack-plugin-vuetify-3.1.1.tgz", - "integrity": "sha512-z3e/MtOfqrsKFUGlXe1MepgBkXvmnWDr7L1nFLpl2AHLHMKf1/sL6kjj1mMnEWQ57dKIJb7ejnQsLKK4jnKWow==", - "devOptional": true, - "dependencies": { - "@vuetify/loader-shared": "^2.1.0", - "decache": "^4.6.0", - "file-loader": "^6.2.0", - "find-cache-dir": "^5.0.0", - "loader-utils": "^2.0.0", - "mkdirp": "^1.0.4", - "null-loader": "^4.0.1", - "upath": "^2.0.1" - }, - "engines": { - "node": "^18.0.0 || >=20.0.0" - }, - "peerDependencies": { - "@vue/compiler-sfc": "^3.2.6", - "vue": "^3.2.6", - "vuetify": "^3.0.0", - "webpack": "^5.0.0" - } - }, - "node_modules/webpack-plugin-vuetify/node_modules/find-cache-dir": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-5.0.0.tgz", - "integrity": "sha512-OuWNfjfP05JcpAP3JPgAKUhWefjMRfI5iAoSsvE24ANYWJaepAtlSgWECSVEuRgSXpyNEc9DJwG/TZpgcOqyig==", - "devOptional": true, - "dependencies": { - "common-path-prefix": "^3.0.0", - "pkg-dir": "^7.0.0" - }, - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/webpack-plugin-vuetify/node_modules/find-up": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-6.3.0.tgz", - "integrity": "sha512-v2ZsoEuVHYy8ZIlYqwPe/39Cy+cFDzp4dXPaxNvkEuouymu+2Jbz0PxpKarJHYJTmv2HWT3O382qY8l4jMWthw==", - "devOptional": true, - "dependencies": { - "locate-path": "^7.1.0", - "path-exists": "^5.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/webpack-plugin-vuetify/node_modules/loader-utils": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz", - "integrity": "sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==", - "devOptional": true, - "dependencies": { - "big.js": "^5.2.2", - "emojis-list": "^3.0.0", - "json5": "^2.1.2" - }, - "engines": { - "node": ">=8.9.0" - } - }, - "node_modules/webpack-plugin-vuetify/node_modules/locate-path": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz", - "integrity": "sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==", - "devOptional": true, - "dependencies": { - "p-locate": "^6.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/webpack-plugin-vuetify/node_modules/p-limit": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz", - "integrity": "sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==", - "devOptional": true, - "dependencies": { - "yocto-queue": "^1.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/webpack-plugin-vuetify/node_modules/p-locate": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-6.0.0.tgz", - "integrity": "sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==", - "devOptional": true, - "dependencies": { - "p-limit": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/webpack-plugin-vuetify/node_modules/path-exists": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz", - "integrity": "sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==", - "devOptional": true, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - } - }, - "node_modules/webpack-plugin-vuetify/node_modules/pkg-dir": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-7.0.0.tgz", - "integrity": "sha512-Ie9z/WINcxxLp27BKOCHGde4ITq9UklYKDzVo1nhk5sqGEXU3FpkwP5GM2voTGJkGd9B3Otl+Q4uwSOeSUtOBA==", - "devOptional": true, - "dependencies": { - "find-up": "^6.3.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/webpack-plugin-vuetify/node_modules/yocto-queue": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.1.tgz", - "integrity": "sha512-AyeEbWOu/TAXdxlV9wmGcR0+yh2j3vYPGOECcIj2S7MkrLyC7ne+oye2BKTItt0ii2PHk4cDy+95+LshzbXnGg==", - "devOptional": true, - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/webpack-sources": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.3.2.tgz", - "integrity": "sha512-ykKKus8lqlgXX/1WjudpIEjqsafjOTcOJqxnAbMLAu/KCsDCJ6GBtvscewvTkrn24HsnvFwrSCbenFrhtcCsAA==", - "devOptional": true, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/webpack-virtual-modules": { - "version": "0.4.6", - "resolved": "https://registry.npmjs.org/webpack-virtual-modules/-/webpack-virtual-modules-0.4.6.tgz", - "integrity": "sha512-5tyDlKLqPfMqjT3Q9TAqf2YqjwmnUleZwzJi1A5qXnlBCdj2AtOJ6wAWdglTIDOPgOiOrXeBeFcsQ8+aGQ6QbA==", - "dev": true, - "peer": true - }, - "node_modules/webpack/node_modules/ajv": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", - "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", - "devOptional": true, - "dependencies": { - "fast-deep-equal": "^3.1.3", - "fast-uri": "^3.0.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/webpack/node_modules/ajv-keywords": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", - "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", - "devOptional": true, - "dependencies": { - "fast-deep-equal": "^3.1.3" - }, - "peerDependencies": { - "ajv": "^8.8.2" - } - }, - "node_modules/webpack/node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "devOptional": true - }, - "node_modules/webpack/node_modules/schema-utils": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.2.tgz", - "integrity": "sha512-Gn/JaSk/Mt9gYubxTtSn/QCV4em9mpAPiR1rqy/Ocu19u/G9J5WWdNoUT4SiV6mFC3y6cxyFcFwdzPM3FgxGAQ==", - "devOptional": true, - "dependencies": { - "@types/json-schema": "^7.0.9", - "ajv": "^8.9.0", - "ajv-formats": "^2.1.1", - "ajv-keywords": "^5.1.0" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/websocket-driver": { - "version": "0.7.4", - "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz", - "integrity": "sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==", - "dev": true, - "peer": true, - "dependencies": { - "http-parser-js": ">=0.5.1", - "safe-buffer": ">=5.1.0", - "websocket-extensions": ">=0.1.1" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/websocket-extensions": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz", - "integrity": "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==", - "dev": true, - "peer": true, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/whatwg-fetch": { - "version": "3.6.20", - "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.20.tgz", - "integrity": "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg==", - "dev": true, - "peer": true - }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "dev": true, - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -12256,13 +2949,6 @@ "node": ">= 8" } }, - "node_modules/wildcard": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.1.tgz", - "integrity": "sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==", - "dev": true, - "peer": true - }, "node_modules/word-wrap": { "version": "1.2.5", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", @@ -12272,52 +2958,6 @@ "node": ">=0.10.0" } }, - "node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "peer": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true - }, - "node_modules/ws": { - "version": "7.5.10", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz", - "integrity": "sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=8.3.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": "^5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, "node_modules/xml-name-validator": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-4.0.0.tgz", @@ -12327,61 +2967,6 @@ "node": ">=12" } }, - "node_modules/y18n": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", - "dev": true, - "peer": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/yallist": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "dev": true - }, - "node_modules/yaml": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 6" - } - }, - "node_modules/yargs": { - "version": "16.2.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", - "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", - "dev": true, - "peer": true, - "dependencies": { - "cliui": "^7.0.2", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.0", - "y18n": "^5.0.5", - "yargs-parser": "^20.2.2" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/yargs-parser": { - "version": "20.2.9", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", - "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", - "dev": true, - "peer": true, - "engines": { - "node": ">=10" - } - }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", diff --git a/talemate_frontend/package.json b/talemate_frontend/package.json index fd5dbbd6..372bf2b2 100644 --- a/talemate_frontend/package.json +++ b/talemate_frontend/package.json @@ -1,16 +1,17 @@ { "name": "talemate_frontend", - "version": "0.31.0", + "version": "0.32.0", "private": true, + "type": "module", "scripts": { - "serve": "vue-cli-service serve", - "build": "vue-cli-service build", - "lint": "vue-cli-service lint" + "serve": "vite", + "build": "vite build", + "lint": "eslint . --ext .vue,.js,.ts --fix" }, "dependencies": { "@codemirror/lang-json": "^6.0.1", - "@codemirror/lang-yaml": "^6.1.2", "@codemirror/lang-markdown": "^6.2.5", + "@codemirror/lang-yaml": "^6.1.2", "@codemirror/language-data": "^6.5.1", "@codemirror/state": "^6.5.2", "@codemirror/theme-one-dark": "^6.1.2", @@ -19,37 +20,22 @@ "core-js": "^3.37.1", "dot-prop": "^9.0.0", "litegraph.js": "^0.7.18", + "lodash": "^4.17.21", + "marked": "^15.0.12", "roboto-fontface": "*", "uuid": "^10.0.0", "vue": "^3.5", "vue-codemirror": "^6.1.1", - "vuetify": "^3.8", - "webfontloader": "^1.6.28", - "marked": "^15.0.12" + "vuetify": "^3.9", + "webfontloader": "^1.6.28" }, "devDependencies": { - "@babel/core": "^7", - "@babel/eslint-parser": "^7", - "@vue/cli-plugin-babel": "~5.0.8", + "@vitejs/plugin-vue": "^6.0.0", "eslint": "^9.0.0", "eslint-plugin-vue": "^10.0.0", "postcss": "^8.5.3", - "vue-cli-plugin-vuetify": "~2.5.8", - "webpack-plugin-vuetify": "^3.1" - }, - "eslintConfig": { - "root": true, - "env": { - "node": true - }, - "extends": [ - "plugin:vue/vue3-essential", - "eslint:recommended" - ], - "parserOptions": { - "parser": "@babel/eslint-parser" - }, - "rules": {} + "vite": "^7.0.4", + "vite-plugin-vuetify": "^2.1.1" }, "browserslist": [ "> 1%", diff --git a/talemate_frontend/src/components/AIAgent.vue b/talemate_frontend/src/components/AIAgent.vue index fec595e4..5897a08a 100644 --- a/talemate_frontend/src/components/AIAgent.vue +++ b/talemate_frontend/src/components/AIAgent.vue @@ -1,7 +1,8 @@ @@ -101,12 +124,19 @@ export default { } return types; + }, + regularMessages() { + return this.messages.filter(message => message.subtype !== 'function_call'); + }, + functionCallMessages() { + return this.messages.filter(message => message.subtype === 'function_call'); } }, data() { return { messages: [], max_messages: 20, + activeTab: 'messages', dirty: {}, intent: { intent: null, @@ -167,7 +197,7 @@ export default { } } }, - created() { + mounted() { this.registerMessageHandler(this.handleMessage); }, unmounted() { diff --git a/talemate_frontend/src/components/DirectorConsoleMessage.vue b/talemate_frontend/src/components/DirectorConsoleMessage.vue index 2595e0a1..f92c97b9 100644 --- a/talemate_frontend/src/components/DirectorConsoleMessage.vue +++ b/talemate_frontend/src/components/DirectorConsoleMessage.vue @@ -3,9 +3,27 @@
- mdi-brain - {{ message.message }} -
+ + {{ message.subtype === 'function_call' ? 'mdi-function-variant' : 'mdi-brain' }} + + + + + {{ message.message }} +
+
+ {{ key }}: {{ typeof value === 'object' ? JSON.stringify(value) : value }} +
+
+
+ + + {{ message.message }} +
{{ message.action }}
diff --git a/talemate_frontend/src/components/DirectorConsoleWidget.vue b/talemate_frontend/src/components/DirectorConsoleWidget.vue index 95e240ec..28c86403 100644 --- a/talemate_frontend/src/components/DirectorConsoleWidget.vue +++ b/talemate_frontend/src/components/DirectorConsoleWidget.vue @@ -1,8 +1,12 @@ - - diff --git a/talemate_frontend/src/components/NarratorMessage.vue b/talemate_frontend/src/components/NarratorMessage.vue index f2b7af81..34e2f5fd 100644 --- a/talemate_frontend/src/components/NarratorMessage.vue +++ b/talemate_frontend/src/components/NarratorMessage.vue @@ -56,9 +56,24 @@ mdi-source-fork Fork Scene + + + + mdi-account-voice + TTS + +
To edit the intro message open the mdi-scriptScene Editor + + + mdi-account-voice + TTS + +
@@ -93,6 +108,14 @@ export default { type: Boolean, default: false, }, + ttsAvailable: { + type: Boolean, + default: false, + }, + ttsBusy: { + type: Boolean, + default: false, + }, }, inject: [ 'requestDeleteMessage', @@ -105,6 +128,7 @@ export default { 'getMessageStyle', 'openWorldStateManager', 'reviseMessage', + 'generateTTS', ], computed: { parts() { diff --git a/talemate_frontend/src/components/SceneMessages.vue b/talemate_frontend/src/components/SceneMessages.vue index 6f8b42b6..0c34f367 100644 --- a/talemate_frontend/src/components/SceneMessages.vue +++ b/talemate_frontend/src/components/SceneMessages.vue @@ -6,11 +6,11 @@ @continue="(name, params) => { forkScene(params.message_id, name) }" />
-
+
- +
@@ -50,7 +50,7 @@
- +
@@ -70,13 +70,20 @@
- +
{{ message.text }}
+ +
+ + Stop audio + mdi-volume-high + +
@@ -109,6 +116,9 @@ export default { }, agentStatus: { type: Object, + }, + audioPlayedForMessageId: { + default: undefined, } }, components: { @@ -122,6 +132,7 @@ export default { ContextInvestigationMessage, SystemMessage, }, + emits: ['cancel-audio-queue'], data() { return { messages: [], @@ -137,7 +148,13 @@ export default { computed: { editorRevisionsEnabled() { return this.agentStatus && this.agentStatus.editor && this.agentStatus.editor.actions && this.agentStatus.editor.actions["revision"] && this.agentStatus.editor.actions["revision"].enabled; - } + }, + ttsAvailable() { + return this.agentStatus.tts?.available; + }, + ttsBusy() { + return this.agentStatus.tts?.busy || this.agentStatus.tts?.busy_bg; + }, }, inject: ['getWebsocket', 'registerMessageHandler', 'setWaitingForInput'], provide() { @@ -149,6 +166,7 @@ export default { getMessageColor: this.getMessageColor, getMessageStyle: this.getMessageStyle, reviseMessage: this.reviseMessage, + generateTTS: this.generateTTS, } }, methods: { @@ -247,6 +265,14 @@ export default { this.setWaitingForInput(false); }, + messageTypeAllowsAudio(type) { + return [ + 'narrator', + 'character', + 'context_investigation', + ].includes(type); + }, + messageTypeIsSceneMessage(type) { return ![ 'request_input', @@ -293,6 +319,14 @@ export default { })); }, + generateTTS(message_id) { + this.getWebsocket().send(JSON.stringify({ + type: 'tts', + action: 'generate_for_scene_message', + message_id: message_id, + })); + }, + handleMessage(data) { var i; @@ -333,11 +367,13 @@ export default { if (data.type == "message_edited") { // find the message by id and update the text# - for (i = 0; i < this.messages.length; i++) { if (this.messages[i].id == data.id) { if (this.messages[i].type == "character") { - this.messages[i].text = data.message.split(':')[1].trim(); + const parts = data.message.split(':'); + parts.shift(); + const text = parts.join(':'); + this.messages[i].text = text.trim(); } else { this.messages[i].text = data.message; } @@ -384,7 +420,8 @@ export default { id: data.id, type: data.type, character: data.character, - text: data.message, direction_mode: data.direction_mode, + text: data.message, + direction_mode: data.direction_mode, action: data.action } ); @@ -459,6 +496,10 @@ export default { overflow-y: auto; } +.message-wrapper { + position: relative; +} + .message { white-space: pre-wrap; } @@ -510,5 +551,9 @@ export default { gap: 10px; } -.message.request_input {} +.audio-played-indicator { + position: absolute; + top: 25px; + left: 5px; +} \ No newline at end of file diff --git a/talemate_frontend/src/components/SceneTools.vue b/talemate_frontend/src/components/SceneTools.vue index a0400737..5ded8e79 100644 --- a/talemate_frontend/src/components/SceneTools.vue +++ b/talemate_frontend/src/components/SceneTools.vue @@ -246,22 +246,8 @@ - - - - Save - - {{ option.title }} - {{ option.description }} - - - + +
@@ -279,7 +265,7 @@ import SceneToolsNarrator from './SceneToolsNarrator.vue'; import SceneToolsActor from './SceneToolsActor.vue'; import SceneToolsCreative from './SceneToolsCreative.vue'; import SceneToolsVisual from './SceneToolsVisual.vue'; - +import SceneToolsSave from './SceneToolsSave.vue'; export default { name: 'SceneTools', @@ -289,6 +275,7 @@ export default { SceneToolsActor, SceneToolsCreative, SceneToolsVisual, + SceneToolsSave, }, props: { appBusy: Boolean, @@ -349,12 +336,6 @@ export default { {"value": "toggleAutoSave", "title": "Auto Save", "icon": "mdi-content-save", "description": "Automatically save after each game-loop", "status": () => { return this.canAutoSave ? this.autoSave : "Manually save scene for auto-save to be available"; }}, {"value": "toggleAutoProgress", "title": "Auto Progress", "icon": "mdi-robot", "description": "AI automatically progresses after player turn.", "status": () => { return this.autoProgress }}, ], - - saveMenu: [ - {"value": "save_as", "title": "Save As", "icon": "mdi-content-save-all", "description": "Save the current scene as a new scene"}, - {"value": "save", "title": "Save", "icon": "mdi-content-save", "description": "Save the current scene"}, - ], - advanceTimeOptions: [ {"value" : "P10Y", "title": "10 years"}, {"value" : "P5Y", "title": "5 years"}, diff --git a/talemate_frontend/src/components/SceneToolsSave.vue b/talemate_frontend/src/components/SceneToolsSave.vue new file mode 100644 index 00000000..94a9c805 --- /dev/null +++ b/talemate_frontend/src/components/SceneToolsSave.vue @@ -0,0 +1,78 @@ + + + \ No newline at end of file diff --git a/talemate_frontend/src/components/TalemateApp.vue b/talemate_frontend/src/components/TalemateApp.vue index 898e5137..ab819880 100644 --- a/talemate_frontend/src/components/TalemateApp.vue +++ b/talemate_frontend/src/components/TalemateApp.vue @@ -39,8 +39,9 @@ - + + v{{ version }} @@ -67,14 +68,28 @@ + + - mdi-bug - mdi-cog - mdi-application-cog - mdi-application-cog + + + + + + + + + + + @@ -200,6 +215,8 @@ :appearance-config="appConfig ? appConfig.appearance : {}" :ux-locked="uxLocked" :agent-status="agentStatus" + :audio-played-for-message-id="audioPlayedForMessageId" + @cancel-audio-queue="onCancelAudioQueue" />
@@ -302,6 +319,7 @@ import AudioQueue from './AudioQueue.vue'; import StatusNotification from './StatusNotification.vue'; import RateLimitAlert from './RateLimitAlert.vue'; import VisualQueue from './VisualQueue.vue'; +import VoiceLibrary from './VoiceLibrary.vue'; import WorldStateManager from './WorldStateManager.vue'; import WorldStateManagerMenu from './WorldStateManagerMenu.vue'; import IntroView from './IntroView.vue'; @@ -337,6 +355,7 @@ export default { DirectorConsoleWidget, PackageManager, PackageManagerMenu, + VoiceLibrary, }, name: 'TalemateApp', data() { @@ -436,6 +455,7 @@ export default { lastAgentUpdate: null, lastClientUpdate: null, busy: false, + audioPlayedForMessageId: undefined, } }, watch:{ @@ -618,9 +638,9 @@ export default { this.connecting = true; let currentUrl = new URL(window.location.href); - let websocketUrl = process.env.VUE_APP_TALEMATE_BACKEND_WEBSOCKET_URL || `ws://${currentUrl.hostname}:5050/ws`; + let websocketUrl = import.meta.env.VITE_TALEMATE_BACKEND_WEBSOCKET_URL || `ws://${currentUrl.hostname}:5050/ws`; - console.log("urls", { websocketUrl, currentUrl }, {env : process.env}); + console.log("urls", { websocketUrl, currentUrl }, {env : import.meta.env}); this.websocket = new WebSocket(websocketUrl); console.log("Websocket connecting ...") @@ -638,8 +658,12 @@ export default { this.sceneActive = false; this.scene = {}; this.loading = false; - if(this.reconnect) - this.connect(); + if (this.reconnect) { + // Wait for the configured reconnectInterval before trying again to reduce rapid retry loops + setTimeout(() => { + this.connect(); + }, this.reconnectInterval); + } }; this.websocket.onerror = (error) => { console.log('WebSocket error', error); @@ -1086,10 +1110,48 @@ export default { this.websocket.send(JSON.stringify({ type: 'request_app_config' })); }, saveClients(clients) { - this.websocket.send(JSON.stringify({ type: 'configure_clients', clients: clients })); + console.log("saveClients", clients) + + const saveData = {} + + for(let client of clients) { + saveData[client.name] = { + ...client, + } + } + this.websocket.send(JSON.stringify({ type: 'configure_clients', clients: saveData })); }, saveAgents(agents) { - this.websocket.send(JSON.stringify({ type: 'configure_agents', agents: agents })); + const saveData = {} + + for(let agent of agents) { + console.log("agent", agent) + const requiresLLM = agent.data?.requires_llm_client || false; + + let client; + + if(requiresLLM) { + + if(agent.client?.client) { + client = agent.client?.client?.value; + } else { + client = agent.client || null; + } + + } else { + client = null; + } + + saveData[agent.name] = { + enabled: agent.enabled, + actions: agent.actions, + client: client, + } + } + + console.log("saveAgents",{ saveData, agents }) + + this.websocket.send(JSON.stringify({ type: 'configure_agents', agents: saveData })); }, requestSceneAssets(asset_ids) { this.websocket.send(JSON.stringify({ type: 'request_scene_assets', asset_ids: asset_ids })); @@ -1311,6 +1373,17 @@ export default { this.websocket.send(JSON.stringify({ type: 'interact', text: "!setenv_scene" })); }, + onMessageAudioPlayed(messageId) { + this.audioPlayedForMessageId = messageId; + }, + + onCancelAudioQueue() { + this.audioPlayedForMessageId = undefined; + if(this.$refs.audioQueue) { + this.$refs.audioQueue.stopAndClear(); + } + }, + } } diff --git a/talemate_frontend/src/components/VoiceLibrary.vue b/talemate_frontend/src/components/VoiceLibrary.vue new file mode 100644 index 00000000..a107c7f6 --- /dev/null +++ b/talemate_frontend/src/components/VoiceLibrary.vue @@ -0,0 +1,881 @@ + + + + + \ No newline at end of file diff --git a/talemate_frontend/src/components/VoiceLibraryCharacterManager.vue b/talemate_frontend/src/components/VoiceLibraryCharacterManager.vue new file mode 100644 index 00000000..a96f2de1 --- /dev/null +++ b/talemate_frontend/src/components/VoiceLibraryCharacterManager.vue @@ -0,0 +1,315 @@ + + + + + \ No newline at end of file diff --git a/talemate_frontend/src/components/VoiceMixer.vue b/talemate_frontend/src/components/VoiceMixer.vue new file mode 100644 index 00000000..69270a4b --- /dev/null +++ b/talemate_frontend/src/components/VoiceMixer.vue @@ -0,0 +1,393 @@ + + + + + \ No newline at end of file diff --git a/talemate_frontend/src/components/VoiceSelect.vue b/talemate_frontend/src/components/VoiceSelect.vue new file mode 100644 index 00000000..f072aa2f --- /dev/null +++ b/talemate_frontend/src/components/VoiceSelect.vue @@ -0,0 +1,133 @@ + + + + + \ No newline at end of file diff --git a/talemate_frontend/src/components/WhatsNew.vue b/talemate_frontend/src/components/WhatsNew.vue index aa95b674..e2f277b4 100644 --- a/talemate_frontend/src/components/WhatsNew.vue +++ b/talemate_frontend/src/components/WhatsNew.vue @@ -25,13 +25,13 @@ - + {{ feature.title }} - - + + Dialogue Instructions - + Dialogue Examples + + Voice + @@ -65,19 +68,59 @@ /> - - - - -
- {{ example }} -
-
-
+ +
+ + +
+
+ + Remove this example + mdi-close-box-outline + +
+
+
+
+
+
+ + + + Test Voice + + + + Select a voice for {{ character.name }}. Only voices from ready TTS APIs are listed. +
@@ -89,12 +132,15 @@ import ContextualGenerate from './ContextualGenerate.vue'; import SpiceAppliedNotification from './SpiceAppliedNotification.vue'; +import VoiceSelect from './VoiceSelect.vue'; +import { parseSceneText } from '../utils/sceneMessageRenderer.js'; export default { name: 'WorldStateManagerCharacterActor', components: { ContextualGenerate, SpiceAppliedNotification, + VoiceSelect, }, data() { return { @@ -103,6 +149,9 @@ export default { dialogueExample: "", dialogueInstructions: null, dialogueInstructionsDirty: false, + voiceId: null, + voiceDirty: false, + testingVoice: false, updateCharacterActorTimeout: null, } }, @@ -124,9 +173,11 @@ export default { handler() { this.dialogueInstructions = this.character.actor.dialogue_instructions; this.dialogueExamples = this.character.actor.dialogue_examples; + this.voiceId = this.character.voice ? this.character.voice.id : null; }, - deep: true - } + deep: true, + immediate: true, + }, }, props: { character: Object, @@ -149,11 +200,19 @@ export default { type: "world_state_manager", action: "update_character_actor", name: this.character.name, - dialogue_instructions: this.dialogueInstructions, + dialogue_instructions: this.dialogueInstructions || "", dialogue_examples: this.dialogueExamples, })); }, + handleDialogeExampleEnter(event) { + if(!event.shiftKey) { + this.dialogueExamples.push(this.dialogueExample); + this.dialogueExample = ''; + this.updateCharacterActor(); + } + }, + setCharacterDialogueInstructions(instructions) { this.dialogueInstructions = instructions; this.dialogueInstructionsDirty = true; @@ -172,12 +231,63 @@ export default { if(data.type === 'world_state_manager') { if(data.action === 'character_actor_updated') { this.dialogueInstructionsDirty = false; + } else if (data.action === 'character_voice_updated') { + this.voiceDirty = false; } else if (data.action === 'character_dialogue_instructions_generated') { this.dialogueInstructions = data.data.instructions; this.dialogueInstructionsBusy = false; } + } else if (data.type === 'tts') { + if (data.action === 'operation_done' || data.action === 'operation_failed') { + this.testingVoice = false; + } } }, + + updateCharacterVoice() { + if(!this.voiceDirty) return; + + this.getWebsocket().send(JSON.stringify({ + type: 'world_state_manager', + action: 'update_character_voice', + name: this.character.name, + voice_id: this.voiceId, + })); + }, + + testCharacterVoice() { + if (!this.voiceId || this.testingVoice) return; + + // Extract provider and provider_id from voiceId (format: "provider:provider_id") + const [provider, ...providerIdParts] = this.voiceId.split(':'); + const provider_id = providerIdParts.join(':'); + + // Get a random dialogue example, or use default text + let testText = "This is a test of the selected voice."; + if (this.dialogueExamples && this.dialogueExamples.length > 0) { + const randomIndex = Math.floor(Math.random() * this.dialogueExamples.length); + testText = this.dialogueExamples[randomIndex]; + // Strip character name prefix if present + if (testText.startsWith(this.character.name + ':')) { + testText = testText.substring(this.character.name.length + 1).trim(); + } + } + + this.testingVoice = true; + this.getWebsocket().send(JSON.stringify({ + type: 'tts', + action: 'test', + provider: provider, + provider_id: provider_id, + provider_model: this.character.voice.provider_model, + parameters: this.character.voice.parameters, + text: testText, + })); + }, + + renderSceneText(text) { + return parseSceneText(text); + }, }, created() { this.registerMessageHandler(this.handleMessage); @@ -185,6 +295,7 @@ export default { mounted() { this.dialogueInstructions = this.character.actor.dialogue_instructions; this.dialogueExamples = this.character.actor.dialogue_examples; + this.voiceId = this.character.voice ? this.character.voice.id : null; }, } diff --git a/talemate_frontend/src/components/WorldStateManagerCharacterCreator.vue b/talemate_frontend/src/components/WorldStateManagerCharacterCreator.vue index 9c1a8fad..20b93dfb 100644 --- a/talemate_frontend/src/components/WorldStateManagerCharacterCreator.vue +++ b/talemate_frontend/src/components/WorldStateManagerCharacterCreator.vue @@ -152,6 +152,21 @@ export default { this.character = {}; this.$emit('cancelled'); }, + + reset() { + this.character = { + generation_context: { + enabled: true, + instructions: "", + generateAttributes: true, + }, + description: "", + name: "", + templates: [], + is_player: false, + } + }, + sendAutocompleteRequest() { this.descriptionBusy = true; this.autocompleteRequest({ @@ -220,6 +235,7 @@ export default { if(this.character.created) { this.character.created(message.data); } + this.reset(); } // Handle director responses (for AI generation) else if (message.type === 'director' && message.action === 'character_persisted') { @@ -228,6 +244,7 @@ export default { if(this.character.created) { this.character.created(message.character); } + this.reset(); } else if ((message.type === 'director' || message.type === 'world_state_manager') && message.action === 'operation_done') { this.busy = false; diff --git a/talemate_frontend/src/components/WorldStateManagerSceneExport.vue b/talemate_frontend/src/components/WorldStateManagerSceneExport.vue index fb62c4f5..d619dee3 100644 --- a/talemate_frontend/src/components/WorldStateManagerSceneExport.vue +++ b/talemate_frontend/src/components/WorldStateManagerSceneExport.vue @@ -28,6 +28,43 @@ > + + + + Include in Export + + + + + + + + + + + + + + + + + @@ -54,12 +91,17 @@ export default { data() { return { formats: [ - { value: 'talemate', title: 'Talemate Scene' }, + { value: 'talemate', title: 'Talemate Scene (JSON only)' }, + { value: 'talemate_complete', title: 'Complete Scene Package (ZIP)' }, ], - format: 'talemate', + format: 'talemate_complete', resetProgress: true, exportName: '', formIsValid: false, + includeAssets: true, + includeNodes: true, + includeInfo: true, + includeTemplates: true, rules: { required: value => !!value || 'Required.' } @@ -74,13 +116,23 @@ export default { return; } - this.getWebsocket().send(JSON.stringify({ + const exportOptions = { type: 'world_state_manager', action: 'export_scene', format: this.format, reset_progress: this.resetProgress, name: this.exportName, - })); + }; + + // Add complete export options if selected + if (this.format === 'talemate_complete') { + exportOptions.include_assets = this.includeAssets; + exportOptions.include_nodes = this.includeNodes; + exportOptions.include_info = this.includeInfo; + exportOptions.include_templates = this.includeTemplates; + } + + this.getWebsocket().send(JSON.stringify(exportOptions)); }, handleMessage(message) { if (message.type !== 'world_state_manager') { @@ -89,13 +141,24 @@ export default { if (message.action === 'scene_exported') { const scene_b64 = message.data; + const format = message.format || 'talemate'; + const fileExtension = message.file_extension || 'json'; + // prepare data url for download - const data = `data:application/octet-stream;base64,${scene_b64}`; + const mimeType = fileExtension === 'zip' ? 'application/zip' : 'application/json'; + const data = `data:${mimeType};base64,${scene_b64}`; + // trigger download const a = document.createElement('a'); a.href = data; - a.download = `${this.exportName}.json`; + a.download = `${this.exportName}.${fileExtension}`; a.click(); + + // Show success message + this.$emit('show-message', { + message: `Scene exported successfully as ${this.exportName}.${fileExtension}`, + type: 'success' + }); } } }, diff --git a/talemate_frontend/src/components/WorldStateManagerWorldEntries.vue b/talemate_frontend/src/components/WorldStateManagerWorldEntries.vue index d72b27b4..e0319765 100644 --- a/talemate_frontend/src/components/WorldStateManagerWorldEntries.vue +++ b/talemate_frontend/src/components/WorldStateManagerWorldEntries.vue @@ -1,11 +1,13 @@