Advanced paste: Add more error handle for foundry local (#43600)

<!-- Enter a brief description/summary of your PR here. What does it
fix/what does it change/how was it tested (even manually, if necessary)?
-->
## Summary of the Pull Request
Foundry local sdk will not run models that is not in catalog, when
catalog removes some, the old ones will fail executing,
so add error hint for users to re-configure the models in settings.

<!-- Please review the items on the PR checklist before submitting-->
## PR Checklist

- [ ] Closes: #xxx
<!-- - [ ] Closes: #yyy (add separate lines for additional resolved
issues) -->
- [ ] **Communication:** I've discussed this with core contributors
already. If the work hasn't been agreed, this work might be rejected
- [ ] **Tests:** Added/updated and all pass
- [ ] **Localization:** All end-user-facing strings can be localized
- [ ] **Dev docs:** Added/updated
- [ ] **New binaries:** Added on the required places
- [ ] [JSON for
signing](https://github.com/microsoft/PowerToys/blob/main/.pipelines/ESRPSigning_core.json)
for new binaries
- [ ] [WXS for
installer](https://github.com/microsoft/PowerToys/blob/main/installer/PowerToysSetup/Product.wxs)
for new binaries and localization folder
- [ ] [YML for CI
pipeline](https://github.com/microsoft/PowerToys/blob/main/.pipelines/ci/templates/build-powertoys-steps.yml)
for new test projects
- [ ] [YML for signed
pipeline](https://github.com/microsoft/PowerToys/blob/main/.pipelines/release.yml)
- [ ] **Documentation updated:** If checked, please file a pull request
on [our docs
repo](https://github.com/MicrosoftDocs/windows-uwp/tree/docs/hub/powertoys)
and link it here: #xxx

<!-- Provide a more detailed description of the PR, other things fixed,
or any additional comments/features here -->
## Detailed Description of the Pull Request / Additional comments

<!-- Describe how you validated the behavior. Add automated tests
wherever possible, but list manual validation steps taken as well -->
## Validation Steps Performed
<img width="864" height="216" alt="image"
src="https://github.com/user-attachments/assets/654207b3-ff50-4888-a638-82136216de7b"
/>
This commit is contained in:
Kai Tao
2025-11-16 15:26:05 +08:00
committed by GitHub
parent db7c9e180e
commit 3e14d50f65
6 changed files with 72 additions and 81 deletions

View File

@@ -169,41 +169,23 @@ internal sealed class FoundryClient
public async Task<bool> EnsureModelLoaded(string modelId) public async Task<bool> EnsureModelLoaded(string modelId)
{ {
try Logger.LogInfo($"[FoundryClient] EnsureModelLoaded called with: {modelId}");
// Check if already loaded
if (await IsModelLoaded(modelId).ConfigureAwait(false))
{ {
Logger.LogInfo($"[FoundryClient] EnsureModelLoaded called with: {modelId}"); Logger.LogInfo($"[FoundryClient] Model already loaded: {modelId}");
return true;
// Check if already loaded
if (await IsModelLoaded(modelId).ConfigureAwait(false))
{
Logger.LogInfo($"[FoundryClient] Model already loaded: {modelId}");
return true;
}
// Check if model exists in cache
var cachedModels = await ListCachedModels().ConfigureAwait(false);
Logger.LogInfo($"[FoundryClient] Cached models: {string.Join(", ", cachedModels.Select(m => m.Name))}");
if (!cachedModels.Any(m => m.Name == modelId))
{
Logger.LogWarning($"[FoundryClient] Model not found in cache: {modelId}");
return false;
}
// Load the model
Logger.LogInfo($"[FoundryClient] Loading model: {modelId}");
await _foundryManager.LoadModelAsync(modelId).ConfigureAwait(false);
// Verify it's loaded
var loaded = await IsModelLoaded(modelId).ConfigureAwait(false);
Logger.LogInfo($"[FoundryClient] Model load result: {loaded}");
return loaded;
}
catch (Exception ex)
{
Logger.LogError($"[FoundryClient] EnsureModelLoaded exception: {ex.Message}");
return false;
} }
// Load the model
Logger.LogInfo($"[FoundryClient] Loading model: {modelId}");
await _foundryManager.LoadModelAsync(modelId).ConfigureAwait(false);
// Verify it's loaded
var loaded = await IsModelLoaded(modelId).ConfigureAwait(false);
Logger.LogInfo($"[FoundryClient] Model load result: {loaded}");
return loaded;
} }
public async Task EnsureRunning() public async Task EnsureRunning()

View File

@@ -13,6 +13,7 @@ namespace LanguageModelProvider;
public sealed class FoundryLocalModelProvider : ILanguageModelProvider public sealed class FoundryLocalModelProvider : ILanguageModelProvider
{ {
private IEnumerable<ModelDetails>? _downloadedModels; private IEnumerable<ModelDetails>? _downloadedModels;
private IEnumerable<FoundryCatalogModel>? _catalogModels;
private FoundryClient? _foundryClient; private FoundryClient? _foundryClient;
private string? _serviceUrl; private string? _serviceUrl;
@@ -24,22 +25,8 @@ public sealed class FoundryLocalModelProvider : ILanguageModelProvider
public IChatClient? GetIChatClient(string modelId) public IChatClient? GetIChatClient(string modelId)
{ {
try Logger.LogInfo($"[FoundryLocal] GetIChatClient called with url: {modelId}");
{ InitializeAsync().GetAwaiter().GetResult();
Logger.LogInfo($"[FoundryLocal] GetIChatClient called with url: {modelId}");
InitializeAsync().GetAwaiter().GetResult();
}
catch (Exception ex)
{
Logger.LogError($"[FoundryLocal] Failed to initialize: {ex.Message}");
return null;
}
if (string.IsNullOrWhiteSpace(_serviceUrl) || _foundryClient == null)
{
Logger.LogError("[FoundryLocal] Service URL or manager is null");
return null;
}
if (string.IsNullOrWhiteSpace(modelId)) if (string.IsNullOrWhiteSpace(modelId))
{ {
@@ -47,35 +34,43 @@ public sealed class FoundryLocalModelProvider : ILanguageModelProvider
return null; return null;
} }
// Ensure the model is loaded before returning chat client // Check if model is in catalog
try var isInCatalog = _catalogModels?.Any(m => m.Name == modelId) ?? false;
if (!isInCatalog)
{ {
var isLoaded = _foundryClient.EnsureModelLoaded(modelId).GetAwaiter().GetResult(); var errorMessage = $"{modelId} is not supported in Foundry Local. Please configure supported models in Settings.";
if (!isLoaded) Logger.LogError($"[FoundryLocal] {errorMessage}");
{ throw new InvalidOperationException(errorMessage);
Logger.LogError($"[FoundryLocal] Failed to load model: {modelId}");
return null;
}
Logger.LogInfo($"[FoundryLocal] Model is loaded: {modelId}");
} }
catch (Exception ex)
// Check if model is cached
var isInCache = _downloadedModels?.Any(m => m.ProviderModelDetails is FoundryCachedModel cached && cached.Name == modelId) ?? false;
if (!isInCache)
{ {
Logger.LogError($"[FoundryLocal] Exception ensuring model loaded: {ex.Message}"); var errorMessage = $"The requested model '{modelId}' is not cached. Please download it using Foundry Local.";
return null; Logger.LogError($"[FoundryLocal] {errorMessage}");
throw new InvalidOperationException(errorMessage);
}
// Ensure the model is loaded before returning chat client
var isLoaded = _foundryClient!.EnsureModelLoaded(modelId).GetAwaiter().GetResult();
if (!isLoaded)
{
Logger.LogError($"[FoundryLocal] Failed to load model: {modelId}");
throw new InvalidOperationException($"Failed to load the model '{modelId}'.");
} }
// Use ServiceUri instead of Endpoint since Endpoint already includes /v1 // Use ServiceUri instead of Endpoint since Endpoint already includes /v1
var baseUri = _foundryClient.GetServiceUri(); var baseUri = _foundryClient.GetServiceUri();
if (baseUri == null) if (baseUri == null)
{ {
Logger.LogError("[FoundryLocal] Service URI is null"); const string message = "Foundry Local service URL is not available. Please make sure Foundry Local is installed and running.";
return null; Logger.LogError($"[FoundryLocal] {message}");
throw new InvalidOperationException(message);
} }
var endpointUri = new Uri($"{baseUri.ToString().TrimEnd('/')}/v1"); var endpointUri = new Uri($"{baseUri.ToString().TrimEnd('/')}/v1");
Logger.LogInfo($"[FoundryLocal] Creating OpenAI client with endpoint: {endpointUri}"); Logger.LogInfo($"[FoundryLocal] Creating OpenAI client with endpoint: {endpointUri}");
Logger.LogInfo($"[FoundryLocal] Model ID for chat client: {modelId}");
return new OpenAIClient( return new OpenAIClient(
new ApiKeyCredential("none"), new ApiKeyCredential("none"),
@@ -122,12 +117,13 @@ public sealed class FoundryLocalModelProvider : ILanguageModelProvider
private void Reset() private void Reset()
{ {
_downloadedModels = null; _downloadedModels = null;
_catalogModels = null;
_ = InitializeAsync(); _ = InitializeAsync();
} }
private async Task InitializeAsync(CancellationToken cancelationToken = default) private async Task InitializeAsync(CancellationToken cancelationToken = default)
{ {
if (_foundryClient != null && _downloadedModels != null && _downloadedModels.Any()) if (_foundryClient != null && _downloadedModels != null && _downloadedModels.Any() && _catalogModels != null && _catalogModels.Any())
{ {
await _foundryClient.EnsureRunning().ConfigureAwait(false); await _foundryClient.EnsureRunning().ConfigureAwait(false);
return; return;
@@ -138,13 +134,18 @@ public sealed class FoundryLocalModelProvider : ILanguageModelProvider
if (_foundryClient == null) if (_foundryClient == null)
{ {
Logger.LogError("[FoundryLocal] Failed to create Foundry client"); const string message = "Foundry Local client could not be created. Please make sure Foundry Local is installed and running.";
return; Logger.LogError($"[FoundryLocal] {message}");
throw new InvalidOperationException(message);
} }
_serviceUrl ??= await _foundryClient.GetServiceUrl(); _serviceUrl ??= await _foundryClient.GetServiceUrl();
Logger.LogInfo($"[FoundryLocal] Service URL: {_serviceUrl}"); Logger.LogInfo($"[FoundryLocal] Service URL: {_serviceUrl}");
var catalogModels = await _foundryClient.ListCatalogModels();
Logger.LogInfo($"[FoundryLocal] Found {catalogModels.Count} catalog models");
_catalogModels = catalogModels;
var cachedModels = await _foundryClient.ListCachedModels(); var cachedModels = await _foundryClient.ListCachedModels();
Logger.LogInfo($"[FoundryLocal] Found {cachedModels.Count} cached models"); Logger.LogInfo($"[FoundryLocal] Found {cachedModels.Count} cached models");

View File

@@ -7,6 +7,7 @@ using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using AdvancedPaste.Helpers;
using AdvancedPaste.Models; using AdvancedPaste.Models;
using LanguageModelProvider; using LanguageModelProvider;
using Microsoft.Extensions.AI; using Microsoft.Extensions.AI;
@@ -33,10 +34,6 @@ public sealed class FoundryLocalPasteProvider : IPasteAIProvider
_config = config; _config = config;
} }
public string ProviderName => AIServiceType.FoundryLocal.ToNormalizedKey();
public string DisplayName => string.IsNullOrWhiteSpace(_config?.Model) ? "Foundry Local" : _config.Model;
public async Task<bool> IsAvailableAsync(CancellationToken cancellationToken) public async Task<bool> IsAvailableAsync(CancellationToken cancellationToken)
{ {
cancellationToken.ThrowIfCancellationRequested(); cancellationToken.ThrowIfCancellationRequested();
@@ -76,13 +73,20 @@ public sealed class FoundryLocalPasteProvider : IPasteAIProvider
} }
cancellationToken.ThrowIfCancellationRequested(); cancellationToken.ThrowIfCancellationRequested();
var chatClient = _modelProvider.GetIChatClient(modelReference);
if (chatClient is null) IChatClient chatClient;
try
{ {
chatClient = _modelProvider.GetIChatClient(modelReference);
}
catch (InvalidOperationException ex)
{
// GetIChatClient throws InvalidOperationException for user-facing errors
var errorMessage = string.Format(System.Globalization.CultureInfo.CurrentCulture, ResourceLoaderInstance.ResourceLoader.GetString("FoundryLocal_UnableToLoadModel"), modelReference);
throw new PasteActionException( throw new PasteActionException(
$"Unable to load Foundry Local model: {modelReference}", errorMessage,
new InvalidOperationException("Chat client resolution failed"), ex,
aiServiceMessage: "The model may not be downloaded or the Foundry Local service may not be running. Please check the model status in settings."); aiServiceMessage: ex.Message);
} }
var userMessageContent = $""" var userMessageContent = $"""

View File

@@ -368,4 +368,8 @@
<value>Local</value> <value>Local</value>
<comment>Badge label displayed next to local AI model providers (e.g., Ollama, Foundry Local) to indicate the model runs locally</comment> <comment>Badge label displayed next to local AI model providers (e.g., Ollama, Foundry Local) to indicate the model runs locally</comment>
</data> </data>
<data name="FoundryLocal_UnableToLoadModel" xml:space="preserve">
<value>Unable to load Foundry Local model: {0}</value>
<comment>{0} is the model identifier. Do not translate {0}.</comment>
</data>
</root> </root>

View File

@@ -106,7 +106,7 @@
<ComboBox.Header> <ComboBox.Header>
<TextBlock> <TextBlock>
<Run x:Uid="AdvancedPaste_FL_LocalModel" /><LineBreak /><Run <Run x:Uid="AdvancedPaste_FL_LocalModel" /><LineBreak /><Run
x:Uid="AdvancedPaste_FL_UseCLIToDownloadModels" x:Uid="AdvancedPaste_FL_UseCliToDownloadModels"
FontSize="12" FontSize="12"
Foreground="{ThemeResource TextFillColorSecondaryBrush}" /> Foreground="{ThemeResource TextFillColorSecondaryBrush}" />
</TextBlock> </TextBlock>
@@ -152,7 +152,7 @@
Spacing="8"> Spacing="8">
<Image Width="36" Source="ms-appx:///Assets/Settings/Icons/Models/FoundryLocal.svg" /> <Image Width="36" Source="ms-appx:///Assets/Settings/Icons/Models/FoundryLocal.svg" />
<TextBlock <TextBlock
x:Uid="AdvancedPaste_FL_FLNotavailableYet" x:Uid="AdvancedPaste_FL_FLNotAvailableYet"
HorizontalAlignment="Center" HorizontalAlignment="Center"
FontWeight="SemiBold" FontWeight="SemiBold"
TextAlignment="Center" TextAlignment="Center"

View File

@@ -5693,14 +5693,14 @@ To record a specific window, enter the hotkey with the Alt key in the opposite m
<value>Foundry Local model</value> <value>Foundry Local model</value>
<comment>Do not localize "Foundry Local", it's a product name</comment> <comment>Do not localize "Foundry Local", it's a product name</comment>
</data> </data>
<data name="AdvancedPaste_FL_UseCLIToDownloadModels.Text" xml:space="preserve"> <data name="AdvancedPaste_FL_UseCliToDownloadModels.Text" xml:space="preserve">
<value>Use the Foundry Local CLI to download models that run locally on-device. They'll appear here.</value> <value>Use the Foundry Local CLI to download models that run locally on-device. They'll appear here.</value>
<comment>Do not localize "Foundry Local", it's a product name</comment> <comment>Do not localize "Foundry Local", it's a product name</comment>
</data> </data>
<data name="AdvancedPaste_FL_RefreshModelList.Text" xml:space="preserve"> <data name="AdvancedPaste_FL_RefreshModelList.Text" xml:space="preserve">
<value>Refresh model list</value> <value>Refresh model list</value>
</data> </data>
<data name="AdvancedPaste_FL_FLNotavailableYet.Text" xml:space="preserve"> <data name="AdvancedPaste_FL_FLNotAvailableYet.Text" xml:space="preserve">
<value>Foundry Local is not available on this device yet.</value> <value>Foundry Local is not available on this device yet.</value>
<comment>Do not localize "Foundry Local", it's a product name</comment> <comment>Do not localize "Foundry Local", it's a product name</comment>
</data> </data>