a better error handle when foundry is not available

This commit is contained in:
vanzue
2025-11-16 09:14:49 +08:00
parent 8b885ccf8e
commit 24ef97981c
2 changed files with 21 additions and 54 deletions

View File

@@ -168,8 +168,6 @@ internal sealed class FoundryClient
}
public async Task<bool> EnsureModelLoaded(string modelId)
{
try
{
Logger.LogInfo($"[FoundryClient] EnsureModelLoaded called with: {modelId}");
@@ -189,12 +187,6 @@ internal sealed class FoundryClient
Logger.LogInfo($"[FoundryClient] Model load result: {loaded}");
return loaded;
}
catch (Exception ex)
{
Logger.LogError($"[FoundryClient] EnsureModelLoaded exception: {ex.Message}");
return false;
}
}
public async Task EnsureRunning()
{

View File

@@ -24,23 +24,9 @@ public sealed class FoundryLocalModelProvider : ILanguageModelProvider
public string ProviderDescription => "The model will run locally via Foundry Local";
public IChatClient? GetIChatClient(string modelId)
{
try
{
Logger.LogInfo($"[FoundryLocal] GetIChatClient called with url: {modelId}");
InitializeAsync().GetAwaiter().GetResult();
}
catch (Exception ex)
{
Logger.LogError($"[FoundryLocal] Failed to initialize: {ex.Message}");
return null;
}
if (string.IsNullOrWhiteSpace(_serviceUrl) || _foundryClient == null)
{
Logger.LogError("[FoundryLocal] Service URL or manager is null");
return null;
}
if (string.IsNullOrWhiteSpace(modelId))
{
@@ -67,21 +53,11 @@ public sealed class FoundryLocalModelProvider : ILanguageModelProvider
}
// Ensure the model is loaded before returning chat client
try
{
var isLoaded = _foundryClient.EnsureModelLoaded(modelId).GetAwaiter().GetResult();
var isLoaded = _foundryClient!.EnsureModelLoaded(modelId).GetAwaiter().GetResult();
if (!isLoaded)
{
Logger.LogError($"[FoundryLocal] Failed to load model: {modelId}");
return null;
}
Logger.LogInfo($"[FoundryLocal] Model is loaded: {modelId}");
}
catch (Exception ex)
{
Logger.LogError($"[FoundryLocal] Exception ensuring model loaded: {ex.Message}");
return null;
throw new InvalidOperationException($"Failed to load the model '{modelId}'.");
}
// Use ServiceUri instead of Endpoint since Endpoint already includes /v1
@@ -95,7 +71,6 @@ public sealed class FoundryLocalModelProvider : ILanguageModelProvider
var endpointUri = new Uri($"{baseUri.ToString().TrimEnd('/')}/v1");
Logger.LogInfo($"[FoundryLocal] Creating OpenAI client with endpoint: {endpointUri}");
Logger.LogInfo($"[FoundryLocal] Model ID for chat client: {modelId}");
return new OpenAIClient(
new ApiKeyCredential("none"),