|
1 |
| - |
2 |
| -using Microsoft.Windows.AI.Generative; |
3 |
| - |
4 | 1 | using AIShell.Abstraction;
|
5 | 2 | using Microsoft.Windows.AI;
|
| 3 | +using Microsoft.Windows.AI.Generative; |
6 | 4 |
|
7 |
| -namespace AIShell.PhiSilica.Agent |
| 5 | +namespace AIShell.PhiSilica.Agent; |
| 6 | + |
| 7 | +public sealed partial class PhiSilicaAgent : ILLMAgent |
8 | 8 | {
|
9 |
| - public class PhiSilicaAgent : ILLMAgent |
10 |
| - { |
11 |
| - public string Name => "PhiSilica"; |
| 9 | + private readonly Task _initTask; |
| 10 | + private LanguageModel _model; |
12 | 11 |
|
13 |
| - public string Description => "This is the Phi Silica agent, an offline local agent on Copilot+ PCs"; |
| 12 | + public string Name => "PhiSilica"; |
| 13 | + public string Description => "This is the Phi Silica agent, an offline local agent on Copilot+ PCs"; |
| 14 | + public string SettingFile => null; |
14 | 15 |
|
15 |
| - public string SettingFile => null; |
| 16 | + public IEnumerable<CommandBase> GetCommands() => null; |
| 17 | + public bool CanAcceptFeedback(UserAction action) => false; |
| 18 | + public Task RefreshChatAsync(IShell shell, bool force) => Task.CompletedTask; |
| 19 | + public void OnUserAction(UserActionPayload actionPayload) { } |
| 20 | + public void Initialize(AgentConfig config) { } |
| 21 | + public void Dispose() { } |
16 | 22 |
|
17 |
| - public bool CanAcceptFeedback(UserAction action) => false; |
| 23 | + public PhiSilicaAgent() |
| 24 | + { |
| 25 | + // Start the initialization for AI feature and model on a background thread. |
| 26 | + _initTask = Task.Run(InitFeatureAndModelAsync); |
| 27 | + } |
18 | 28 |
|
| 29 | + private async Task InitFeatureAndModelAsync() |
| 30 | + { |
| 31 | + AIFeatureReadyState state = LanguageModel.GetReadyState(); |
| 32 | + if (state is AIFeatureReadyState.NotSupportedOnCurrentSystem) |
| 33 | + { |
| 34 | + throw new PlatformNotSupportedException("The Phi Silica feature is not supported on current system."); |
| 35 | + } |
19 | 36 |
|
20 |
| - public async Task<bool> ChatAsync(string input, IShell shell) |
| 37 | + if (state is AIFeatureReadyState.DisabledByUser) |
21 | 38 | {
|
22 |
| - IHost host = shell.Host; |
23 |
| - if (LanguageModel.GetReadyState() == AIFeatureReadyState.EnsureNeeded) |
24 |
| - { |
25 |
| - var op = await LanguageModel.EnsureReadyAsync(); |
| 39 | + throw new PlatformNotSupportedException("The Phi Silica feature is currently disabled."); |
| 40 | + } |
26 | 41 |
|
| 42 | + if (state is AIFeatureReadyState.EnsureNeeded) |
| 43 | + { |
| 44 | + // Initialize the WinRT runtime. |
| 45 | + AIFeatureReadyResult result = await LanguageModel.EnsureReadyAsync(); |
| 46 | + // Do not proceed if it failed to get the feature ready. |
| 47 | + if (result.Status is not AIFeatureReadyResultState.Success) |
| 48 | + { |
| 49 | + throw new InvalidOperationException(result.ErrorDisplayText, result.Error); |
27 | 50 | }
|
| 51 | + } |
28 | 52 |
|
29 |
| - var languageModel = await LanguageModel.CreateAsync(); |
| 53 | + _model = await LanguageModel.CreateAsync(); |
| 54 | + } |
30 | 55 |
|
31 |
| - var results = await languageModel.GenerateResponseAsync(input); |
| 56 | + public async Task<bool> ChatAsync(string input, IShell shell) |
| 57 | + { |
| 58 | + IHost host = shell.Host; |
32 | 59 |
|
33 |
| - if (results != null && !string.IsNullOrEmpty(results.Text)) |
| 60 | + try |
| 61 | + { |
| 62 | + // Wait for the init task to finish. Once it's finished, calling this again is a non-op. |
| 63 | + await _initTask; |
| 64 | + } |
| 65 | + catch (Exception e) |
| 66 | + { |
| 67 | + host.WriteErrorLine(e.Message); |
| 68 | + if (e is InvalidOperationException && e.InnerException is not null) |
34 | 69 | {
|
35 |
| - host.RenderFullResponse(results.Text); |
| 70 | + host.WriteErrorLine(e.InnerException.StackTrace); |
36 | 71 | }
|
37 |
| - else |
| 72 | + else if (e is not PlatformNotSupportedException) |
38 | 73 | {
|
39 |
| - host.WriteErrorLine("No response received from the language model."); |
| 74 | + // Show stack trace for non-PNS exception. |
| 75 | + host.WriteErrorLine(e.StackTrace); |
40 | 76 | }
|
41 |
| - //host.RenderFullResponse("Goodbye World"); |
42 | 77 |
|
43 |
| - return true; |
| 78 | + return false; |
44 | 79 | }
|
45 | 80 |
|
46 |
| - public void Dispose() |
47 |
| - { |
48 |
| - |
49 |
| - } |
| 81 | + var result = await host.RunWithSpinnerAsync( |
| 82 | + status: "Thinking ...", |
| 83 | + func: async () => await _model.GenerateResponseAsync(input) |
| 84 | + ); |
50 | 85 |
|
51 |
| - public IEnumerable<CommandBase> GetCommands() => null; |
52 |
| - |
53 |
| - public void Initialize(AgentConfig config) |
| 86 | + if (result is not null && !string.IsNullOrEmpty(result.Text)) |
54 | 87 | {
|
55 |
| - |
| 88 | + host.RenderFullResponse(result.Text); |
56 | 89 | }
|
57 |
| - |
58 |
| - public void OnUserAction(UserActionPayload actionPayload) |
| 90 | + else |
59 | 91 | {
|
60 |
| - |
| 92 | + host.WriteErrorLine("No response received from the language model."); |
61 | 93 | }
|
62 | 94 |
|
63 |
| - public Task RefreshChatAsync(IShell shell, bool force) |
64 |
| - { |
65 |
| - return Task.CompletedTask; |
66 |
| - } |
| 95 | + return true; |
67 | 96 | }
|
68 |
| - |
69 | 97 | }
|
0 commit comments