diff --git a/blazor/ai/azure-openai.md b/blazor/ai/azure-openai.md index d83fadaca6..34af512093 100644 --- a/blazor/ai/azure-openai.md +++ b/blazor/ai/azure-openai.md @@ -15,11 +15,14 @@ This section explains how to configure and use the [Syncfusion.Blazor.AI](https: Before you begin integrating Azure OpenAI with your Blazor application, ensure you have: -* Installed the [Syncfusion.Blazor.AI](https://www.nuget.org/packages/Syncfusion.Blazor.AI) package via NuGet +* Installed the following nuget packages: {% tabs %} {% highlight C# tabtitle="Package Manager" %} Install-Package Syncfusion.Blazor.AI -Version {{ site.releaseversion }} +Install-Package Microsoft.Extensions.AI +Install-Package Microsoft.Extensions.AI.OpenAI +Install-Package Azure.AI.OpenAI {% endhighlight %} {% endtabs %} @@ -38,14 +41,20 @@ Open your Blazor application's `Program.cs` file and add the following configura ```csharp // Add required namespaces using Syncfusion.Blazor.AI; +using Azure.AI.OpenAI; +using Microsoft.Extensions.AI; +using System.ClientModel; // Register Azure OpenAI credentials -builder.Services.AddSingleton(new AIServiceCredentials -{ - ApiKey = "your-azure-openai-key", // Replace with your Azure OpenAI API key - DeploymentName = "your-deployment-name", // Specify the Azure OpenAI deployment name - Endpoint = new Uri("https://your-openai.azure.com/") // Replace with your Azure OpenAI endpoint -}); +string azureOpenAiKey = "AZURE_OPENAI_KEY"; +string azureOpenAiEndpoint = "AZURE_OPENAI_ENDPOINT"; +string azureOpenAiModel = "AZURE_OPENAI_MODEL"; +AzureOpenAIClient azureOpenAIClient = new AzureOpenAIClient( + new Uri(azureOpenAiEndpoint), + new ApiKeyCredential(azureOpenAiKey) +); +IChatClient azureOpenAiChatClient = azureOpenAIClient.GetChatClient(azureOpenAiModel).AsIChatClient(); +builder.Services.AddChatClient(azureOpenAiChatClient); // Register the inference service builder.Services.AddSingleton(); @@ -64,6 +73,8 @@ Install-Package Syncfusion.Blazor.Grid -Version {{ site.releaseversion }} Install-Package Syncfusion.Blazor.Themes -Version {{ site.releaseversion }} Install-Package Syncfusion.Blazor.AI -Version {{ site.releaseversion }} Install-Package Syncfusion.Blazor.QueryBuilder -Version {{ site.releaseversion }} +Install-Package Microsoft.Extensions.AI +Install-Package Microsoft.Extensions.AI.OpenAI Install-Package Azure.AI.OpenAI {% endhighlight %} diff --git a/blazor/ai/ollama.md b/blazor/ai/ollama.md index 822cda6393..5fd3e68199 100644 --- a/blazor/ai/ollama.md +++ b/blazor/ai/ollama.md @@ -15,11 +15,13 @@ This section explains how to configure and use the [Syncfusion.Blazor.AI](https: Before you begin integrating Ollama with your Blazor application, ensure you have: -* Installed the [Syncfusion.Blazor.AI](https://www.nuget.org/packages/Syncfusion.Blazor.AI) package via NuGet +* Installed the following nuget packages {% tabs %} {% highlight C# tabtitle="Package Manager" %} Install-Package Syncfusion.Blazor.AI -Version {{ site.releaseversion }} +Install-Package Microsoft.Extensions.AI +Install-Package OllamaSharp {% endhighlight %} {% endtabs %} @@ -41,13 +43,12 @@ Open your Blazor application's `Program.cs` file and add the following configura ```csharp // Add required namespaces using Syncfusion.Blazor.AI; +using Microsoft.Extensions.AI; +using OllamaSharp; -builder.Services.AddSingleton(new AIServiceCredentials -{ - DeploymentName = "llama2", // Specify the Ollama model (e.g., "llama2", "mistral", "codellama") - Endpoint = new Uri("http://localhost:11434"), // Replace with your Ollama endpoint URL - SelfHosted = true // Set to true for Ollama -}); +string ModelName = "MODEL_NAME"; +IChatClient chatClient = new OllamaApiClient("http://localhost:11434", ModelName); +builder.Services.AddChatClient(chatClient); // Register the inference backend builder.Services.AddSingleton(); @@ -66,6 +67,8 @@ This example demonstrates using the **Syncfusion.Blazor.AI** package with **Olla Install-Package Syncfusion.Blazor.TreeGrid -Version {{ site.releaseversion }} Install-Package Syncfusion.Blazor.Themes -Version {{ site.releaseversion }} Install-Package Syncfusion.Blazor.AI -Version {{ site.releaseversion }} +Install-Package Microsoft.Extensions.AI +Install-Package OllamaSharp {% endhighlight %} {% endtabs %} diff --git a/blazor/ai/openAI.md b/blazor/ai/openAI.md index 7eac88b6f9..7a2f665034 100644 --- a/blazor/ai/openAI.md +++ b/blazor/ai/openAI.md @@ -17,11 +17,13 @@ This section helps to configuring and using the [Syncfusion.Blazor.AI](https://w Before you begin integrating OpenAI with your Blazor application, ensure you have: -* Installed the [Syncfusion.Blazor.AI](https://www.nuget.org/packages/Syncfusion.Blazor.AI) package via NuGet +* Installed the following nuget packages: {% tabs %} {% highlight C# tabtitle="Package Manager" %} Install-Package Syncfusion.Blazor.AI -Version {{ site.releaseversion }} +Install-Package Microsoft.Extensions.AI +Install-Package Microsoft.Extensions.AI.OpenAI {% endhighlight %} {% endtabs %} @@ -39,14 +41,15 @@ Open your Blazor application's `Program.cs` file and add the following configura ```csharp // Add required namespaces using Syncfusion.Blazor.AI; +using Microsoft.Extensions.AI; +using OpenAI; // Register OpenAI credentials -builder.Services.AddSingleton(new AIServiceCredentials -{ - ApiKey = "your-openai-key", // Replace with your actual OpenAI API key - DeploymentName = "gpt-4", // Specify the model (e.g., "gpt-4", "gpt-3.5-turbo") - Endpoint = null // Must be null for OpenAI (as opposed to Azure OpenAI) -}); +string openAiApiKey = "API-KEY"; +string openAiModel = "OPENAI_MODEL"; +OpenAIClient openAIClient = new OpenAIClient(openAiApiKey); +IChatClient openAiChatClient = openAIClient.GetChatClient(openAiModel).AsIChatClient(); +builder.Services.AddChatClient(openAiChatClient); // Register the inference service builder.Services.AddSingleton(); @@ -64,6 +67,8 @@ This example demonstrates using the **Syncfusion.Blazor.AI** package with OpenAI Install-Package Syncfusion.Blazor.Grid -Version {{ site.releaseversion }} Install-Package Syncfusion.Blazor.Themes -Version {{ site.releaseversion }} Install-Package Syncfusion.Blazor.AI -Version {{ site.releaseversion }} +Install-Package Microsoft.Extensions.AI +Install-Package Microsoft.Extensions.AI.OpenAI {% endhighlight %} {% endtabs %} diff --git a/blazor/smart-paste/getting-started-webapp.md b/blazor/smart-paste/getting-started-webapp.md index a9452d547e..a6f789b366 100644 --- a/blazor/smart-paste/getting-started-webapp.md +++ b/blazor/smart-paste/getting-started-webapp.md @@ -146,25 +146,47 @@ var app = builder.Build(); ## Configure AI Service -To configure the AI service, add the following settings to the **~/Program.cs** file in your Blazor Server app. +Follow the instructions below to register an AI model in your application. + +### OpenAI + +For **OpenAI**, create an API key and place it at `openAIApiKey`. The value for `openAIModel` is the model you wish to use (e.g., `gpt-3.5-turbo`, `gpt-4`, etc.). + +* Install the following NuGet packages to your project: + +{% tabs %} + +{% highlight c# tabtitle="Package Manager" %} + +Install-Package Microsoft.Extensions.AI +Install-Package Microsoft.Extensions.AI.OpenAI + +{% endhighlight %} + +{% endtabs %} + +* To configure the AI service, add the following settings to the **~/Program.cs** file in your Blazor Server app. {% tabs %} {% highlight C# tabtitle="Blazor Server App" hl_lines="7 8 9 11 12 13" %} using Syncfusion.Blazor.SmartComponents; using Syncfusion.Blazor.AI; +using Microsoft.Extensions.AI; +using OpenAI; var builder = WebApplication.CreateBuilder(args); .... builder.Services.AddSyncfusionBlazor(); -string apiKey = "api-key"; -string deploymentName = "deployment-name"; -string endpoint = "end point url";// Must be null for OpenAI +string openAIApiKey = "API-KEY"; +string openAIModel = "OPENAI_MODEL"; +OpenAIClient openAIClient = new OpenAIClient(openAIApiKey); +IChatClient openAIChatClient = openAIClient.GetChatClient(openAIModel).AsIChatClient(); +builder.Services.AddChatClient(openAIChatClient); builder.Services.AddSyncfusionSmartComponents() -.ConfigureCredentials(new AIServiceCredentials(apiKey, deploymentName, endpoint)) .InjectOpenAIInference(); var app = builder.Build(); @@ -173,19 +195,64 @@ var app = builder.Build(); {% endhighlight %} {% endtabs %} -Here, +### Azure OpenAI -* **apiKey**: "OpenAI or Azure OpenAI API Key"; -* **deploymentName**: "Azure OpenAI deployment name"; -* **endpoint**: "Azure OpenAI deployment end point URL"; +For **Azure OpenAI**, first [deploy an Azure OpenAI Service resource and model](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource), then values for `azureOpenAIKey`, `azureOpenAIEndpoint` and `azureOpenAIModel` will all be provided to you. -For **Azure OpenAI**, first [deploy an Azure OpenAI Service resource and model](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource), then values for `apiKey`, `deploymentName` and `endpoint` will all be provided to you. +* Install the following NuGet packages to your project: -N> From version 28.2.33, the Azure.AI.OpenAI package has been removed from the SmartComponents dependency. To use Azure OpenAI, please install the [Azure.AI.OpenAI](https://www.nuget.org/packages/Azure.AI.OpenAI) package separately in your Blazor application. +{% tabs %} -If you are using **OpenAI**, [create an API key](https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key) and place it at `apiKey`, leave the `endpoint` as `""`. The value for `deploymentName` is the [model](https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models) you wish to use (e.g., `gpt-3.5-turbo`, `gpt-4`, etc.). +{% highlight c# tabtitle="Package Manager" %} -### Configuring Ollama for Self-Hosted AI Models +Install-Package Microsoft.Extensions.AI +Install-Package Microsoft.Extensions.AI.OpenAI +Install-Package Azure.AI.OpenAI + +{% endhighlight %} + +{% endtabs %} + +* To configure the AI service, add the following settings to the **~/Program.cs** file in your Blazor Server app. + +{% tabs %} +{% highlight C# tabtitle="Blazor Server App" hl_lines="7 8 9 11 12 13" %} + +using Syncfusion.Blazor.SmartComponents; +using Syncfusion.Blazor.AI; +using Azure.AI.OpenAI; +using Microsoft.Extensions.AI; +using System.ClientModel; + +var builder = WebApplication.CreateBuilder(args); + +.... + +builder.Services.AddSyncfusionBlazor(); + +string azureOpenAIKey = "AZURE_OPENAI_KEY"; +string azureOpenAIEndpoint = "AZURE_OPENAI_ENDPOINT"; +string azureOpenAIModel = "AZURE_OPENAI_MODEL"; +AzureOpenAIClient azureOpenAIClient = new AzureOpenAIClient( + new Uri(azureOpenAIEndpoint), + new ApiKeyCredential(azureOpenAIKey) +); +IChatClient azureOpenAIChatClient = azureOpenAIClient.GetChatClient(azureOpenAIModel).AsIChatClient(); +builder.Services.AddChatClient(azureOpenAIChatClient); + +builder.Services.AddSyncfusionSmartComponents() +.InjectOpenAIInference(); + +var app = builder.Build(); +.... + +{% endhighlight %} +{% endtabs %} + + +N> From version 28.2.33 to 30.2.6, the Azure.AI.OpenAI package has been removed from the SmartComponents dependency. To use Azure OpenAI, please install the [Azure.AI.OpenAI](https://www.nuget.org/packages/Azure.AI.OpenAI) package separately in your Blazor application. + +### Ollama To use Ollama for running self-hosted models: @@ -197,25 +264,43 @@ To use Ollama for running self-hosted models: 3. **Configure your application** - - Set `SelfHosted` to `true`. - Provide the `Endpoint` URL where the model is hosted (e.g., `http://localhost:11434`). - - Set `DeploymentName` to the specific model you installed (e.g., `llama2:13b`). + - Set `ModelName` to the specific model you installed (e.g., `llama2:13b`). + +* Install the following NuGet packages to your project: + +{% tabs %} + +{% highlight c# tabtitle="Package Manager" %} -Add the following settings to the **~/Program.cs** file in your Blazor Server app. +Install-Package Microsoft.Extensions.AI +Install-Package OllamaSharp + +{% endhighlight %} + +{% endtabs %} + +* Add the following settings to the **~/Program.cs** file in your Blazor Server app. {% tabs %} {% highlight C# tabtitle="Blazor Server App" hl_lines="7 8 9 11 12 13" %} using Syncfusion.Blazor.SmartComponents; using Syncfusion.Blazor.AI; +using Microsoft.Extensions.AI; +using OllamaSharp; + var builder = WebApplication.CreateBuilder(args); .... builder.Services.AddSyncfusionBlazor(); +string ModelName = "MODEL_NAME"; +IChatClient chatClient = new OllamaApiClient("http://localhost:11434", ModelName); +builder.Services.AddChatClient(chatClient); + builder.Services.AddSyncfusionSmartComponents() -.ConfigureCredentials(new AIServiceCredentials { SelfHosted=true, Endpoint= new Uri("Your self-hosted end point url") ,DeploymentName = "Your model name" }) .InjectOpenAIInference(); var app = builder.Build(); diff --git a/blazor/smart-paste/getting-started.md b/blazor/smart-paste/getting-started.md index 4a76e2c8bd..2ae43c9e0a 100644 --- a/blazor/smart-paste/getting-started.md +++ b/blazor/smart-paste/getting-started.md @@ -130,25 +130,47 @@ var app = builder.Build(); ## Configure AI Service -To configure the AI service, add the following settings to the **~/Program.cs** file in your Blazor Server app. +Follow the instructions below to register an AI model in your application. + +### OpenAI + +For **OpenAI**, create an API key and place it at `openAIApiKey`. The value for `openAIModel` is the model you wish to use (e.g., `gpt-3.5-turbo`, `gpt-4`, etc.). + +* Install the following NuGet packages to your project: + +{% tabs %} + +{% highlight c# tabtitle="Package Manager" %} + +Install-Package Microsoft.Extensions.AI +Install-Package Microsoft.Extensions.AI.OpenAI + +{% endhighlight %} + +{% endtabs %} + +* To configure the AI service, add the following settings to the **~/Program.cs** file in your Blazor Server app. {% tabs %} {% highlight C# tabtitle="Blazor Server App" hl_lines="7 8 9 11 12 13" %} using Syncfusion.Blazor.SmartComponents; using Syncfusion.Blazor.AI; +using Microsoft.Extensions.AI; +using OpenAI; var builder = WebApplication.CreateBuilder(args); .... builder.Services.AddSyncfusionBlazor(); -string apiKey = "api-key"; -string deploymentName = "deployment-name"; -string endpoint = "end point url";// Must be null for OpenAI +string openAIApiKey = "API-KEY"; +string openAIModel = "OPENAI_MODEL"; +OpenAIClient openAIClient = new OpenAIClient(openAIApiKey); +IChatClient openAIChatClient = openAIClient.GetChatClient(openAIModel).AsIChatClient(); +builder.Services.AddChatClient(openAIChatClient); builder.Services.AddSyncfusionSmartComponents() -.ConfigureCredentials(new AIServiceCredentials(apiKey, deploymentName, endpoint)) .InjectOpenAIInference(); var app = builder.Build(); @@ -157,19 +179,64 @@ var app = builder.Build(); {% endhighlight %} {% endtabs %} -Here, +### Azure OpenAI -* **apiKey**: "OpenAI or Azure OpenAI API Key"; -* **deploymentName**: "Azure OpenAI deployment name"; -* **endpoint**: "Azure OpenAI deployment end point URL"; +For **Azure OpenAI**, first [deploy an Azure OpenAI Service resource and model](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource), then values for `azureOpenAIKey`, `azureOpenAIEndpoint` and `azureOpenAIModel` will all be provided to you. -For **Azure OpenAI**, first [deploy an Azure OpenAI Service resource and model](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource), then values for `apiKey`, `deploymentName` and `endpoint` will all be provided to you. +* Install the following NuGet packages to your project: -N> From version 28.2.33, the Azure.AI.OpenAI package has been removed from the SmartComponents dependency. To use Azure OpenAI, please install the [Azure.AI.OpenAI](https://www.nuget.org/packages/Azure.AI.OpenAI) package separately in your Blazor application. +{% tabs %} -If you are using **OpenAI**, create an API key and place it at `apiKey`, leave the `endpoint` as `""`. The value for `deploymentName` is the model you wish to use (e.g., `gpt-3.5-turbo`, `gpt-4`, etc.). +{% highlight c# tabtitle="Package Manager" %} -### Configuring Ollama for Self-Hosted AI Models +Install-Package Microsoft.Extensions.AI +Install-Package Microsoft.Extensions.AI.OpenAI +Install-Package Azure.AI.OpenAI + +{% endhighlight %} + +{% endtabs %} + +* To configure the AI service, add the following settings to the **~/Program.cs** file in your Blazor Server app. + +{% tabs %} +{% highlight C# tabtitle="Blazor Server App" hl_lines="7 8 9 11 12 13" %} + +using Syncfusion.Blazor.SmartComponents; +using Syncfusion.Blazor.AI; +using Azure.AI.OpenAI; +using Microsoft.Extensions.AI; +using System.ClientModel; + +var builder = WebApplication.CreateBuilder(args); + +.... + +builder.Services.AddSyncfusionBlazor(); + +string azureOpenAIKey = "AZURE_OPENAI_KEY"; +string azureOpenAIEndpoint = "AZURE_OPENAI_ENDPOINT"; +string azureOpenAIModel = "AZURE_OPENAI_MODEL"; +AzureOpenAIClient azureOpenAIClient = new AzureOpenAIClient( + new Uri(azureOpenAIEndpoint), + new ApiKeyCredential(azureOpenAIKey) +); +IChatClient azureOpenAIChatClient = azureOpenAIClient.GetChatClient(azureOpenAIModel).AsIChatClient(); +builder.Services.AddChatClient(azureOpenAIChatClient); + +builder.Services.AddSyncfusionSmartComponents() +.InjectOpenAIInference(); + +var app = builder.Build(); +.... + +{% endhighlight %} +{% endtabs %} + + +N> From version 28.2.33 to 30.2.6, the Azure.AI.OpenAI package has been removed from the SmartComponents dependency. To use Azure OpenAI, please install the [Azure.AI.OpenAI](https://www.nuget.org/packages/Azure.AI.OpenAI) package separately in your Blazor application. + +### Ollama To use Ollama for running self-hosted models: @@ -181,25 +248,43 @@ To use Ollama for running self-hosted models: 3. **Configure your application** - - Set `SelfHosted` to `true`. - Provide the `Endpoint` URL where the model is hosted (e.g., `http://localhost:11434`). - - Set `DeploymentName` to the specific model you installed (e.g., `llama2:13b`). + - Set `ModelName` to the specific model you installed (e.g., `llama2:13b`). + +* Install the following NuGet packages to your project: + +{% tabs %} + +{% highlight c# tabtitle="Package Manager" %} -Add the following settings to the **~/Program.cs** file in your Blazor Server app. +Install-Package Microsoft.Extensions.AI +Install-Package OllamaSharp + +{% endhighlight %} + +{% endtabs %} + +* Add the following settings to the **~/Program.cs** file in your Blazor Server app. {% tabs %} {% highlight C# tabtitle="Blazor Server App" hl_lines="7 8 9 11 12 13" %} using Syncfusion.Blazor.SmartComponents; using Syncfusion.Blazor.AI; +using Microsoft.Extensions.AI; +using OllamaSharp; + var builder = WebApplication.CreateBuilder(args); .... builder.Services.AddSyncfusionBlazor(); +string ModelName = "MODEL_NAME"; +IChatClient chatClient = new OllamaApiClient("http://localhost:11434", ModelName); +builder.Services.AddChatClient(chatClient); + builder.Services.AddSyncfusionSmartComponents() -.ConfigureCredentials(new AIServiceCredentials { SelfHosted=true, Endpoint= new Uri("Your self-hosted end point url") ,DeploymentName = "Your model name" }) .InjectOpenAIInference(); var app = builder.Build(); diff --git a/blazor/smart-textarea/getting-started-webapp.md b/blazor/smart-textarea/getting-started-webapp.md index 0491ee6106..9f60192453 100644 --- a/blazor/smart-textarea/getting-started-webapp.md +++ b/blazor/smart-textarea/getting-started-webapp.md @@ -147,25 +147,47 @@ var app = builder.Build(); ## Configure AI Service -To configure the AI service, add the following settings to the **~/Program.cs** file in your Blazor Server app. +Follow the instructions below to register an AI model in your application. + +### OpenAI + +For **OpenAI**, create an API key and place it at `openAIApiKey`. The value for `openAIModel` is the model you wish to use (e.g., `gpt-3.5-turbo`, `gpt-4`, etc.). + +* Install the following NuGet packages to your project: + +{% tabs %} + +{% highlight c# tabtitle="Package Manager" %} + +Install-Package Microsoft.Extensions.AI +Install-Package Microsoft.Extensions.AI.OpenAI + +{% endhighlight %} + +{% endtabs %} + +* To configure the AI service, add the following settings to the **~/Program.cs** file in your Blazor Server app. {% tabs %} {% highlight C# tabtitle="Blazor Server App" hl_lines="7 8 9 11 12 13" %} using Syncfusion.Blazor.SmartComponents; using Syncfusion.Blazor.AI; +using Microsoft.Extensions.AI; +using OpenAI; var builder = WebApplication.CreateBuilder(args); .... builder.Services.AddSyncfusionBlazor(); -string apiKey = "api-key"; -string deploymentName = "deployment-name"; -string endpoint = "end point url";// Must be null for OpenAI +string openAIApiKey = "API-KEY"; +string openAIModel = "OPENAI_MODEL"; +OpenAIClient openAIClient = new OpenAIClient(openAIApiKey); +IChatClient openAIChatClient = openAIClient.GetChatClient(openAIModel).AsIChatClient(); +builder.Services.AddChatClient(openAIChatClient); builder.Services.AddSyncfusionSmartComponents() -.ConfigureCredentials(new AIServiceCredentials(apiKey, deploymentName, endpoint)) .InjectOpenAIInference(); var app = builder.Build(); @@ -174,19 +196,64 @@ var app = builder.Build(); {% endhighlight %} {% endtabs %} -Here, +### Azure OpenAI -* **apiKey**: "OpenAI or Azure OpenAI API Key"; -* **deploymentName**: "Azure OpenAI deployment name"; -* **endpoint**: "Azure OpenAI deployment end point URL"; +For **Azure OpenAI**, first [deploy an Azure OpenAI Service resource and model](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource), then values for `azureOpenAIKey`, `azureOpenAIEndpoint` and `azureOpenAIModel` will all be provided to you. -For **Azure OpenAI**, first [deploy an Azure OpenAI Service resource and model](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource), then values for `apiKey`, `deploymentName` and `endpoint` will all be provided to you. +* Install the following NuGet packages to your project: -N> From version 28.2.33, the Azure.AI.OpenAI package has been removed from the SmartComponents dependency. To use Azure OpenAI, please install the [Azure.AI.OpenAI](https://www.nuget.org/packages/Azure.AI.OpenAI) package separately in your Blazor application. +{% tabs %} -If you are using **OpenAI**, [create an API key](https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key) and place it at `apiKey`, leave the `endpoint` as `""`. The value for `deploymentName` is the [model](https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models) you wish to use (e.g., `gpt-3.5-turbo`, `gpt-4`, etc.). +{% highlight c# tabtitle="Package Manager" %} + +Install-Package Microsoft.Extensions.AI +Install-Package Microsoft.Extensions.AI.OpenAI +Install-Package Azure.AI.OpenAI + +{% endhighlight %} + +{% endtabs %} + +* To configure the AI service, add the following settings to the **~/Program.cs** file in your Blazor Server app. -### Configuring Ollama for Self-Hosted AI Models +{% tabs %} +{% highlight C# tabtitle="Blazor Server App" hl_lines="7 8 9 11 12 13" %} + +using Syncfusion.Blazor.SmartComponents; +using Syncfusion.Blazor.AI; +using Azure.AI.OpenAI; +using Microsoft.Extensions.AI; +using System.ClientModel; + +var builder = WebApplication.CreateBuilder(args); + +.... + +builder.Services.AddSyncfusionBlazor(); + +string azureOpenAIKey = "AZURE_OPENAI_KEY"; +string azureOpenAIEndpoint = "AZURE_OPENAI_ENDPOINT"; +string azureOpenAIModel = "AZURE_OPENAI_MODEL"; +AzureOpenAIClient azureOpenAIClient = new AzureOpenAIClient( + new Uri(azureOpenAIEndpoint), + new ApiKeyCredential(azureOpenAIKey) +); +IChatClient azureOpenAIChatClient = azureOpenAIClient.GetChatClient(azureOpenAIModel).AsIChatClient(); +builder.Services.AddChatClient(azureOpenAIChatClient); + +builder.Services.AddSyncfusionSmartComponents() +.InjectOpenAIInference(); + +var app = builder.Build(); +.... + +{% endhighlight %} +{% endtabs %} + + +N> From version 28.2.33 to 30.2.6, the Azure.AI.OpenAI package has been removed from the SmartComponents dependency. To use Azure OpenAI, please install the [Azure.AI.OpenAI](https://www.nuget.org/packages/Azure.AI.OpenAI) package separately in your Blazor application. + +### Ollama To use Ollama for running self-hosted models: @@ -198,25 +265,42 @@ To use Ollama for running self-hosted models: 3. **Configure your application** - - Set `SelfHosted` to `true`. - Provide the `Endpoint` URL where the model is hosted (e.g., `http://localhost:11434`). - - Set `DeploymentName` to the specific model you installed (e.g., `llama2:13b`). + - Set `ModelName` to the specific model you installed (e.g., `llama2:13b`). + +* Install the following NuGet packages to your project: + +{% tabs %} + +{% highlight c# tabtitle="Package Manager" %} + +Install-Package Microsoft.Extensions.AI +Install-Package OllamaSharp -Add the following settings to the **~/Program.cs** file in your Blazor Server app. +{% endhighlight %} + +{% endtabs %} + +* Add the following settings to the **~/Program.cs** file in your Blazor Server app. {% tabs %} {% highlight C# tabtitle="Blazor Server App" hl_lines="7 8 9 11 12 13" %} using Syncfusion.Blazor.SmartComponents; using Syncfusion.Blazor.AI; +using Microsoft.Extensions.AI; +using OllamaSharp; var builder = WebApplication.CreateBuilder(args); .... builder.Services.AddSyncfusionBlazor(); +string ModelName = "MODEL_NAME"; +IChatClient chatClient = new OllamaApiClient("http://localhost:11434", ModelName); +builder.Services.AddChatClient(chatClient); + builder.Services.AddSyncfusionSmartComponents() -.ConfigureCredentials(new AIServiceCredentials { SelfHosted=true, Endpoint= new Uri("Your self-hosted end point url") ,DeploymentName = "Your model name" }) .InjectOpenAIInference(); var app = builder.Build(); diff --git a/blazor/smart-textarea/getting-started.md b/blazor/smart-textarea/getting-started.md index 1378aa7a36..dabfc1769b 100644 --- a/blazor/smart-textarea/getting-started.md +++ b/blazor/smart-textarea/getting-started.md @@ -130,25 +130,47 @@ var app = builder.Build(); ## Configure AI Service -To configure the AI service, add the following settings to the **~/Program.cs** file in your Blazor Server app. +Follow the instructions below to register an AI model in your application. + +### OpenAI + +For **OpenAI**, create an API key and place it at `openAIApiKey`. The value for `openAIModel` is the model you wish to use (e.g., `gpt-3.5-turbo`, `gpt-4`, etc.). + +* Install the following NuGet packages to your project: + +{% tabs %} + +{% highlight c# tabtitle="Package Manager" %} + +Install-Package Microsoft.Extensions.AI +Install-Package Microsoft.Extensions.AI.OpenAI + +{% endhighlight %} + +{% endtabs %} + +* To configure the AI service, add the following settings to the **~/Program.cs** file in your Blazor Server app. {% tabs %} {% highlight C# tabtitle="Blazor Server App" hl_lines="7 8 9 11 12 13" %} using Syncfusion.Blazor.SmartComponents; using Syncfusion.Blazor.AI; +using Microsoft.Extensions.AI; +using OpenAI; var builder = WebApplication.CreateBuilder(args); .... builder.Services.AddSyncfusionBlazor(); -string apiKey = "api-key"; -string deploymentName = "deployment-name"; -string endpoint = "end point url";// Must be null for OpenAI +string openAIApiKey = "API-KEY"; +string openAIModel = "OPENAI_MODEL"; +OpenAIClient openAIClient = new OpenAIClient(openAIApiKey); +IChatClient openAIChatClient = openAIClient.GetChatClient(openAIModel).AsIChatClient(); +builder.Services.AddChatClient(openAIChatClient); builder.Services.AddSyncfusionSmartComponents() -.ConfigureCredentials(new AIServiceCredentials(apiKey, deploymentName, endpoint)) .InjectOpenAIInference(); var app = builder.Build(); @@ -157,19 +179,64 @@ var app = builder.Build(); {% endhighlight %} {% endtabs %} -Here, +### Azure OpenAI + +For **Azure OpenAI**, first [deploy an Azure OpenAI Service resource and model](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource), then values for `azureOpenAIKey`, `azureOpenAIEndpoint` and `azureOpenAIModel` will all be provided to you. + +* Install the following NuGet packages to your project: + +{% tabs %} + +{% highlight c# tabtitle="Package Manager" %} + +Install-Package Microsoft.Extensions.AI +Install-Package Microsoft.Extensions.AI.OpenAI +Install-Package Azure.AI.OpenAI + +{% endhighlight %} + +{% endtabs %} + +* To configure the AI service, add the following settings to the **~/Program.cs** file in your Blazor Server app. + +{% tabs %} +{% highlight C# tabtitle="Blazor Server App" hl_lines="7 8 9 11 12 13" %} + +using Syncfusion.Blazor.SmartComponents; +using Syncfusion.Blazor.AI; +using Azure.AI.OpenAI; +using Microsoft.Extensions.AI; +using System.ClientModel; + +var builder = WebApplication.CreateBuilder(args); + +.... + +builder.Services.AddSyncfusionBlazor(); + +string azureOpenAIKey = "AZURE_OPENAI_KEY"; +string azureOpenAIEndpoint = "AZURE_OPENAI_ENDPOINT"; +string azureOpenAIModel = "AZURE_OPENAI_MODEL"; +AzureOpenAIClient azureOpenAIClient = new AzureOpenAIClient( + new Uri(azureOpenAIEndpoint), + new ApiKeyCredential(azureOpenAIKey) +); +IChatClient azureOpenAIChatClient = azureOpenAIClient.GetChatClient(azureOpenAIModel).AsIChatClient(); +builder.Services.AddChatClient(azureOpenAIChatClient); -* **apiKey**: "OpenAI or Azure OpenAI API Key"; -* **deploymentName**: "Azure OpenAI deployment name"; -* **endpoint**: "Azure OpenAI deployment end point URL"; +builder.Services.AddSyncfusionSmartComponents() +.InjectOpenAIInference(); + +var app = builder.Build(); +.... -For **Azure OpenAI**, first [deploy an Azure OpenAI Service resource and model](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource), then values for `apiKey`, `deploymentName` and `endpoint` will all be provided to you. +{% endhighlight %} +{% endtabs %} -N> From version 28.2.33, the Azure.AI.OpenAI package has been removed from the SmartComponents dependency. To use Azure OpenAI, please install the [Azure.AI.OpenAI](https://www.nuget.org/packages/Azure.AI.OpenAI) package separately in your Blazor application. -If you are using **OpenAI**, create an API key and place it at `apiKey`, leave the `endpoint` as `""`. The value for `deploymentName` is the model you wish to use (e.g., `gpt-3.5-turbo`, `gpt-4`, etc.). +N> From version 28.2.33 to 30.2.6, the Azure.AI.OpenAI package has been removed from the SmartComponents dependency. To use Azure OpenAI, please install the [Azure.AI.OpenAI](https://www.nuget.org/packages/Azure.AI.OpenAI) package separately in your Blazor application. -### Configuring Ollama for Self-Hosted AI Models +### Ollama To use Ollama for running self-hosted models: @@ -181,25 +248,42 @@ To use Ollama for running self-hosted models: 3. **Configure your application** - - Set `SelfHosted` to `true`. - Provide the `Endpoint` URL where the model is hosted (e.g., `http://localhost:11434`). - - Set `DeploymentName` to the specific model you installed (e.g., `llama2:13b`). + - Set `ModelName` to the specific model you installed (e.g., `llama2:13b`). + +* Install the following NuGet packages to your project: + +{% tabs %} + +{% highlight c# tabtitle="Package Manager" %} + +Install-Package Microsoft.Extensions.AI +Install-Package OllamaSharp -Add the following settings to the **~/Program.cs** file in your Blazor Server app. +{% endhighlight %} + +{% endtabs %} + +* Add the following settings to the **~/Program.cs** file in your Blazor Server app. {% tabs %} {% highlight C# tabtitle="Blazor Server App" hl_lines="7 8 9 11 12 13" %} using Syncfusion.Blazor.SmartComponents; using Syncfusion.Blazor.AI; +using Microsoft.Extensions.AI; +using OllamaSharp; var builder = WebApplication.CreateBuilder(args); .... builder.Services.AddSyncfusionBlazor(); +string ModelName = "MODEL_NAME"; +IChatClient chatClient = new OllamaApiClient("http://localhost:11434", ModelName); +builder.Services.AddChatClient(chatClient); + builder.Services.AddSyncfusionSmartComponents() -.ConfigureCredentials(new AIServiceCredentials { SelfHosted=true, Endpoint= new Uri("Your self-hosted end point url") ,DeploymentName = "Your model name" }) .InjectOpenAIInference(); var app = builder.Build();