Service for interacting with AI chat models to get completions with conversation memory.
Object
Namespace: Radzen
Assembly: Radzen.Blazor.dll
public class AIChatService : IAIChatServiceService for interacting with AI chat models to get completions with conversation memory.
public AIChatService(IServiceProvider serviceProvider, Microsoft.Extensions.Options.IOptions<AIChatServiceOptions> options)
| Type | Name | Description |
|---|---|---|
| IServiceProvider | serviceProvider | |
| Microsoft.Extensions.Options.IOptions<AIChatServiceOptions> | options |
Gets the configuration options for the chat streaming service.
public AIChatServiceOptions Options { get; }
| Type | Description |
|---|---|
| AIChatServiceOptions | Gets the configuration options for the chat streaming service. |
public void CleanupOldSessions(int maxAgeHours)
| Type | Name | Description |
|---|---|---|
| int | maxAgeHours |
public void ClearSession(string sessionId)
| Type | Name | Description |
|---|---|---|
| string | sessionId |
public IEnumerable<ConversationSession> GetActiveSessions()
| Type | Description |
|---|---|
| IEnumerable<ConversationSession> |
public IAsyncEnumerable<string> GetCompletionsAsync(string userInput, string sessionId, Threading.CancellationToken cancellationToken, string model, string systemPrompt, double? temperature, int? maxTokens, string endpoint, string proxy, string apiKey, string apiKeyHeader)
| Type | Name | Description |
|---|---|---|
| string | userInput | |
| string | sessionId | |
| Threading.CancellationToken | cancellationToken | |
| string | model | |
| string | systemPrompt | |
| double? | temperature | |
| int? | maxTokens | |
| string | endpoint | |
| string | proxy | |
| string | apiKey | |
| string | apiKeyHeader |
| Type | Description |
|---|---|
| IAsyncEnumerable<string> |
public ConversationSession GetOrCreateSession(string sessionId)
| Type | Name | Description |
|---|---|---|
| string | sessionId |
| Type | Description |
|---|---|
| ConversationSession |