diff --git a/Documentation/constitution-rag_test.pdf b/Documentation/constitution-rag_test.pdf new file mode 100644 index 0000000..447cb0d Binary files /dev/null and b/Documentation/constitution-rag_test.pdf differ diff --git a/PalmHill.BlazorChat.ApiClient/BlazorChatApi.cs b/PalmHill.BlazorChat.ApiClient/BlazorChatApi.cs new file mode 100644 index 0000000..4fb596f --- /dev/null +++ b/PalmHill.BlazorChat.ApiClient/BlazorChatApi.cs @@ -0,0 +1,24 @@ +using PalmHill.BlazorChat.ApiClient.WebApiInterface; +using Refit; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace PalmHill.BlazorChat.ApiClient +{ + public class BlazorChatApi + { + public BlazorChatApi(HttpClient httpClient) + { + HttpClient = httpClient; + Attachment = RestService.For(httpClient); + Chat = RestService.For(httpClient); + } + + public HttpClient HttpClient { get; } + public IAttachment Attachment { get; } + public IChat Chat { get; } + } +} diff --git a/PalmHill.BlazorChat.ApiClient/PalmHill.BlazorChat.ApiClient.csproj b/PalmHill.BlazorChat.ApiClient/PalmHill.BlazorChat.ApiClient.csproj new file mode 100644 index 0000000..2ab2228 --- /dev/null +++ b/PalmHill.BlazorChat.ApiClient/PalmHill.BlazorChat.ApiClient.csproj @@ -0,0 +1,17 @@ + + + + net8.0 + enable + enable + + + + + + + + + + + diff --git a/PalmHill.BlazorChat.ApiClient/README.md b/PalmHill.BlazorChat.ApiClient/README.md new file mode 100644 index 0000000..6eaf3b8 --- /dev/null +++ b/PalmHill.BlazorChat.ApiClient/README.md @@ -0,0 +1,36 @@ +# PalmHill.BlazorChat.ApiClient + +This project is a .NET 8.0 library that provides an API client for the PalmHill Blazor Chat application. It uses the Refit library for HTTP communication. + +## Dependencies +- .NET 8.0 +- Refit 7.0.0 + +## Project References +- PalmHill.BlazorChat.Shared + +## API Interface +The API client provides an interface to interact with the chat API. The interface is defined in the `IChat` interface. + +### Methods +- `Chat(InferenceRequest conversation)`: This method sends a chat message to the server and returns the server's response as a string. The chat message is encapsulated in an `InferenceRequest` object. +- `Ask(InferenceRequest chatConversation)`: This method sends a chat message to the server and returns the server's response as a `ChatMessage` object. The chat message is encapsulated in an `InferenceRequest` object. +- `CancelChat(Guid conversationId)`: This method sends a request to the server to cancel a chat conversation. The ID of the conversation to be cancelled is passed as a parameter. + +### BlazorChatApi.cs +The `BlazorChatApi.cs` file contains the `BlazorChatApi` class, which is the main entry point for using the API client. It provides a convenient way to access the interface and its methods. + +The `BlazorChatApi` class has a constructor that takes a `HttpClient` object. This `HttpClient` object is used by the Refit library to make HTTP requests. + +The `BlazorChatApi` implments the interfaces by using Refit. + +## Usage +To use this library, add a reference to it in your project and create an instance of the `IChat` interface using Refit's `RestService` class. Then, you can call the methods defined in the `IChat` interface. + +```csharp +var httpClient = new HttpClient { BaseAddress = new Uri("https://api.example.com") }; +var blazorChatApi = new BlazorChatApi(httpClient); +var response = await blazorChatApi.Chat.Chat(new InferenceRequest { /* ... */ }); +``` + +Note: This library is not yet available on NuGet. \ No newline at end of file diff --git a/PalmHill.BlazorChat.ApiClient/WebApiInterface/IAttachment.cs b/PalmHill.BlazorChat.ApiClient/WebApiInterface/IAttachment.cs new file mode 100644 index 0000000..7099f37 --- /dev/null +++ b/PalmHill.BlazorChat.ApiClient/WebApiInterface/IAttachment.cs @@ -0,0 +1,22 @@ +using PalmHill.BlazorChat.Shared.Models; +using Refit; + + +namespace PalmHill.BlazorChat.ApiClient.WebApiInterface +{ + public interface IAttachment + { + [Get("/api/Attachment/list/{conversationId}")] + Task> GetAttachments(string conversationId); + + [Get("/api/Attachment/{attachmentId}")] + Task> GetAttachmentById(string attachmentId); + + [Multipart] + [Post("/api/Attachment/{conversationId}/{attachmentId}")] + Task> AddAttachment(Guid conversationId, Guid attachmentId, [AliasAs("file")] StreamPart file); + + [Delete("/api/Attachment/{attachmentId}")] + Task> DeleteAttachment(Guid attachmentId); + } +} diff --git a/PalmHill.BlazorChat.ApiClient/WebApiInterface/IChat.cs b/PalmHill.BlazorChat.ApiClient/WebApiInterface/IChat.cs new file mode 100644 index 0000000..91eefb2 --- /dev/null +++ b/PalmHill.BlazorChat.ApiClient/WebApiInterface/IChat.cs @@ -0,0 +1,19 @@ +using Refit; +using PalmHill.BlazorChat.Shared.Models; + + +namespace PalmHill.BlazorChat.ApiClient.WebApiInterface +{ + public interface IChat + { + [Post("/api/chat")] + Task> Chat(InferenceRequest conversation); + + [Post("/api/chat/docs")] + Task> Ask(InferenceRequest chatConversation); + + [Delete("/api/chat/cancel/{conversationId}")] + public Task> CancelChat(Guid conversationId); + + } +} diff --git a/PalmHill.BlazorChat.sln b/PalmHill.BlazorChat.sln index 5aeb7e3..0b047fc 100644 --- a/PalmHill.BlazorChat.sln +++ b/PalmHill.BlazorChat.sln @@ -16,6 +16,10 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "PalmHill.BlazorChat.Server" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "PalmHill.BlazorChat.Shared", "PalmHill.BlazorChat\Shared\PalmHill.BlazorChat.Shared.csproj", "{F2337C3B-69E8-43F8-8D21-382C233702D0}" EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "PalmHill.LlmMemory", "PalmHill.LlmMemory\PalmHill.LlmMemory.csproj", "{A3075AF8-E520-4B62-8D47-564D9C88E52A}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "PalmHill.BlazorChat.ApiClient", "PalmHill.BlazorChat.ApiClient\PalmHill.BlazorChat.ApiClient.csproj", "{434FEDF0-2AD0-4276-AC06-E26126EEF237}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -38,6 +42,14 @@ Global {F2337C3B-69E8-43F8-8D21-382C233702D0}.Debug|Any CPU.Build.0 = Debug|Any CPU {F2337C3B-69E8-43F8-8D21-382C233702D0}.Release|Any CPU.ActiveCfg = Release|Any CPU {F2337C3B-69E8-43F8-8D21-382C233702D0}.Release|Any CPU.Build.0 = Release|Any CPU + {A3075AF8-E520-4B62-8D47-564D9C88E52A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A3075AF8-E520-4B62-8D47-564D9C88E52A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A3075AF8-E520-4B62-8D47-564D9C88E52A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A3075AF8-E520-4B62-8D47-564D9C88E52A}.Release|Any CPU.Build.0 = Release|Any CPU + {434FEDF0-2AD0-4276-AC06-E26126EEF237}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {434FEDF0-2AD0-4276-AC06-E26126EEF237}.Debug|Any CPU.Build.0 = Debug|Any CPU + {434FEDF0-2AD0-4276-AC06-E26126EEF237}.Release|Any CPU.ActiveCfg = Release|Any CPU + {434FEDF0-2AD0-4276-AC06-E26126EEF237}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/PalmHill.BlazorChat/Client/Components/Attachment/AttachmentItemDisplay.razor b/PalmHill.BlazorChat/Client/Components/Attachment/AttachmentItemDisplay.razor new file mode 100644 index 0000000..9af11b5 --- /dev/null +++ b/PalmHill.BlazorChat/Client/Components/Attachment/AttachmentItemDisplay.razor @@ -0,0 +1,129 @@ +@using PalmHill.BlazorChat.ApiClient +@using PalmHill.BlazorChat.Shared.Models +@inject BlazorChatApi BlazorChatApi + +@if (Attachment != null) +{ + + +
+ @Attachment.Name +
+ + +
+
+} + + +@code { + /// + /// The to display + /// + [Parameter] + public AttachmentInfo? Attachment { get; set; } + + /// + /// The to invoke when the attachment is deleted. + /// The event handler will remove the attachment from the list. + /// + [Parameter] + public EventCallback OnFileDeleted { get; set; } + + private bool _deleteInProgress = false; + + /// + /// Deletes the attachment by calling the api. + /// + /// + private async Task _deleteAttachment() + { + if(Attachment == null) + { + return; + } + + _deleteInProgress = true; + try + { + var apiResponse = await BlazorChatApi.Attachment.DeleteAttachment(Attachment.Id); + + if (!apiResponse.IsSuccessStatusCode ) + { + throw new Exception($"Failed to delete attachment. {apiResponse.StatusCode} {apiResponse.ReasonPhrase}"); + } + + if (!apiResponse.Content) + { + throw new Exception($"Failed to delete attachment. {apiResponse.StatusCode} {apiResponse.ReasonPhrase}"); + } + + if (apiResponse.IsSuccessStatusCode) + { //Only invoke when the delete was successful. + //The event handler will remove the attachment from the list. + await OnFileDeleted.InvokeAsync(Attachment); + } + + } + catch (Exception ex) + { + Console.WriteLine($"{ex}"); + } + _deleteInProgress = false; + } + + /// + /// The to display on the attachment. + /// + private PresenceStatus _presenceStatus + { + get + { + if (Attachment == null) + { + return PresenceStatus.Busy; + + } + + var presenceStatus = Attachment.Status switch + { + AttachmentStatus.Pending => PresenceStatus.Away, + AttachmentStatus.Uploaded => PresenceStatus.Available, + AttachmentStatus.Failed => PresenceStatus.Busy, + _ => PresenceStatus.Busy + }; + + return presenceStatus; + } + } + + /// + /// The tooltip text to display for the attachment. + /// + private string _presenceLabel + { + get + { + if (Attachment == null) + { + return ""; + + } + + var presenceStatus = Attachment.Status switch + { + AttachmentStatus.Pending => "Pending", + AttachmentStatus.Uploaded => "Ready", + AttachmentStatus.Failed => "Failed", + _ => "Failed" + }; + + return presenceStatus; + } + } + + +} diff --git a/PalmHill.BlazorChat/Client/Components/Attachment/AttachmentList.razor b/PalmHill.BlazorChat/Client/Components/Attachment/AttachmentList.razor new file mode 100644 index 0000000..9a4589b --- /dev/null +++ b/PalmHill.BlazorChat/Client/Components/Attachment/AttachmentList.razor @@ -0,0 +1,22 @@ +@if (Controller!.UploadedFiles.Any()) +{ +
+ @foreach (var file in Controller.UploadedFiles) + { + + } +
+} +@code { + [Parameter] + public ChatService? Controller { get; set; } + + /// + /// Removes the deleted attachment from the 's UploadedFiles list. + /// + /// The deleted + private void _fileDeletedHandler(AttachmentInfo deletedAttachment) + { + Controller!.UploadedFiles.Remove(deletedAttachment); + } +} diff --git a/PalmHill.BlazorChat/Client/Components/Attachment/AttachmentManager.razor b/PalmHill.BlazorChat/Client/Components/Attachment/AttachmentManager.razor new file mode 100644 index 0000000..effb73a --- /dev/null +++ b/PalmHill.BlazorChat/Client/Components/Attachment/AttachmentManager.razor @@ -0,0 +1,74 @@ +@using Microsoft.AspNetCore.Components.Forms +@using Refit +@inject BlazorChatApi BlazorChatApi; + + + + + + + + + + +@code { + + [Parameter] + public ChatService? Controller { get; set; } + + /// + /// Uploads files to the server when the user selects them. + /// Adds to the 's UploadedFiles list. + /// + /// The selected files. + private async Task _uploadFiles(InputFileChangeEventArgs e) + { + + var files = e.GetMultipleFiles(); + var uploadedCount = 0; + var uploadTasks = new List(); + + foreach (var file in files) + { + var attachmentInfo = new AttachmentInfo(); + attachmentInfo.ConversationId = Controller?.WebSocketChatConnection?.ConversationId; + attachmentInfo.Name = file.Name; + attachmentInfo.Size = file.Size; + attachmentInfo.ContentType = file.ContentType; + attachmentInfo.Status = AttachmentStatus.Pending; + + Controller!.UploadedFiles.Add(attachmentInfo); + + var uploadTask = new Task(async () => + { + if (attachmentInfo?.ConversationId is null) + { + attachmentInfo!.Status = AttachmentStatus.Failed; + return; + } + + var stream = file.OpenReadStream(10000000); + var streamPart = new StreamPart(stream, file.Name, file.ContentType); + var apiResponse = await BlazorChatApi.Attachment.AddAttachment(attachmentInfo.ConversationId.Value, attachmentInfo.Id, streamPart); + uploadedCount++; + + if (!apiResponse.IsSuccessStatusCode) + { + attachmentInfo.Status = AttachmentStatus.Failed; + } + }); + + uploadTasks.Add(uploadTask); + } + + foreach (var uploadTask in uploadTasks) + { + uploadTask.Start(); + await uploadTask; + } + } + +} diff --git a/PalmHill.BlazorChat/Client/Components/Attachment/FileInput.razor b/PalmHill.BlazorChat/Client/Components/Attachment/FileInput.razor new file mode 100644 index 0000000..96640a3 --- /dev/null +++ b/PalmHill.BlazorChat/Client/Components/Attachment/FileInput.razor @@ -0,0 +1,52 @@ +@using Microsoft.AspNetCore.Components.Forms +@using PalmHill.BlazorChat.ApiClient +@using PalmHill.BlazorChat.Shared.Models +@using Refit +@inject BlazorChatApi BlazorChatApi + + + + + +
+
Drag and Drop Files
+
+ + Select Files + +
+
+ + + + + +@code +{ + [Parameter] + public ChatService? Controller { get; set; } + + /// + /// Event callback to invoke when the user selects files. + /// + [Parameter] + public EventCallback OnInputFileChange { get; set; } + + + +} \ No newline at end of file diff --git a/PalmHill.BlazorChat/Client/Components/Chat.razor b/PalmHill.BlazorChat/Client/Components/Chat.razor deleted file mode 100644 index 9ca1ff0..0000000 --- a/PalmHill.BlazorChat/Client/Components/Chat.razor +++ /dev/null @@ -1,135 +0,0 @@ -@using Microsoft.AspNetCore.SignalR.Client -@inject NavigationManager Navigation -@implements IAsyncDisposable -@using Markdig -@using PalmHill.BlazorChat.Shared.Models - -@inherits FluentComponentBase - -
- @foreach (var mr in modelResponses) - { - - -
- -
- -
- @mr.Prompt -
-
- -
- - - -
- - - -
- -
- -
-
- -
- } - -
-
- - - -@code -{ - - - private HubConnection? hubConnection; - private List modelResponses = new List(); - - protected override async Task OnInitializedAsync() - { - - hubConnection = new HubConnectionBuilder() - .WithUrl(Navigation.ToAbsoluteUri("/chathub")) - .Build(); - - hubConnection.On("ReceiveModelString", (messageId, message) => - { - var currentModelResponse = modelResponses.Single(mr => mr.PromptId == messageId); - currentModelResponse.AddResponseString(message); - InvokeAsync(StateHasChanged); - }); - - hubConnection.On("MessageComplete", (messageId, status) => - { - var currentModelResponse = modelResponses.Single(mr => mr.PromptId == messageId); - currentModelResponse.CompleteResponse(); - - InvokeAsync(StateHasChanged); - }); - - await hubConnection.StartAsync(); - } - - public async Task Send(string message, UISettings inferenceSettings) - { - if (string.IsNullOrWhiteSpace(message)) - { - return; - } - - if (hubConnection is not null) - { - var newMessage = new ModelResponse() { Prompt = message }; - modelResponses.Add(newMessage); - StateHasChanged(); - - var chatConversation = GetChatConversation(modelResponses); - chatConversation.Settings = inferenceSettings; - chatConversation.SystemMessage = inferenceSettings.SystemMessage; - await hubConnection.SendAsync("SendPrompt", newMessage.PromptId, chatConversation); - } - } - - public ChatConversation GetChatConversation(List modelResponses) - { - var chatConversation = new ChatConversation(); - - foreach (var mr in modelResponses) - { - var userMessage = new ChatMessage(); - userMessage.Message = mr.Prompt; - userMessage.Role = ChatMessageRole.User; - chatConversation.ChatMessages.Add(userMessage); - - if (mr.IsComplete) - { - var modelMessage = new ChatMessage(); - modelMessage.Message = mr.Resonse; - modelMessage.Role = ChatMessageRole.Assistant; - chatConversation.ChatMessages.Add(modelMessage); - - } - - } - - return chatConversation; - } - - public bool IsConnected => - hubConnection?.State == HubConnectionState.Connected; - - public async ValueTask DisposeAsync() - { - if (hubConnection is not null) - { - await hubConnection.DisposeAsync(); - } - } - - -} diff --git a/PalmHill.BlazorChat/Client/Components/Chat/ChatInput.razor b/PalmHill.BlazorChat/Client/Components/Chat/ChatInput.razor new file mode 100644 index 0000000..00946ea --- /dev/null +++ b/PalmHill.BlazorChat/Client/Components/Chat/ChatInput.razor @@ -0,0 +1,109 @@ +@using Microsoft.AspNetCore.Components.Forms +@using System.Collections.Concurrent +@inject IJSRuntime JSRuntime + + + + + +
+ + + +
+
+ +
+
+ @if (Controller?.CanSend == true) + { + + + } + + @if (Controller?.CanStop == true) + { + + + } + +
+
+
+ + +@code { + [Parameter] + public ChatService? Controller { get; set; } + + /// + /// Reference to the main text area. Used to resize it. + /// + private ElementReference textAreaElement; + + /// + /// Sends the prompt to the server. + /// + private async Task _sendPrompt() + { + await _sizeTextArea(); + await Controller!.SendPrompt(); + } + + /// + /// Cancels the text generation. + /// + private async Task _cancelTextGeneration() + { + await Controller!.CancelTextGeneration(); + } + + /// + /// Handel key press events from the main text area. + /// Send prompt on enter. + /// + /// The key pressed. + private async Task _handleKeyPress(KeyboardEventArgs e) + { + if (e.Key == "Enter" && !e.ShiftKey) + { + await _sendPrompt(); + } + } + + /// + /// Resizes the main text area. + /// + private async Task _sizeTextArea() + { + await JSRuntime.InvokeVoidAsync("textAreaAdjust", textAreaElement); + } + + + + + + +} diff --git a/PalmHill.BlazorChat/Client/Components/Chat/ChatMessageList.razor b/PalmHill.BlazorChat/Client/Components/Chat/ChatMessageList.razor new file mode 100644 index 0000000..1e2643a --- /dev/null +++ b/PalmHill.BlazorChat/Client/Components/Chat/ChatMessageList.razor @@ -0,0 +1,53 @@ +@using Microsoft.AspNetCore.SignalR.Client +@inject NavigationManager Navigation +@using Markdig +@inject ApiClient.BlazorChatApi BlazorChatApi; + +@inherits FluentComponentBase + +
+ @* Display each prompt in a card. Display each response in a card. *@ + @foreach (var promptWithResponse in Controller!.WebsocketChatMessages) + { + + +
+ +
+ +
+ @promptWithResponse.Prompt +
+
+ +
+ + + +
+ + + +
+ +
+ +
+
+ +
+ } + +
+
+ + + +@code +{ + + [Parameter] + public ChatService? Controller { get; set; } + +} diff --git a/PalmHill.BlazorChat/Client/Components/ChatInput.razor b/PalmHill.BlazorChat/Client/Components/ChatInput.razor deleted file mode 100644 index 78e3a5e..0000000 --- a/PalmHill.BlazorChat/Client/Components/ChatInput.razor +++ /dev/null @@ -1,107 +0,0 @@ -@using PalmHill.BlazorChat.Shared.Models -@inject IDialogService DialogService -@inject IJSRuntime JSRuntime -@inject ThemeControl ThemeControl - - - - -
- -
-
- -
-
- Send -
-
-
- -@code { - private ElementReference textAreaElement; - private string messageInput = string.Empty; - private FluentButton? sendButton; - private Chat? _chatRenderer; - public Chat? ChatRenderer { get { return _chatRenderer; } } - public UISettings UISettings { get; set; } = new UISettings(); - - public void AttachToChat(Chat chat) - { - _chatRenderer = chat; - } - - private async Task ShowSettings() - { - - DialogParameters parameters = new() - { - - Title = $"Settings", - PrimaryAction = "Save", - PrimaryActionEnabled = true, - Width = "500px", - TrapFocus = true, - Modal = true, - PreventScroll = true, - }; - var currentSettingsCopy = new UISettings - { - FrequencyPenalty = UISettings.FrequencyPenalty, - MaxLength = UISettings.MaxLength, - PresencePenalty = UISettings.PresencePenalty, - Temperature = UISettings.Temperature, - TopP = UISettings.TopP, - SystemMessage = UISettings.SystemMessage, - DarkMode = UISettings.DarkMode - }; - var dialog = await DialogService.ShowDialogAsync(currentSettingsCopy, parameters); - var dialogResult = await dialog.Result; - - if (dialogResult?.Cancelled == true) - { - //Reset the theme if cancel. - await ThemeControl.ChangeTheme(UISettings.DarkMode); - } - else - { - //Save the settings. - UISettings = (UISettings?)dialogResult?.Data ?? new UISettings(); - } - - } - - private async Task Send() - { - if (ChatRenderer == null) - { - return; - } - - await ChatRenderer.Send(messageInput, UISettings); - messageInput = string.Empty; - StateHasChanged(); - await SizeTextArea(); - - } - - private async Task HandleKeyPress(KeyboardEventArgs e) - { - if (ChatRenderer == null) - { - return; - } - - if (e.Key == "Enter" && !e.ShiftKey) - { - await Send(); - } - - await SizeTextArea(); - } - - private async Task SizeTextArea() - { - await JSRuntime.InvokeVoidAsync("textAreaAdjust", textAreaElement); - } -} diff --git a/PalmHill.BlazorChat/Client/Components/ChatSettings.razor b/PalmHill.BlazorChat/Client/Components/ChatSettings.razor deleted file mode 100644 index 6913c4a..0000000 --- a/PalmHill.BlazorChat/Client/Components/ChatSettings.razor +++ /dev/null @@ -1,51 +0,0 @@ -@inject ThemeControl ThemeControl -@implements IDialogContentComponent - -@using Microsoft.FluentUI.AspNetCore.Components.DesignTokens -@using PalmHill.BlazorChat.Shared.Models - - - - - - - - - - - - - - - - - - - - - - - - - - - - -@code { - - - [Parameter] - public UISettings Content { get; set; } = new UISettings(); - [CascadingParameter] - public FluentDialog? Dialog { get; set; } - - public async Task ChangeThemeChecked(bool darkModeChecked) - { - Content.DarkMode = darkModeChecked; - await ThemeControl.ChangeTheme(darkModeChecked); - - } - - - -} diff --git a/PalmHill.BlazorChat/Client/Components/InfoPanel.razor b/PalmHill.BlazorChat/Client/Components/InfoPanel.razor new file mode 100644 index 0000000..1245bcd --- /dev/null +++ b/PalmHill.BlazorChat/Client/Components/InfoPanel.razor @@ -0,0 +1,63 @@ +@if (IsOpen) +{ + + +
@Title
+ + + +
+ + @ChildContent +
+ + +} + + + +@code { + /// + /// Title of the panel. + /// + [Parameter] + public string Title { get; set; } = string.Empty; + [Parameter] + public RenderFragment? ChildContent { get; set; } + + /// + /// Is the panel open? + /// + [Parameter] + public bool IsOpen { get; set; } = false; + + + /// + /// Show the panel. + /// + public void Show() + { + IsOpen = true; + StateHasChanged(); + } + + /// + /// Hide the panel. + /// + public void Hide() + { + IsOpen = false; + StateHasChanged(); + } + + /// + /// Toggle the panel. + /// + public void Toggle() + { + IsOpen = !IsOpen; + StateHasChanged(); + } + +} + diff --git a/PalmHill.BlazorChat/Client/Components/MarkdownSection.razor b/PalmHill.BlazorChat/Client/Components/Markdown/MarkdownSection.razor similarity index 100% rename from PalmHill.BlazorChat/Client/Components/MarkdownSection.razor rename to PalmHill.BlazorChat/Client/Components/Markdown/MarkdownSection.razor diff --git a/PalmHill.BlazorChat/Client/Components/MarkdownSection.razor.cs b/PalmHill.BlazorChat/Client/Components/Markdown/MarkdownSection.razor.cs similarity index 78% rename from PalmHill.BlazorChat/Client/Components/MarkdownSection.razor.cs rename to PalmHill.BlazorChat/Client/Components/Markdown/MarkdownSection.razor.cs index 0049ba4..7a91e53 100644 --- a/PalmHill.BlazorChat/Client/Components/MarkdownSection.razor.cs +++ b/PalmHill.BlazorChat/Client/Components/Markdown/MarkdownSection.razor.cs @@ -2,10 +2,7 @@ using Microsoft.AspNetCore.Components; using Microsoft.FluentUI.AspNetCore.Components; -using Microsoft.FluentUI.AspNetCore.Components.Infrastructure; -using static Microsoft.FluentUI.AspNetCore.Components.Icons.Filled.Size20; - -namespace PalmHill.BlazorChat.Client.Components; +namespace PalmHill.BlazorChat.Client.Components.Markdown; public partial class MarkdownSection : FluentComponentBase @@ -13,10 +10,6 @@ public partial class MarkdownSection : FluentComponentBase private string? _content; private bool _raiseContentConverted; - //[Inject] - //private IStaticAssetService StaticAssetService { get; set; } = default!; - - /// /// Gets or sets the Markdown content /// @@ -24,11 +17,8 @@ public partial class MarkdownSection : FluentComponentBase public string? Content { get; set; } /// - /// Gets or sets asset to read the Markdown from + /// Event callback for when the Markdown content is converted to HTML. /// - //[Parameter] - //public string? FromAsset { get; set; } - [Parameter] public EventCallback OnContentConverted { get; set; } @@ -63,12 +53,6 @@ protected override void OnInitialized() protected override async Task OnAfterRenderAsync(bool firstRender) { - - //if (firstRender && !string.IsNullOrEmpty(FromAsset)) - //{ - // InternalContent = await StaticAssetService.GetAsync(FromAsset); - //} - if (_raiseContentConverted) { _raiseContentConverted = false; @@ -82,9 +66,7 @@ protected override async Task OnAfterRenderAsync(bool firstRender) public void RefreshContent() { - InternalContent = Content; - } diff --git a/PalmHill.BlazorChat/Client/Components/Markdown/ModelMarkdown.razor b/PalmHill.BlazorChat/Client/Components/Markdown/ModelMarkdown.razor new file mode 100644 index 0000000..b5df103 --- /dev/null +++ b/PalmHill.BlazorChat/Client/Components/Markdown/ModelMarkdown.razor @@ -0,0 +1,71 @@ + + +@code { + + /// + /// The markdown to render. + /// + private string _inferenceMarkdownToRender { get; set; } = string.Empty; + + /// + /// The to display. + /// + [Parameter] + public WebSocketChatMessage? WebSocketChatMessage { get; set; } + + /// + /// The markdown renderer reference to refresh the content. + /// + private MarkdownSection? mdSection { get; set; } + + /// + /// Event handler for when the event is fired. + /// + /// + /// + private void _handleResponseChanged(object? sender, EventArgs? e) + { + // Update the markdownToRender variable based on the new response + _inferenceMarkdownToRender = WebSocketChatMessage?.Resonse ?? string.Empty; + mdSection?.RefreshContent(); + // Invoke a StateHasChanged to refresh the UI if needed + StateHasChanged(); + } + + /// + /// Setup the response event handling. + /// + protected override void OnParametersSet() + { + // Unsubscribe from the old ModelResponse's event to avoid memory leaks + if (WebSocketChatMessage != null) + { + WebSocketChatMessage.ResponseChanged -= _handleResponseChanged; + WebSocketChatMessage.ResponseCompleted -= _handleResponseChanged; + } + + // Assign the new ModelResponse object + base.OnParametersSet(); + + // Subscribe to the new ModelResponse's event + if (WebSocketChatMessage != null) + { + WebSocketChatMessage.ResponseChanged += _handleResponseChanged; + WebSocketChatMessage.ResponseCompleted += _handleResponseChanged; + + // Update the UI with the current state of the response + _inferenceMarkdownToRender = WebSocketChatMessage.Resonse; + } + } + + // Ensure to unsubscribe when the component is disposed to avoid memory leaks + public void Dispose() + { + if (WebSocketChatMessage != null) + { + WebSocketChatMessage.ResponseChanged -= _handleResponseChanged; + WebSocketChatMessage.ResponseCompleted -= _handleResponseChanged; + } + } +} + diff --git a/PalmHill.BlazorChat/Client/Components/ModelMarkdown.razor b/PalmHill.BlazorChat/Client/Components/ModelMarkdown.razor deleted file mode 100644 index e85778d..0000000 --- a/PalmHill.BlazorChat/Client/Components/ModelMarkdown.razor +++ /dev/null @@ -1,54 +0,0 @@ - - -@code { - - private string markdownToRender { get; set; } = string.Empty; - - [Parameter] - public ModelResponse? ModelResponse { get; set; } - - private MarkdownSection? mdSection { get; set; } - - private void HandleResponseChanged(object? sender, EventArgs? e) - { - // Update the markdownToRender variable based on the new response - markdownToRender = ModelResponse?.Resonse ?? string.Empty; - mdSection?.RefreshContent(); - // Invoke a StateHasChanged to refresh the UI if needed - StateHasChanged(); - } - - protected override void OnParametersSet() - { - // Unsubscribe from the old ModelResponse's event to avoid memory leaks - if (ModelResponse != null) - { - ModelResponse.ResponseChanged -= HandleResponseChanged; - ModelResponse.ResponseCompleted -= HandleResponseChanged; - } - - // Assign the new ModelResponse object - base.OnParametersSet(); - - // Subscribe to the new ModelResponse's event - if (ModelResponse != null) - { - ModelResponse.ResponseChanged += HandleResponseChanged; - ModelResponse.ResponseCompleted += HandleResponseChanged; - - // Update the UI with the current state of the response - markdownToRender = ModelResponse.Resonse; - } - } - - // Ensure to unsubscribe when the component is disposed to avoid memory leaks - public void Dispose() - { - if (ModelResponse != null) - { - ModelResponse.ResponseChanged -= HandleResponseChanged; - ModelResponse.ResponseCompleted -= HandleResponseChanged; - } - } -} - diff --git a/PalmHill.BlazorChat/Client/Components/Settings/ChatSettings.razor b/PalmHill.BlazorChat/Client/Components/Settings/ChatSettings.razor new file mode 100644 index 0000000..90a17d1 --- /dev/null +++ b/PalmHill.BlazorChat/Client/Components/Settings/ChatSettings.razor @@ -0,0 +1,97 @@ +@inject IDialogService DialogService +@inject ThemeService ThemeControl +@implements IDialogContentComponent + +@using Microsoft.FluentUI.AspNetCore.Components.DesignTokens + + + + + + + + + + + + + + + + + + + + + + + +@code { + + /// + /// The to display. + /// + [Parameter] + public LocalStorageSettings Content { get; set; } = new LocalStorageSettings(); + + /// + /// The the component is displayed in. + /// + [CascadingParameter] + public FluentDialog? Dialog { get; set; } + + + /// + /// Event handler for when is changed. + /// + /// Is dark mode on + public async Task ChangeThemeChecked(bool darkModeChecked) + { + Content.DarkMode = darkModeChecked; + await ThemeControl.ChangeTheme(darkModeChecked); + + } + + /// + /// The default for the component. + /// + public static DialogParameters DefaultDialogParameters = new() + { + + Title = $"Settings", + PrimaryAction = "Save", + PrimaryActionEnabled = true, + Width = "500px", + TrapFocus = true, + Modal = true, + PreventScroll = true, + }; + + + +} diff --git a/PalmHill.BlazorChat/Client/Components/SettingContainer.razor b/PalmHill.BlazorChat/Client/Components/Settings/RangeSetting.razor similarity index 72% rename from PalmHill.BlazorChat/Client/Components/SettingContainer.razor rename to PalmHill.BlazorChat/Client/Components/Settings/RangeSetting.razor index 0e12798..755e776 100644 --- a/PalmHill.BlazorChat/Client/Components/SettingContainer.razor +++ b/PalmHill.BlazorChat/Client/Components/Settings/RangeSetting.razor @@ -6,7 +6,14 @@ @Label
- +
@@ -35,8 +42,11 @@ [Parameter] public EventCallback ValueChanged { get; set; } - + /// + /// Event handler for when the text input changes. Triggeres the event. + /// + /// private async Task OnTextChanged(ChangeEventArgs e) { if (e.Value == null) diff --git a/PalmHill.BlazorChat/Client/Components/SideNav.razor b/PalmHill.BlazorChat/Client/Components/SideNav.razor deleted file mode 100644 index d2cd701..0000000 --- a/PalmHill.BlazorChat/Client/Components/SideNav.razor +++ /dev/null @@ -1,5 +0,0 @@ -

SideNav

- -@code { - -} diff --git a/PalmHill.BlazorChat/Client/MainLayout.razor b/PalmHill.BlazorChat/Client/MainLayout.razor index 518cd90..d563c9a 100644 --- a/PalmHill.BlazorChat/Client/MainLayout.razor +++ b/PalmHill.BlazorChat/Client/MainLayout.razor @@ -1,18 +1,9 @@ @using Microsoft.FluentUI.AspNetCore.Components.DesignTokens + @inherits LayoutComponentBase -@inject ThemeControl ThemeControl +@inject ThemeService ThemeControl - - - - Blazor Chat - - - - API - - @Body @@ -21,19 +12,3 @@ -@code { - private bool darkMode = false; - public static bool Expanded { get; set; } = true; - protected override async Task OnAfterRenderAsync(bool firstRender) - { - if (firstRender) - { - - await ThemeControl.ChangeTheme(darkMode); - StateHasChanged(); - } - } - - - -} diff --git a/PalmHill.BlazorChat/Client/Models/LocalStorageSettings.cs b/PalmHill.BlazorChat/Client/Models/LocalStorageSettings.cs new file mode 100644 index 0000000..92594ea --- /dev/null +++ b/PalmHill.BlazorChat/Client/Models/LocalStorageSettings.cs @@ -0,0 +1,32 @@ +using PalmHill.BlazorChat.Shared.Models; + +namespace PalmHill.BlazorChat.Client.Models +{ + /// + /// Settings that are stored in the browser's local storage. + /// + public class LocalStorageSettings + { + /// + /// Parameters for the inference request. + /// + public InferenceSettings InferenceSettings { get; set; } = new InferenceSettings(); + + /// + /// System message for the inference request. + /// + public string SystemMessage { get; set; } = "You are a helpful assistant."; + + /// + /// Dark mode. + /// + public bool DarkMode { get; set; } = false; + + /// + /// Settings version. Used to control migration of settings when there is a new version. + /// + public int SettingsVersion { get; set; } = CurrentSettingsVersion; + + public const int CurrentSettingsVersion = 1; + } +} diff --git a/PalmHill.BlazorChat/Client/Models/ModelExtensions.cs b/PalmHill.BlazorChat/Client/Models/ModelExtensions.cs new file mode 100644 index 0000000..6976959 --- /dev/null +++ b/PalmHill.BlazorChat/Client/Models/ModelExtensions.cs @@ -0,0 +1,40 @@ +using PalmHill.BlazorChat.Shared.Models; + +namespace PalmHill.BlazorChat.Client.Models +{ + public static class ModelExtensions + { + /// + /// Creates a copy of the object. + /// + /// The object to copy. + /// A new instance of with values from . + public static LocalStorageSettings CreateCopy(this LocalStorageSettings localStorageSettings) + { + var copy = new LocalStorageSettings(); + copy.DarkMode = localStorageSettings.DarkMode; + copy.InferenceSettings = localStorageSettings.InferenceSettings.CreateCopy(); + copy.SettingsVersion = localStorageSettings.SettingsVersion; + copy.SystemMessage = localStorageSettings.SystemMessage; + return copy; + } + + /// + /// Creates a copy of the object. + /// + /// The object top copy. + /// A new instance of with values from . + public static InferenceSettings CreateCopy(this InferenceSettings inferenceSettings) + { + var copy = new InferenceSettings(); + copy.MaxLength = inferenceSettings.MaxLength; + copy.PresencePenalty = inferenceSettings.PresencePenalty; + copy.Temperature = inferenceSettings.Temperature; + copy.TopP = inferenceSettings.TopP; + copy.FrequencyPenalty = inferenceSettings.FrequencyPenalty; + return copy; + + } + + } +} diff --git a/PalmHill.BlazorChat/Client/ModelResponse.cs b/PalmHill.BlazorChat/Client/Models/WebSocketChatMessage.cs similarity index 74% rename from PalmHill.BlazorChat/Client/ModelResponse.cs rename to PalmHill.BlazorChat/Client/Models/WebSocketChatMessage.cs index d2dc3aa..36d9926 100644 --- a/PalmHill.BlazorChat/Client/ModelResponse.cs +++ b/PalmHill.BlazorChat/Client/Models/WebSocketChatMessage.cs @@ -1,17 +1,20 @@ -using System; -using System.Collections.Generic; - -namespace PalmHill.BlazorChat.Client + +namespace PalmHill.BlazorChat.Client.Models { /// - /// Represents a response from the chat model. + /// Represents prompt with a response. /// - public class ModelResponse + public class WebSocketChatMessage { /// /// Gets or sets the unique identifier for the prompt. /// - public Guid PromptId { get; set; } = Guid.NewGuid(); + public Guid Id { get; set; } = Guid.NewGuid(); + + /// + /// The unique identifier for the conversation. + /// + public Guid? ConversationId { get; set; } /// /// Gets or sets the prompt text. @@ -24,10 +27,15 @@ public class ModelResponse public List ResponseStrings { get; set; } = new List(); /// - /// Gets or sets a value indicating whether the response is complete. + /// Response is complete. /// public bool IsComplete { get; set; } = false; + /// + /// Response is successful. + /// + public bool Success { get; private set; } = false; + /// /// Occurs when the response changes. /// @@ -39,7 +47,7 @@ public class ModelResponse public event EventHandler? ResponseCompleted; /// - /// Gets the full response text. + /// Full response text. /// public string Resonse { @@ -63,9 +71,10 @@ public void AddResponseString(string responseString) /// /// Marks the response as complete and raises the event. /// - public void CompleteResponse() + public void CompleteResponse(bool success) { IsComplete = true; + Success = success; ResponseCompleted?.Invoke(this, EventArgs.Empty); } } diff --git a/PalmHill.BlazorChat/Client/Pages/Index.razor b/PalmHill.BlazorChat/Client/Pages/Index.razor index 4a475d0..bc182a5 100644 --- a/PalmHill.BlazorChat/Client/Pages/Index.razor +++ b/PalmHill.BlazorChat/Client/Pages/Index.razor @@ -1,26 +1,52 @@ @page "/" +@inject ChatService Controller; +@inject LocalStorageService LocalStorage; + + + + + Blazor Chat + + API + +
- - + + - +
+ + + + + @code { - private Chat? ChatRenderer { get; set; } - private ChatInput? ChatInput { get; set; } - protected override void OnAfterRender(bool firstRender) + + protected override async Task OnInitializedAsync() { + attachControllerEvents(); + await LocalStorage.SyncTheme(); + await Controller.StartChat(); + } + + private void attachControllerEvents() + { + Controller.OnStateChange += Controller_OnStateChange; + } - if (firstRender && ChatRenderer != null) - { - ChatInput?.AttachToChat(ChatRenderer); - } + public void Controller_OnStateChange(object? sender, bool bs) + { + StateHasChanged(); } + } diff --git a/PalmHill.BlazorChat/Client/PalmHill.BlazorChat.Client.csproj b/PalmHill.BlazorChat/Client/PalmHill.BlazorChat.Client.csproj index bf02c1c..3b21213 100644 --- a/PalmHill.BlazorChat/Client/PalmHill.BlazorChat.Client.csproj +++ b/PalmHill.BlazorChat/Client/PalmHill.BlazorChat.Client.csproj @@ -1,4 +1,4 @@ - + net8.0 @@ -7,14 +7,15 @@ + - - - + + + @@ -22,6 +23,7 @@ + diff --git a/PalmHill.BlazorChat/Client/Program.cs b/PalmHill.BlazorChat/Client/Program.cs index b595ebe..bc0386c 100644 --- a/PalmHill.BlazorChat/Client/Program.cs +++ b/PalmHill.BlazorChat/Client/Program.cs @@ -1,20 +1,33 @@ +using Blazored.LocalStorage; using Microsoft.AspNetCore.Components.Web; using Microsoft.AspNetCore.Components.WebAssembly.Hosting; using Microsoft.FluentUI.AspNetCore.Components; using Microsoft.FluentUI.AspNetCore.Components.DesignTokens; +using PalmHill.BlazorChat.ApiClient; using PalmHill.BlazorChat.Client; +using PalmHill.BlazorChat.Client.Services; var builder = WebAssemblyHostBuilder.CreateDefault(args); builder.RootComponents.Add("#app"); builder.RootComponents.Add("head::after"); -builder.Services.AddHttpClient("PlamHill.BlazorChat.ServerAPI", client => client.BaseAddress = new Uri(builder.HostEnvironment.BaseAddress)); -builder.Services.AddFluentUIComponents(); // Supply HttpClient instances that include access tokens when making requests to the server project +builder.Services.AddHttpClient("PlamHill.BlazorChat.ServerAPI", client => client.BaseAddress = new Uri(builder.HostEnvironment.BaseAddress)); builder.Services.AddScoped(sp => sp.GetRequiredService().CreateClient("PlamHill.BlazorChat.ServerAPI")); +builder.Services.AddFluentUIComponents(); +builder.Services.AddBlazoredLocalStorage(); + +//Add BlazorChatApi for operating the API. +builder.Services.AddScoped(); + +//Add ThemeControler Service +builder.Services.AddSingleton(); + +//Add LocalStorageService +builder.Services.AddScoped(); -//Add ThemeControler -builder.Services.AddSingleton(); +//Add ChatService +builder.Services.AddScoped(); await builder.Build().RunAsync(); diff --git a/PalmHill.BlazorChat/Client/README.md b/PalmHill.BlazorChat/Client/README.md index ae99571..0d2107a 100644 --- a/PalmHill.BlazorChat/Client/README.md +++ b/PalmHill.BlazorChat/Client/README.md @@ -3,38 +3,36 @@ ## Introduction The Client folder contains the Blazor WebAssembly client application for the PlamHill.BlazorChat project. This application is responsible for the user interface and client-side logic of the chat application. -# Table of Contents -1. [Introduction](#introduction) -2. [Project Structure](#project-structure) - - [Program.cs](#programcs) - - [_Imports.razor](#importsrazor) - - [wwwroot](#wwwroot) - - [Components](#components) - - [Pages](#pages) -3. [Key Components](#key-components) - - [Chat.razor](#chatrazor) - - [ChatInput.razor](#chatinputrazor) - - [MarkdownSection.razor](#markdownsectionrazorcs) +## Table of Contents +1. [Project Structure](#project-structure) +2. [Key Components](#key-components) +3. [Key Pages](#key-pages) +4. [Key Services](#key-services) ## Project Structure Here's an overview of the main files and folders in the Client folder: - `Program.cs`: The entry point of the Blazor application. -- `_Imports.razor`: Contains global using directives for the Blazor application. - `wwwroot`: Contains static files for the Blazor application. -- `Components`: Contains the Blazor components used in the application. - `Pages`: Contains the Blazor pages of the application. +- `Components`: Contains the Blazor components used in the application. +- `Models`: Contains the View Models used by the Components and Pages. +- `Services`: Contains the Services used to operate the UI. ## Key Components Here are some of the key components in the Components folder: -- `Chat.razor`: This is the main chat component of the application. It establishes a connection with a SignalR hub, listens for incoming messages from the server, and sends messages to the server when a user sends a chat message. The chat messages are displayed in a list, with each message being represented by a ModelResponse object. -- `ChatInput.razor`: This component handles the input area for the chat. It includes a text area for the user to type their message and a send button to send the message. It also has a button to show the chat settings. The ChatInput component is attached to the Chat component after the first render, allowing it to send messages to the chat. -- `MarkdownSection.razor`: This component is responsible for converting Markdown content to HTML for rendering in the Blazor application. It is used in the Chat component to display the chat messages in a user-friendly format. -- `ModelMarkdown.razor`This component is used to display the response from the model in markdown format. It takes a ModelResponse object as a parameter and listens for changes in the response. When the response changes, it updates the markdown content to be rendered. +- `ChatInput.razor`: This component handles the input area for the chat. It includes a text area for the user to type their message and a send button to send the message. +- `AttachmentManager.razor`: This component is responsible for uploading attachments. It includes the file input, and list of uploaded files. +- `ModelMarkdown.razor`: This component is used to display the response from the inference in markdown format. It takes a ModelResponse object as a parameter and listens for changes in the response. When the response changes, it updates the markdown content to be rendered. +- `Settings.razor`: This component handles user configurable settings (Theme, Inference, System Message). + ## Key Pages Here's a key page in the Pages folder: - -- `Index.razor`: This is the main page of the application. It includes the Chat and ChatInput components, providing the main interface for the chat application. The Chat component is responsible for displaying the chat messages, and the ChatInput component is used for entering and sending new messages. +- `Index.razor`: This is the main chat component of the application. It uses the ChatService.cs to implement its functionality. - `MainLayout.razor`: This is the main layout of the application. It includes a header with the application title and a body where the current page is rendered. The layout is defined using the FluentContainer and FluentRow components from the Fluent UI library, providing a consistent look and feel across the application. + +## Key Services +- `ChatService.cs`: The main service provided by Dependency Injection for operating the UI. +- `WebSocketChatService.cs`: The service used by the ChatService to interact with the server via WebSockets. diff --git a/PalmHill.BlazorChat/Client/Services/ChatService.cs b/PalmHill.BlazorChat/Client/Services/ChatService.cs new file mode 100644 index 0000000..0bdfb5f --- /dev/null +++ b/PalmHill.BlazorChat/Client/Services/ChatService.cs @@ -0,0 +1,317 @@ +using Blazored.LocalStorage; +using Microsoft.AspNetCore.Components; +using Microsoft.FluentUI.AspNetCore.Components; +using PalmHill.BlazorChat.ApiClient; +using PalmHill.BlazorChat.Client.Components.Settings; +using PalmHill.BlazorChat.Client.Models; +using PalmHill.BlazorChat.Shared.Models; +using PalmHill.BlazorChat.Shared.Models.WebSocket; +using System.Data; + +namespace PalmHill.BlazorChat.Client.Services +{ + /// + /// The the main service that controls the chat UI. + /// + public class ChatService + { + + /// + /// Main constructor. Uses dependency injection to get the required services. + /// + /// + /// + /// + /// + /// + public ChatService( + NavigationManager navigationManager, + LocalStorageService localStorage, + IDialogService dialogService, + ThemeService themeControl, + BlazorChatApi blazorChatApi + ) + { + + _localStorageService = localStorage; + _dialogService = dialogService; + _themeControl = themeControl; + _blazorChatApi = blazorChatApi; + _navigationManager = navigationManager; + setupWebSocketChatConnection(); + } + + /// + /// User input from the chat box. + /// + public string UserInput { get; set; } = string.Empty; + /// + /// Whether the chat is ready to send a message. + /// + public bool CanSend { get; set; } = true; + /// + /// Whether the chat is ready to stop. + /// + public bool CanStop { get; set; } = false; + /// + /// Whether the chat is in Attachment mode. Will only reference attached douments. + /// + public bool AttachmentsEnabled { get; set; } = false; + /// + /// Show the attachment panel. + /// + public bool AttachmentsVisible { get; set; } = false; + /// + /// The list of files that have been selected for Chat. This is non-functional for now. + /// + public List SelectedFiles = new List(); + + /// + /// The list of files that have been uploaded for Chat. + /// + public List UploadedFiles = new List(); + + /// + /// The local storage settings. + /// + public LocalStorageSettings LocalStorageSettings { get; private set; } = new LocalStorageSettings(); + /// + /// The list of chat messages. Containing a prompt and its response. + /// + public List WebsocketChatMessages { get; private set; } = new List(); + + /// + /// The WebSocketChatService that handles the WebSocket connection. + /// + public WebSocketChatService? WebSocketChatConnection { get; private set; } + + /// + /// Event handler for when the state changes. + /// + public event EventHandler? OnStateChange; + + + + private readonly LocalStorageService _localStorageService; + private readonly IDialogService _dialogService; + private readonly ThemeService _themeControl; + private readonly BlazorChatApi _blazorChatApi; + private readonly NavigationManager _navigationManager; + + + /// + /// Starts the chat over the WebSocket connection. + /// + /// + public async Task StartChat() + { + LocalStorageSettings = await _localStorageService.GetSettings(); + await WebSocketChatConnection!.StartAsync(); + } + + /// + /// Stops the chat over the WebSocket connection. + /// + /// + private async Task SendInferenceRequest() + { + await WebSocketChatConnection!.SendInferenceRequestAsync(); + } + + /// + /// Sends the prompt to the WebSocketChatConnection or the Document API if in is true. + /// + /// + public async Task SendPrompt() + { + if (AttachmentsEnabled == false) + { + await SendToWebSocketChat(); + } + + if (AttachmentsEnabled == true) + { + await AskDocumentApi(); + } + } + + /// + /// Sends the prompt to the WebSocketChatConnection. + /// + public async Task SendToWebSocketChat() + { + //Set the UI state. + CanSend = false; + CanStop = true; + + var prompt = new WebSocketChatMessage(); + prompt.Prompt = UserInput; + WebsocketChatMessages.Add(prompt); + UserInput = string.Empty; + StateHasChanged(); + await SendInferenceRequest(); + } + + /// + /// Sends the prompt to the Document API. + /// + public async Task AskDocumentApi() + { + + CanSend = false; + CanStop = true; + + var prompt = new WebSocketChatMessage(); + prompt.Prompt = UserInput; + WebsocketChatMessages.Add(prompt); + UserInput = string.Empty; + StateHasChanged(); + + var infrerenceRequest = new InferenceRequest(); + infrerenceRequest.Id = WebSocketChatConnection!.ConversationId; + var chatMessage = new ChatMessage(); + chatMessage.Id = prompt.Id; + chatMessage.Message = prompt.Prompt; + chatMessage.Role = ChatMessageRole.Question; + + infrerenceRequest.ChatMessages.Add(chatMessage); + + var apiResponse = await _blazorChatApi!.Chat.Ask(infrerenceRequest); + + if (apiResponse.IsSuccessStatusCode) + { + var chatMessageResponse = apiResponse.Content; + prompt.AddResponseString(chatMessageResponse?.Message ?? ""); + prompt.CompleteResponse(true); + } + else + { + prompt.CompleteResponse(false); + } + SetReady(); + } + + /// + /// Saves the settings to local storage. + /// + /// + public async Task SaveSettings() + { + await _localStorageService.SaveSettings(LocalStorageSettings); + } + + /// + /// Shows the dialog. + /// + public async Task ShowSettings() + { + var currentSettings = LocalStorageSettings.CreateCopy(); + var dialogParameters = ChatSettings.DefaultDialogParameters; + var dialog = await _dialogService.ShowDialogAsync(currentSettings, dialogParameters); + var dialogResult = await dialog.Result; + + if (dialogResult?.Cancelled == true) + { + //Reset the theme if cancel. + await _localStorageService.SyncTheme(); + } + else + { + //Save the settings. + LocalStorageSettings = (LocalStorageSettings?)dialogResult?.Data ?? new LocalStorageSettings(); + await SaveSettings(); + } + } + + /// + /// Toggles the property. + /// This does not toggle the property. + /// + public void ToggleAttachmentsVisible() + { + AttachmentsVisible = !AttachmentsVisible; + StateHasChanged(); + } + + /// + /// Shows the . + /// + public void ShowAttachments() + { + AttachmentsVisible = true; + StateHasChanged(); + } + + /// + /// Hides the . + /// + public void HideAttachments() + { + AttachmentsVisible = false; + StateHasChanged(); + } + + /// + /// Chat is ready to send a message. + /// Sets the property true. + /// Sets the property false. + /// + public void SetReady() + { + CanSend = true; + CanStop = false; + StateHasChanged(); + } + + /// + /// Cancel the text generation via http request. Effectively stops the chat. + /// + public async Task CancelTextGeneration() + { + var canceled = await _blazorChatApi!.Chat.CancelChat(WebSocketChatConnection!.ConversationId); + + if (canceled.Content) + { + SetReady(); + } + + Console.WriteLine($"CancelTextGeneration failed ({canceled.StatusCode}): {canceled.ReasonPhrase}"); + } + + /// + /// Sets up the property. + /// Configures the event handlers for the . + /// + private void setupWebSocketChatConnection() + { + WebSocketChatConnection = new WebSocketChatService( + _navigationManager.ToAbsoluteUri("/chathub?customUserId=user1"), + WebsocketChatMessages, + _localStorageService + ); + + WebSocketChatConnection.OnInferenceStatusUpdate += (sender, inferenceStatusUpdate) => + { + if (inferenceStatusUpdate.IsComplete == true) + { + SetReady(); + } + }; + + WebSocketChatConnection.OnAttachmentStatusUpdate += (sender, attachmentInfo) => + { + var attachmentInfoToUpdate = UploadedFiles.SingleOrDefault(af => af.Id == attachmentInfo.Id); + attachmentInfoToUpdate!.Status = attachmentInfo.Status; + StateHasChanged(); + }; + } + + /// + /// Invokes the event. + /// + private void StateHasChanged() + { + OnStateChange?.Invoke(this, true); + } + } +} diff --git a/PalmHill.BlazorChat/Client/Services/LocalStorageService.cs b/PalmHill.BlazorChat/Client/Services/LocalStorageService.cs new file mode 100644 index 0000000..82fa4fe --- /dev/null +++ b/PalmHill.BlazorChat/Client/Services/LocalStorageService.cs @@ -0,0 +1,81 @@ +using Blazored.LocalStorage; +using Markdig.Syntax.Inlines; +using PalmHill.BlazorChat.Client.Models; + +namespace PalmHill.BlazorChat.Client.Services +{ + /// + /// Service to handle local storage. + /// + public class LocalStorageService + { + private ILocalStorageService _localStorage; + private ThemeService _themeControl; + + /// + /// Use dependency injection to get the local storage service and the theme service. + /// + /// + /// Used to change the theme on settings get. + public LocalStorageService(ILocalStorageService localStorage, ThemeService themeControl) + { + _localStorage = localStorage; + _themeControl = themeControl; + } + + /// + /// In memory settings. + /// + public LocalStorageSettings LocalStorageSettings { get; private set; } = new LocalStorageSettings(); + + /// + /// Get the settings from local storage. + /// + /// + public async Task GetSettings() + { + LocalStorageSettings = await _getMigratedSettings(); + return LocalStorageSettings; + } + + public async Task SyncTheme() + { + await _themeControl.ChangeTheme(LocalStorageSettings.DarkMode); + } + + /// + /// Save the settings to local storage. + /// + /// + /// + public async Task SaveSettings(LocalStorageSettings localStorageSettings) + { + await _localStorage.SetItemAsync("LocalStorageSettings", localStorageSettings); + } + + /// + /// Gets the settings from local storage and migrates them if necessary. + /// + private async Task _getMigratedSettings() + { + var settingsExist = await _localStorage.ContainKeyAsync("LocalStorageSettings"); + + if (settingsExist) + { + var localStorageSettings = await _localStorage.GetItemAsync("LocalStorageSettings"); + + if (localStorageSettings.SettingsVersion == LocalStorageSettings.CurrentSettingsVersion) + { + return localStorageSettings; + } + else + { + //TODO: Migrate settings + return new LocalStorageSettings(); + } + } + + return new LocalStorageSettings(); + } + } +} diff --git a/PalmHill.BlazorChat/Client/Services/ThemeService.cs b/PalmHill.BlazorChat/Client/Services/ThemeService.cs new file mode 100644 index 0000000..693281a --- /dev/null +++ b/PalmHill.BlazorChat/Client/Services/ThemeService.cs @@ -0,0 +1,49 @@ +using Microsoft.AspNetCore.Components; +using Microsoft.FluentUI.AspNetCore.Components.DesignTokens; + +namespace PalmHill.BlazorChat.Client.Services +{ + + /// + /// Service to handle theme changes. + /// + public class ThemeService + { + /// + /// For DI. + /// + /// From FluentUI + /// From FluentUI + public ThemeService(BaseLayerLuminance baseLayerLuminance, AccentBaseColor accentBaseColor) + { + _baseLayerLuminance = baseLayerLuminance; + _accentBaseColor = accentBaseColor; + } + + private BaseLayerLuminance? _baseLayerLuminance { get; } + + private AccentBaseColor? _accentBaseColor { get; } + + /// + /// Changes the theme. + /// + /// Dark Mode + public async Task ChangeTheme(bool darkMode) + { + + if (darkMode) + { + await _baseLayerLuminance!.WithDefault(0f); + await _accentBaseColor!.WithDefault(new Swatch(255, 105, 180)); + + } + else + { + await _baseLayerLuminance!.WithDefault(1f); + await _accentBaseColor!.WithDefault(new Swatch(255, 105, 180)); + } + + } + + } +} diff --git a/PalmHill.BlazorChat/Client/Services/WebSocketChatService.cs b/PalmHill.BlazorChat/Client/Services/WebSocketChatService.cs new file mode 100644 index 0000000..350eede --- /dev/null +++ b/PalmHill.BlazorChat/Client/Services/WebSocketChatService.cs @@ -0,0 +1,159 @@ +using Microsoft.AspNetCore.SignalR.Client; +using PalmHill.BlazorChat.Client.Models; +using PalmHill.BlazorChat.Shared.Models; +using PalmHill.BlazorChat.Shared.Models.WebSocket; + +namespace PalmHill.BlazorChat.Client.Services +{ + /// + /// Controlls the WebSocket traffic. + /// + public class WebSocketChatService + { + /// + /// Setup the WebSocketChatService + /// + /// + /// The list of to be interacted with. + public WebSocketChatService( + Uri chatHubUri, + List webSocketChatMessages, + LocalStorageService localStorageService + ) + { + _localStorageService = localStorageService; + WebSocketChatMessages = webSocketChatMessages; + HubConnection = new HubConnectionBuilder() + .WithUrl(chatHubUri) + .Build(); + + setupHubConnection(); + } + /// + /// Used for DB correlation. (Later) + /// + public Guid ConversationId { get; } = Guid.NewGuid(); + + private LocalStorageService _localStorageService; + + /// + /// The list of to be interacted with. + /// + public List WebSocketChatMessages { get; } + + /// + /// The SignalR HubConnection. + /// + public HubConnection HubConnection { get; } + + /// + /// Triggered when a is received. + /// + public event EventHandler? OnReceiveInferenceString; + + /// + /// Triggered when a is received. + /// + public event EventHandler? OnInferenceStatusUpdate; + /// + /// Triggered when a is received. + /// + public event EventHandler? OnAttachmentStatusUpdate; + + /// + /// Start the WebSocket connection. + /// + public async Task StartAsync() + { + await HubConnection.StartAsync(); + } + + /// + /// Stop the WebSocket connection. + /// + public async Task StopAsync() + { + await HubConnection.StopAsync(); + } + + /// + /// Send the to the server. + /// + /// + public async Task SendInferenceRequestAsync() + { + var inferenceRequest = await GetInferenceRequestFromWebsocketMessages(); + await HubConnection.SendAsync("InferenceRequest", inferenceRequest); + } + + /// + /// Wire up messages handlers. + /// + private void setupHubConnection() + { + //Whena an InferenceString message is received, add it to the last prompt. + HubConnection.On("ReceiveInferenceString", (inferenceString) => + { + var lastPrompt = WebSocketChatMessages.SingleOrDefault(cm => cm.Id == inferenceString.WebSocketChatMessageId); + lastPrompt?.AddResponseString(inferenceString.InferenceString); + OnReceiveInferenceString?.Invoke(this, inferenceString); + }); + + //When an InferenceStatusUpdate message is received, update the last prompt. + HubConnection.On("InferenceStatusUpdate", (statusUpdate) => + { + var lastPrompt = WebSocketChatMessages.Single(cm => cm.Id == statusUpdate.MessageId); + + if (statusUpdate.IsComplete) + { + lastPrompt.CompleteResponse(statusUpdate.Success ?? false); + } + + OnInferenceStatusUpdate?.Invoke(this, statusUpdate); + }); + + ///When an AttachmentStatusUpdate message is received, update the attachment status. + HubConnection.On("AttachmentStatusUpdate", (attachmentInfo) => + { + OnAttachmentStatusUpdate?.Invoke(this, attachmentInfo); + }); + } + + /// + /// Combine the into a . + /// + /// + /// A new with the combined. And and set from . + /// + private async Task GetInferenceRequestFromWebsocketMessages() + { + var chatConversation = new InferenceRequest(); + var localStorageSettings = await _localStorageService.GetSettings(); + + chatConversation.Settings = localStorageSettings.InferenceSettings; + chatConversation.Id = ConversationId; + chatConversation.SystemMessage = localStorageSettings.SystemMessage; + + foreach (var promptAndResponse in WebSocketChatMessages) + { + var userMessage = new ChatMessage(); + userMessage.Message = promptAndResponse.Prompt; + userMessage.Id = promptAndResponse.Id; + userMessage.Role = ChatMessageRole.User; + chatConversation.ChatMessages.Add(userMessage); + + if (promptAndResponse.IsComplete && promptAndResponse.Success == true) + { + var modelMessage = new ChatMessage(); + modelMessage.Message = promptAndResponse.Resonse; + modelMessage.Role = ChatMessageRole.Assistant; + chatConversation.ChatMessages.Add(modelMessage); + + } + + } + + return chatConversation; + } + } +} diff --git a/PalmHill.BlazorChat/Client/ThemeControl.cs b/PalmHill.BlazorChat/Client/ThemeControl.cs deleted file mode 100644 index 44fa82e..0000000 --- a/PalmHill.BlazorChat/Client/ThemeControl.cs +++ /dev/null @@ -1,38 +0,0 @@ -using Microsoft.AspNetCore.Components; -using Microsoft.FluentUI.AspNetCore.Components.DesignTokens; - -namespace PalmHill.BlazorChat.Client -{ - public class ThemeControl - { - - public ThemeControl(BaseLayerLuminance baseLayerLuminance, AccentBaseColor accentBaseColor) - { - BaseLayerLuminance = baseLayerLuminance; - AccentBaseColor = accentBaseColor; - } - - private BaseLayerLuminance? BaseLayerLuminance { get; set; } - - private AccentBaseColor? AccentBaseColor { get; set; } - - - public async Task ChangeTheme(bool darkMode) - { - - if (darkMode) - { - await BaseLayerLuminance!.WithDefault(0f); - await AccentBaseColor!.WithDefault(new Swatch(255, 105, 180)); - - } - else - { - await BaseLayerLuminance!.WithDefault(1f); - await AccentBaseColor!.WithDefault(new Swatch(255, 105, 180)); - } - - } - - } -} diff --git a/PalmHill.BlazorChat/Client/UISettings.cs b/PalmHill.BlazorChat/Client/UISettings.cs deleted file mode 100644 index 9ef8c7e..0000000 --- a/PalmHill.BlazorChat/Client/UISettings.cs +++ /dev/null @@ -1,10 +0,0 @@ -using PalmHill.BlazorChat.Shared.Models; - -namespace PalmHill.BlazorChat.Client -{ - public class UISettings : InferenceSettings - { - public string SystemMessage { get; set; } = "You are a helpful assistant."; - public bool DarkMode { get; set; } = false; - } -} diff --git a/PalmHill.BlazorChat/Client/_Imports.razor b/PalmHill.BlazorChat/Client/_Imports.razor index f12d84e..69cef70 100644 --- a/PalmHill.BlazorChat/Client/_Imports.razor +++ b/PalmHill.BlazorChat/Client/_Imports.razor @@ -6,4 +6,12 @@ @using Microsoft.JSInterop @using PalmHill.BlazorChat.Client @using Microsoft.FluentUI.AspNetCore.Components -@using PalmHill.BlazorChat.Client.Components; \ No newline at end of file +@using PalmHill.BlazorChat.Client.Components; +@using PalmHill.BlazorChat.Client.Components.Attachment; +@using PalmHill.BlazorChat.Client.Components.Chat; +@using PalmHill.BlazorChat.Client.Components.Markdown; +@using PalmHill.BlazorChat.Client.Components.Settings; +@using PalmHill.BlazorChat.Client.Models; +@using PalmHill.BlazorChat.Shared.Models; +@using PalmHill.BlazorChat.Client.Services; +@using PalmHill.BlazorChat.ApiClient \ No newline at end of file diff --git a/PalmHill.BlazorChat/Client/wwwroot/css/app.css b/PalmHill.BlazorChat/Client/wwwroot/css/app.css index a3fad9e..e633446 100644 --- a/PalmHill.BlazorChat/Client/wwwroot/css/app.css +++ b/PalmHill.BlazorChat/Client/wwwroot/css/app.css @@ -53,13 +53,22 @@ h1:focus { .main-textarea { - width:100%; - height:46px; + width: 100%; + height: 100%; + min-height: 40px; border-radius: 10px 10px; - overflow:hidden; - margin-bottom:10px; + overflow: hidden; + margin-bottom: 10px; + margin: 0px 0px; + padding: 0px 0px; + border: none; + font-size: 16px; + outline: none; + line-height: 1.5; } + + #app, html, body { height: 100%; -webkit-overflow-scrolling: touch !important; @@ -67,23 +76,26 @@ h1:focus { } .chat-box { - height:calc(100vh - 125px); - margin-top:80px; + height: calc(100vh - 125px); + margin-top: 80px; } .w-100 { - width:100%; + width: 100%; +} + +.min-h-100 { + min-height: 100%; } .chat-messages { max-width: 768px; margin-left: auto; margin-right: auto; - } .chat-messages fluent-card:not(:first-child) { - margin-top:10px; + margin-top: 10px; } .chat-messages fluent-card:last-child { @@ -103,14 +115,12 @@ h1:focus { @media (max-width: 1000px) { .chat-messages { - width: 100%; - max-width: none; - padding-left: 10px; - padding-right: 10px; + width: calc(100vw - 30px); } .chat-header { - width:calc(100vw - 30px); + width: calc(100vw - 30px); + max-width: auto; } } @@ -118,3 +128,42 @@ h1:focus { display: none !important; } +.input-area { + border: 3px solid #FF69B4; + box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); + min-height: 46px; + border-radius: 10px 10px; + padding-left: 10px; + padding-right: 10px; +} + +.hidden { + display: none; +} + +.info-panel { + position: fixed; + top: 75px; + bottom: 75px; + width: 250px; + background-color: var(--neutral-layer-3); + border-radius: 10px 10px; + padding: 10px 10px; +} + +@media (min-width: 1300px) { + .info-panel { + right: 50%; + transform: translateX(256%); + } +} + +@media (max-width: 1300px) { + .info-panel { + right: 15px; + } +} + + /*.file-input-area { + height: 32px; +}*/ diff --git a/PalmHill.BlazorChat/Client/wwwroot/index.html b/PalmHill.BlazorChat/Client/wwwroot/index.html index 5138ebf..250f462 100644 --- a/PalmHill.BlazorChat/Client/wwwroot/index.html +++ b/PalmHill.BlazorChat/Client/wwwroot/index.html @@ -6,9 +6,9 @@ PlamHill.BlazorChat - + diff --git a/PalmHill.BlazorChat/Server/ApiChat.cs b/PalmHill.BlazorChat/Server/ApiChat.cs deleted file mode 100644 index 49d9252..0000000 --- a/PalmHill.BlazorChat/Server/ApiChat.cs +++ /dev/null @@ -1,107 +0,0 @@ -using LLama.Common; -using LLama; -using Microsoft.AspNetCore.Mvc; -using PalmHill.BlazorChat.Shared.Models; -using PalmHill.Llama; -using System.Diagnostics; - -// For more information on enabling Web API for empty projects, visit https://go.microsoft.com/fwlink/?LinkID=397860 - -namespace PalmHill.BlazorChat.Server -{ - - /// - /// The ApiChat class is responsible for handling chat API requests. - /// - [Route("api/chat", Name = "Chat")] - [ApiController] - public class ApiChat : ControllerBase - { - /// - /// The LLamaWeights instance used for model weights. - /// - LLamaWeights LLamaWeights; - - /// - /// The ModelParams instance used for model parameters. - /// - ModelParams ModelParams; - - /// - /// Initializes a new instance of the class. - /// - /// The LLamaWeights model. - /// The model parameters. - public ApiChat(LLamaWeights model, ModelParams modelParams) - { - LLamaWeights = model; - ModelParams = modelParams; - } - - /// - /// Handles a chat API request. - /// - /// The chat conversation. - /// Returns a string response from the chat model inference. - /// Thrown when there is an error during the chat model inference. - [HttpPost(Name = "Chat")] - public async Task> Chat([FromBody] ChatConversation conversation) - { - var errorText = ""; - - await ThreadLock.InferenceLock.WaitAsync(); - try - { - var response = await DoInference(conversation); - return Ok(response); - } - catch (Exception ex) - { - errorText = ex.ToString(); - } - finally - { - ThreadLock.InferenceLock.Release(); - } - - Console.WriteLine(errorText); - - return StatusCode(500, errorText); - } - - /// - /// Performs inference for a chat conversation. - /// - /// The chat conversation for which to perform inference. - /// Returns the inference result as a string. - private async Task DoInference(ChatConversation conversation) - { - LLamaContext modelContext = LLamaWeights.CreateContext(ModelParams); - var session = modelContext.CreateChatSession(conversation); - var inferenceParams = conversation.GetInferenceParams(); - - var cancelGeneration = new CancellationTokenSource(); - var fullResponse = ""; - var totalTokens = 0; - var inferenceStopwatch = new Stopwatch(); - - inferenceStopwatch.Start(); - var asyncResponse = session.ChatAsync(session.History, - inferenceParams, - cancelGeneration.Token); - await foreach (var text in asyncResponse) - { - totalTokens++; - fullResponse += text; - } - modelContext.Dispose(); - inferenceStopwatch.Stop(); - - Console.WriteLine($"Inference took {inferenceStopwatch.ElapsedMilliseconds}ms and generated {totalTokens} tokens. {(totalTokens / (inferenceStopwatch.ElapsedMilliseconds / (float)1000)).ToString("F2")} tokens/second."); - Console.WriteLine(fullResponse); - - return fullResponse; - } - - } -} diff --git a/PalmHill.BlazorChat/Server/ChatCancelation.cs b/PalmHill.BlazorChat/Server/ChatCancelation.cs new file mode 100644 index 0000000..c91477d --- /dev/null +++ b/PalmHill.BlazorChat/Server/ChatCancelation.cs @@ -0,0 +1,9 @@ +using System.Collections.Concurrent; + +namespace PalmHill.BlazorChat.Server +{ + public static class ChatCancelation + { + public static ConcurrentDictionary CancelationTokens = new ConcurrentDictionary(); + } +} diff --git a/PalmHill.BlazorChat/Server/PalmHill.BlazorChat.Server.csproj b/PalmHill.BlazorChat/Server/PalmHill.BlazorChat.Server.csproj index e15ef49..5e3b8ae 100644 --- a/PalmHill.BlazorChat/Server/PalmHill.BlazorChat.Server.csproj +++ b/PalmHill.BlazorChat/Server/PalmHill.BlazorChat.Server.csproj @@ -13,6 +13,7 @@ + diff --git a/PalmHill.BlazorChat/Server/Program.cs b/PalmHill.BlazorChat/Server/Program.cs index 54c4180..bdcdbf9 100644 --- a/PalmHill.BlazorChat/Server/Program.cs +++ b/PalmHill.BlazorChat/Server/Program.cs @@ -1,23 +1,34 @@ -using LLama.Common; -using LLama; -using Microsoft.AspNetCore.ResponseCompression; using PalmHill.Llama; -using System.Runtime.InteropServices; -using Microsoft.Extensions.DependencyInjection; using Microsoft.OpenApi.Models; using System.Text.Json.Serialization; -using PalmHill.BlazorChat.Server; +using PalmHill.BlazorChat.Server.SignalR; +using Microsoft.AspNetCore.SignalR; +using PalmHill.LlmMemory; var builder = WebApplication.CreateBuilder(args); -//Make Swagger use enums. +// Initlize Llama +builder.AddLlamaModel(); +// End Initlize Llama + + +// Initiaize Memory +builder.AddLlmMemory(); +// End Initiaize Memory + + builder.Services.AddControllers().AddJsonOptions(options => - options.JsonSerializerOptions.Converters.Add(new JsonStringEnumConverter())); + //Make Swagger use enums. + options.JsonSerializerOptions.Converters.Add(new JsonStringEnumConverter()) + ); builder.Services.AddControllersWithViews(); builder.Services.AddRazorPages(); builder.Services.AddSignalR(); +//Add signalR custom user id provider. +builder.Services.AddSingleton(); + ////Compress websockets traffic. //builder.Services.AddResponseCompression(opts => //{ @@ -33,32 +44,9 @@ c.UseAllOfToExtendReferenceSchemas(); }); -//get model path from appsettings.json -string? modelPath = builder.Configuration["DefaultModelPath"]; ; // change in appsettings.json -//check if model is present -var modelExsists = System.IO.File.Exists(modelPath); -if (!modelExsists) -{ - Console.ForegroundColor = ConsoleColor.Red; - Console.WriteLine($"Model at path {modelPath} does not exsist."); - Console.ResetColor(); - Console.WriteLine("Press any key to exit."); - Console.Read(); - return; -} -//Initlize Llama -ModelParams parameters = new ModelParams(modelPath ?? "") -{ - ContextSize = 4096, - GpuLayerCount = 90, -}; - -LLamaWeights model = LLamaWeights.LoadFromFile(parameters); -builder.Services.AddSingleton(model); -builder.Services.AddSingleton(parameters); -//End Initlize Llama + var app = builder.Build(); diff --git a/PalmHill.BlazorChat/Server/README.md b/PalmHill.BlazorChat/Server/README.md index d90a80a..6ff3660 100644 --- a/PalmHill.BlazorChat/Server/README.md +++ b/PalmHill.BlazorChat/Server/README.md @@ -5,14 +5,16 @@ This is the server-side part of the BlazorChat application. It is responsible fo ## Directory Structure - **Program.cs**: The main entry point for the server application. It configures and runs the server application, sets up services like Swagger, SignalR, and response compression, and maps the WebSocket URI. -- **ApiChat.cs**: Handles chat API requests. It uses a LLamaWeights model and ModelParams to process chat conversations and perform inference. +- **ApiChatController.cs**: Handles chat API requests. It uses a LLamaWeights model and ModelParams to process chat conversations and perform inference. Also performs Retreival-based chatbot inference. +- **AttachmentController.cs** Handles file attachment API requests. - **WebSocketChat.cs**: Handles real-time chat communication via WebSockets. It uses a LLamaWeights model and ModelParams to process chat conversations and perform inference. ## Key Files - **Program.cs**: Configures and runs the server application. It sets up services like Swagger, SignalR, and response compression, and maps the WebSocket URI. To run the server application, use the `dotnet run` command in the Server directory. -- **ApiChat.cs**: Defines the `ApiChat` class which handles chat API requests. It uses a LLamaWeights model and ModelParams to process chat conversations and perform inference. The server exposes a chat API endpoint at `api/chat` which accepts POST requests with a `ChatConversation` object in the request body and returns a string response from the chat model inference. +- **ApiChatController.cs**: Defines the `ApiChatController` class which handles chat API requests. It uses a LLamaWeights model and ModelParams to process chat conversations and perform inference. The server exposes a chat API endpoint at `api/chat` which accepts POST requests with a `ChatConversation` object in the request body and returns a string response from the chat model inference. - **WebSocketChat.cs**: Defines the `WebSocketChat` class which handles real-time chat communication via WebSockets. It uses a LLamaWeights model and ModelParams to process chat conversations and perform inference. The server exposes a WebSocket endpoint at `/chathub` for real-time chat communication. +- **AttachmentController.cs**: Defines the `AttachmentController` class which handles file attachment API requests. ## Running the Server @@ -25,5 +27,6 @@ The server-side application depends on several packages and projects. These depe - Microsoft.AspNetCore.Components.WebAssembly.Server - Swashbuckle.AspNetCore - PalmHill.Llama +- PalmHill.LlmMemory - PlamHill.BlazorChat.Client - PlamHill.BlazorChat.Shared diff --git a/PalmHill.BlazorChat/Server/SignalR/CustomUserIdProvider.cs b/PalmHill.BlazorChat/Server/SignalR/CustomUserIdProvider.cs new file mode 100644 index 0000000..cfaeaa1 --- /dev/null +++ b/PalmHill.BlazorChat/Server/SignalR/CustomUserIdProvider.cs @@ -0,0 +1,29 @@ +using Microsoft.AspNetCore.SignalR; + +namespace PalmHill.BlazorChat.Server.SignalR +{ + public class CustomUserIdProvider : IUserIdProvider + { + /// + /// Used to create a fake for now, but will be used to get the user id from the request. + /// + /// The hub connection. + /// + public string GetUserId(HubConnectionContext connection) + { + var httpContext = connection.GetHttpContext(); + + if (httpContext == null) + { + return ""; + } + + if (httpContext.Request.Query.ContainsKey("customUserId")) + { + return httpContext.Request.Query["customUserId"].SingleOrDefault() ?? ""; + } + + return ""; + } + } +} diff --git a/PalmHill.BlazorChat/Server/WebSocketChat.cs b/PalmHill.BlazorChat/Server/SignalR/WebSocketChat.cs similarity index 58% rename from PalmHill.BlazorChat/Server/WebSocketChat.cs rename to PalmHill.BlazorChat/Server/SignalR/WebSocketChat.cs index f20f306..46400d3 100644 --- a/PalmHill.BlazorChat/Server/WebSocketChat.cs +++ b/PalmHill.BlazorChat/Server/SignalR/WebSocketChat.cs @@ -6,8 +6,12 @@ using System.Web; using PalmHill.Llama; using System.Diagnostics; +using PalmHill.Llama.Models; +using PalmHill.LlmMemory; +using PalmHill.BlazorChat.Shared.Models.WebSocket; +using System.Collections.Concurrent; -namespace PalmHill.BlazorChat.Server +namespace PalmHill.BlazorChat.Server.SignalR { /// @@ -15,16 +19,14 @@ namespace PalmHill.BlazorChat.Server /// public class WebSocketChat : Hub { - LLamaWeights LLamaWeights; - ModelParams ModelParams; - public WebSocketChat(LLamaWeights model, ModelParams modelParams) + public WebSocketChat(InjectedModel injectedModel, LlmMemory.ServerlessLlmMemory? llmMemory = null) { - LLamaWeights = model; - ModelParams = modelParams; + InjectedModel = injectedModel; + LlmMemory = llmMemory; } - - - + private InjectedModel InjectedModel { get; } + private ServerlessLlmMemory? LlmMemory { get; } + /// /// Sends a chat prompt to the client and waits for a response. The method performs inference on the chat conversation and sends the result back to the client. @@ -33,14 +35,33 @@ public WebSocketChat(LLamaWeights model, ModelParams modelParams) /// The chat conversation to send. /// A Task that represents the asynchronous operation. /// Thrown when an error occurs during the inference process. - public async Task SendPrompt(Guid messageId, ChatConversation chatConversation) + public async Task InferenceRequest(InferenceRequest chatConversation) { - await ThreadLock.InferenceLock.WaitAsync(); + var conversationId = chatConversation.Id; + var cancellationTokenSource = new CancellationTokenSource(); + ChatCancelation.CancelationTokens[conversationId] = cancellationTokenSource; try { - await DoInferenceAndRespondToClient(Clients.Caller, messageId, chatConversation); - + await ThreadLock.InferenceLock.WaitAsync(cancellationTokenSource.Token); + await DoInferenceAndRespondToClient(Clients.Caller, chatConversation, cancellationTokenSource.Token); + + var inferenceStatusUpdate = new WebSocketInferenceStatusUpdate(); + inferenceStatusUpdate.MessageId = chatConversation.ChatMessages.LastOrDefault()?.Id; + inferenceStatusUpdate.IsComplete = true; + inferenceStatusUpdate.Success = true; + await Clients.Caller.SendAsync("InferenceStatusUpdate", inferenceStatusUpdate); + } + + catch (OperationCanceledException) + { + var inferenceStatusUpdate = new WebSocketInferenceStatusUpdate(); + inferenceStatusUpdate.MessageId = chatConversation.ChatMessages.LastOrDefault()?.Id; + inferenceStatusUpdate.IsComplete = true; + inferenceStatusUpdate.Success = false; + await Clients.Caller.SendAsync("InferenceStatusUpdate", inferenceStatusUpdate); + // Handle the cancellation operation + Console.WriteLine($"Inference for {conversationId} was canceled."); } catch (Exception ex) { @@ -49,6 +70,8 @@ public async Task SendPrompt(Guid messageId, ChatConversation chatConversation) finally { ThreadLock.InferenceLock.Release(); + ChatCancelation.CancelationTokens.TryRemove(conversationId, out _); + } } @@ -60,15 +83,15 @@ public async Task SendPrompt(Guid messageId, ChatConversation chatConversation) /// The unique identifier for the message. /// The chat conversation to use for inference. /// A Task that represents the asynchronous operation. - private async Task DoInferenceAndRespondToClient(ISingleClientProxy respondToClient, Guid messageId, ChatConversation chatConversation) + private async Task DoInferenceAndRespondToClient(ISingleClientProxy respondToClient, InferenceRequest chatConversation, CancellationToken cancellationToken) { - // Create a context for the model and a chat session for the conversation - LLamaContext modelContext = LLamaWeights.CreateContext(ModelParams); + LLamaContext modelContext = InjectedModel.Model.CreateContext(InjectedModel.ModelParams); var session = modelContext.CreateChatSession(chatConversation); - var inferenceParams = chatConversation.GetInferenceParams(); + var inferenceParams = chatConversation.GetInferenceParams(InjectedModel.DefaultAntiPrompts); - var cancelGeneration = new CancellationTokenSource(); + var messageId = chatConversation.ChatMessages.LastOrDefault()?.Id; + var textBuffer = ""; var fullResponse = ""; var totalTokens = 0; @@ -77,10 +100,17 @@ private async Task DoInferenceAndRespondToClient(ISingleClientProxy respondToCli inferenceStopwatch.Start(); var asyncResponse = session.ChatAsync(session.History, inferenceParams, - cancelGeneration.Token); + cancellationToken); // Perform inference and send the response to the client await foreach (var text in asyncResponse) { + if (cancellationToken.IsCancellationRequested) + { + modelContext.Dispose(); + inferenceStopwatch.Stop(); + + throw new OperationCanceledException(cancellationToken); + } totalTokens++; fullResponse += text; @@ -89,22 +119,25 @@ private async Task DoInferenceAndRespondToClient(ISingleClientProxy respondToCli if (shouldSendBuffer) { - await respondToClient.SendAsync("ReceiveModelString", messageId, textBuffer); + var inferenceString = new WebSocketInferenceString(); + inferenceString.WebSocketChatMessageId = messageId ?? Guid.NewGuid(); + inferenceString.InferenceString = textBuffer; + + await respondToClient.SendAsync("ReceiveInferenceString", inferenceString); textBuffer = ""; } } - modelContext.Dispose(); + modelContext.Dispose(); inferenceStopwatch.Stop(); if (textBuffer.Length > 0) { - await respondToClient.SendAsync("ReceiveModelString", messageId, textBuffer); + await respondToClient.SendAsync("ReceiveInferenceString", chatConversation.Id, textBuffer); } - await respondToClient.SendAsync("MessageComplete", messageId, "success"); Console.WriteLine($"Inference took {inferenceStopwatch.ElapsedMilliseconds}ms and generated {totalTokens} tokens. {(totalTokens / (inferenceStopwatch.ElapsedMilliseconds / (float)1000)).ToString("F2")} tokens/second."); Console.WriteLine(fullResponse); } diff --git a/PalmHill.BlazorChat/Server/WebApi/ApiChatController.cs b/PalmHill.BlazorChat/Server/WebApi/ApiChatController.cs new file mode 100644 index 0000000..ba75099 --- /dev/null +++ b/PalmHill.BlazorChat/Server/WebApi/ApiChatController.cs @@ -0,0 +1,191 @@ +using LLama.Common; +using LLama; +using Microsoft.AspNetCore.Mvc; +using PalmHill.BlazorChat.Shared.Models; +using PalmHill.Llama; +using System.Diagnostics; +using PalmHill.Llama.Models; +using Microsoft.AspNetCore.SignalR; +using PalmHill.BlazorChat.Server.SignalR; +using PalmHill.BlazorChat.Shared.Models.WebSocket; +using PalmHill.LlmMemory; + +// For more information on enabling Web API for empty projects, visit https://go.microsoft.com/fwlink/?LinkID=397860 + +namespace PalmHill.BlazorChat.Server.WebApi +{ + + /// + /// The ApiChat class is responsible for handling chat API requests. + /// + [Route("api/chat", Name = "Chat")] + [ApiController] + public class ApiChatController : ControllerBase + { + /// + /// Initializes a new instance of the class. + /// + /// The LLamaWeights model. + /// The model parameters. + public ApiChatController( + InjectedModel injectedModel, + IHubContext webSocketChat, + LlmMemory.ServerlessLlmMemory? llmMemory = null + ) + { + InjectedModel = injectedModel; + WebSocketChat = webSocketChat; + LlmMemory = llmMemory; + + } + + private IHubContext WebSocketChat { get; } + public ServerlessLlmMemory? LlmMemory { get; } + private InjectedModel InjectedModel { get; } + + /// + /// Handles a chat API request. + /// + /// The chat conversation. + /// Returns a string response from the chat model inference. + /// Thrown when there is an error during the chat model inference. + [HttpPost(Name = "Chat")] + public async Task> Chat([FromBody] InferenceRequest conversation) + { + var errorText = ""; + + var conversationId = conversation.Id; + var cancellationTokenSource = new CancellationTokenSource(); + ChatCancelation.CancelationTokens[conversationId] = cancellationTokenSource; + + try + { + await ThreadLock.InferenceLock.WaitAsync(cancellationTokenSource.Token); + var response = await DoInference(conversation, cancellationTokenSource.Token); + return Ok(response); + } + catch (OperationCanceledException) + { + errorText = $"Inference for {conversationId} was canceled."; + Console.WriteLine(errorText); + return StatusCode(444, errorText); + } + catch (Exception ex) + { + errorText = ex.ToString(); + } + finally + { + ThreadLock.InferenceLock.Release(); + ChatCancelation.CancelationTokens.TryRemove(conversationId, out _); + } + + Console.WriteLine(errorText); + return StatusCode(500, errorText); + } + + [HttpPost("docs")] + public async Task> Ask(InferenceRequest chatConversation) + { + if (LlmMemory == null) + { + var result = StatusCode(503, "No LlmMemory loaded."); + return result; + } + + + var conversationId = chatConversation.Id; + var cancellationTokenSource = new CancellationTokenSource(); + ChatCancelation.CancelationTokens[conversationId] = cancellationTokenSource; + + var question = chatConversation.ChatMessages.LastOrDefault()?.Message; + if (question == null) + { + return BadRequest("No question provided."); + } + + try + { + var answer = await LlmMemory.Ask(conversationId.ToString(), question, cancellationTokenSource.Token); + + var chatMessageAnswer = new ChatMessage() + { + Role = ChatMessageRole.Assistant, + Message = answer.Result, + AttachmentIds = answer.RelevantSources.Select(s => s.SourceName).ToList() + }; + + if (cancellationTokenSource.Token.IsCancellationRequested) + { + throw new OperationCanceledException(cancellationTokenSource.Token); + } + + return chatMessageAnswer; + } + catch (OperationCanceledException ex) + { + return StatusCode(444, ex.ToString()); + } + catch (Exception ex) + { + return StatusCode(500, ex.ToString()); + } + } + + [HttpDelete("cancel/{conversationId}", Name = "CancelChat")] + public async Task CancelChat(Guid conversationId) + { + var cancelToken = ChatCancelation.CancelationTokens[conversationId]; + if (cancelToken == null) + { + return false; + } + else { + await cancelToken.CancelAsync(); + return true; + } + } + + /// + /// Performs inference for a chat conversation. + /// + /// The chat conversation for which to perform inference. + /// Returns the inference result as a string. + private async Task DoInference(InferenceRequest conversation, CancellationToken cancellationToken) + { + LLamaContext modelContext = InjectedModel.Model.CreateContext(InjectedModel.ModelParams); + var session = modelContext.CreateChatSession(conversation); + var inferenceParams = conversation.GetInferenceParams(InjectedModel.DefaultAntiPrompts); + + var fullResponse = ""; + var totalTokens = 0; + var inferenceStopwatch = new Stopwatch(); + + inferenceStopwatch.Start(); + var asyncResponse = session.ChatAsync(session.History, + inferenceParams, + cancellationToken); + await foreach (var text in asyncResponse) + { + if (cancellationToken.IsCancellationRequested) + { + modelContext.Dispose(); + inferenceStopwatch.Stop(); + + throw new OperationCanceledException(cancellationToken); + } + + totalTokens++; + fullResponse += text; + } + modelContext.Dispose(); + inferenceStopwatch.Stop(); + + Console.WriteLine($"Inference took {inferenceStopwatch.ElapsedMilliseconds}ms and generated {totalTokens} tokens. {(totalTokens / (inferenceStopwatch.ElapsedMilliseconds / (float)1000)).ToString("F2")} tokens/second."); + Console.WriteLine(fullResponse); + + return fullResponse; + } + + } +} diff --git a/PalmHill.BlazorChat/Server/WebApi/AttachmentController.cs b/PalmHill.BlazorChat/Server/WebApi/AttachmentController.cs new file mode 100644 index 0000000..f24fdcc --- /dev/null +++ b/PalmHill.BlazorChat/Server/WebApi/AttachmentController.cs @@ -0,0 +1,137 @@ +using DocumentFormat.OpenXml.Spreadsheet; +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.SignalR; +using Microsoft.KernelMemory.FileSystem.DevTools; +using PalmHill.BlazorChat.Server.SignalR; +using PalmHill.BlazorChat.Shared.Models; +using PalmHill.LlmMemory; + +// For more information on enabling Web API for empty projects, visit https://go.microsoft.com/fwlink/?LinkID=397860 + +namespace PalmHill.BlazorChat.Server.WebApi +{ + [Route("api/[controller]", Name = "Attachment")] + [ApiController] + public class AttachmentController : ControllerBase + { + private LlmMemory.ServerlessLlmMemory LlmMemory { get; } + private IHubContext WebSocketChat { get; } + + public AttachmentController( + LlmMemory.ServerlessLlmMemory llmMemory, + IHubContext webSocketChat + ) + { + LlmMemory = llmMemory; + WebSocketChat = webSocketChat; + } + + + + [HttpGet("list/{conversationId}")] + public IEnumerable GetAttachments(Guid conversationId) + { + var conversationAttachments = LlmMemory + .AttachmentInfos + .Where(a => a.Value.ConversationId == conversationId) + .Select(a => a.Value); + + return conversationAttachments; + } + + [HttpGet("{attachmentId}")] + public ActionResult GetAttachmentById(Guid attachmentId) + { + var attchmentFound = LlmMemory.AttachmentInfos.TryGetValue(attachmentId, out var attachmentInfo); + + if (!attchmentFound) + { + return NotFound(); + } + + return Ok(attachmentInfo); + } + + + + public class FileUpload + { + public IFormFile? File { get; set; } + } + + // POST api/ + [HttpPost("{conversationId}/{attachmentId}")] + public ActionResult AddAttachment([FromForm] FileUpload fileUpload, Guid conversationId, Guid attachmentId) + { + var file = fileUpload.File; + + if (file == null) + { + return BadRequest("No file provided."); + } + + var attachmentInfo = new AttachmentInfo() + { + Id = attachmentId, + Name = file.FileName, + ContentType = file.ContentType, + Size = file.Length, + Status = AttachmentStatus.Pending, + ConversationId = conversationId, + FileBytes = file.OpenReadStream().ReadAllBytes() + }; + var userId = "user1"; + _ = DoImportAsync(userId, attachmentInfo); + + return attachmentInfo; + } + + private async Task DoImportAsync(string? userId, AttachmentInfo attachmentInfo) + { + try + { + await LlmMemory.ImportDocumentAsync(attachmentInfo, null); + await WebSocketChat.Clients.User(userId!).SendCoreAsync("AttachmentStatusUpdate", [attachmentInfo]); + + } + catch (Exception ex) + { + attachmentInfo.Status = AttachmentStatus.Failed; + await WebSocketChat.Clients.User(userId!).SendCoreAsync("AttachmentStatusUpdate", [attachmentInfo]); + } + } + + // DELETE api//5 + [HttpDelete("{attachmentId}")] + public async Task DeleteAttachment(Guid attachmentId) + { + var exists = LlmMemory.AttachmentInfos.TryGetValue(attachmentId, out var attachmentInfo); + if (!exists || attachmentInfo?.ConversationId is null) + { + return false; + } + + await LlmMemory.DeleteDocument(attachmentInfo.ConversationId.Value, attachmentId); + + return true; + } + + [HttpGet("{attachmentId}/file")] + public ActionResult GetAttachmentFile(Guid attachmentId) + { + var attachmentInfo = LlmMemory.AttachmentInfos[attachmentId]; + if (attachmentInfo == null) + { + return NotFound(); + } + + if (attachmentInfo.FileBytes == null || attachmentInfo.Status != AttachmentStatus.Uploaded) + { + return BadRequest("File not ready."); + } + + return File(attachmentInfo.FileBytes, attachmentInfo.ContentType, attachmentInfo.Name); + + } + } +} diff --git a/PalmHill.BlazorChat/Server/appsettings.json b/PalmHill.BlazorChat/Server/appsettings.json index b3ae3ff..40d77cc 100644 --- a/PalmHill.BlazorChat/Server/appsettings.json +++ b/PalmHill.BlazorChat/Server/appsettings.json @@ -6,5 +6,19 @@ } }, "AllowedHosts": "*", - "DefaultModelPath": "C:\\models\\orca-2-13b.Q6_K.gguf" + "InferenceModelConfig": { + "ModelPath": "C:\\models\\orca-2-13b.Q6_K.gguf", + "GpuLayerCount": 100, + "ContextSize": 4096, + "Gpu": 0, + "AntiPrompts": [ "User:" ] + }, + "EmbeddingModelConfig": { + "ModelPath": "C:\\models\\mistral-7b-openorca.Q4_K_M.gguf", + "GpuLayerCount": 100, + "ContextSize": 2048, + "Gpu": 0, + "AntiPrompts": [ "Question:" ] + + } } diff --git a/PalmHill.BlazorChat/Shared/ChatExtensions.cs b/PalmHill.BlazorChat/Shared/ChatExtensions.cs index 7b47919..e75e744 100644 --- a/PalmHill.BlazorChat/Shared/ChatExtensions.cs +++ b/PalmHill.BlazorChat/Shared/ChatExtensions.cs @@ -25,7 +25,7 @@ public static class ChatExtensions /// The ChatConversation object to be converted. /// A string representation of the ChatConversation object in a format suitable for Orca. /// Thrown when the role of a ChatMessage is neither User nor Assistant. - public static string ToOrcaPromptString(this ChatConversation chatConversation) + public static string ToOrcaPromptString(this InferenceRequest chatConversation) { var promptString = new StringBuilder(); diff --git a/PalmHill.BlazorChat/Shared/Models/AttachmentInfo.cs b/PalmHill.BlazorChat/Shared/Models/AttachmentInfo.cs new file mode 100644 index 0000000..720a488 --- /dev/null +++ b/PalmHill.BlazorChat/Shared/Models/AttachmentInfo.cs @@ -0,0 +1,66 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Text.Json.Serialization; +using System.Threading.Tasks; + +namespace PalmHill.BlazorChat.Shared.Models +{ + /// + /// Information about an attachment. + /// + public class AttachmentInfo + { + /// + /// The Id of the attachment. + /// + public Guid Id { get; set; } = Guid.NewGuid(); + /// + /// The file name of the attachment. + /// + public string Name { get; set; } = ""; + + /// + /// The file bytes of the attachment. (Not serialized). Used for import/memorization. + /// + [JsonIgnore] + public byte[]? FileBytes { get; set; } + + /// + /// The content type of the attachment. + /// + public string ContentType { get; set; } = ""; + + /// + /// The size of the attachment. + /// + public long Size { get; set; } = 0; + + /// + /// The status of the attachment. + /// + public AttachmentStatus Status { get; set; } = AttachmentStatus.Pending; + + /// + /// The Id of the conversation the attachment belongs to. Used for DB (later). + /// + public Guid? ConversationId { get; set; } + } + + public enum AttachmentStatus + { + /// + /// The attachment is pending. Processing is not complete. + /// + Pending, + /// + /// The attachment is uploaded. Processing is complete. + /// + Uploaded, + /// + /// The attachment failed to upload or processing is failed. + /// + Failed + } +} diff --git a/PalmHill.BlazorChat/Shared/Models/ChatMessage.cs b/PalmHill.BlazorChat/Shared/Models/ChatMessage.cs index 9bb0eb9..9fd0791 100644 --- a/PalmHill.BlazorChat/Shared/Models/ChatMessage.cs +++ b/PalmHill.BlazorChat/Shared/Models/ChatMessage.cs @@ -5,6 +5,8 @@ /// public class ChatMessage { + public Guid Id { get; set; } = Guid.NewGuid(); + /// /// Gets or sets the role of the entity that generated the message. /// @@ -22,6 +24,11 @@ public class ChatMessage /// [DefaultValue("What are cats?")] public string? Message { get; set; } + + /// + /// The user can add attachments to the message, the attachment will becom part of the context. + /// + public List AttachmentIds { get; set; } = new List(); } /// @@ -40,4 +47,7 @@ public enum ChatMessageRole /// [Description("User Message")] User = 1, + + [Description("User Message")] + Question = 2, } \ No newline at end of file diff --git a/PalmHill.BlazorChat/Shared/Models/ChatConversation.cs b/PalmHill.BlazorChat/Shared/Models/InferenceRequest.cs similarity index 80% rename from PalmHill.BlazorChat/Shared/Models/ChatConversation.cs rename to PalmHill.BlazorChat/Shared/Models/InferenceRequest.cs index aa5215a..a0eea70 100644 --- a/PalmHill.BlazorChat/Shared/Models/ChatConversation.cs +++ b/PalmHill.BlazorChat/Shared/Models/InferenceRequest.cs @@ -11,8 +11,10 @@ namespace PalmHill.BlazorChat.Shared.Models /// /// Represents a chat conversation. /// - public class ChatConversation + public class InferenceRequest { + public Guid Id { get; set; } = Guid.NewGuid(); + /// /// Gets or sets the system message for the chat conversation. /// @@ -37,5 +39,11 @@ public class ChatConversation /// The inference settings for the chat conversation. /// public InferenceSettings Settings { get; set; } = new InferenceSettings(); + + + /// + /// List of attachments that are part of the conversation. + /// + public List Attachments { get; set; } = new List(); } } \ No newline at end of file diff --git a/PalmHill.BlazorChat/Shared/Models/WebSocket/WebSocketInferenceStatusUpdate.cs b/PalmHill.BlazorChat/Shared/Models/WebSocket/WebSocketInferenceStatusUpdate.cs new file mode 100644 index 0000000..e786fe4 --- /dev/null +++ b/PalmHill.BlazorChat/Shared/Models/WebSocket/WebSocketInferenceStatusUpdate.cs @@ -0,0 +1,12 @@ +namespace PalmHill.BlazorChat.Shared.Models.WebSocket +{ + /// + /// A status update sent over a WebSocket. + /// + public class WebSocketInferenceStatusUpdate + { + public Guid? MessageId { get; set; } + public bool IsComplete { get; set; } = false; + public bool? Success { get; set; } + } +} diff --git a/PalmHill.BlazorChat/Shared/Models/WebSocket/WebSocketInferenceString.cs b/PalmHill.BlazorChat/Shared/Models/WebSocket/WebSocketInferenceString.cs new file mode 100644 index 0000000..818819e --- /dev/null +++ b/PalmHill.BlazorChat/Shared/Models/WebSocket/WebSocketInferenceString.cs @@ -0,0 +1,12 @@ +namespace PalmHill.BlazorChat.Shared.Models.WebSocket +{ + /// + /// An inference string sent over a WebSocket. + /// + public class WebSocketInferenceString + { + + public Guid WebSocketChatMessageId { get; set; } = Guid.NewGuid(); + public string InferenceString { get; set; } = string.Empty; + } +} diff --git a/PalmHill.BlazorChat/Shared/README.md b/PalmHill.BlazorChat/Shared/README.md index df31796..a08268d 100644 --- a/PalmHill.BlazorChat/Shared/README.md +++ b/PalmHill.BlazorChat/Shared/README.md @@ -1,43 +1,127 @@ -# PlamHill.BlazorChat.Shared - -This project is part of the PlamHill.BlazorChat solution. It contains shared models and extensions used across the solution. - -## Models - -### ChatMessage - -This class represents a chat message in a conversation. It has two properties: - -- **Role**: The role of the entity that generated the message. It can be either `User` or `Assistant`. The `User` role represents a message generated by a user, while the `Assistant` role represents a message generated by an assistant. -- **Message**: The content of the message. - -### ChatConversation - -This class represents a chat conversation. It has three properties: - -- **SystemMessage**: The system message for the chat conversation. This is a message that sets the context for the conversation. -- **ChatMessages**: The chat messages for the chat conversation. This is a list of `ChatMessage` objects that represent the messages in the conversation. -- **Settings**: The inference settings for the chat conversation. This is an `InferenceSettings` object that controls the behavior of the inference. - -### InferenceSettings - -This class represents the settings for inference with a large language model. It has several properties: - -- **Temperature**: Controls the randomness of the output. Higher values make the output more random, while lower values make the output more deterministic. -- **MaxLength**: The maximum number of tokens that the model will generate. -- **TopP**: The nucleus sampling parameter that controls the size of the token set considered for generation at each step. Lower values make the output more focused, while higher values make it more diverse. -- **FrequencyPenalty**: Discourages the model from using frequent tokens. Higher values increase the penalty, while lower values decrease it. -- **PresencePenalty**: Discourages the model from using new tokens. Higher values increase the penalty, while lower values decrease it. - -## Extensions - -### ChatExtensions - -This static class provides an extension method for the `ChatConversation` class: - -- **ToOrcaPromptString**: This method converts a `ChatConversation` object into a string format suitable for Orca. Here's an example of how to use it: - -```csharp -ChatConversation conversation = new ChatConversation(); -// Add messages to the conversation... -string prompt = conversation.ToOrcaPromptString(); \ No newline at end of file +#PalmHill.BlazorChat.Shared + +Contains models shared between the server and client projects. + +## Table of Contents +1. [WebSocketInferenceStatusUpdate.cs](#websocketinferencestatusupdatecs) +2. [WebSocketInferenceString.cs](#websocketinferencestringcs) +3. [AttachmentInfo.cs](#attachmentinfocs) +4. [ChatMessage.cs](#chatmessagecs) +5. [InferenceRequest.cs](#inferencerequestcs) +6. [InferenceSettings.cs](#inferencesettingscs) + +--- + +## WebSocketInferenceStatusUpdate.cs +### Namespace +- `PalmHill.BlazorChat.Shared.Models.WebSocket` + +### Class: `WebSocketInferenceStatusUpdate` +#### Summary +- A status update sent over a WebSocket. + +#### Properties +- **`MessageId`** (`Guid?`) + - **Description**: Not documented. +- **`IsComplete`** (`bool`) + - **Description**: Indicates whether the status update is complete. Default value is `false`. +- **`Success`** (`bool?`) + - **Description**: Indicates the success status of the update. Not documented. + +--- + +## WebSocketInferenceString.cs +### Namespace +- `PalmHill.BlazorChat.Shared.Models.WebSocket` + +### Class: `WebSocketInferenceString` +#### Summary +- An inference string sent over a WebSocket. + +#### Properties +- **`WebSocketChatMessageId`** (`Guid`) + - **Description**: Not explicitly documented. Default value is a new `Guid`. +- **`InferenceString`** (`string`) + - **Description**: The inference string content. Default value is an empty string. + +--- + +## AttachmentInfo.cs +### Namespace +- `PalmHill.BlazorChat.Shared.Models` + +### Class: `AttachmentInfo` +#### Summary +- Information about an attachment. + +#### Properties +- **`Id`** (`Guid`) + - **Summary**: The Id of the attachment. + - **Description**: Default value is a new `Guid`. +- **`Name`** (`string`) + - **Summary**: The file name of the attachment. + - **Description**: Default value is an empty string. +- **`FileBytes`** (`byte[]?`) + - **Summary**: The file bytes of the attachment. (Not serialized). Used for import/memorization. + - **Description**: This property is not serialized and is used for internal processing. +- **`ContentType`** (`string`) + - **Summary**: The content type of the attachment. + - **Description**: Default value is an empty string. + +--- + +## ChatMessage.cs +### Namespace +- Not explicitly defined in the provided content. + +### Class: `ChatMessage` +#### Summary +- Represents a chat message in a conversation. + +#### Properties +- **`Id`** (`Guid`) + - **Description**: Represents the unique identifier of the chat message. Default value is a new `Guid`. +- **`Role`** (`ChatMessageRole?`) + - **Summary**: Gets or sets the role of the entity that generated the message. + - **Description**: The role of the entity. Default value is `ChatMessageRole.User`. +- **`Message`** (`string?`) + - **Summary**: Gets or sets the content of the message. + - **Description**: The actual message content. Default value is "What are cats?". +- **`AttachmentIds`** (`List`) + - **Summary**: The user can add attachments to the message, the attachment will become part of the context. + - **Description**: A list of attachment identifiers. Default value is an empty list. + +--- + +## InferenceRequest.cs +### Namespace +- `PalmHill.BlazorChat.Shared.Models` + +### Class: `InferenceRequest` +#### Summary +- Represents a chat conversation. + +#### Properties +- **`Id`** (`Guid`) + - **Description**: Represents the unique identifier of the inference request. Default value is a new `Guid`. +- **`SystemMessage`** (`string`) + - **Summary**: Gets or sets the system message for the chat conversation. + - **Description**: The system message content. Default value is "You are a helpful assistant." +- **`ChatMessages`** (`List`) + - **Summary**: Gets or sets the chat messages for the chat conversation. + - **Description**: A collection of `ChatMessage` objects. Default value is a new list. + +--- + +## InferenceSettings.cs +### Namespace +- `PalmHill.BlazorChat.Shared.Models` + +### Class: `InferenceSettings` +#### Summary +- Represents the settings for inference with a large language model. + +#### Properties +- **`Temperature`** (`float`) + - **Summary**: Gets or sets the temperature for the inference. Higher values (closer to 1) make the output more random, while lower values make the output more deterministic. + - **Description**: The temperature setting for inference. Default diff --git a/PalmHill.Llama/LlamaExtensions.cs b/PalmHill.Llama/LlamaExtensions.cs index 2a9b4de..bc237c2 100644 --- a/PalmHill.Llama/LlamaExtensions.cs +++ b/PalmHill.Llama/LlamaExtensions.cs @@ -3,6 +3,10 @@ using PalmHill.BlazorChat.Shared.Models; using static LLama.Common.ChatHistory; using PalmHill.BlazorChat.Shared; +using Microsoft.Extensions.DependencyInjection; +using PalmHill.Llama.Models; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Configuration; namespace PalmHill.Llama { @@ -15,12 +19,12 @@ public static class LlamaExtensions /// Loads the chat history into a . ///
/// The to load the history into. - /// The containing the chat history. - public static void LoadChatHistory(this ChatSession chatSession, ChatConversation chatConversation) - { + /// The containing the chat history. + public static void LoadChatHistory(this ChatSession chatSession, InferenceRequest chatConversation) + { if (!string.IsNullOrWhiteSpace(chatConversation.SystemMessage)) - { + { chatSession.History.AddMessage(AuthorRole.System, chatConversation.SystemMessage); } @@ -35,7 +39,7 @@ public static void LoadChatHistory(this ChatSession chatSession, ChatConversatio continue; } - + switch (chatMessage.Role) { case ChatMessageRole.User: @@ -53,17 +57,17 @@ public static void LoadChatHistory(this ChatSession chatSession, ChatConversatio } /// - /// Creates a new from a . + /// Creates a new from a . /// /// The to use for the session. - /// The to create the session from. + /// The to create the session from. /// A new . - public static ChatSession CreateChatSession(this LLamaContext lLamaContext, ChatConversation chatConversation) + public static ChatSession CreateChatSession(this LLamaContext lLamaContext, InferenceRequest chatConversation) { var ex = new InteractiveExecutor(lLamaContext); ChatSession session = new ChatSession(ex); - var specialTokensToIgnore = new string[] { "Assistant:", "User:"}; + var specialTokensToIgnore = new string[] { "Assistant:", "User:" }; session = session.WithOutputTransform(new LLamaTransforms.KeywordTextOutputStreamTransform(specialTokensToIgnore, redundancyLength: 8)); session.LoadChatHistory(chatConversation); var promptMessage = chatConversation.ChatMessages.Last(); @@ -82,20 +86,81 @@ public static ChatSession CreateChatSession(this LLamaContext lLamaContext, Chat } /// - /// Gets the inference parameters from a . + /// Gets the inference parameters from a . /// - /// The to get the parameters from. + /// The to get the parameters from. /// The for the conversation. - public static InferenceParams GetInferenceParams(this ChatConversation chatConversation) + public static InferenceParams GetInferenceParams(this InferenceRequest chatConversation, List? defaultAntiPrompts = null) { - var inferenceParams = new InferenceParams() { + var inferenceParams = new InferenceParams() + { Temperature = chatConversation.Settings.Temperature, MaxTokens = chatConversation.Settings.MaxLength, TopP = chatConversation.Settings.TopP, FrequencyPenalty = chatConversation.Settings.FrequencyPenalty, PresencePenalty = chatConversation.Settings.PresencePenalty, - AntiPrompts = ["User:"] }; + AntiPrompts = defaultAntiPrompts ?? [] + }; return inferenceParams; } + + + /// + /// Add Llama to the service collection. + /// + /// + /// + /// Occurs when the model file is missing. + /// Occurs when the is null." + public static InjectedModel AddLlamaModel(this IHostApplicationBuilder builder, ModelConfig? modelConfig = null) + { + var defaultModelConfigSection = "InferenceModelConfig"; + + //Attemt to get model config from config + modelConfig = builder?.GetModelConfigFromConfigSection(defaultModelConfigSection); + + if (modelConfig == null) + { + throw new ArgumentNullException(nameof(modelConfig), $"The argument {modelConfig} must be supplied if there is no {defaultModelConfigSection} section in app configuartion."); + } + + //check if model is present + var modelExsists = System.IO.File.Exists(modelConfig.ModelPath); + if (!modelExsists) + { + throw new FileNotFoundException($"Model file does not exsist.", modelConfig.ModelPath); + } + + //Initlize Llama + ModelParams parameters = new ModelParams(modelConfig.ModelPath ?? "") + { + ContextSize = modelConfig.ContextSize, + GpuLayerCount = modelConfig.GpuLayerCount, + MainGpu = modelConfig.Gpu + }; + + LLamaWeights model = LLamaWeights.LoadFromFile(parameters); + //End Initlize Llama + + var injectedModel = new InjectedModel(model, parameters, modelConfig.AntiPrompts); + + //Add to services + builder?.Services.AddSingleton(injectedModel); + + return injectedModel; + } + + + + + + public static ModelConfig? GetModelConfigFromConfigSection(this IHostApplicationBuilder builder, string configSection) + { + var appConfig = builder?.Configuration.GetSection(configSection); + var appSettingsConfig = appConfig?.Get(); + appSettingsConfig!.AntiPrompts = appConfig?.GetSection("AntiPrompts").Get>() ?? []; + + return appSettingsConfig; + } } } diff --git a/PalmHill.Llama/Models/InjectedModel.cs b/PalmHill.Llama/Models/InjectedModel.cs new file mode 100644 index 0000000..a0d9e6e --- /dev/null +++ b/PalmHill.Llama/Models/InjectedModel.cs @@ -0,0 +1,24 @@ +using LLama; +using LLama.Common; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace PalmHill.Llama.Models +{ + public class InjectedModel + { + public LLamaWeights Model { get; } + public ModelParams ModelParams { get; } + public List DefaultAntiPrompts { get; set; } + + public InjectedModel(LLamaWeights model, ModelParams modelParams, List defaultAntiPrompts) + { + Model = model; + ModelParams = modelParams; + DefaultAntiPrompts = defaultAntiPrompts; + } + } +} diff --git a/PalmHill.Llama/Models/ModelConfig.cs b/PalmHill.Llama/Models/ModelConfig.cs new file mode 100644 index 0000000..cdb2068 --- /dev/null +++ b/PalmHill.Llama/Models/ModelConfig.cs @@ -0,0 +1,17 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace PalmHill.Llama.Models +{ + public class ModelConfig + { + public string ModelPath { get; set; } = ""; + public int GpuLayerCount { get; set; } = 20; + public uint ContextSize { get; set; } = 2048; + public int Gpu { get; set; } = 0; + public List AntiPrompts = []; + } +} diff --git a/PalmHill.Llama/PalmHill.Llama.csproj b/PalmHill.Llama/PalmHill.Llama.csproj index 2883413..15e054a 100644 --- a/PalmHill.Llama/PalmHill.Llama.csproj +++ b/PalmHill.Llama/PalmHill.Llama.csproj @@ -7,8 +7,10 @@ - - + + + + diff --git a/PalmHill.Llama/README.md b/PalmHill.Llama/README.md index 82a377c..d376ec2 100644 --- a/PalmHill.Llama/README.md +++ b/PalmHill.Llama/README.md @@ -1,7 +1,42 @@ -The [`LlamaExtensions.cs`](PalmHill.Llama/LlamaExtensions.cs) file is a part of the `PalmHill.Llama` namespace and contains extension methods for the `ChatSession` and `ChatConversation` classes. +# PalmHill.Llama + +PalmHill.Llama is a .NET library designed to integrate with the LLama machine learning framework. It provides essential utilities and models to enhance the capabilities of LLama-based applications, particularly in the realm of AI and data processing. + +## Features + +- **Llama Extensions**: Enhancements to the LLama context for improved functionality and usability. +- **Thread Locking**: Utilizes `SemaphoreSlim` for effective thread management in concurrent environments. +- **Model Injection**: Facilitates the injection of pre-trained LLama models with configurable parameters. +- **Configuration Management**: Offers robust options to configure the LLama models and their operational parameters. + +## Dependencies + +- .NET 8.0 +- LLamaSharp 0.8.1 +- LLamaSharp.Backend.Cuda12 0.8.1 +- Microsoft.Extensions.* various libraries + +## Installation + +Include the PalmHill.Llama library in your .NET project using the provided `.csproj` file. NuGet coming soon. + +## Usage + +### Llama Extensions + +Utilize the `LlamaExtensions` class to add enhanced capabilities to your LLama context. + +### Thread Management + +Use the `ThreadLock` class to manage concurrency in your applications, ensuring safe and efficient processing. + +### Model Injection + +Create instances of `InjectedModel` to work with pre-trained LLama models. Configure using `ModelConfig`. + +### Configuration + +Configure your models and operational parameters with the `ModelConfig` class, allowing for flexible and robust application setups. -The [`LoadChatHistory`](PalmHill.Llama/LlamaExtensions.cs#L11-L45) method is an extension method for the `ChatSession` class. It takes a `ChatConversation` object as an argument and loads the chat history into the `ChatSession` object. It iterates over the `ChatMessages` in the `ChatConversation` and adds them to the `ChatSession`'s history. If a message is from the user, it is added as a `User` message, and if it is from the assistant, it is added as an `Assistant` message. -The [`CreateChatSession`](PalmHill.Llama/LlamaExtensions.cs#L48-L69) method is an extension method for the `LLamaContext` class. It takes a `ChatConversation` object as an argument and creates a new `ChatSession` object. It also sets up an output transform to ignore certain special tokens and loads the chat history into the session. It then checks if the last message in the conversation is a user message and if it is not empty. -The [`GetInferenceParams`](PalmHill.Llama/LlamaExtensions.cs#L72-L82) method is an extension method for the `ChatConversation` class. It extracts the inference parameters from the `ChatConversation`'s settings and returns them as an `InferenceParams` object. \ No newline at end of file diff --git a/PalmHill.BlazorChat/Server/ThreadLock.cs b/PalmHill.Llama/ThreadLock.cs similarity index 76% rename from PalmHill.BlazorChat/Server/ThreadLock.cs rename to PalmHill.Llama/ThreadLock.cs index 15e22b9..a6e6783 100644 --- a/PalmHill.BlazorChat/Server/ThreadLock.cs +++ b/PalmHill.Llama/ThreadLock.cs @@ -1,7 +1,8 @@ -namespace PalmHill.BlazorChat.Server +namespace PalmHill.Llama { public static class ThreadLock { public static SemaphoreSlim InferenceLock = new SemaphoreSlim(1, 1); + } } diff --git a/PalmHill.LlmMemory/ConversationMemoryExtensions.cs b/PalmHill.LlmMemory/ConversationMemoryExtensions.cs new file mode 100644 index 0000000..33b03d5 --- /dev/null +++ b/PalmHill.LlmMemory/ConversationMemoryExtensions.cs @@ -0,0 +1,62 @@ +using LLama.Common; +using LLama; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using PalmHill.Llama.Models; +using PalmHill.Llama; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using LLamaSharp.KernelMemory; +using Microsoft.KernelMemory; + +namespace PalmHill.LlmMemory +{ + public static class ConversationMemoryExtensions + { + + public static ServerlessLlmMemory AddLlmMemory(this IHostApplicationBuilder builder, ModelConfig? modelConfig = null) + { + var defaultModelConfigSection = "EmbeddingModelConfig"; + + if (modelConfig == null) + { + //Attemt to get model config from config + modelConfig = builder.GetModelConfigFromConfigSection(defaultModelConfigSection); + } + + if (modelConfig == null) + { + throw new ArgumentNullException(nameof(modelConfig), $"The argument {modelConfig} must be supplied if there is no {defaultModelConfigSection} section in app configuartion."); + } + + //check if model is present + var modelExsists = System.IO.File.Exists(modelConfig.ModelPath); + if (!modelExsists) + { + throw new FileNotFoundException($"Model file does not exsist.", modelConfig.ModelPath); + } + + var memoryModelConfig = new LLamaSharpConfig(modelConfig.ModelPath); + memoryModelConfig.DefaultInferenceParams = new LLama.Common.InferenceParams(); + memoryModelConfig.DefaultInferenceParams.AntiPrompts = modelConfig.AntiPrompts; + memoryModelConfig.ContextSize = modelConfig.ContextSize; + memoryModelConfig.GpuLayerCount = modelConfig.GpuLayerCount; + + var memory = new KernelMemoryBuilder() + .WithLLamaSharpDefaults(memoryModelConfig) + .Build(); + + + var llmMemory = new ServerlessLlmMemory(memory); + builder.Services.AddSingleton(llmMemory); + + return llmMemory; + } + + + } +} diff --git a/PalmHill.LlmMemory/PalmHill.LlmMemory.csproj b/PalmHill.LlmMemory/PalmHill.LlmMemory.csproj new file mode 100644 index 0000000..c024486 --- /dev/null +++ b/PalmHill.LlmMemory/PalmHill.LlmMemory.csproj @@ -0,0 +1,20 @@ + + + + net8.0 + enable + enable + + + + + + + + + + + + + + diff --git a/PalmHill.LlmMemory/README.md b/PalmHill.LlmMemory/README.md new file mode 100644 index 0000000..6b78671 --- /dev/null +++ b/PalmHill.LlmMemory/README.md @@ -0,0 +1,10 @@ +# PalmHill.LlmMemory Project + +## Introduction + +The PalmHill.LlmMemory project is an innovative C# application designed to enhance Large Language Models (LLMs) through Retrieval Augmented Generation (RAG). This approach integrates advanced memory management and data retrieval techniques to support serverless architectures and kernel memory operations. The project consists of: + +- **ServerlessLlmMemory.cs**: This pivotal class manages the memory and retrieval processes. + +The PalmHill.LlmMemory project aims to provide robust, scalable solutions for enhancing the capabilities of LLMs, making them more efficient in retrieving and generating contextually rich content. + diff --git a/PalmHill.LlmMemory/ServerlessLlmMemory.cs b/PalmHill.LlmMemory/ServerlessLlmMemory.cs new file mode 100644 index 0000000..b8893f2 --- /dev/null +++ b/PalmHill.LlmMemory/ServerlessLlmMemory.cs @@ -0,0 +1,148 @@ +using Azure.Core; +using Microsoft.KernelMemory; +using PalmHill.BlazorChat.Shared.Models; +using PalmHill.Llama; +using System.Collections.Concurrent; +using System.Threading; + +namespace PalmHill.LlmMemory +{ + public class ServerlessLlmMemory + { + public ServerlessLlmMemory(IKernelMemory kernelMemory) + { + KernelMemory = kernelMemory; + } + + public IKernelMemory KernelMemory { get; } + + + public ConcurrentDictionary AttachmentInfos { get; } = new ConcurrentDictionary(); + + public async Task ImportDocumentAsync( + AttachmentInfo attachmentInfo, + TagCollection? tagCollection = null + ) + { + if (attachmentInfo.FileBytes == null) + { + throw new InvalidOperationException("FileBytes is null"); + } + + if (!AttachmentInfos.TryAdd(attachmentInfo.Id, attachmentInfo)) + { + throw new Exception("Failed to add attachment to memory"); + }; + + attachmentInfo.Size = attachmentInfo.FileBytes.LongLength; + + await ThreadLock.InferenceLock.WaitAsync(); + + var stream = new MemoryStream(attachmentInfo.FileBytes); + var documentId = string.Empty; + try + { + documentId = await KernelMemory.ImportDocumentAsync(stream, + attachmentInfo.Name, + attachmentInfo.Id.ToString(), + tagCollection, + attachmentInfo.ConversationId.ToString()); + } + catch (Exception ex) + { + attachmentInfo.Status = AttachmentStatus.Failed; + Console.WriteLine(ex); + } + finally + { + ThreadLock.InferenceLock.Release(); + } + + + + if (documentId == null) + { + attachmentInfo.Status = AttachmentStatus.Failed; + } + + + while (attachmentInfo.Status == AttachmentStatus.Pending) + { + await UpdateAttachmentStatus(attachmentInfo); + + if ( + attachmentInfo.Status == AttachmentStatus.Uploaded + || + attachmentInfo.Status == AttachmentStatus.Failed + ) + { + break; + } + + System.Threading.Thread.Sleep(100); + } + + return attachmentInfo; + } + + public async Task UpdateAttachmentStatus(AttachmentInfo attachmentInfo) + { + var isDocReady = await KernelMemory.IsDocumentReadyAsync(attachmentInfo.Id.ToString(), attachmentInfo.ConversationId.ToString()); + + if (attachmentInfo != null && attachmentInfo?.Status != AttachmentStatus.Failed) + { + attachmentInfo!.Status = isDocReady ? AttachmentStatus.Uploaded : AttachmentStatus.Pending; + } + } + + public async Task DeleteDocument(Guid conversationId, Guid attachmentId) + { + await KernelMemory.DeleteDocumentAsync(attachmentId.ToString(), conversationId.ToString()); + var removed = AttachmentInfos.Remove(attachmentId, out _); + return removed; + } + + public async Task SearchAsync(string conversationId, string query) + { + var results = await KernelMemory.SearchAsync(query, conversationId); + + return results; + } + + public async Task Ask(string conversationId, string query, CancellationToken cancellationToken) + { + var processedQuery = processQuery(query); + Exception? exception; + try + { + await Llama.ThreadLock.InferenceLock.WaitAsync(cancellationToken); + var results = await KernelMemory.AskAsync(processedQuery, conversationId, cancellationToken: cancellationToken); + return results; + } + catch (OperationCanceledException ex) + { + exception = ex; + } + catch (Exception ex) + { + exception = ex; + } + finally + { + Llama.ThreadLock.InferenceLock.Release(); + } + + throw exception; + + + } + + private string processQuery(string query) + { + var processedQuery = query.Trim(); + + return processedQuery; + } + + } +} diff --git a/README.md b/README.md index 52944ca..8343bb8 100644 --- a/README.md +++ b/README.md @@ -46,6 +46,8 @@ PalmHill.BlazorChat offers a range of features to provide a seamless and interac - **Real-Time Chat**: Engage in real-time conversations with the help of SignalR, which ensures instant message delivery. +- **Retrieval Augmented Generation for Uploaded Docs**: This feature allows users to chat about content within uploaded documents. By leveraging retrieval augmented generation, the chatbot can reference and incorporate specific information from these documents in its responses, providing a more personalized and context-aware interaction. Early version. + - **Markdown Support**: The [`ModelMarkdown.razor`](PalmHill.BlazorChat/Client/Components/ModelMarkdown.razor#L1-L53) component allows for markdown formatting in chat messages, enhancing readability and user experience. - **Chat Settings**: Customize your chat experience with adjustable settings such as temperature, max length, top P, frequency penalty, and presence penalty, all managed by the [`ChatSettings.razor`](PalmHill.BlazorChat/Client/Components/ChatSettings.razor#L1-L37) component. @@ -77,7 +79,8 @@ Before you begin, ensure you have the following installed on your system: ### Step 1: Download and Place the Language Model 1. **Download the Language Model:** First, you'll need to download the appropriate Llama 2 language model for the application. Any GGUF/Llama2 model should work and can be downloaded from Hugginface. We reccomend selecting a model that will fit your VRAM and RAM from [this list](https://huggingface.co/TheBloke/Orca-2-13B-GGUF). - For testing [TheBloke/Orca-2-13B-GGUF](https://huggingface.co/TheBloke/Orca-2-13B-GGUF/blob/main/orca-2-13b.Q6_K.gguf) was used and requires at least 13gb VRAM. + For testing Chat [TheBloke/Orca-2-13B-GGUF](https://huggingface.co/TheBloke/Orca-2-13B-GGUF/blob/main/orca-2-13b.Q6_K.gguf) was used and requires at least 13gb VRAM. + For testing Embedding [TheBloke/Mistral-7B-OpenOrca-GGUF](https://huggingface.co/TheBloke/Mistral-7B-OpenOrca-GGUF/blob/main/mistral-7b-openorca.Q4_K_M.gguf) was used and requires at least 7gb VRAM. 2. **Place the Model:** Once downloaded, place the model file in a designated directory on your system. Remember the path of this directory, as you'll need it for configuring the application. @@ -99,6 +102,35 @@ Before you begin, ensure you have the following installed on your system: "ModelPath": "C:\\path\\to\\your\\model\\model-file-name" ``` + Full example: + ```json + { + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*", + "InferenceModelConfig": { + "ModelPath": "C:\\models\\orca-2-13b.Q6_K.gguf", + "GpuLayerCount": 100, + "ContextSize": 4096, + "Gpu": 0, + "AntiPrompts": [ "User:" ] + }, + "EmbeddingModelConfig": { + "ModelPath": "C:\\models\\mistral-7b-openorca.Q4_K_M.gguf", + "GpuLayerCount": 100, + "ContextSize": 2048, + "Gpu": 0, + "AntiPrompts": [ "Question:" ] + + } + } + ``` + + ### Step 5: Run the Application 1. **Build the Solution:** Build the solution in Visual Studio to ensure all dependencies are correctly resolved. 2. **Run the Application:** Click 'Play' in Visual Studio to start the application. This will launch the application in your default web browser.