Skip to content

Commit

Permalink
Logging improvements (use ILogger not Console). (#13)
Browse files Browse the repository at this point in the history
Remove unnecessary project.

Co-authored-by: Edgett Hilimire <edgett@palmhillai.com>
  • Loading branch information
edgett and edgett authored Jun 18, 2024
1 parent debe514 commit 270b7c1
Show file tree
Hide file tree
Showing 9 changed files with 51 additions and 47 deletions.
6 changes: 0 additions & 6 deletions PalmHill.BlazorChat.sln
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "PalmHill.BlazorChat.Shared"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "PalmHill.BlazorChat.ApiClient", "PalmHill.BlazorChat.ApiClient\PalmHill.BlazorChat.ApiClient.csproj", "{434FEDF0-2AD0-4276-AC06-E26126EEF237}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "PalmHill.Llama.Kernel", "PalmHill.LlamaKernel\PalmHill.Llama.Kernel.csproj", "{D92943B2-5585-432B-BFEC-ECC13B5A82D1}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Expand All @@ -46,10 +44,6 @@ Global
{434FEDF0-2AD0-4276-AC06-E26126EEF237}.Debug|Any CPU.Build.0 = Debug|Any CPU
{434FEDF0-2AD0-4276-AC06-E26126EEF237}.Release|Any CPU.ActiveCfg = Release|Any CPU
{434FEDF0-2AD0-4276-AC06-E26126EEF237}.Release|Any CPU.Build.0 = Release|Any CPU
{D92943B2-5585-432B-BFEC-ECC13B5A82D1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D92943B2-5585-432B-BFEC-ECC13B5A82D1}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D92943B2-5585-432B-BFEC-ECC13B5A82D1}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D92943B2-5585-432B-BFEC-ECC13B5A82D1}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
Expand Down
8 changes: 6 additions & 2 deletions PalmHill.BlazorChat/Client/Services/ChatService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
using PalmHill.BlazorChat.Client.Components.Settings;
using PalmHill.BlazorChat.Client.Models;
using PalmHill.BlazorChat.Shared.Models;
using System.Reflection.Emit;

namespace PalmHill.BlazorChat.Client.Services
{
Expand All @@ -25,14 +26,16 @@ public ChatService(
NavigationManager navigationManager,
LocalStorageService localStorage,
IDialogService dialogService,
BlazorChatApi blazorChatApi
BlazorChatApi blazorChatApi,
ILogger<ChatService> logger
)
{

_localStorageService = localStorage;
_dialogService = dialogService;
_blazorChatApi = blazorChatApi;
_navigationManager = navigationManager;
_logger = logger;
setupAttachmentService();
setupWebSocketChatConnection();
}
Expand Down Expand Up @@ -84,6 +87,7 @@ BlazorChatApi blazorChatApi
private readonly IDialogService _dialogService;
private readonly BlazorChatApi _blazorChatApi;
private readonly NavigationManager _navigationManager;
private readonly ILogger<ChatService> _logger;


/// <summary>
Expand Down Expand Up @@ -242,7 +246,7 @@ public async Task CancelTextGeneration()
SetReady();
}

Console.WriteLine($"CancelTextGeneration failed ({canceled.StatusCode}): {canceled.ReasonPhrase}");
_logger.LogWarning($"Text generation for ConversationId {ConversationId} canceled via API: ({canceled.StatusCode}): {canceled.ReasonPhrase}");
}

/// <summary>
Expand Down
2 changes: 1 addition & 1 deletion PalmHill.BlazorChat/Client/Services/LocalStorageService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ private async Task<LocalStorageSettings> _getMigratedSettings()
{
var localStorageSettings = await _localStorage.GetItemAsync<LocalStorageSettings>("LocalStorageSettings");

if (localStorageSettings.SettingsVersion == LocalStorageSettings.CurrentSettingsVersion)
if (localStorageSettings?.SettingsVersion == LocalStorageSettings.CurrentSettingsVersion)
{
return localStorageSettings;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\..\PalmHill.LlamaKernel\PalmHill.Llama.Kernel.csproj" />
<ProjectReference Include="..\..\PalmHill.Llama\PalmHill.Llama.csproj" />
<ProjectReference Include="..\Client\PalmHill.BlazorChat.Client.csproj" />
<ProjectReference Include="..\Shared\PalmHill.BlazorChat.Shared.csproj" />
Expand Down
25 changes: 18 additions & 7 deletions PalmHill.BlazorChat/Server/SignalR/WebSocketChat.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
using LLama;
using Microsoft.AspNetCore.SignalR;
using Microsoft.SemanticKernel.ChatCompletion;
using PalmHill.BlazorChat.Server.WebApi;
using PalmHill.BlazorChat.Shared.Models;
using PalmHill.BlazorChat.Shared.Models.WebSocket;
using PalmHill.Llama;
Expand All @@ -16,15 +17,16 @@ namespace PalmHill.BlazorChat.Server.SignalR
/// </summary>
public class WebSocketChat : Hub
{
public WebSocketChat(LlamaKernel llamaKernel)
public WebSocketChat(LlamaKernel llamaKernel, ILogger<WebSocketChat> logger)
{
LlamaKernel = llamaKernel;
ChatCompletion = llamaKernel.Kernel.Services.GetService<IChatCompletionService>();
_logger = logger;
}

public LlamaKernel LlamaKernel { get; }
public IChatCompletionService? ChatCompletion { get; }

private ILogger<WebSocketChat> _logger { get; }

/// <summary>
/// Sends a chat prompt to the client and waits for a response. The method performs inference on the chat conversation and sends the result back to the client.
Expand Down Expand Up @@ -59,11 +61,11 @@ public async Task InferenceRequest(InferenceRequest chatConversation)
inferenceStatusUpdate.Success = false;
await Clients.Caller.SendAsync("InferenceStatusUpdate", inferenceStatusUpdate);
// Handle the cancellation operation
Console.WriteLine($"Inference for {conversationId} was canceled.");
_logger.LogWarning($"Text generation for {conversationId} was canceled via WebSockets.");
}
catch (Exception ex)
{
Console.WriteLine(ex);
_logger.LogError(ex, $"WebSocket text generation failed for ConversationId: {conversationId}");
}
finally
{
Expand Down Expand Up @@ -96,7 +98,16 @@ private async Task DoInferenceAndRespondToClient(ISingleClientProxy respondToCli


inferenceStopwatch.Start();
var asyncResponse = ChatCompletion.GetStreamingChatMessageContentsAsync(chatHistory, inferenceParams, cancellationToken: cancellationToken);
var asyncResponse = ChatCompletion?.GetStreamingChatMessageContentsAsync(chatHistory, inferenceParams, cancellationToken: cancellationToken);

if (asyncResponse == null)
{
_logger.LogError($"{nameof(IChatCompletionService)} not implemented.");
await respondToClient.SendAsync("ReceiveInferenceString", $"Error: {nameof(IChatCompletionService)} not implemented.");
return;
}


// Perform inference and send the response to the client
await foreach (var text in asyncResponse)
{
Expand Down Expand Up @@ -131,8 +142,8 @@ private async Task DoInferenceAndRespondToClient(ISingleClientProxy respondToCli
await respondToClient.SendAsync("ReceiveInferenceString", chatConversation.Id, textBuffer);
}

Console.WriteLine($"Inference took {inferenceStopwatch.ElapsedMilliseconds}ms and generated {totalTokens} tokens. {(totalTokens / (inferenceStopwatch.ElapsedMilliseconds / (float)1000)).ToString("F2")} tokens/second.");
Console.WriteLine(fullResponse);
_logger.LogInformation($"Inference took {inferenceStopwatch.ElapsedMilliseconds}ms and generated {totalTokens} tokens. {(totalTokens / (inferenceStopwatch.ElapsedMilliseconds / (float)1000)).ToString("F2")} tokens/second.");
_logger.LogInformation(fullResponse);
}

/// <summary>
Expand Down
20 changes: 14 additions & 6 deletions PalmHill.BlazorChat/Server/WebApi/ApiChatController.cs
Original file line number Diff line number Diff line change
Expand Up @@ -29,19 +29,22 @@ public class ApiChatController : ControllerBase
/// <param name="modelParams">The model parameters.</param>
public ApiChatController(
IHubContext<WebSocketChat> webSocketChat,
LlamaKernel llamaKernel
LlamaKernel llamaKernel,
ILogger<ApiChatController> logger
)
{
WebSocketChat = webSocketChat;
LlamaKernel = llamaKernel;
LlmMemory = llamaKernel.Kernel.Services.GetService<ServerlessLlmMemory>();
ChatCompletion = llamaKernel.Kernel.Services.GetService<IChatCompletionService>();
_logger = logger;
}

private IHubContext<WebSocketChat> WebSocketChat { get; }
public LlamaKernel LlamaKernel { get; }
public ServerlessLlmMemory? LlmMemory { get; }
public IChatCompletionService? ChatCompletion { get; }
private ILogger<ApiChatController> _logger { get; }

/// <summary>
/// Handles a chat API request.
Expand All @@ -67,7 +70,7 @@ public async Task<ActionResult<string>> Chat([FromBody] InferenceRequest convers
catch (OperationCanceledException)
{
errorText = $"Inference for {conversationId} was canceled.";
Console.WriteLine(errorText);
_logger.LogWarning(errorText);
return StatusCode(444, errorText);
}
catch (Exception ex)
Expand All @@ -80,7 +83,7 @@ public async Task<ActionResult<string>> Chat([FromBody] InferenceRequest convers
ChatCancelation.CancelationTokens.TryRemove(conversationId, out _);
}

Console.WriteLine(errorText);
_logger.LogError(errorText);
return StatusCode(500, errorText);
}

Expand Down Expand Up @@ -169,10 +172,15 @@ private async Task<string> DoInference(InferenceRequest conversation, Cancellati
var chatExecutionSettings = conversation.GetPromptExecutionSettings();

inferenceStopwatch.Start();
var asyncResponse = ChatCompletion.GetStreamingChatMessageContentsAsync(chatSession,
var asyncResponse = ChatCompletion?.GetStreamingChatMessageContentsAsync(chatSession,
chatExecutionSettings,
cancellationToken: cancellationToken);

if (asyncResponse == null)
{
_logger.LogError($"{nameof(IChatCompletionService)} not implemented.");
throw new InvalidOperationException($"{nameof(IChatCompletionService)} not implemented.");
}

await foreach (var text in asyncResponse)
{
Expand All @@ -181,8 +189,8 @@ private async Task<string> DoInference(InferenceRequest conversation, Cancellati
}
inferenceStopwatch.Stop();
var fullResponseString = fullResponse.ToString();
Console.WriteLine($"Inference took {inferenceStopwatch.ElapsedMilliseconds}ms and generated {totalTokens} tokens. {(totalTokens / (inferenceStopwatch.ElapsedMilliseconds / (float)1000)).ToString("F2")} tokens/second.");
Console.WriteLine(fullResponseString);
_logger.LogInformation($"Inference took {inferenceStopwatch.ElapsedMilliseconds}ms and generated {totalTokens} tokens. {(totalTokens / (inferenceStopwatch.ElapsedMilliseconds / (float)1000)).ToString("F2")} tokens/second.");
_logger.LogInformation(fullResponseString);

return fullResponseString;
}
Expand Down
7 changes: 6 additions & 1 deletion PalmHill.BlazorChat/Server/WebApi/AttachmentController.cs
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,20 @@ public class AttachmentController : ControllerBase
{
private ServerlessLlmMemory LlmMemory { get; }
private IHubContext<WebSocketChat> WebSocketChat { get; }
private ILogger<AttachmentController> _logger { get; }


public AttachmentController(
LlamaKernel llamaKernel,
IHubContext<WebSocketChat> webSocketChat
IHubContext<WebSocketChat> webSocketChat,
ILogger<AttachmentController> logger
)
{
LlmMemory = llamaKernel.Kernel.Services
.GetService<ServerlessLlmMemory>()
?? throw new InvalidOperationException($"{nameof(ServerlessLlmMemory)} not loaded.");
WebSocketChat = webSocketChat;
_logger = logger;
}


Expand Down Expand Up @@ -104,6 +108,7 @@ private async Task DoImportAsync(string? userId, AttachmentInfo attachmentInfo)
}
catch (Exception ex)
{
_logger.LogError(ex, "Error importing attachment.");
attachmentInfo.Status = AttachmentStatus.Failed;
await WebSocketChat.Clients.User(userId!).SendCoreAsync("AttachmentStatusUpdate", [attachmentInfo]);
}
Expand Down
9 changes: 6 additions & 3 deletions PalmHill.Llama/ServerlessLlmMemory.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using Microsoft.KernelMemory;
using Microsoft.Extensions.Logging;
using Microsoft.KernelMemory;
using PalmHill.BlazorChat.Shared.Models;
using System;
using System.Collections.Concurrent;
Expand All @@ -11,13 +12,15 @@ namespace PalmHill.Llama
{
public class ServerlessLlmMemory
{
public ServerlessLlmMemory(IKernelMemory kernelMemory)
public ServerlessLlmMemory(IKernelMemory kernelMemory, ILogger<ServerlessLlmMemory> logger)
{
KernelMemory = kernelMemory;
_logger = logger;
}

public IKernelMemory KernelMemory { get; }

private readonly ILogger<ServerlessLlmMemory> _logger;

public ConcurrentDictionary<Guid, AttachmentInfo> AttachmentInfos { get; } = new ConcurrentDictionary<Guid, AttachmentInfo>();

Expand Down Expand Up @@ -53,7 +56,7 @@ public async Task<AttachmentInfo> ImportDocumentAsync(
catch (Exception ex)
{
attachmentInfo.Status = AttachmentStatus.Failed;
Console.WriteLine(ex);
_logger.LogError(ex, "Error importing attachment.");
}
finally
{
Expand Down
20 changes: 0 additions & 20 deletions PalmHill.LlamaKernel/PalmHill.Llama.Kernel.csproj

This file was deleted.

0 comments on commit 270b7c1

Please sign in to comment.