diff --git a/LLama.SemanticKernel/LLamaSharp.SemanticKernel.csproj b/LLama.SemanticKernel/LLamaSharp.SemanticKernel.csproj
index e82e8513..8de65692 100644
--- a/LLama.SemanticKernel/LLamaSharp.SemanticKernel.csproj
+++ b/LLama.SemanticKernel/LLamaSharp.SemanticKernel.csproj
@@ -36,6 +36,10 @@
+
+
+
+
diff --git a/LLama/Common/Logger.cs b/LLama/Common/Logger.cs
deleted file mode 100644
index edff64f9..00000000
--- a/LLama/Common/Logger.cs
+++ /dev/null
@@ -1,270 +0,0 @@
-using LLama.Native;
-using System;
-using System.Diagnostics;
-using System.IO;
-using static LLama.Common.ILLamaLogger;
-
-namespace LLama.Common;
-
-///
-/// receives log messages from LLamaSharp
-///
-public interface ILLamaLogger
-{
- ///
- /// Severity level of a log message
- ///
- public enum LogLevel
- {
- ///
- /// Logs that are used for interactive investigation during development.
- ///
- Debug = 1,
-
- ///
- /// Logs that highlight when the current flow of execution is stopped due to a failure.
- ///
- Error = 2,
-
- ///
- /// Logs that highlight an abnormal or unexpected event in the application flow, but do not otherwise cause the application execution to stop.
- ///
- Warning = 3,
-
- ///
- /// Logs that track the general flow of the application.
- ///
- Info = 4
- }
-
- ///
- /// Write the log in customized way
- ///
- /// The source of the log. It may be a method name or class name.
- /// The message.
- /// The log level.
- void Log(string source, string message, LogLevel level);
-}
-
-///
-/// The default logger of LLamaSharp. On default it write to console. Use methods of `LLamaLogger.Default` to change the behavior.
-/// It's recommended to inherit `ILLamaLogger` to customize the behavior.
-///
-public sealed class LLamaDefaultLogger
- : ILLamaLogger
-{
- private static readonly Lazy _instance = new Lazy(() => new LLamaDefaultLogger());
-
- private bool _toConsole = true;
- private bool _toFile;
-
- private FileStream? _fileStream;
- private StreamWriter? _fileWriter;
-
- ///
- /// Get the default logger instance
- ///
- public static LLamaDefaultLogger Default => _instance.Value;
-
- private LLamaDefaultLogger()
- {
-
- }
-
- ///
- /// Enable logging output from llama.cpp
- ///
- ///
- public LLamaDefaultLogger EnableNative()
- {
- EnableNativeLogCallback();
- return this;
- }
-
- ///
- /// Enable writing log messages to console
- ///
- ///
- public LLamaDefaultLogger EnableConsole()
- {
- _toConsole = true;
- return this;
- }
-
- ///
- /// Disable writing messages to console
- ///
- ///
- public LLamaDefaultLogger DisableConsole()
- {
- _toConsole = false;
- return this;
- }
-
- ///
- /// Enable writing log messages to file
- ///
- ///
- ///
- ///
- public LLamaDefaultLogger EnableFile(string filename, FileMode mode = FileMode.Append)
- {
- _fileStream = new FileStream(filename, mode, FileAccess.Write);
- _fileWriter = new StreamWriter(_fileStream);
- _toFile = true;
- return this;
- }
-
- ///
- /// Disable writing log messages to file
- ///
- /// unused!
- ///
- [Obsolete("Use DisableFile method without 'filename' parameter")]
- public LLamaDefaultLogger DisableFile(string filename)
- {
- return DisableFile();
- }
-
- ///
- /// Disable writing log messages to file
- ///
- ///
- public LLamaDefaultLogger DisableFile()
- {
- if (_fileWriter is not null)
- {
- _fileWriter.Close();
- _fileWriter = null;
- }
- if (_fileStream is not null)
- {
- _fileStream.Close();
- _fileStream = null;
- }
- _toFile = false;
- return this;
- }
-
- ///
- /// Log a message
- ///
- /// The source of this message (e.g. class name)
- /// The message to log
- /// Severity level of this message
- public void Log(string source, string message, LogLevel level)
- {
- if (level == LogLevel.Info)
- {
- Info(message);
- }
- else if (level == LogLevel.Debug)
- {
-
- }
- else if (level == LogLevel.Warning)
- {
- Warn(message);
- }
- else if (level == LogLevel.Error)
- {
- Error(message);
- }
- }
-
- ///
- /// Write a log message with "Info" severity
- ///
- ///
- public void Info(string message)
- {
- message = MessageFormat("info", message);
- if (_toConsole)
- {
- Console.ForegroundColor = ConsoleColor.White;
- Console.WriteLine(message);
- Console.ResetColor();
- }
- if (_toFile)
- {
- Debug.Assert(_fileStream is not null);
- Debug.Assert(_fileWriter is not null);
- _fileWriter.WriteLine(message);
- }
- }
-
- ///
- /// Write a log message with "Warn" severity
- ///
- ///
- public void Warn(string message)
- {
- message = MessageFormat("warn", message);
- if (_toConsole)
- {
- Console.ForegroundColor = ConsoleColor.Yellow;
- Console.WriteLine(message);
- Console.ResetColor();
- }
- if (_toFile)
- {
- Debug.Assert(_fileStream is not null);
- Debug.Assert(_fileWriter is not null);
- _fileWriter.WriteLine(message);
- }
- }
-
- ///
- /// Write a log message with "Error" severity
- ///
- ///
- public void Error(string message)
- {
- message = MessageFormat("error", message);
- if (_toConsole)
- {
- Console.ForegroundColor = ConsoleColor.Red;
- Console.WriteLine(message);
- Console.ResetColor();
- }
- if (_toFile)
- {
- Debug.Assert(_fileStream is not null);
- Debug.Assert(_fileWriter is not null);
- _fileWriter.WriteLine(message);
- }
- }
-
- private static string MessageFormat(string level, string message)
- {
- var now = DateTime.Now;
- return $"[{now:yyyy.MM.dd HH:mm:ss}][{level}]: {message}";
- }
-
- ///
- /// Register native logging callback
- ///
- private void EnableNativeLogCallback()
- {
- // TODO: Move to a more appropriate place once we have a intitialize method
- NativeApi.llama_log_set(NativeLogCallback);
- }
-
- ///
- /// Callback for native logging function
- ///
- /// The log level
- /// The log message
- private void NativeLogCallback(LogLevel level, string message)
- {
- if (string.IsNullOrEmpty(message))
- return;
-
- // Note that text includes the new line character at the end for most events.
- // If your logging mechanism cannot handle that, check if the last character is '\n' and strip it
- // if it exists.
- // It might not exist for progress report where '.' is output repeatedly.
- Log(default!, message.TrimEnd('\n'), level);
- }
-
-}
\ No newline at end of file
diff --git a/LLama/LLamaContext.cs b/LLama/LLamaContext.cs
index 0cb77f60..1a1845b0 100644
--- a/LLama/LLamaContext.cs
+++ b/LLama/LLamaContext.cs
@@ -10,6 +10,7 @@ using LLama.Common;
using System.Runtime.InteropServices;
using LLama.Extensions;
using LLama.Abstractions;
+using Microsoft.Extensions.Logging;
namespace LLama
{
@@ -21,7 +22,7 @@ namespace LLama
public sealed class LLamaContext
: IDisposable
{
- private readonly ILLamaLogger? _logger;
+ private readonly ILogger? _logger;
private readonly Encoding _encoding;
private readonly SafeLLamaContextHandle _ctx;
@@ -62,18 +63,18 @@ namespace LLama
/// Model params.
/// The logger.
[Obsolete("Use the LLamaWeights.CreateContext instead")]
- public LLamaContext(IModelParams @params, ILLamaLogger? logger = null)
+ public LLamaContext(IModelParams @params, ILogger? logger = null)
{
Params = @params;
_logger = logger;
_encoding = @params.Encoding;
- _logger?.Log(nameof(LLamaContext), $"Initializing LLama model with params: {this.Params}", ILLamaLogger.LogLevel.Info);
+ _logger?.LogInformation($"[LLamaContext] Initializing LLama model with params: {this.Params}");
_ctx = Utils.InitLLamaContextFromModelParams(Params);
}
- internal LLamaContext(SafeLLamaContextHandle nativeContext, IModelParams @params, ILLamaLogger? logger = null)
+ internal LLamaContext(SafeLLamaContextHandle nativeContext, IModelParams @params, ILogger? logger = null)
{
Params = @params;
@@ -89,7 +90,7 @@ namespace LLama
///
///
///
- public LLamaContext(LLamaWeights model, IModelParams @params, ILLamaLogger? logger = null)
+ public LLamaContext(LLamaWeights model, IModelParams @params, ILogger? logger = null)
{
if (model.NativeHandle.IsClosed)
throw new ObjectDisposedException("Cannot create context, model weights have been disposed");
@@ -471,7 +472,7 @@ namespace LLama
if (!_ctx.Eval(tokens.Slice(i, n_eval), pastTokensCount, Params.Threads))
{
- _logger?.Log(nameof(LLamaContext), "Failed to eval.", ILLamaLogger.LogLevel.Error);
+ _logger?.LogError($"[LLamaContext] Failed to eval.");
throw new RuntimeError("Failed to eval.");
}
diff --git a/LLama/LLamaExecutorBase.cs b/LLama/LLamaExecutorBase.cs
index 73dd439c..0eefa98e 100644
--- a/LLama/LLamaExecutorBase.cs
+++ b/LLama/LLamaExecutorBase.cs
@@ -2,6 +2,7 @@
using LLama.Common;
using LLama.Exceptions;
using LLama.Native;
+using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.IO;
@@ -21,7 +22,7 @@ namespace LLama
///
/// The logger used by this executor.
///
- protected ILLamaLogger? _logger;
+ protected ILogger? _logger;
///
/// The tokens that were already processed by the model.
///
@@ -73,7 +74,7 @@ namespace LLama
///
///
///
- protected StatefulExecutorBase(LLamaContext context, ILLamaLogger? logger = null)
+ protected StatefulExecutorBase(LLamaContext context, ILogger? logger = null)
{
Context = context;
_logger = logger;
@@ -99,20 +100,20 @@ namespace LLama
}
if (File.Exists(filename))
{
- _logger?.Log("LLamaExecutor", $"Attempting to load saved session from {filename}", ILLamaLogger.LogLevel.Info);
+ _logger?.LogInformation($"[LLamaExecutor] Attempting to load saved session from {filename}");
llama_token[] session_tokens = new llama_token[Context.ContextSize];
ulong n_token_count_out = 0;
if (!NativeApi.llama_load_session_file(Context.NativeHandle, _pathSession, session_tokens, (ulong)Context.ContextSize, &n_token_count_out))
{
- _logger?.Log("LLamaExecutor", $"Failed to load session file {filename}", ILLamaLogger.LogLevel.Error);
+ _logger?.LogError($"[LLamaExecutor] Failed to load session file {filename}");
throw new RuntimeError($"Failed to load session file {_pathSession}");
}
_session_tokens = session_tokens.Take((int)n_token_count_out).ToList();
- _logger?.Log("LLamaExecutor", $"Loaded a session with prompt size of {session_tokens.Length} tokens", ILLamaLogger.LogLevel.Info);
+ _logger?.LogInformation($"[LLamaExecutor] Loaded a session with prompt size of {session_tokens.Length} tokens");
}
else
{
- _logger?.Log("LLamaExecutor", $"Session file does not exist, will create", ILLamaLogger.LogLevel.Warning);
+ _logger?.LogWarning($"[LLamaExecutor] Session file does not exist, will create");
}
_n_matching_session_tokens = 0;
@@ -128,17 +129,15 @@ namespace LLama
}
if (_n_matching_session_tokens >= _embed_inps.Count)
{
- _logger?.Log("LLamaExecutor", $"Session file has exact match for prompt!", ILLamaLogger.LogLevel.Info);
+ _logger?.LogInformation("[LLamaExecutor] Session file has exact match for prompt!");
}
else if (_n_matching_session_tokens < _embed_inps.Count / 2)
{
- _logger?.Log("LLamaExecutor", $"session file has low similarity to prompt ({_n_matching_session_tokens}" +
- $" / {_embed_inps.Count} tokens); will mostly be reevaluated", ILLamaLogger.LogLevel.Warning);
+ _logger?.LogWarning($"[LLamaExecutor] Session file has low similarity to prompt ({_n_matching_session_tokens} / {_embed_inps.Count} tokens) will mostly be reevaluated");
}
else
{
- _logger?.Log("LLamaExecutor", $"Session file matches {_n_matching_session_tokens} / " +
- $"{_embed_inps.Count} tokens of prompt", ILLamaLogger.LogLevel.Info);
+ _logger?.LogInformation($"[LLamaExecutor] Session file matches {_n_matching_session_tokens} / {_embed_inps.Count} tokens of prompt");
}
}
diff --git a/LLama/LLamaSharp.csproj b/LLama/LLamaSharp.csproj
index 2120e319..d525202f 100644
--- a/LLama/LLamaSharp.csproj
+++ b/LLama/LLamaSharp.csproj
@@ -41,9 +41,13 @@
-
+
-
+
+
+
+
+
diff --git a/LLama/Native/LLamaLogLevel.cs b/LLama/Native/LLamaLogLevel.cs
new file mode 100644
index 00000000..0083efe1
--- /dev/null
+++ b/LLama/Native/LLamaLogLevel.cs
@@ -0,0 +1,28 @@
+namespace LLama.Native
+{
+ ///
+ /// Severity level of a log message
+ ///
+ public enum LLamaLogLevel
+ {
+ ///
+ /// Logs that are used for interactive investigation during development.
+ ///
+ Debug = 1,
+
+ ///
+ /// Logs that highlight when the current flow of execution is stopped due to a failure.
+ ///
+ Error = 2,
+
+ ///
+ /// Logs that highlight an abnormal or unexpected event in the application flow, but do not otherwise cause the application execution to stop.
+ ///
+ Warning = 3,
+
+ ///
+ /// Logs that track the general flow of the application.
+ ///
+ Info = 4
+ }
+}
diff --git a/LLama/Native/NativeApi.cs b/LLama/Native/NativeApi.cs
index e9666ea8..996ecce3 100644
--- a/LLama/Native/NativeApi.cs
+++ b/LLama/Native/NativeApi.cs
@@ -16,7 +16,7 @@ namespace LLama.Native
///
///
///
- public delegate void LLamaLogCallback(ILLamaLogger.LogLevel level, string message);
+ public delegate void LLamaLogCallback(LLamaLogLevel level, string message);
///
/// Direct translation of the llama.cpp API