Browse Source

Merge pull request #158 from saddam213/ILogger

ILogger implementation
tags/v0.6.0
Martin Evans GitHub 2 years ago
parent
commit
e074cd3273
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 56 additions and 290 deletions
  1. +4
    -0
      LLama.SemanticKernel/LLamaSharp.SemanticKernel.csproj
  2. +0
    -270
      LLama/Common/Logger.cs
  3. +7
    -6
      LLama/LLamaContext.cs
  4. +10
    -11
      LLama/LLamaExecutorBase.cs
  5. +6
    -2
      LLama/LLamaSharp.csproj
  6. +28
    -0
      LLama/Native/LLamaLogLevel.cs
  7. +1
    -1
      LLama/Native/NativeApi.cs

+ 4
- 0
LLama.SemanticKernel/LLamaSharp.SemanticKernel.csproj View File

@@ -36,6 +36,10 @@
<PackageReference Include="Microsoft.SemanticKernel.Abstractions" Version="0.21.230828.2-preview" />
</ItemGroup>

<ItemGroup Condition="'$(TargetFramework)' == 'netstandard2.0'">
<PackageReference Include="System.Memory" Version="4.5.5" PrivateAssets="all" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\LLama\LLamaSharp.csproj" />
</ItemGroup>


+ 0
- 270
LLama/Common/Logger.cs View File

@@ -1,270 +0,0 @@
using LLama.Native;
using System;
using System.Diagnostics;
using System.IO;
using static LLama.Common.ILLamaLogger;

namespace LLama.Common;

/// <summary>
/// receives log messages from LLamaSharp
/// </summary>
public interface ILLamaLogger
{
/// <summary>
/// Severity level of a log message
/// </summary>
public enum LogLevel
{
/// <summary>
/// Logs that are used for interactive investigation during development.
/// </summary>
Debug = 1,

/// <summary>
/// Logs that highlight when the current flow of execution is stopped due to a failure.
/// </summary>
Error = 2,

/// <summary>
/// Logs that highlight an abnormal or unexpected event in the application flow, but do not otherwise cause the application execution to stop.
/// </summary>
Warning = 3,

/// <summary>
/// Logs that track the general flow of the application.
/// </summary>
Info = 4
}

/// <summary>
/// Write the log in customized way
/// </summary>
/// <param name="source">The source of the log. It may be a method name or class name.</param>
/// <param name="message">The message.</param>
/// <param name="level">The log level.</param>
void Log(string source, string message, LogLevel level);
}

/// <summary>
/// The default logger of LLamaSharp. On default it write to console. Use methods of `LLamaLogger.Default` to change the behavior.
/// It's recommended to inherit `ILLamaLogger` to customize the behavior.
/// </summary>
public sealed class LLamaDefaultLogger
: ILLamaLogger
{
private static readonly Lazy<LLamaDefaultLogger> _instance = new Lazy<LLamaDefaultLogger>(() => new LLamaDefaultLogger());

private bool _toConsole = true;
private bool _toFile;

private FileStream? _fileStream;
private StreamWriter? _fileWriter;

/// <summary>
/// Get the default logger instance
/// </summary>
public static LLamaDefaultLogger Default => _instance.Value;

private LLamaDefaultLogger()
{

}

/// <summary>
/// Enable logging output from llama.cpp
/// </summary>
/// <returns></returns>
public LLamaDefaultLogger EnableNative()
{
EnableNativeLogCallback();
return this;
}

/// <summary>
/// Enable writing log messages to console
/// </summary>
/// <returns></returns>
public LLamaDefaultLogger EnableConsole()
{
_toConsole = true;
return this;
}

/// <summary>
/// Disable writing messages to console
/// </summary>
/// <returns></returns>
public LLamaDefaultLogger DisableConsole()
{
_toConsole = false;
return this;
}

/// <summary>
/// Enable writing log messages to file
/// </summary>
/// <param name="filename"></param>
/// <param name="mode"></param>
/// <returns></returns>
public LLamaDefaultLogger EnableFile(string filename, FileMode mode = FileMode.Append)
{
_fileStream = new FileStream(filename, mode, FileAccess.Write);
_fileWriter = new StreamWriter(_fileStream);
_toFile = true;
return this;
}

/// <summary>
/// Disable writing log messages to file
/// </summary>
/// <param name="filename">unused!</param>
/// <returns></returns>
[Obsolete("Use DisableFile method without 'filename' parameter")]
public LLamaDefaultLogger DisableFile(string filename)
{
return DisableFile();
}

/// <summary>
/// Disable writing log messages to file
/// </summary>
/// <returns></returns>
public LLamaDefaultLogger DisableFile()
{
if (_fileWriter is not null)
{
_fileWriter.Close();
_fileWriter = null;
}
if (_fileStream is not null)
{
_fileStream.Close();
_fileStream = null;
}
_toFile = false;
return this;
}

/// <summary>
/// Log a message
/// </summary>
/// <param name="source">The source of this message (e.g. class name)</param>
/// <param name="message">The message to log</param>
/// <param name="level">Severity level of this message</param>
public void Log(string source, string message, LogLevel level)
{
if (level == LogLevel.Info)
{
Info(message);
}
else if (level == LogLevel.Debug)
{

}
else if (level == LogLevel.Warning)
{
Warn(message);
}
else if (level == LogLevel.Error)
{
Error(message);
}
}

/// <summary>
/// Write a log message with "Info" severity
/// </summary>
/// <param name="message"></param>
public void Info(string message)
{
message = MessageFormat("info", message);
if (_toConsole)
{
Console.ForegroundColor = ConsoleColor.White;
Console.WriteLine(message);
Console.ResetColor();
}
if (_toFile)
{
Debug.Assert(_fileStream is not null);
Debug.Assert(_fileWriter is not null);
_fileWriter.WriteLine(message);
}
}

/// <summary>
/// Write a log message with "Warn" severity
/// </summary>
/// <param name="message"></param>
public void Warn(string message)
{
message = MessageFormat("warn", message);
if (_toConsole)
{
Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine(message);
Console.ResetColor();
}
if (_toFile)
{
Debug.Assert(_fileStream is not null);
Debug.Assert(_fileWriter is not null);
_fileWriter.WriteLine(message);
}
}

/// <summary>
/// Write a log message with "Error" severity
/// </summary>
/// <param name="message"></param>
public void Error(string message)
{
message = MessageFormat("error", message);
if (_toConsole)
{
Console.ForegroundColor = ConsoleColor.Red;
Console.WriteLine(message);
Console.ResetColor();
}
if (_toFile)
{
Debug.Assert(_fileStream is not null);
Debug.Assert(_fileWriter is not null);
_fileWriter.WriteLine(message);
}
}

private static string MessageFormat(string level, string message)
{
var now = DateTime.Now;
return $"[{now:yyyy.MM.dd HH:mm:ss}][{level}]: {message}";
}

/// <summary>
/// Register native logging callback
/// </summary>
private void EnableNativeLogCallback()
{
// TODO: Move to a more appropriate place once we have a intitialize method
NativeApi.llama_log_set(NativeLogCallback);
}

/// <summary>
/// Callback for native logging function
/// </summary>
/// <param name="level">The log level</param>
/// <param name="message">The log message</param>
private void NativeLogCallback(LogLevel level, string message)
{
if (string.IsNullOrEmpty(message))
return;

// Note that text includes the new line character at the end for most events.
// If your logging mechanism cannot handle that, check if the last character is '\n' and strip it
// if it exists.
// It might not exist for progress report where '.' is output repeatedly.
Log(default!, message.TrimEnd('\n'), level);
}

}

+ 7
- 6
LLama/LLamaContext.cs View File

@@ -10,6 +10,7 @@ using LLama.Common;
using System.Runtime.InteropServices;
using LLama.Extensions;
using LLama.Abstractions;
using Microsoft.Extensions.Logging;

namespace LLama
{
@@ -21,7 +22,7 @@ namespace LLama
public sealed class LLamaContext
: IDisposable
{
private readonly ILLamaLogger? _logger;
private readonly ILogger? _logger;
private readonly Encoding _encoding;
private readonly SafeLLamaContextHandle _ctx;

@@ -62,18 +63,18 @@ namespace LLama
/// <param name="params">Model params.</param>
/// <param name="logger">The logger.</param>
[Obsolete("Use the LLamaWeights.CreateContext instead")]
public LLamaContext(IModelParams @params, ILLamaLogger? logger = null)
public LLamaContext(IModelParams @params, ILogger? logger = null)
{
Params = @params;

_logger = logger;
_encoding = @params.Encoding;

_logger?.Log(nameof(LLamaContext), $"Initializing LLama model with params: {this.Params}", ILLamaLogger.LogLevel.Info);
_logger?.LogInformation($"[LLamaContext] Initializing LLama model with params: {this.Params}");
_ctx = Utils.InitLLamaContextFromModelParams(Params);
}

internal LLamaContext(SafeLLamaContextHandle nativeContext, IModelParams @params, ILLamaLogger? logger = null)
internal LLamaContext(SafeLLamaContextHandle nativeContext, IModelParams @params, ILogger? logger = null)
{
Params = @params;

@@ -89,7 +90,7 @@ namespace LLama
/// <param name="params"></param>
/// <param name="logger"></param>
/// <exception cref="ObjectDisposedException"></exception>
public LLamaContext(LLamaWeights model, IModelParams @params, ILLamaLogger? logger = null)
public LLamaContext(LLamaWeights model, IModelParams @params, ILogger? logger = null)
{
if (model.NativeHandle.IsClosed)
throw new ObjectDisposedException("Cannot create context, model weights have been disposed");
@@ -471,7 +472,7 @@ namespace LLama

if (!_ctx.Eval(tokens.Slice(i, n_eval), pastTokensCount, Params.Threads))
{
_logger?.Log(nameof(LLamaContext), "Failed to eval.", ILLamaLogger.LogLevel.Error);
_logger?.LogError($"[LLamaContext] Failed to eval.");
throw new RuntimeError("Failed to eval.");
}



+ 10
- 11
LLama/LLamaExecutorBase.cs View File

@@ -2,6 +2,7 @@
using LLama.Common;
using LLama.Exceptions;
using LLama.Native;
using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.IO;
@@ -21,7 +22,7 @@ namespace LLama
/// <summary>
/// The logger used by this executor.
/// </summary>
protected ILLamaLogger? _logger;
protected ILogger? _logger;
/// <summary>
/// The tokens that were already processed by the model.
/// </summary>
@@ -73,7 +74,7 @@ namespace LLama
/// </summary>
/// <param name="context"></param>
/// <param name="logger"></param>
protected StatefulExecutorBase(LLamaContext context, ILLamaLogger? logger = null)
protected StatefulExecutorBase(LLamaContext context, ILogger? logger = null)
{
Context = context;
_logger = logger;
@@ -99,20 +100,20 @@ namespace LLama
}
if (File.Exists(filename))
{
_logger?.Log("LLamaExecutor", $"Attempting to load saved session from {filename}", ILLamaLogger.LogLevel.Info);
_logger?.LogInformation($"[LLamaExecutor] Attempting to load saved session from {filename}");
llama_token[] session_tokens = new llama_token[Context.ContextSize];
ulong n_token_count_out = 0;
if (!NativeApi.llama_load_session_file(Context.NativeHandle, _pathSession, session_tokens, (ulong)Context.ContextSize, &n_token_count_out))
{
_logger?.Log("LLamaExecutor", $"Failed to load session file {filename}", ILLamaLogger.LogLevel.Error);
_logger?.LogError($"[LLamaExecutor] Failed to load session file {filename}");
throw new RuntimeError($"Failed to load session file {_pathSession}");
}
_session_tokens = session_tokens.Take((int)n_token_count_out).ToList();
_logger?.Log("LLamaExecutor", $"Loaded a session with prompt size of {session_tokens.Length} tokens", ILLamaLogger.LogLevel.Info);
_logger?.LogInformation($"[LLamaExecutor] Loaded a session with prompt size of {session_tokens.Length} tokens");
}
else
{
_logger?.Log("LLamaExecutor", $"Session file does not exist, will create", ILLamaLogger.LogLevel.Warning);
_logger?.LogWarning($"[LLamaExecutor] Session file does not exist, will create");
}

_n_matching_session_tokens = 0;
@@ -128,17 +129,15 @@ namespace LLama
}
if (_n_matching_session_tokens >= _embed_inps.Count)
{
_logger?.Log("LLamaExecutor", $"Session file has exact match for prompt!", ILLamaLogger.LogLevel.Info);
_logger?.LogInformation("[LLamaExecutor] Session file has exact match for prompt!");
}
else if (_n_matching_session_tokens < _embed_inps.Count / 2)
{
_logger?.Log("LLamaExecutor", $"session file has low similarity to prompt ({_n_matching_session_tokens}" +
$" / {_embed_inps.Count} tokens); will mostly be reevaluated", ILLamaLogger.LogLevel.Warning);
_logger?.LogWarning($"[LLamaExecutor] Session file has low similarity to prompt ({_n_matching_session_tokens} / {_embed_inps.Count} tokens) will mostly be reevaluated");
}
else
{
_logger?.Log("LLamaExecutor", $"Session file matches {_n_matching_session_tokens} / " +
$"{_embed_inps.Count} tokens of prompt", ILLamaLogger.LogLevel.Info);
_logger?.LogInformation($"[LLamaExecutor] Session file matches {_n_matching_session_tokens} / {_embed_inps.Count} tokens of prompt");
}
}



+ 6
- 2
LLama/LLamaSharp.csproj View File

@@ -41,9 +41,13 @@

<ItemGroup Condition="'$(TargetFramework)' == 'netstandard2.0'">
<PackageReference Include="IsExternalInit" Version="1.0.3" PrivateAssets="all" />
<PackageReference Include="System.Memory" Version="4.5.4" PrivateAssets="all" />
<PackageReference Include="System.Memory" Version="4.5.5" PrivateAssets="all" />
<PackageReference Include="System.Linq.Async" Version="6.0.1" />
<PackageReference Include="System.Text.Json" Version="6.0.0" />
<PackageReference Include="System.Text.Json" Version="7.0.3" />
</ItemGroup>

<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="7.0.1" />
</ItemGroup>

</Project>

+ 28
- 0
LLama/Native/LLamaLogLevel.cs View File

@@ -0,0 +1,28 @@
namespace LLama.Native
{
/// <summary>
/// Severity level of a log message
/// </summary>
public enum LLamaLogLevel
{
/// <summary>
/// Logs that are used for interactive investigation during development.
/// </summary>
Debug = 1,

/// <summary>
/// Logs that highlight when the current flow of execution is stopped due to a failure.
/// </summary>
Error = 2,

/// <summary>
/// Logs that highlight an abnormal or unexpected event in the application flow, but do not otherwise cause the application execution to stop.
/// </summary>
Warning = 3,

/// <summary>
/// Logs that track the general flow of the application.
/// </summary>
Info = 4
}
}

+ 1
- 1
LLama/Native/NativeApi.cs View File

@@ -16,7 +16,7 @@ namespace LLama.Native
/// </summary>
/// <param name="level"></param>
/// <param name="message"></param>
public delegate void LLamaLogCallback(ILLamaLogger.LogLevel level, string message);
public delegate void LLamaLogCallback(LLamaLogLevel level, string message);

/// <summary>
/// Direct translation of the llama.cpp API


Loading…
Cancel
Save