diff --git a/LLama/LLamaContext.cs b/LLama/LLamaContext.cs
index b64befd8..3a3e51af 100644
--- a/LLama/LLamaContext.cs
+++ b/LLama/LLamaContext.cs
@@ -340,6 +340,7 @@ namespace LLama
///
/// The updated `pastTokensCount`.
///
+ [Obsolete("use llama_decode() instead")]
public int Eval(llama_token[] tokens, int pastTokensCount)
{
return Eval(tokens.AsSpan(), pastTokensCount);
@@ -352,6 +353,7 @@ namespace LLama
///
/// The updated `pastTokensCount`.
///
+ [Obsolete("use llama_decode() instead")]
public int Eval(List tokens, int pastTokensCount)
{
#if NET5_0_OR_GREATER
@@ -382,6 +384,7 @@ namespace LLama
///
/// The updated `pastTokensCount`.
///
+ [Obsolete("use llama_decode() instead")]
public int Eval(ReadOnlyMemory tokens, int pastTokensCount)
{
return Eval(tokens.Span, pastTokensCount);
@@ -394,6 +397,7 @@ namespace LLama
///
/// The updated `pastTokensCount`.
///
+ [Obsolete("use llama_decode() instead")]
public int Eval(ReadOnlySpan tokens, int pastTokensCount)
{
var total = tokens.Length;
@@ -415,7 +419,7 @@ namespace LLama
}
return pastTokensCount;
}
-#endregion
+ #endregion
///
public void Dispose()
diff --git a/LLama/Native/SafeLLamaContextHandle.cs b/LLama/Native/SafeLLamaContextHandle.cs
index 59a5bfd9..bb49cc0f 100644
--- a/LLama/Native/SafeLLamaContextHandle.cs
+++ b/LLama/Native/SafeLLamaContextHandle.cs
@@ -193,6 +193,7 @@ namespace LLama.Native
/// The provided batch of new tokens to process
/// the number of tokens to use from previous eval calls
/// Returns true on success
+ [Obsolete("use llama_decode() instead")]
public bool Eval(ReadOnlySpan tokens, int n_past)
{
unsafe