Browse Source

KernelMemory bug fix - cleanup nullable refs

pull/726/head
Zoli Somogyi 2 years ago
parent
commit
7b309d7bf6
2 changed files with 16 additions and 16 deletions
  1. +6
    -6
      LLama.KernelMemory/BuilderExtensions.cs
  2. +10
    -10
      LLama.KernelMemory/LLamaSharpTextEmbeddingGenerator.cs

+ 6
- 6
LLama.KernelMemory/BuilderExtensions.cs View File

@@ -81,15 +81,15 @@ namespace LLamaSharp.KernelMemory
{
var parameters = new ModelParams(config.ModelPath)
{
ContextSize = config?.ContextSize ?? 2048,
Seed = config?.Seed ?? 0,
GpuLayerCount = config?.GpuLayerCount ?? 20,
ContextSize = config.ContextSize ?? 2048,
Seed = config.Seed ?? 0,
GpuLayerCount = config.GpuLayerCount ?? 20,
Embeddings = true,
MainGpu = config?.MainGpu ?? 0,
SplitMode = config?.SplitMode ?? GPUSplitMode.None,
MainGpu = config.MainGpu,
SplitMode = config.SplitMode
};

if (weights == null)
if (weights == null || context == null)
{
weights = LLamaWeights.LoadFromFile(parameters);
context = weights.CreateContext(parameters);


+ 10
- 10
LLama.KernelMemory/LLamaSharpTextEmbeddingGenerator.cs View File

@@ -30,12 +30,12 @@ namespace LLamaSharp.KernelMemory
this._config = config;
var @params = new ModelParams(_config.ModelPath)
{
ContextSize = config?.ContextSize ?? 2048,
Seed = config?.Seed ?? 0,
GpuLayerCount = config?.GpuLayerCount ?? 20,
ContextSize = config.ContextSize ?? 2048,
Seed = config.Seed ?? 0,
GpuLayerCount = config.GpuLayerCount ?? 20,
Embeddings = true,
MainGpu = _config?.MainGpu ?? 0,
SplitMode = _config?.SplitMode ?? GPUSplitMode.None
MainGpu = _config.MainGpu,
SplitMode = _config.SplitMode
};
_weights = LLamaWeights.LoadFromFile(@params);
_embedder = new LLamaEmbedder(_weights, @params);
@@ -53,12 +53,12 @@ namespace LLamaSharp.KernelMemory
this._config = config;
var @params = new ModelParams(_config.ModelPath)
{
ContextSize = config?.ContextSize ?? 2048,
Seed = config?.Seed ?? 0,
GpuLayerCount = config?.GpuLayerCount ?? 20,
ContextSize = config.ContextSize ?? 2048,
Seed = config.Seed ?? 0,
GpuLayerCount = config.GpuLayerCount ?? 20,
Embeddings = true,
MainGpu = _config?.MainGpu ?? 0,
SplitMode = _config?.SplitMode ?? GPUSplitMode.None
MainGpu = _config.MainGpu,
SplitMode = _config.SplitMode
};
_weights = weights;
_embedder = new LLamaEmbedder(_weights, @params);


Loading…
Cancel
Save