Skip to content

Commit 79b5bb9

Browse files
committed
Fix some warnings.
1 parent 6f1dd3a commit 79b5bb9

File tree

7 files changed

+29
-13
lines changed

7 files changed

+29
-13
lines changed

LLama.Examples/Examples/QuantizeModel.cs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@ public static async Task Run()
2020
{
2121
Console.WriteLine("Quantization failed!");
2222
}
23+
24+
await Task.CompletedTask;
2325
}
2426
}
2527
}

LLama/Batched/Conversation.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -410,7 +410,7 @@ public void Remove(LLamaPos start, LLamaPos end)
410410
}
411411

412412
/// <summary>
413-
/// Removes <see cref="count"/> tokens starting from <see cref="start"/>
413+
/// Removes <paramref name="count"/> tokens starting from <paramref name="start"/>
414414
/// </summary>
415415
/// <param name="start">Start position (inclusive)</param>
416416
/// <param name="count">Number of tokens</param>

LLama/Common/FixedSizeQueue.cs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ public class FixedSizeQueue<T>
1414
private readonly T[] _buffer;
1515
private int _start;
1616
private int _count;
17-
private T[]? _window;
1817

1918
// Minimum capacity for the temporary buffer used to expose a contiguous view.
2019
private const int MinimumWindowSize = 4;

LLama/LLamaExecutorBase.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -262,7 +262,7 @@ protected virtual void TryReuseMatchingPrefix()
262262
/// <param name="inferenceParams"></param>
263263
/// <param name="args"></param>
264264
/// <returns></returns>
265-
protected abstract Task<(bool, IReadOnlyList<string>)> PostProcess(IInferenceParams inferenceParams, InferStateArgs args);
265+
protected abstract (bool, IReadOnlyList<string>) PostProcess(IInferenceParams inferenceParams, InferStateArgs args);
266266

267267
/// <summary>
268268
/// The core inference logic.
@@ -338,7 +338,7 @@ public virtual async IAsyncEnumerable<string> InferAsync(string? text, IInferenc
338338
yield return decoded;
339339
}
340340

341-
var (breakGeneration, extraOutputs) = await PostProcess(inferenceParams, args);
341+
var (breakGeneration, extraOutputs) = PostProcess(inferenceParams, args);
342342
if (extraOutputs is { Count: > 0 })
343343
{
344344
foreach (var item in extraOutputs)

LLama/LLamaInstructExecutor.cs

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,7 @@ public override async Task SaveState(string filename)
9999
await JsonSerializer.SerializeAsync(fs, state);
100100
}
101101
}
102+
102103
/// <inheritdoc />
103104
public override async Task LoadState(string filename)
104105
{
@@ -154,7 +155,7 @@ protected override Task PreprocessInputs(string? text, InferStateArgs args)
154155
}
155156

156157
/// <inheritdoc />
157-
protected override async Task<(bool, IReadOnlyList<string>)> PostProcess(IInferenceParams inferenceParams, InferStateArgs args)
158+
protected override (bool, IReadOnlyList<string>) PostProcess(IInferenceParams inferenceParams, InferStateArgs args)
158159
{
159160
if (_embed_inps.Count <= _consumedTokensCount)
160161
{
@@ -205,7 +206,9 @@ protected override async Task InferInternal(IInferenceParams inferenceParams, In
205206
_pastTokensCount = pastTokensCount;
206207

207208
if (result != DecodeResult.Ok)
209+
{
208210
throw new LLamaDecodeError(result);
211+
}
209212

210213
if (_embeds.Count > 0 && !string.IsNullOrEmpty(_pathSession))
211214
{
@@ -250,6 +253,7 @@ protected override async Task InferInternal(IInferenceParams inferenceParams, In
250253

251254
return;
252255
}
256+
253257
/// <summary>
254258
/// The descriptor of the state of the instruct executor.
255259
/// </summary>

LLama/LLamaInteractExecutor.cs

Lines changed: 17 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@ public override ExecutorBaseState GetStateData()
6767
};
6868
return state;
6969
}
70+
7071
/// <inheritdoc />
7172
public override Task LoadState(ExecutorBaseState data)
7273
{
@@ -88,23 +89,23 @@ public override Task LoadState(ExecutorBaseState data)
8889

8990
return Task.CompletedTask;
9091
}
92+
9193
/// <inheritdoc />
9294
public override async Task SaveState(string filename)
9395
{
9496
var state = (InteractiveExecutorState)GetStateData();
95-
using(var fs = new FileStream(filename, FileMode.Create, FileAccess.Write))
97+
using (var fs = new FileStream(filename, FileMode.Create, FileAccess.Write))
9698
{
9799
await JsonSerializer.SerializeAsync(fs, state);
98100
}
99101
}
102+
100103
/// <inheritdoc />
101104
public override async Task LoadState(string filename)
102105
{
103-
using (var fs = new FileStream(filename, FileMode.Open, FileAccess.Read))
104-
{
105-
var state = await JsonSerializer.DeserializeAsync<InteractiveExecutorState>(fs);
106-
await LoadState(state!);
107-
}
106+
using var fs = new FileStream(filename, FileMode.Open, FileAccess.Read);
107+
var state = await JsonSerializer.DeserializeAsync<InteractiveExecutorState>(fs);
108+
await LoadState(state!);
108109
}
109110

110111
/// <summary>
@@ -122,7 +123,11 @@ protected override Task PreprocessInputs(string? text, InferStateArgs args)
122123
if (_is_prompt_run)
123124
{
124125
// When running the first input (prompt) in interactive mode, we should specially process it.
125-
if (text == null) throw new ArgumentException("Prompt cannot be null to trigger continuation if a prompt has not been provided previously.");
126+
if (text == null)
127+
{
128+
throw new ArgumentException("Prompt cannot be null to trigger continuation if a prompt has not been provided previously.");
129+
}
130+
126131
if (!IsMultiModal)
127132
{
128133
_embed_inps = Context.Tokenize(text, true, true).ToList();
@@ -203,15 +208,19 @@ private Task PreprocessLlava(string text, InferStateArgs args, bool addBos = tru
203208
/// <param name="inferenceParams"></param>
204209
/// <param name="args"></param>
205210
/// <returns></returns>
206-
protected override async Task<(bool, IReadOnlyList<string>)> PostProcess(IInferenceParams inferenceParams, InferStateArgs args)
211+
protected override (bool, IReadOnlyList<string>) PostProcess(IInferenceParams inferenceParams, InferStateArgs args)
207212
{
208213
if (_embed_inps.Count <= _consumedTokensCount)
209214
{
210215
if (!string.IsNullOrEmpty(args.LastOutput) && AntipromptProcessor.Add(args.LastOutput))
216+
{
211217
args.WaitForInput = true;
218+
}
212219

213220
if (_pastTokensCount > 0 && args.WaitForInput)
221+
{
214222
return (true, Array.Empty<string>());
223+
}
215224
}
216225

217226
if (_embeds.Count > 0 && _embeds.Last().IsEndOfGeneration(Context.Vocab))

LLama/Native/SafeLlamaModelHandle.cs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -436,6 +436,7 @@ private static int llama_model_meta_val_str(SafeLlamaModelHandle model, string k
436436
/// </summary>
437437
/// <param name="model"></param>
438438
/// <returns></returns>
439+
[DllImport(NativeApi.libraryName, CallingConvention = CallingConvention.Cdecl)]
439440
private static extern uint llama_model_n_cls_out(SafeLlamaModelHandle model);
440441

441442
/// <summary>
@@ -444,6 +445,7 @@ private static int llama_model_meta_val_str(SafeLlamaModelHandle model, string k
444445
/// <param name="model"></param>
445446
/// <param name="i"></param>
446447
/// <returns></returns>
448+
[DllImport(NativeApi.libraryName, CallingConvention = CallingConvention.Cdecl)]
447449
private static extern string? llama_model_cls_label(SafeLlamaModelHandle model, uint i);
448450
#endregion
449451

0 commit comments

Comments
 (0)