Table of Contents

Class LLM<TMessage>

Namespace
Prefrontal.Modules.Chat
Assembly
Prefrontal.Modules.dll

A base class for large language models (LLMs) that can be used to generate text and embeddings.

public abstract class LLM<TMessage>

Type Parameters

TMessage

The type of Messages the LLM uses. Typically this would be Message or Microsoft.SemanticKernel.ChatMessageContent .

Inheritance
LLM<TMessage>
Derived
Inherited Members
Extension Methods

Fields

EmptyCompletionMessageFactory

[JsonIgnore]
public Func<TMessage>? EmptyCompletionMessageFactory

Field Value

Func<TMessage>

FrequencyPenalty

public double FrequencyPenalty

Field Value

double

MaxTokens

public int MaxTokens

Field Value

int

Messages

The messages in the conversation. This is the context that the LLM uses to generate completions. Do not edit this while the LLM is generating completions.

public List<TMessage> Messages

Field Value

List<TMessage>

Model

public string Model

Field Value

string

PresencePenalty

public double PresencePenalty

Field Value

double

ReasoningEffort

[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public LLM<TMessage>.Effort? ReasoningEffort

Field Value

LLM<TMessage>.Effort?

RepeatPenalty

public double RepeatPenalty

Field Value

double

Seed

public int Seed

Field Value

int

Temperature

public double Temperature

Field Value

double

TopK

public double TopK

Field Value

double

TopP

public double TopP

Field Value

double

Methods

CompleteAsync(int, CancellationToken)

public abstract Task<LLM<TMessage>.CompletionChunk> CompleteAsync(int n, CancellationToken cancellationToken = default)

Parameters

n int
cancellationToken CancellationToken

Returns

Task<LLM<TMessage>.CompletionChunk>

CompleteAsync(CancellationToken)

public virtual Task<TMessage> CompleteAsync(CancellationToken cancellationToken = default)

Parameters

cancellationToken CancellationToken

Returns

Task<TMessage>

ContinueAsync(int, CancellationToken)

public abstract IAsyncEnumerable<LLM<TMessage>.CompletionChunk> ContinueAsync(int n, CancellationToken cancellationToken = default)

Parameters

n int
cancellationToken CancellationToken

Returns

IAsyncEnumerable<LLM<TMessage>.CompletionChunk>

ContinueAsync(CancellationToken)

public virtual IAsyncEnumerable<TMessage> ContinueAsync(CancellationToken cancellationToken = default)

Parameters

cancellationToken CancellationToken

Returns

IAsyncEnumerable<TMessage>

CreateEmptyCompletionChunk(int)

protected LLM<TMessage>.CompletionChunk CreateEmptyCompletionChunk(int n)

Parameters

n int

Returns

LLM<TMessage>.CompletionChunk

GetAvailableModelsAsync(CancellationToken)

public abstract Task<List<LLM<TMessage>.ModelInfo>> GetAvailableModelsAsync(CancellationToken cancellationToken = default)

Parameters

cancellationToken CancellationToken

Returns

Task<List<LLM<TMessage>.ModelInfo>>

GetEmbeddingsAsync(List<string>, int?, string?, CancellationToken)

public abstract Task<List<LLM<TMessage>.EmbeddingObject>> GetEmbeddingsAsync(List<string> input, int? dimensions = null, string? user = null, CancellationToken cancellationToken = default)

Parameters

input List<string>
dimensions int?
user string
cancellationToken CancellationToken

Returns

Task<List<LLM<TMessage>.EmbeddingObject>>

GetEmbeddingsAsync(string, int?, string?, CancellationToken)

public virtual Task<LLM<TMessage>.EmbeddingObject> GetEmbeddingsAsync(string input, int? dimensions = null, string? user = null, CancellationToken cancellationToken = default)

Parameters

input string
dimensions int?
user string
cancellationToken CancellationToken

Returns

Task<LLM<TMessage>.EmbeddingObject>