Type Alias: LLamaChatPromptCompletionEngineOptions
type LLamaChatPromptCompletionEngineOptions = {
maxPreloadTokens?: number;
onGeneration?: void;
maxCachedCompletions?: number;
temperature?: LLamaChatCompletePromptOptions["temperature"];
minP?: LLamaChatCompletePromptOptions["minP"];
topK?: LLamaChatCompletePromptOptions["topK"];
topP?: LLamaChatCompletePromptOptions["topP"];
seed?: LLamaChatCompletePromptOptions["seed"];
trimWhitespaceSuffix?: LLamaChatCompletePromptOptions["trimWhitespaceSuffix"];
evaluationPriority?: LLamaChatCompletePromptOptions["evaluationPriority"];
repeatPenalty?: LLamaChatCompletePromptOptions["repeatPenalty"];
tokenBias?: LLamaChatCompletePromptOptions["tokenBias"];
customStopTriggers?: LLamaChatCompletePromptOptions["customStopTriggers"];
grammar?: LLamaChatCompletePromptOptions["grammar"];
functions?: LLamaChatCompletePromptOptions["functions"];
documentFunctionParams?: LLamaChatCompletePromptOptions["documentFunctionParams"];
};
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:7
Properties
maxPreloadTokens?
optional maxPreloadTokens: number;
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:13
Max tokens to allow for preloading a prompt and generating a completion for it.
Defaults to 256
or half of the context size, whichever is smaller.
maxCachedCompletions?
optional maxCachedCompletions: number;
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:21
Max number of completions to cache.
Defaults to 100
.
temperature?
optional temperature: LLamaChatCompletePromptOptions["temperature"];
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:23
minP?
optional minP: LLamaChatCompletePromptOptions["minP"];
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:24
topK?
optional topK: LLamaChatCompletePromptOptions["topK"];
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:25
topP?
optional topP: LLamaChatCompletePromptOptions["topP"];
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:26
seed?
optional seed: LLamaChatCompletePromptOptions["seed"];
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:27
trimWhitespaceSuffix?
optional trimWhitespaceSuffix: LLamaChatCompletePromptOptions["trimWhitespaceSuffix"];
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:28
evaluationPriority?
optional evaluationPriority: LLamaChatCompletePromptOptions["evaluationPriority"];
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:29
repeatPenalty?
optional repeatPenalty: LLamaChatCompletePromptOptions["repeatPenalty"];
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:30
tokenBias?
optional tokenBias: LLamaChatCompletePromptOptions["tokenBias"];
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:31
customStopTriggers?
optional customStopTriggers: LLamaChatCompletePromptOptions["customStopTriggers"];
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:32
grammar?
optional grammar: LLamaChatCompletePromptOptions["grammar"];
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:33
functions?
optional functions: LLamaChatCompletePromptOptions["functions"];
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:34
documentFunctionParams?
optional documentFunctionParams: LLamaChatCompletePromptOptions["documentFunctionParams"];
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:35
Methods
onGeneration()?
optional onGeneration(prompt: string, completion: string): void;
Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:14
Parameters
Parameter | Type |
---|---|
prompt | string |
completion | string |
Returns
void