Type Alias: LlamaChatResponse<Functions>
ts
type LlamaChatResponse<Functions> = {
response: string;
fullResponse: (string | LlamaChatResponseSegment)[];
functionCalls: Functions extends ChatModelFunctions ? LlamaChatResponseFunctionCall<Functions>[] : never;
lastEvaluation: {
cleanHistory: ChatHistoryItem[];
contextWindow: ChatHistoryItem[];
contextShiftMetadata: any;
};
metadata: | {
remainingGenerationAfterStop: string | Token[];
stopReason: | "eogToken"
| "stopGenerationTrigger"
| "functionCalls"
| "maxTokens"
| "abort";
}
| {
remainingGenerationAfterStop: string | Token[];
stopReason: "customStopTrigger";
customStopTrigger: (string | Token)[];
};
};
Defined in: evaluator/LlamaChat/LlamaChat.ts:849
Type Parameters
Type Parameter | Default type |
---|---|
Functions extends ChatModelFunctions | undefined | undefined |
Properties
response
ts
response: string;
Defined in: evaluator/LlamaChat/LlamaChat.ts:853
The response text only, without any text segments (like thoughts).
fullResponse
ts
fullResponse: (string | LlamaChatResponseSegment)[];
Defined in: evaluator/LlamaChat/LlamaChat.ts:858
The full response, including all text and text segments (like thoughts).
functionCalls?
ts
optional functionCalls: Functions extends ChatModelFunctions ? LlamaChatResponseFunctionCall<Functions>[] : never;
Defined in: evaluator/LlamaChat/LlamaChat.ts:859
lastEvaluation
ts
lastEvaluation: {
cleanHistory: ChatHistoryItem[];
contextWindow: ChatHistoryItem[];
contextShiftMetadata: any;
};
Defined in: evaluator/LlamaChat/LlamaChat.ts:862
cleanHistory
ts
cleanHistory: ChatHistoryItem[];
contextWindow
ts
contextWindow: ChatHistoryItem[];
contextShiftMetadata
ts
contextShiftMetadata: any;
metadata
ts
metadata:
| {
remainingGenerationAfterStop: string | Token[];
stopReason: | "eogToken"
| "stopGenerationTrigger"
| "functionCalls"
| "maxTokens"
| "abort";
}
| {
remainingGenerationAfterStop: string | Token[];
stopReason: "customStopTrigger";
customStopTrigger: (string | Token)[];
};
Defined in: evaluator/LlamaChat/LlamaChat.ts:867