Type Alias: ChatWrapperSettings
type ChatWrapperSettings = {
supportsSystemMessages: boolean;
functions: {
call: {
optionalPrefixSpace: boolean;
prefix: string | LlamaText;
paramsPrefix: string | LlamaText;
suffix: string | LlamaText;
emptyCallParamsPlaceholder?: object | string | number | boolean | null;
};
result: {
prefix: string | LlamaText;
suffix: string | LlamaText;
};
parallelism?: {
call: {
sectionPrefix: string | LlamaText;
betweenCalls?: string | LlamaText;
sectionSuffix?: string | LlamaText;
};
result?: {
sectionPrefix?: string | LlamaText;
betweenResults?: string | LlamaText;
sectionSuffix?: string | LlamaText;
};
};
};
segments?: {
closeAllSegments?: string | LlamaText;
reiterateStackAfterFunctionCalls?: boolean;
thought?: ChatWrapperSettingsSegment & {
reopenAfterFunctionCalls?: boolean;
};
comment?: ChatWrapperSettingsSegment;
};
};Defined in: types.ts:22
Properties
supportsSystemMessages
readonly supportsSystemMessages: boolean;Defined in: types.ts:23
functions
readonly functions: {
call: {
optionalPrefixSpace: boolean;
prefix: string | LlamaText;
paramsPrefix: string | LlamaText;
suffix: string | LlamaText;
emptyCallParamsPlaceholder?: object | string | number | boolean | null;
};
result: {
prefix: string | LlamaText;
suffix: string | LlamaText;
};
parallelism?: {
call: {
sectionPrefix: string | LlamaText;
betweenCalls?: string | LlamaText;
sectionSuffix?: string | LlamaText;
};
result?: {
sectionPrefix?: string | LlamaText;
betweenResults?: string | LlamaText;
sectionSuffix?: string | LlamaText;
};
};
};Defined in: types.ts:24
call
readonly call: {
optionalPrefixSpace: boolean;
prefix: string | LlamaText;
paramsPrefix: string | LlamaText;
suffix: string | LlamaText;
emptyCallParamsPlaceholder?: object | string | number | boolean | null;
};call.optionalPrefixSpace
readonly optionalPrefixSpace: boolean;call.prefix
readonly prefix: string | LlamaText;call.paramsPrefix
readonly paramsPrefix: string | LlamaText;call.suffix
readonly suffix: string | LlamaText;call.emptyCallParamsPlaceholder?
readonly optional emptyCallParamsPlaceholder: object | string | number | boolean | null;The value to use when the function has no arguments.
Will be stringified using jsonDumps.
Defaults to "".
result
readonly result: {
prefix: string | LlamaText;
suffix: string | LlamaText;
};result.prefix
readonly prefix: string | LlamaText;Supported template parameters:
{{functionName}}{{functionParams}}
Template parameters can only appear in a string or a string in a LlamaText.
Template parameters inside a SpecialTokensText inside a LlamaText won't be replaced.
Example of supported values:
"texttext"LlamaText(["texttext"])
Example of unsupported values:
LlamaText([new SpecialTokensText("texttext")])
result.suffix
readonly suffix: string | LlamaText;Supported template parameters:
{{functionName}}{{functionParams}}
Template parameters can only appear in a string or a string in a LlamaText.
Template parameters inside a SpecialTokensText inside a LlamaText won't be replaced.
Example of supported values:
"texttext"LlamaText(["texttext"])
Example of unsupported values:
LlamaText([new SpecialTokensText("texttext")])
parallelism?
readonly optional parallelism: {
call: {
sectionPrefix: string | LlamaText;
betweenCalls?: string | LlamaText;
sectionSuffix?: string | LlamaText;
};
result?: {
sectionPrefix?: string | LlamaText;
betweenResults?: string | LlamaText;
sectionSuffix?: string | LlamaText;
};
};If this field is present, parallel function calling is supported
parallelism.call
readonly call: {
sectionPrefix: string | LlamaText;
betweenCalls?: string | LlamaText;
sectionSuffix?: string | LlamaText;
};parallelism.call.sectionPrefix
readonly sectionPrefix: string | LlamaText;parallelism.call.betweenCalls?
readonly optional betweenCalls: string | LlamaText;parallelism.call.sectionSuffix?
readonly optional sectionSuffix: string | LlamaText;parallelism.result?
readonly optional result: {
sectionPrefix?: string | LlamaText;
betweenResults?: string | LlamaText;
sectionSuffix?: string | LlamaText;
};parallelism.result.sectionPrefix?
readonly optional sectionPrefix: string | LlamaText;parallelism.result.betweenResults?
readonly optional betweenResults: string | LlamaText;parallelism.result.sectionSuffix?
readonly optional sectionSuffix: string | LlamaText;segments?
readonly optional segments: {
closeAllSegments?: string | LlamaText;
reiterateStackAfterFunctionCalls?: boolean;
thought?: ChatWrapperSettingsSegment & {
reopenAfterFunctionCalls?: boolean;
};
comment?: ChatWrapperSettingsSegment;
};Defined in: types.ts:94
closeAllSegments?
readonly optional closeAllSegments: string | LlamaText;Consider all active segments to be closed when this text is detected
reiterateStackAfterFunctionCalls?
readonly optional reiterateStackAfterFunctionCalls: boolean;After function calls, reiterate the stack of the active segments to remind the model of the context.
Defaults to false.
thought?
readonly optional thought: ChatWrapperSettingsSegment & {
reopenAfterFunctionCalls?: boolean;
};Chain of Thought text segment
Type declaration
reopenAfterFunctionCalls?
optional reopenAfterFunctionCalls: boolean;comment?
readonly optional comment: ChatWrapperSettingsSegment;Comment segment.
Used by models such as gpt-oss.