ChatConfig
Auto-generated API reference for ChatConfig.
Interface: ChatConfig
Defined in: packages/core/src/types/chat.ts:11
#Properties
#adapter
adapter:
AdapterFactory
Defined in: packages/core/src/types/chat.ts:12
#initialMessages?
optionalinitialMessages?:Message[]
Defined in: packages/core/src/types/chat.ts:20
#maxTokens?
optionalmaxTokens?:number
Defined in: packages/core/src/types/chat.ts:15
#maxToolIterations?
optionalmaxToolIterations?:number
Defined in: packages/core/src/types/chat.ts:27
Maximum number of LLM ↔ tool feedback turns per send().
After a tool call, the controller feeds the result back to the model
so it can continue reasoning. This caps that loop to prevent runaway
cost if a model keeps requesting tools. Default: 5. Set to 1 to disable.
#memory?
optionalmemory?:ChatMemory
Defined in: packages/core/src/types/chat.ts:18
#observers?
optionalobservers?:Observer[]
Defined in: packages/core/src/types/chat.ts:31
#onError?
optionalonError?: (error) =>void
Defined in: packages/core/src/types/chat.ts:29
#Parameters
error
Error
#Returns
void
#onMessage?
optionalonMessage?: (message) =>void
Defined in: packages/core/src/types/chat.ts:28
#Parameters
message
#Returns
void
#onToolCall?
optionalonToolCall?: (toolCall,context) =>MaybePromise<void>
Defined in: packages/core/src/types/chat.ts:30
#Parameters
toolCall
context
#Returns
MaybePromise<void>
#retriever?
optionalretriever?:Retriever
Defined in: packages/core/src/types/chat.ts:19
#skills?
optionalskills?:SkillDefinition[]
Defined in: packages/core/src/types/chat.ts:17
#systemPrompt?
optionalsystemPrompt?:string
Defined in: packages/core/src/types/chat.ts:13
#temperature?
optionaltemperature?:number
Defined in: packages/core/src/types/chat.ts:14
#tools?
optionaltools?:ToolDefinition<Record<string,unknown>>[]
Defined in: packages/core/src/types/chat.ts:16