ChatConfig
Auto-generated API reference for ChatConfig.
Interface: ChatConfig
Defined in: core/dist/index.d.ts:158
#Properties
#adapter
adapter:
AdapterFactory
Defined in: core/dist/index.d.ts:159
#initialMessages?
optionalinitialMessages?:MessageType[]
Defined in: core/dist/index.d.ts:167
#maxTokens?
optionalmaxTokens?:number
Defined in: core/dist/index.d.ts:162
#maxToolIterations?
optionalmaxToolIterations?:number
Defined in: core/dist/index.d.ts:174
Maximum number of LLM β tool feedback turns per send().
After a tool call, the controller feeds the result back to the model
so it can continue reasoning. This caps that loop to prevent runaway
cost if a model keeps requesting tools. Default: 5. Set to 1 to disable.
#memory?
optionalmemory?:ChatMemory
Defined in: core/dist/index.d.ts:165
#observers?
optionalobservers?:Observer[]
Defined in: core/dist/index.d.ts:178
#onError?
optionalonError?: (error) =>void
Defined in: core/dist/index.d.ts:176
#Parameters
error
Error
#Returns
void
#onMessage?
optionalonMessage?: (message) =>void
Defined in: core/dist/index.d.ts:175
#Parameters
message
#Returns
void
#onToolCall?
optionalonToolCall?: (toolCall,context) =>MaybePromise<void>
Defined in: core/dist/index.d.ts:177
#Parameters
toolCall
context
#Returns
MaybePromise<void>
#retriever?
optionalretriever?:Retriever
Defined in: core/dist/index.d.ts:166
#skills?
optionalskills?:SkillDefinition[]
Defined in: core/dist/index.d.ts:164
#systemPrompt?
optionalsystemPrompt?:string
Defined in: core/dist/index.d.ts:160
#temperature?
optionaltemperature?:number
Defined in: core/dist/index.d.ts:161
#tools?
optionaltools?:ToolDefinition<Record<string,unknown>>[]
Defined in: core/dist/index.d.ts:163