interface ChatAnthropicCallOptions {
    callbacks?: Callbacks;
    configurable?: Record<string, any>;
    headers?: Record<string, string>;
    maxConcurrency?: number;
    metadata?: Record<string, unknown>;
    recursionLimit?: number;
    runId?: string;
    runName?: string;
    signal?: AbortSignal;
    stop?: string[];
    streamUsage?: boolean;
    tags?: string[];
    timeout?: number;
    tool_choice?: AnthropicToolChoice;
    tools?: ChatAnthropicToolType[];
}

Hierarchy

  • BaseChatModelCallOptions
  • Pick<AnthropicInput, "streamUsage">
    • ChatAnthropicCallOptions

Properties

callbacks?: Callbacks

Callbacks for this call and any sub-calls (eg. a Chain calling an LLM). Tags are passed to all callbacks, metadata is passed to handle*Start callbacks.

configurable?: Record<string, any>

Runtime values for attributes previously made configurable on this Runnable, or sub-Runnables.

headers?: Record<string, string>

Custom headers to pass to the Anthropic API when making a request.

maxConcurrency?: number

Maximum number of parallel calls to make.

metadata?: Record<string, unknown>

Metadata for this call and any sub-calls (eg. a Chain calling an LLM). Keys should be strings, values should be JSON-serializable.

recursionLimit?: number

Maximum number of times a call can recurse. If not provided, defaults to 25.

runId?: string

Unique identifier for the tracer run for this call. If not provided, a new UUID will be generated.

runName?: string

Name for the tracer run for this call. Defaults to the name of the class.

signal?: AbortSignal

Abort signal for this call. If provided, the call will be aborted when the signal is aborted.

stop?: string[]

Stop tokens to use for this call. If not provided, the default stop tokens for the model will be used.

streamUsage?: boolean

Whether or not to include token usage data in streamed chunks.

true
tags?: string[]

Tags for this call and any sub-calls (eg. a Chain calling an LLM). You can use these to filter calls.

timeout?: number

Timeout for this call in milliseconds.

tool_choice?: AnthropicToolChoice

Whether or not to specify what tool the model should use

"auto"
tools?: ChatAnthropicToolType[]