ChatAnthropicToolsInput: {
    anthropicApiKey?: undefined | string;
    anthropicApiUrl?: undefined | string;
    apiKey?: undefined | string;
    cache: undefined | boolean | BaseCache<Generation[]>;
    callbackManager: undefined | CallbackManager;
    callbacks: undefined | Callbacks;
    clientOptions?: undefined | ClientOptions;
    createClient?: undefined | ((options: ClientOptions) => any);
    invocationKwargs?: undefined | Kwargs;
    llm?: undefined | BaseChatModel<BaseChatModelCallOptions, AIMessageChunk>;
    maxConcurrency: undefined | number;
    maxRetries: undefined | number;
    maxTokens?: undefined | number;
    maxTokensToSample?: undefined | number;
    metadata: undefined | Record<string, unknown>;
    model?: undefined | string;
    modelName?: undefined | string;
    onFailedAttempt: undefined | FailedAttemptHandler;
    stopSequences?: undefined | string[];
    streamUsage?: undefined | boolean;
    streaming?: undefined | boolean;
    systemPromptTemplate?: undefined | BasePromptTemplate<any, BasePromptValueInterface, any>;
    tags: undefined | string[];
    temperature?: undefined | number;
    topK?: undefined | number;
    topP?: undefined | number;
    verbose: undefined | boolean;
}

Type declaration

  • OptionalanthropicApiKey?: undefined | string

    Anthropic API key

  • OptionalanthropicApiUrl?: undefined | string

    Anthropic API URL

  • OptionalapiKey?: undefined | string

    Anthropic API key

  • cache: undefined | boolean | BaseCache<Generation[]>
  • callbackManager: undefined | CallbackManager
  • callbacks: undefined | Callbacks
  • OptionalclientOptions?: undefined | ClientOptions

    Overridable Anthropic ClientOptions

  • OptionalcreateClient?: undefined | ((options: ClientOptions) => any)

    Optional method that returns an initialized underlying Anthropic client. Useful for accessing Anthropic models hosted on other cloud services such as Google Vertex.

  • OptionalinvocationKwargs?: undefined | Kwargs

    Holds any additional parameters that are valid to pass to anthropic.messages that are not explicitly specified on this class.

  • Optionalllm?: undefined | BaseChatModel<BaseChatModelCallOptions, AIMessageChunk>
  • maxConcurrency: undefined | number
  • maxRetries: undefined | number
  • OptionalmaxTokens?: undefined | number

    A maximum number of tokens to generate before stopping.

  • OptionalmaxTokensToSample?: undefined | number

    A maximum number of tokens to generate before stopping.

    Use "maxTokens" instead.

  • metadata: undefined | Record<string, unknown>
  • Optionalmodel?: undefined | string

    Model name to use

  • OptionalmodelName?: undefined | string

    Use "model" instead

  • onFailedAttempt: undefined | FailedAttemptHandler
  • OptionalstopSequences?: undefined | string[]

    A list of strings upon which to stop generating. You probably want ["\n\nHuman:"], as that's the cue for the next turn in the dialog agent.

  • OptionalstreamUsage?: undefined | boolean

    Whether or not to include token usage data in streamed chunks.

    true
    
  • Optionalstreaming?: undefined | boolean

    Whether to stream the results or not

  • OptionalsystemPromptTemplate?: undefined | BasePromptTemplate<any, BasePromptValueInterface, any>
  • tags: undefined | string[]
  • Optionaltemperature?: undefined | number

    Amount of randomness injected into the response. Ranges from 0 to 1. Use temp closer to 0 for analytical / multiple choice, and temp closer to 1 for creative and generative tasks.

  • OptionaltopK?: undefined | number

    Only sample from the top K options for each subsequent token. Used to remove "long tail" low probability responses. Defaults to -1, which disables it.

  • OptionaltopP?: undefined | number

    Does nucleus sampling, in which we compute the cumulative distribution over all the options for each subsequent token in decreasing probability order and cut it off once it reaches a particular probability specified by top_p. Defaults to -1, which disables it. Note that you should either alter temperature or top_p, but not both.

  • verbose: undefined | boolean