ChatFireworksCallOptions: {
    audio: undefined | ChatCompletionAudioParam;
    callbacks?: undefined | Callbacks;
    configurable: undefined | Record<string, any>;
    maxConcurrency?: undefined | number;
    metadata?: undefined | Record<string, unknown>;
    modalities: undefined | ChatCompletionModality[];
    options: undefined | OpenAICoreRequestOptions<Record<string, unknown>>;
    parallel_tool_calls: undefined | boolean;
    prediction: undefined | ChatCompletionPredictionContent;
    promptIndex: undefined | number;
    recursionLimit?: undefined | number;
    response_format: undefined | ChatOpenAIResponseFormat;
    runId?: undefined | string;
    runName?: undefined | string;
    seed: undefined | number;
    signal?: undefined | AbortSignal;
    stop?: undefined | string[];
    stream_options: undefined | {
        include_usage: boolean;
    };
    strict: undefined | boolean;
    tags?: undefined | string[];
    timeout?: undefined | number;
    tool_choice: undefined | OpenAIToolChoice;
    tools: undefined | ChatOpenAIToolType[];
}

Type declaration

  • audio: undefined | ChatCompletionAudioParam
  • Optionalcallbacks?: undefined | Callbacks

    Callbacks for this call and any sub-calls (eg. a Chain calling an LLM). Tags are passed to all callbacks, metadata is passed to handle*Start callbacks.

  • configurable: undefined | Record<string, any>
  • OptionalmaxConcurrency?: undefined | number

    Maximum number of parallel calls to make.

  • Optionalmetadata?: undefined | Record<string, unknown>

    Metadata for this call and any sub-calls (eg. a Chain calling an LLM). Keys should be strings, values should be JSON-serializable.

  • modalities: undefined | ChatCompletionModality[]
  • options: undefined | OpenAICoreRequestOptions<Record<string, unknown>>
  • parallel_tool_calls: undefined | boolean
  • prediction: undefined | ChatCompletionPredictionContent
  • promptIndex: undefined | number
  • OptionalrecursionLimit?: undefined | number

    Maximum number of times a call can recurse. If not provided, defaults to 25.

  • response_format: undefined | ChatOpenAIResponseFormat
  • OptionalrunId?: undefined | string

    Unique identifier for the tracer run for this call. If not provided, a new UUID will be generated.

  • OptionalrunName?: undefined | string

    Name for the tracer run for this call. Defaults to the name of the class.

  • seed: undefined | number
  • Optionalsignal?: undefined | AbortSignal

    Abort signal for this call. If provided, the call will be aborted when the signal is aborted.

  • Optionalstop?: undefined | string[]

    Stop tokens to use for this call. If not provided, the default stop tokens for the model will be used.

  • stream_options: undefined | {
        include_usage: boolean;
    }
  • strict: undefined | boolean
  • Optionaltags?: undefined | string[]

    Tags for this call and any sub-calls (eg. a Chain calling an LLM). You can use these to filter calls.

  • Optionaltimeout?: undefined | number

    Timeout for this call in milliseconds.

  • tool_choice: undefined | OpenAIToolChoice
  • tools: undefined | ChatOpenAIToolType[]