interface ChatGroqInput {
    apiKey?: string;
    cache?: boolean | BaseCache<Generation[]>;
    callbackManager?: CallbackManager;
    callbacks?: Callbacks;
    maxConcurrency?: number;
    maxRetries?: number;
    maxTokens?: number;
    metadata?: Record<string, unknown>;
    model?: string;
    modelName?: string;
    onFailedAttempt?: FailedAttemptHandler;
    stop?: null | string | string[];
    stopSequences?: string[];
    streaming?: boolean;
    tags?: string[];
    temperature?: number;
    verbose?: boolean;
}

Hierarchy

  • BaseChatModelParams
    • ChatGroqInput

Properties

apiKey?: string

The Groq API key to use for requests.

process.env.GROQ_API_KEY
cache?: boolean | BaseCache<Generation[]>
callbackManager?: CallbackManager

Use callbacks instead

callbacks?: Callbacks
maxConcurrency?: number

The maximum number of concurrent calls that can be made. Defaults to Infinity, which means no limit.

maxRetries?: number

The maximum number of retries that can be made for a single call, with an exponential backoff between each attempt. Defaults to 6.

maxTokens?: number

The maximum number of tokens that the model can process in a single response. This limits ensures computational efficiency and resource management.

metadata?: Record<string, unknown>
model?: string

The name of the model to use.

"mixtral-8x7b-32768"
modelName?: string

The name of the model to use. Alias for model

"mixtral-8x7b-32768"
onFailedAttempt?: FailedAttemptHandler

Custom handler to handle failed attempts. Takes the originally thrown error object as input, and should itself throw an error if the input error is not retryable.

stop?: null | string | string[]

Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. Alias for stopSequences

stopSequences?: string[]

Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.

streaming?: boolean

Whether or not to stream responses.

tags?: string[]
temperature?: number

The temperature to use for sampling.

0.7
verbose?: boolean