Initialize a ChatModel from the model name and provider. Must have the integration package corresponding to the model provider installed.
initChatModel<
RunInput extends BaseLanguageModelInput = BaseLanguageModelInput,
CallOptions extends ConfigurableChatModelCallOptions = ConfigurableChatModelCallOptions
>(
model: string,
fields: Partial<Record<string, any>> __type
): Promise<ConfigurableModel<RunInput, CallOptions>>import { initChatModel } from "langchain/chat_models/universal";
const gpt4 = await initChatModel("openai:gpt-4", {
temperature: 0.25,
});
const gpt4Result = await gpt4.invoke("what's your name");
const claude = await initChatModel("anthropic:claude-3-opus-20240229", {
temperature: 0.25,
});
const claudeResult = await claude.invoke("what's your name");
const gemini = await initChatModel("gemini-1.5-pro", {
modelProvider: "google-vertexai",
temperature: 0.25,
});
const geminiResult = await gemini.invoke("what's your name");import { initChatModel } from "langchain/chat_models/universal";
const configurableModel = await initChatModel(undefined, {
temperature: 0,
configurableFields: ["model", "apiKey"],
});
const gpt4Result = await configurableModel.invoke("what's your name", {
configurable: {
model: "gpt-4",
},
});
const claudeResult = await configurableModel.invoke("what's your name", {
configurable: {
model: "claude-sonnet-4-5-20250929",
},
});import { initChatModel } from "langchain/chat_models/universal";
const configurableModelWithDefault = await initChatModel("gpt-4", {
modelProvider: "openai",
configurableFields: "any",
configPrefix: "foo",
temperature: 0,
});
const openaiResult = await configurableModelWithDefault.invoke(
"what's your name",
{
configurable: {
foo_apiKey: process.env.OPENAI_API_KEY,
},
}
);
const claudeResult = await configurableModelWithDefault.invoke(
"what's your name",
{
configurable: {
foo_model: "claude-sonnet-4-5-20250929",
foo_modelProvider: "anthropic",
foo_temperature: 0.6,
foo_apiKey: process.env.ANTHROPIC_API_KEY,
},
}
);import { initChatModel } from "langchain/chat_models/universal";
import { z } from "zod/v3";
import { tool } from "@langchain/core/tools";
const getWeatherTool = tool(
(input) => {
// Do something with the input
return JSON.stringify(input);
},
{
schema: z
.object({
location: z
.string()
.describe("The city and state, e.g. San Francisco, CA"),
})
.describe("Get the current weather in a given location"),
name: "GetWeather",
description: "Get the current weather in a given location",
}
);
const getPopulationTool = tool(
(input) => {
// Do something with the input
return JSON.stringify(input);
},
{
schema: z
.object({
location: z
.string()
.describe("The city and state, e.g. San Francisco, CA"),
})
.describe("Get the current population in a given location"),
name: "GetPopulation",
description: "Get the current population in a given location",
}
);
const configurableModel = await initChatModel("gpt-4", {
configurableFields: ["model", "modelProvider", "apiKey"],
temperature: 0,
});
const configurableModelWithTools = configurableModel.bindTools([
getWeatherTool,
getPopulationTool,
]);
const configurableToolResult = await configurableModelWithTools.invoke(
"Which city is hotter today and which is bigger: LA or NY?",
{
configurable: {
apiKey: process.env.OPENAI_API_KEY,
},
}
);
const configurableToolResult2 = await configurableModelWithTools.invoke(
"Which city is hotter today and which is bigger: LA or NY?",
{
configurable: {
model: "claude-sonnet-4-5-20250929",
apiKey: process.env.ANTHROPIC_API_KEY,
},
}
);import { initChatModel } from "langchain/chat_models/universal";
const model = await initChatModel("gpt-4o-mini", {
profile: {
maxInputTokens: 100000,
},
});
@description
This function initializes a ChatModel based on the provided model name and provider.
It supports various model providers and allows for runtime configuration of model parameters.
Security Note: Setting `configurableFields` to "any" means fields like apiKey, baseUrl, etc.
can be altered at runtime, potentially redirecting model requests to a different service/user.
Make sure that if you're accepting untrusted configurations, you enumerate the
`configurableFields` explicitly.
The function will attempt to infer the model provider from the model name if not specified.
Certain model name prefixes are associated with specific providers:
- gpt-3... or gpt-4... -> openai
- claude... -> anthropic
- amazon.... -> bedrock
- gemini... -> google-vertexai
- command... -> cohere
- accounts/fireworks... -> fireworks
@since 0.2.11
@version 0.2.11