Skip to content

Commit 468dad1

Browse files
authored
chore: use model IDs, latest anthropic models (#33747)
- standardize on using model IDs, no more aliases - makes future maintenance easier - use latest models in docstrings to highlight support - remove remaining sonnet 3-7 usage due to deprecation Depends on #33751
1 parent 32d294b commit 468dad1

File tree

19 files changed

+44
-38
lines changed

19 files changed

+44
-38
lines changed

libs/core/langchain_core/callbacks/usage.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ class UsageMetadataCallbackHandler(BaseCallbackHandler):
2424
from langchain_core.callbacks import UsageMetadataCallbackHandler
2525
2626
llm_1 = init_chat_model(model="openai:gpt-4o-mini")
27-
llm_2 = init_chat_model(model="anthropic:claude-3-5-haiku-latest")
27+
llm_2 = init_chat_model(model="anthropic:claude-3-5-haiku-20241022")
2828
2929
callback = UsageMetadataCallbackHandler()
3030
result_1 = llm_1.invoke("Hello", config={"callbacks": [callback]})
@@ -109,7 +109,7 @@ def get_usage_metadata_callback(
109109
from langchain_core.callbacks import get_usage_metadata_callback
110110
111111
llm_1 = init_chat_model(model="openai:gpt-4o-mini")
112-
llm_2 = init_chat_model(model="anthropic:claude-3-5-haiku-latest")
112+
llm_2 = init_chat_model(model="anthropic:claude-3-5-haiku-20241022")
113113
114114
with get_usage_metadata_callback() as cb:
115115
llm_1.invoke("Hello")

libs/core/langchain_core/rate_limiters.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,9 @@ class InMemoryRateLimiter(BaseRateLimiter):
105105
106106
from langchain_anthropic import ChatAnthropic
107107
108-
model = ChatAnthropic(model_name="claude-sonnet-4-5", rate_limiter=rate_limiter)
108+
model = ChatAnthropic(
109+
model_name="claude-sonnet-4-5-20250929", rate_limiter=rate_limiter
110+
)
109111
110112
for _ in range(5):
111113
tic = time.time()

libs/core/langchain_core/runnables/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2637,7 +2637,7 @@ def configurable_alternatives(
26372637
from langchain_openai import ChatOpenAI
26382638
26392639
model = ChatAnthropic(
2640-
model_name="claude-3-7-sonnet-20250219"
2640+
model_name="claude-sonnet-4-5-20250929"
26412641
).configurable_alternatives(
26422642
ConfigurableField(id="llm"),
26432643
default_key="anthropic",

libs/core/langchain_core/runnables/fallbacks.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -604,7 +604,7 @@ def __getattr__(self, name: str) -> Any:
604604
from langchain_anthropic import ChatAnthropic
605605
606606
gpt_4o = ChatOpenAI(model="gpt-4o")
607-
claude_3_sonnet = ChatAnthropic(model="claude-3-7-sonnet-20250219")
607+
claude_3_sonnet = ChatAnthropic(model="claude-sonnet-4-5-20250929")
608608
model = gpt_4o.with_fallbacks([claude_3_sonnet])
609609
610610
model.model_name

libs/langchain/langchain_classic/agents/tool_calling_agent/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ def create_tool_calling_agent(
5555
("placeholder", "{agent_scratchpad}"),
5656
]
5757
)
58-
model = ChatAnthropic(model="claude-3-opus-20240229")
58+
model = ChatAnthropic(model="claude-opus-4-1-20250805")
5959
6060
@tool
6161
def magic_function(input: int) -> int:

libs/langchain/langchain_classic/chains/openai_functions/extraction.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ class Joke(BaseModel):
6666
# Please reference to the documentation of structured_output
6767
# to see an up to date list of which models support
6868
# with_structured_output.
69-
model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0)
69+
model = ChatAnthropic(model="claude-opus-4-1-20250805", temperature=0)
7070
structured_model = model.with_structured_output(Joke)
7171
structured_model.invoke("Tell me a joke about cats.
7272
Make sure to call the Joke function.")
@@ -135,7 +135,7 @@ class Joke(BaseModel):
135135
# Please reference to the documentation of structured_output
136136
# to see an up to date list of which models support
137137
# with_structured_output.
138-
model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0)
138+
model = ChatAnthropic(model="claude-opus-4-1-20250805", temperature=0)
139139
structured_model = model.with_structured_output(Joke)
140140
structured_model.invoke("Tell me a joke about cats.
141141
Make sure to call the Joke function.")

libs/langchain/langchain_classic/chains/openai_functions/tagging.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@ class Joke(BaseModel):
152152
# Please reference to the documentation of structured_output
153153
# to see an up to date list of which models support
154154
# with_structured_output.
155-
model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0)
155+
model = ChatAnthropic(model="claude-opus-4-1-20250805", temperature=0)
156156
structured_model = model.with_structured_output(Joke)
157157
structured_model.invoke(
158158
"Why did the cat cross the road? To get to the other "

libs/langchain/langchain_classic/chains/openai_tools/extraction.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ class Joke(BaseModel):
4343
# Please reference to the documentation of structured_output
4444
# to see an up to date list of which models support
4545
# with_structured_output.
46-
model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0)
46+
model = ChatAnthropic(model="claude-opus-4-1-20250805", temperature=0)
4747
structured_model = model.with_structured_output(Joke)
4848
structured_model.invoke("Tell me a joke about cats.
4949
Make sure to call the Joke function.")

libs/langchain/langchain_classic/chains/structured_output/base.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ class Joke(BaseModel):
5656
# Please reference to the documentation of structured_output
5757
# to see an up to date list of which models support
5858
# with_structured_output.
59-
model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0)
59+
model = ChatAnthropic(model="claude-opus-4-1-20250805", temperature=0)
6060
structured_model = model.with_structured_output(Joke)
6161
structured_model.invoke("Tell me a joke about cats.
6262
Make sure to call the Joke function.")
@@ -175,7 +175,7 @@ class Joke(BaseModel):
175175
# Please reference to the documentation of structured_output
176176
# to see an up to date list of which models support
177177
# with_structured_output.
178-
model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0)
178+
model = ChatAnthropic(model="claude-opus-4-1-20250805", temperature=0)
179179
structured_model = model.with_structured_output(Joke)
180180
structured_model.invoke("Tell me a joke about cats.
181181
Make sure to call the Joke function.")

libs/langchain/langchain_classic/chat_models/base.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ def init_chat_model(
9696
for supported model parameters to use as `**kwargs`.
9797
9898
Args:
99-
model: The name of the model, e.g. `'o3-mini'`, `'claude-sonnet-4-5'`.
99+
model: The name or ID of the model, e.g. `'o3-mini'`, `'claude-sonnet-4-5-20250929'`.
100100
101101
You can also specify model and model provider in a single argument using
102102
`'{model_provider}:{model}'` format, e.g. `'openai:o1'`.
@@ -206,7 +206,7 @@ def init_chat_model(
206206
from langchain_classic.chat_models import init_chat_model
207207
208208
o3_mini = init_chat_model("openai:o3-mini", temperature=0)
209-
claude_sonnet = init_chat_model("anthropic:claude-sonnet-4-5", temperature=0)
209+
claude_sonnet = init_chat_model("anthropic:claude-sonnet-4-5-20250929", temperature=0)
210210
gemini_2-5_flash = init_chat_model(
211211
"google_vertexai:gemini-2.5-flash", temperature=0
212212
)
@@ -233,7 +233,7 @@ def init_chat_model(
233233
234234
configurable_model.invoke(
235235
"what's your name",
236-
config={"configurable": {"model": "claude-sonnet-4-5"}},
236+
config={"configurable": {"model": "claude-sonnet-4-5-20250929"}},
237237
)
238238
```
239239
@@ -258,7 +258,7 @@ def init_chat_model(
258258
"what's your name",
259259
config={
260260
"configurable": {
261-
"foo_model": "anthropic:claude-sonnet-4-5",
261+
"foo_model": "anthropic:claude-sonnet-4-5-20250929",
262262
"foo_temperature": 0.6,
263263
}
264264
},
@@ -311,7 +311,7 @@ class GetPopulation(BaseModel):
311311
312312
configurable_model_with_tools.invoke(
313313
"Which city is hotter today and which is bigger: LA or NY?",
314-
config={"configurable": {"model": "claude-sonnet-4-5"}},
314+
config={"configurable": {"model": "claude-sonnet-4-5-20250929"}},
315315
)
316316
# Use Sonnet 4.5
317317
```

0 commit comments

Comments
 (0)