Skip to content

Commit 4cc56e5

Browse files
committed
pass container id back
1 parent 3b6dc5e commit 4cc56e5

File tree

2 files changed

+159
-3
lines changed

2 files changed

+159
-3
lines changed

pydantic_ai_slim/pydantic_ai/models/anthropic.py

Lines changed: 32 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,7 @@
7474
BetaCodeExecutionToolResultBlockContent,
7575
BetaCodeExecutionToolResultBlockParam,
7676
BetaCodeExecutionToolResultBlockParamContentParam,
77+
BetaContainerParams,
7778
BetaContentBlock,
7879
BetaContentBlockParam,
7980
BetaImageBlockParam,
@@ -200,6 +201,16 @@ class AnthropicModelSettings(ModelSettings, total=False):
200201
See https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching for more information.
201202
"""
202203

204+
anthropic_container: BetaContainerParams | Literal[False]
205+
"""Container configuration for multi-turn conversations.
206+
207+
By default, if previous messages contain a container_id (from a prior response),
208+
it will be reused automatically.
209+
210+
Set to `False` to force a fresh container (ignore any container_id from history).
211+
Set to a dict (e.g. `{'id': 'container_xxx'}`) to explicitly specify a container.
212+
"""
213+
203214

204215
@dataclass(init=False)
205216
class AnthropicModel(Model):
@@ -385,6 +396,7 @@ async def _messages_create(
385396
output_format = self._native_output_format(model_request_parameters)
386397
betas, extra_headers = self._get_betas_and_extra_headers(tools, model_request_parameters, model_settings)
387398
betas.update(builtin_tool_betas)
399+
container = self._get_container(messages, model_settings)
388400
try:
389401
return await self.client.beta.messages.create(
390402
max_tokens=model_settings.get('max_tokens', 4096),
@@ -403,6 +415,7 @@ async def _messages_create(
403415
top_p=model_settings.get('top_p', OMIT),
404416
timeout=model_settings.get('timeout', NOT_GIVEN),
405417
metadata=model_settings.get('anthropic_metadata', OMIT),
418+
container=container or OMIT,
406419
extra_headers=extra_headers,
407420
extra_body=model_settings.get('extra_body'),
408421
)
@@ -439,6 +452,18 @@ def _get_betas_and_extra_headers(
439452

440453
return betas, extra_headers
441454

455+
def _get_container(
456+
self, messages: list[ModelMessage], model_settings: AnthropicModelSettings
457+
) -> BetaContainerParams | None:
458+
"""Get container config for the API request."""
459+
if (container := model_settings.get('anthropic_container')) is not None:
460+
return None if container is False else container
461+
for m in reversed(messages):
462+
if isinstance(m, ModelResponse) and m.provider_details:
463+
if cid := m.provider_details.get('container_id'):
464+
return BetaContainerParams(id=cid)
465+
return None
466+
442467
async def _messages_count_tokens(
443468
self,
444469
messages: list[ModelMessage],
@@ -526,6 +551,9 @@ def _process_response(self, response: BetaMessage) -> ModelResponse:
526551
if raw_finish_reason := response.stop_reason: # pragma: no branch
527552
provider_details = {'finish_reason': raw_finish_reason}
528553
finish_reason = _FINISH_REASON_MAP.get(raw_finish_reason)
554+
if response.container:
555+
provider_details = provider_details or {}
556+
provider_details['container_id'] = response.container.id
529557

530558
return ModelResponse(
531559
parts=items,
@@ -1125,6 +1153,9 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]:
11251153
if isinstance(event, BetaRawMessageStartEvent):
11261154
self._usage = _map_usage(event, self._provider_name, self._provider_url, self._model_name)
11271155
self.provider_response_id = event.message.id
1156+
if event.message.container:
1157+
self.provider_details = self.provider_details or {}
1158+
self.provider_details['container_id'] = event.message.container.id
11281159

11291160
elif isinstance(event, BetaRawContentBlockStartEvent):
11301161
current_block = event.content_block
@@ -1239,7 +1270,7 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]:
12391270
elif isinstance(event, BetaRawMessageDeltaEvent):
12401271
self._usage = _map_usage(event, self._provider_name, self._provider_url, self._model_name, self._usage)
12411272
if raw_finish_reason := event.delta.stop_reason: # pragma: no branch
1242-
self.provider_details = {'finish_reason': raw_finish_reason}
1273+
self.provider_details = {**(self.provider_details or {}), 'finish_reason': raw_finish_reason}
12431274
self.finish_reason = _FINISH_REASON_MAP.get(raw_finish_reason)
12441275

12451276
elif isinstance(event, BetaRawContentBlockStopEvent): # pragma: no branch

tests/models/test_anthropic.py

Lines changed: 127 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -89,11 +89,14 @@
8989
BetaRawMessageStreamEvent,
9090
BetaServerToolUseBlock,
9191
BetaTextBlock,
92+
BetaTextDelta,
9293
BetaToolUseBlock,
9394
BetaUsage,
9495
BetaWebSearchResultBlock,
9596
BetaWebSearchToolResultBlock,
9697
)
98+
from anthropic.types.beta.beta_container import BetaContainer
99+
from anthropic.types.beta.beta_container_params import BetaContainerParams
97100
from anthropic.types.beta.beta_raw_message_delta_event import Delta
98101

99102
from pydantic_ai.models.anthropic import (
@@ -5512,7 +5515,7 @@ async def test_anthropic_code_execution_tool(allow_model_requests: None, anthrop
55125515
model_name='claude-sonnet-4-20250514',
55135516
timestamp=IsDatetime(),
55145517
provider_name='anthropic',
5515-
provider_details={'finish_reason': 'end_turn'},
5518+
provider_details={'finish_reason': 'end_turn', 'container_id': 'container_011CTCwceSoRxi8Pf16Fb7Tn'},
55165519
provider_response_id='msg_018bVTPr9khzuds31rFDuqW4',
55175520
finish_reason='stop',
55185521
run_id=IsStr(),
@@ -5579,7 +5582,7 @@ async def test_anthropic_code_execution_tool(allow_model_requests: None, anthrop
55795582
model_name='claude-sonnet-4-20250514',
55805583
timestamp=IsDatetime(),
55815584
provider_name='anthropic',
5582-
provider_details={'finish_reason': 'end_turn'},
5585+
provider_details={'finish_reason': 'end_turn', 'container_id': 'container_011CTCwdXe48NC7LaX3rxQ4d'},
55835586
provider_response_id='msg_01VngRFBcNddwrYQoKUmdePY',
55845587
finish_reason='stop',
55855588
run_id=IsStr(),
@@ -7858,3 +7861,125 @@ async def test_anthropic_cache_messages_real_api(allow_model_requests: None, ant
78587861
assert usage2.cache_read_tokens > 0
78597862
assert usage2.cache_write_tokens > 0
78607863
assert usage2.output_tokens > 0
7864+
7865+
7866+
async def test_anthropic_container_setting_explicit(allow_model_requests: None):
7867+
"""Test that anthropic_container setting passes explicit container config to API."""
7868+
c = completion_message([BetaTextBlock(text='world', type='text')], BetaUsage(input_tokens=5, output_tokens=10))
7869+
mock_client = MockAnthropic.create_mock(c)
7870+
m = AnthropicModel('claude-haiku-4-5', provider=AnthropicProvider(anthropic_client=mock_client))
7871+
agent = Agent(m)
7872+
7873+
# Test with explicit container config
7874+
await agent.run('hello', model_settings=AnthropicModelSettings(anthropic_container={'id': 'container_abc123'}))
7875+
7876+
completion_kwargs = get_mock_chat_completion_kwargs(mock_client)[0]
7877+
assert completion_kwargs['container'] == BetaContainerParams(id='container_abc123')
7878+
7879+
7880+
async def test_anthropic_container_from_message_history(allow_model_requests: None):
7881+
"""Test that container_id from message history is passed to subsequent requests."""
7882+
c = completion_message([BetaTextBlock(text='world', type='text')], BetaUsage(input_tokens=5, output_tokens=10))
7883+
mock_client = MockAnthropic.create_mock([c, c])
7884+
m = AnthropicModel('claude-haiku-4-5', provider=AnthropicProvider(anthropic_client=mock_client))
7885+
agent = Agent(m)
7886+
7887+
# Create a message history with a container_id in provider_details
7888+
history: list[ModelMessage] = [
7889+
ModelRequest(parts=[UserPromptPart(content='hello')]),
7890+
ModelResponse(
7891+
parts=[TextPart(content='world')],
7892+
provider_details={'container_id': 'container_from_history'},
7893+
),
7894+
]
7895+
7896+
# Run with the message history
7897+
await agent.run('follow up', message_history=history)
7898+
7899+
completion_kwargs = get_mock_chat_completion_kwargs(mock_client)[0]
7900+
assert completion_kwargs['container'] == BetaContainerParams(id='container_from_history')
7901+
7902+
7903+
async def test_anthropic_container_setting_false_ignores_history(allow_model_requests: None):
7904+
"""Test that anthropic_container=False ignores container_id from history."""
7905+
c = completion_message([BetaTextBlock(text='world', type='text')], BetaUsage(input_tokens=5, output_tokens=10))
7906+
mock_client = MockAnthropic.create_mock(c)
7907+
m = AnthropicModel('claude-haiku-4-5', provider=AnthropicProvider(anthropic_client=mock_client))
7908+
agent = Agent(m)
7909+
7910+
# Create a message history with a container_id
7911+
history: list[ModelMessage] = [
7912+
ModelRequest(parts=[UserPromptPart(content='hello')]),
7913+
ModelResponse(
7914+
parts=[TextPart(content='world')],
7915+
provider_details={'container_id': 'container_should_be_ignored'},
7916+
),
7917+
]
7918+
7919+
# Run with anthropic_container=False to force fresh container
7920+
await agent.run(
7921+
'follow up', message_history=history, model_settings=AnthropicModelSettings(anthropic_container=False)
7922+
)
7923+
7924+
completion_kwargs = get_mock_chat_completion_kwargs(mock_client)[0]
7925+
# When anthropic_container=False, container should be OMIT (filtered out before sending to API)
7926+
from anthropic import omit as OMIT
7927+
7928+
assert completion_kwargs.get('container') is OMIT
7929+
7930+
7931+
async def test_anthropic_container_id_from_stream_response(allow_model_requests: None):
7932+
"""Test that container_id is extracted from streamed response and stored in provider_details."""
7933+
from datetime import datetime
7934+
7935+
stream_events: list[BetaRawMessageStreamEvent] = [
7936+
BetaRawMessageStartEvent(
7937+
type='message_start',
7938+
message=BetaMessage(
7939+
id='msg_123',
7940+
content=[],
7941+
model='claude-3-5-haiku-123',
7942+
role='assistant',
7943+
stop_reason=None,
7944+
type='message',
7945+
usage=BetaUsage(input_tokens=5, output_tokens=0),
7946+
container=BetaContainer(
7947+
id='container_from_stream',
7948+
expires_at=datetime(2025, 1, 1, 0, 0, 0),
7949+
),
7950+
),
7951+
),
7952+
BetaRawContentBlockStartEvent(
7953+
type='content_block_start',
7954+
index=0,
7955+
content_block=BetaTextBlock(text='', type='text'),
7956+
),
7957+
BetaRawContentBlockDeltaEvent(
7958+
type='content_block_delta',
7959+
index=0,
7960+
delta=BetaTextDelta(type='text_delta', text='hello'),
7961+
),
7962+
BetaRawContentBlockStopEvent(type='content_block_stop', index=0),
7963+
BetaRawMessageDeltaEvent(
7964+
type='message_delta',
7965+
delta=Delta(stop_reason='end_turn', stop_sequence=None),
7966+
usage=BetaMessageDeltaUsage(output_tokens=5),
7967+
),
7968+
BetaRawMessageStopEvent(type='message_stop'),
7969+
]
7970+
7971+
mock_client = MockAnthropic.create_stream_mock(stream_events)
7972+
m = AnthropicModel('claude-haiku-4-5', provider=AnthropicProvider(anthropic_client=mock_client))
7973+
agent = Agent(m)
7974+
7975+
async with agent.run_stream('hello') as result:
7976+
response = await result.get_output()
7977+
assert response == 'hello'
7978+
7979+
# Check that container_id was captured in the response
7980+
messages = result.all_messages()
7981+
model_response = messages[-1]
7982+
assert isinstance(model_response, ModelResponse)
7983+
assert model_response.provider_details is not None
7984+
assert model_response.provider_details.get('container_id') == 'container_from_stream'
7985+
assert model_response.provider_details.get('finish_reason') == 'end_turn'

0 commit comments

Comments
 (0)