|
89 | 89 | BetaRawMessageStreamEvent, |
90 | 90 | BetaServerToolUseBlock, |
91 | 91 | BetaTextBlock, |
| 92 | + BetaTextDelta, |
92 | 93 | BetaToolUseBlock, |
93 | 94 | BetaUsage, |
94 | 95 | BetaWebSearchResultBlock, |
95 | 96 | BetaWebSearchToolResultBlock, |
96 | 97 | ) |
| 98 | + from anthropic.types.beta.beta_container import BetaContainer |
| 99 | + from anthropic.types.beta.beta_container_params import BetaContainerParams |
97 | 100 | from anthropic.types.beta.beta_raw_message_delta_event import Delta |
98 | 101 |
|
99 | 102 | from pydantic_ai.models.anthropic import ( |
@@ -169,9 +172,7 @@ async def messages_create( |
169 | 172 | if isinstance(self.stream[0], Sequence): |
170 | 173 | response = MockAsyncStream(iter(cast(list[MockRawMessageStreamEvent], self.stream[self.index]))) |
171 | 174 | else: |
172 | | - response = MockAsyncStream( # pragma: no cover |
173 | | - iter(cast(list[MockRawMessageStreamEvent], self.stream)) |
174 | | - ) |
| 175 | + response = MockAsyncStream(iter(cast(list[MockRawMessageStreamEvent], self.stream))) |
175 | 176 | else: |
176 | 177 | assert self.messages_ is not None, '`messages` must be provided' |
177 | 178 | if isinstance(self.messages_, Sequence): |
@@ -5512,7 +5513,7 @@ async def test_anthropic_code_execution_tool(allow_model_requests: None, anthrop |
5512 | 5513 | model_name='claude-sonnet-4-20250514', |
5513 | 5514 | timestamp=IsDatetime(), |
5514 | 5515 | provider_name='anthropic', |
5515 | | - provider_details={'finish_reason': 'end_turn'}, |
| 5516 | + provider_details={'finish_reason': 'end_turn', 'container_id': 'container_011CTCwceSoRxi8Pf16Fb7Tn'}, |
5516 | 5517 | provider_response_id='msg_018bVTPr9khzuds31rFDuqW4', |
5517 | 5518 | finish_reason='stop', |
5518 | 5519 | run_id=IsStr(), |
@@ -5579,7 +5580,7 @@ async def test_anthropic_code_execution_tool(allow_model_requests: None, anthrop |
5579 | 5580 | model_name='claude-sonnet-4-20250514', |
5580 | 5581 | timestamp=IsDatetime(), |
5581 | 5582 | provider_name='anthropic', |
5582 | | - provider_details={'finish_reason': 'end_turn'}, |
| 5583 | + provider_details={'finish_reason': 'end_turn', 'container_id': 'container_011CTCwdXe48NC7LaX3rxQ4d'}, |
5583 | 5584 | provider_response_id='msg_01VngRFBcNddwrYQoKUmdePY', |
5584 | 5585 | finish_reason='stop', |
5585 | 5586 | run_id=IsStr(), |
@@ -7858,3 +7859,125 @@ async def test_anthropic_cache_messages_real_api(allow_model_requests: None, ant |
7858 | 7859 | assert usage2.cache_read_tokens > 0 |
7859 | 7860 | assert usage2.cache_write_tokens > 0 |
7860 | 7861 | assert usage2.output_tokens > 0 |
| 7862 | + |
| 7863 | + |
| 7864 | +async def test_anthropic_container_setting_explicit(allow_model_requests: None): |
| 7865 | + """Test that anthropic_container setting passes explicit container config to API.""" |
| 7866 | + c = completion_message([BetaTextBlock(text='world', type='text')], BetaUsage(input_tokens=5, output_tokens=10)) |
| 7867 | + mock_client = MockAnthropic.create_mock(c) |
| 7868 | + m = AnthropicModel('claude-haiku-4-5', provider=AnthropicProvider(anthropic_client=mock_client)) |
| 7869 | + agent = Agent(m) |
| 7870 | + |
| 7871 | + # Test with explicit container config |
| 7872 | + await agent.run('hello', model_settings=AnthropicModelSettings(anthropic_container={'id': 'container_abc123'})) |
| 7873 | + |
| 7874 | + completion_kwargs = get_mock_chat_completion_kwargs(mock_client)[0] |
| 7875 | + assert completion_kwargs['container'] == BetaContainerParams(id='container_abc123') |
| 7876 | + |
| 7877 | + |
| 7878 | +async def test_anthropic_container_from_message_history(allow_model_requests: None): |
| 7879 | + """Test that container_id from message history is passed to subsequent requests.""" |
| 7880 | + c = completion_message([BetaTextBlock(text='world', type='text')], BetaUsage(input_tokens=5, output_tokens=10)) |
| 7881 | + mock_client = MockAnthropic.create_mock([c, c]) |
| 7882 | + m = AnthropicModel('claude-haiku-4-5', provider=AnthropicProvider(anthropic_client=mock_client)) |
| 7883 | + agent = Agent(m) |
| 7884 | + |
| 7885 | + # Create a message history with a container_id in provider_details |
| 7886 | + history: list[ModelMessage] = [ |
| 7887 | + ModelRequest(parts=[UserPromptPart(content='hello')]), |
| 7888 | + ModelResponse( |
| 7889 | + parts=[TextPart(content='world')], |
| 7890 | + provider_details={'container_id': 'container_from_history'}, |
| 7891 | + ), |
| 7892 | + ] |
| 7893 | + |
| 7894 | + # Run with the message history |
| 7895 | + await agent.run('follow up', message_history=history) |
| 7896 | + |
| 7897 | + completion_kwargs = get_mock_chat_completion_kwargs(mock_client)[0] |
| 7898 | + assert completion_kwargs['container'] == BetaContainerParams(id='container_from_history') |
| 7899 | + |
| 7900 | + |
| 7901 | +async def test_anthropic_container_setting_false_ignores_history(allow_model_requests: None): |
| 7902 | + """Test that anthropic_container=False ignores container_id from history.""" |
| 7903 | + c = completion_message([BetaTextBlock(text='world', type='text')], BetaUsage(input_tokens=5, output_tokens=10)) |
| 7904 | + mock_client = MockAnthropic.create_mock(c) |
| 7905 | + m = AnthropicModel('claude-haiku-4-5', provider=AnthropicProvider(anthropic_client=mock_client)) |
| 7906 | + agent = Agent(m) |
| 7907 | + |
| 7908 | + # Create a message history with a container_id |
| 7909 | + history: list[ModelMessage] = [ |
| 7910 | + ModelRequest(parts=[UserPromptPart(content='hello')]), |
| 7911 | + ModelResponse( |
| 7912 | + parts=[TextPart(content='world')], |
| 7913 | + provider_details={'container_id': 'container_should_be_ignored'}, |
| 7914 | + ), |
| 7915 | + ] |
| 7916 | + |
| 7917 | + # Run with anthropic_container=False to force fresh container |
| 7918 | + await agent.run( |
| 7919 | + 'follow up', message_history=history, model_settings=AnthropicModelSettings(anthropic_container=False) |
| 7920 | + ) |
| 7921 | + |
| 7922 | + completion_kwargs = get_mock_chat_completion_kwargs(mock_client)[0] |
| 7923 | + # When anthropic_container=False, container should be OMIT (filtered out before sending to API) |
| 7924 | + from anthropic import omit as OMIT |
| 7925 | + |
| 7926 | + assert completion_kwargs.get('container') is OMIT |
| 7927 | + |
| 7928 | + |
| 7929 | +async def test_anthropic_container_id_from_stream_response(allow_model_requests: None): |
| 7930 | + """Test that container_id is extracted from streamed response and stored in provider_details.""" |
| 7931 | + from datetime import datetime |
| 7932 | + |
| 7933 | + stream_events: list[BetaRawMessageStreamEvent] = [ |
| 7934 | + BetaRawMessageStartEvent( |
| 7935 | + type='message_start', |
| 7936 | + message=BetaMessage( |
| 7937 | + id='msg_123', |
| 7938 | + content=[], |
| 7939 | + model='claude-3-5-haiku-123', |
| 7940 | + role='assistant', |
| 7941 | + stop_reason=None, |
| 7942 | + type='message', |
| 7943 | + usage=BetaUsage(input_tokens=5, output_tokens=0), |
| 7944 | + container=BetaContainer( |
| 7945 | + id='container_from_stream', |
| 7946 | + expires_at=datetime(2025, 1, 1, 0, 0, 0), |
| 7947 | + ), |
| 7948 | + ), |
| 7949 | + ), |
| 7950 | + BetaRawContentBlockStartEvent( |
| 7951 | + type='content_block_start', |
| 7952 | + index=0, |
| 7953 | + content_block=BetaTextBlock(text='', type='text'), |
| 7954 | + ), |
| 7955 | + BetaRawContentBlockDeltaEvent( |
| 7956 | + type='content_block_delta', |
| 7957 | + index=0, |
| 7958 | + delta=BetaTextDelta(type='text_delta', text='hello'), |
| 7959 | + ), |
| 7960 | + BetaRawContentBlockStopEvent(type='content_block_stop', index=0), |
| 7961 | + BetaRawMessageDeltaEvent( |
| 7962 | + type='message_delta', |
| 7963 | + delta=Delta(stop_reason='end_turn', stop_sequence=None), |
| 7964 | + usage=BetaMessageDeltaUsage(output_tokens=5), |
| 7965 | + ), |
| 7966 | + BetaRawMessageStopEvent(type='message_stop'), |
| 7967 | + ] |
| 7968 | + |
| 7969 | + mock_client = MockAnthropic.create_stream_mock(stream_events) |
| 7970 | + m = AnthropicModel('claude-haiku-4-5', provider=AnthropicProvider(anthropic_client=mock_client)) |
| 7971 | + agent = Agent(m) |
| 7972 | + |
| 7973 | + async with agent.run_stream('hello') as result: |
| 7974 | + response = await result.get_output() |
| 7975 | + assert response == 'hello' |
| 7976 | + |
| 7977 | + # Check that container_id was captured in the response |
| 7978 | + messages = result.all_messages() |
| 7979 | + model_response = messages[-1] |
| 7980 | + assert isinstance(model_response, ModelResponse) |
| 7981 | + assert model_response.provider_details is not None |
| 7982 | + assert model_response.provider_details.get('container_id') == 'container_from_stream' |
| 7983 | + assert model_response.provider_details.get('finish_reason') == 'end_turn' |
0 commit comments