Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
8d52d65
Clarify usage of agent factories
dsfaccini Nov 28, 2025
8c69124
add timestamps and update test snapshots
dsfaccini Nov 30, 2025
de6d989
fix: add missing timestamp field to ModelRequest test snapshots
dsfaccini Nov 30, 2025
97cad05
fix tests
dsfaccini Nov 30, 2025
e041398
coverage
dsfaccini Nov 30, 2025
ed199ca
improve code
dsfaccini Nov 30, 2025
47af45e
fix groq test
dsfaccini Nov 30, 2025
f9efa8a
covergae
dsfaccini Nov 30, 2025
7722ee9
coverage
dsfaccini Nov 30, 2025
bf1c640
add note
dsfaccini Nov 30, 2025
76aad15
- set default timestamp on StreamResponseModels
dsfaccini Dec 2, 2025
0e4500a
ModelRequest.timestamp=None by default for backwards compat
dsfaccini Dec 2, 2025
29e84ae
Merge branch 'main' into request-reponse-timestamps
dsfaccini Dec 4, 2025
5d0eff6
timestamp's set only in one place
dsfaccini Dec 4, 2025
b895578
Merge branch 'main' into request-reponse-timestamps
dsfaccini Dec 4, 2025
54bcb92
make sure last request always has timestamp
dsfaccini Dec 4, 2025
bad15b5
Merge branch 'main' into request-reponse-timestamps
dsfaccini Dec 5, 2025
9f9bf06
fix new tests
dsfaccini Dec 5, 2025
4176971
fix timestamps
dsfaccini Dec 5, 2025
8652eb1
fix test openai responses test and remove empty provider dicts
dsfaccini Dec 5, 2025
e811f4e
add signatures
dsfaccini Dec 5, 2025
a501928
re-add reset to signature and pd
dsfaccini Dec 5, 2025
8e1670e
fix snapshots
dsfaccini Dec 6, 2025
702b216
coverage
dsfaccini Dec 6, 2025
754c782
coverage
dsfaccini Dec 7, 2025
5327aea
coverage
dsfaccini Dec 8, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions docs/agents.md
Original file line number Diff line number Diff line change
Expand Up @@ -321,6 +321,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down Expand Up @@ -385,6 +386,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down Expand Up @@ -1049,6 +1051,7 @@ with capture_run_messages() as messages: # (2)!
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -1073,6 +1076,7 @@ with capture_run_messages() as messages: # (2)!
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
1 change: 1 addition & 0 deletions docs/api/models/function.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ async def model_function(
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
]
Expand Down
6 changes: 6 additions & 0 deletions docs/deferred-tools.md
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -152,6 +153,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelRequest(
Expand All @@ -173,6 +175,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -197,6 +200,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -324,6 +328,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -350,6 +355,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
7 changes: 7 additions & 0 deletions docs/message-history.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -95,6 +96,7 @@ async def main():
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
)
]
Expand Down Expand Up @@ -122,6 +124,7 @@ async def main():
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -178,6 +181,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -198,6 +202,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -303,6 +308,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -323,6 +329,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
2 changes: 2 additions & 0 deletions docs/testing.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ async def test_forecast():
timestamp=IsNow(tz=timezone.utc), # (7)!
),
],
timestamp=IsNow(tz=timezone.utc),
run_id=IsStr(),
),
ModelResponse(
Expand Down Expand Up @@ -158,6 +159,7 @@ async def test_forecast():
timestamp=IsNow(tz=timezone.utc),
),
],
timestamp=IsNow(tz=timezone.utc),
run_id=IsStr(),
),
ModelResponse(
Expand Down
3 changes: 3 additions & 0 deletions docs/tools.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ print(dice_result.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -110,6 +111,7 @@ print(dice_result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -132,6 +134,7 @@ print(dice_result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
5 changes: 4 additions & 1 deletion pydantic_ai_slim/pydantic_ai/_a2a.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
ToolCallPart,
UserPromptPart,
VideoUrl,
_utils,
)

from .agent import AbstractAgent, AgentDepsT, OutputDataT
Expand Down Expand Up @@ -200,7 +201,9 @@ def build_message_history(self, history: list[Message]) -> list[ModelMessage]:
model_messages: list[ModelMessage] = []
for message in history:
if message['role'] == 'user':
model_messages.append(ModelRequest(parts=self._request_parts_from_a2a(message['parts'])))
model_messages.append(
ModelRequest(parts=self._request_parts_from_a2a(message['parts']), timestamp=_utils.now_utc())
)
else:
model_messages.append(ModelResponse(parts=self._response_parts_from_a2a(message['parts'])))
return model_messages
Expand Down
11 changes: 9 additions & 2 deletions pydantic_ai_slim/pydantic_ai/_agent_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from pydantic_ai._function_schema import _takes_ctx as is_takes_ctx # type: ignore
from pydantic_ai._instrumentation import DEFAULT_INSTRUMENTATION_VERSION
from pydantic_ai._tool_manager import ToolManager
from pydantic_ai._utils import dataclasses_no_defaults_repr, get_union_args, is_async_callable, run_in_executor
from pydantic_ai._utils import dataclasses_no_defaults_repr, get_union_args, is_async_callable, now_utc, run_in_executor
from pydantic_ai.builtin_tools import AbstractBuiltinTool
from pydantic_graph import BaseNode, GraphRunContext
from pydantic_graph.beta import Graph, GraphBuilder
Expand Down Expand Up @@ -492,6 +492,7 @@ async def _make_request(
async def _prepare_request(
self, ctx: GraphRunContext[GraphAgentState, GraphAgentDeps[DepsT, NodeRunEndT]]
) -> tuple[ModelSettings | None, models.ModelRequestParameters, list[_messages.ModelMessage], RunContext[DepsT]]:
self.request.timestamp = now_utc()
self.request.run_id = self.request.run_id or ctx.state.run_id
ctx.state.message_history.append(self.request)

Expand All @@ -509,6 +510,11 @@ async def _prepare_request(
# Update the new message index to ensure `result.new_messages()` returns the correct messages
ctx.deps.new_message_index -= len(original_history) - len(message_history)

# Ensure the last request has a timestamp (history processors may create new ModelRequest objects without one)
last_request = message_history[-1]
if isinstance(last_request, _messages.ModelRequest) and last_request.timestamp is None:
last_request.timestamp = self.request.timestamp

# Merge possible consecutive trailing `ModelRequest`s into one, with tool call parts before user parts,
# but don't store it in the message history on state. This is just for the benefit of model classes that want clear user/assistant boundaries.
# See `tests/test_tools.py::test_parallel_tool_return_with_deferred` for an example where this is necessary
Expand Down Expand Up @@ -785,7 +791,7 @@ def _handle_final_result(

# For backwards compatibility, append a new ModelRequest using the tool returns and retries
if tool_responses:
messages.append(_messages.ModelRequest(parts=tool_responses, run_id=ctx.state.run_id))
messages.append(_messages.ModelRequest(parts=tool_responses, run_id=ctx.state.run_id, timestamp=now_utc()))

return End(final_result)

Expand Down Expand Up @@ -1340,6 +1346,7 @@ def _clean_message_history(messages: list[_messages.ModelMessage]) -> list[_mess
merged_message = _messages.ModelRequest(
parts=parts,
instructions=last_message.instructions or message.instructions,
timestamp=message.timestamp or last_message.timestamp,
)
clean_messages[-1] = merged_message
else:
Expand Down
6 changes: 3 additions & 3 deletions pydantic_ai_slim/pydantic_ai/_mcp.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from collections.abc import Sequence
from typing import Literal

from . import exceptions, messages
from . import _utils, exceptions, messages

try:
from mcp import types as mcp_types
Expand Down Expand Up @@ -44,15 +44,15 @@ def map_from_mcp_params(params: mcp_types.CreateMessageRequestParams) -> list[me
# role is assistant
# if there are any request parts, add a request message wrapping them
if request_parts:
pai_messages.append(messages.ModelRequest(parts=request_parts))
pai_messages.append(messages.ModelRequest(parts=request_parts, timestamp=_utils.now_utc()))
request_parts = []

response_parts.append(map_from_sampling_content(content))

if response_parts:
pai_messages.append(messages.ModelResponse(parts=response_parts))
if request_parts:
pai_messages.append(messages.ModelRequest(parts=request_parts))
pai_messages.append(messages.ModelRequest(parts=request_parts, timestamp=_utils.now_utc()))
return pai_messages


Expand Down
1 change: 1 addition & 0 deletions pydantic_ai_slim/pydantic_ai/agent/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -509,6 +509,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
7 changes: 6 additions & 1 deletion pydantic_ai_slim/pydantic_ai/agent/abstract.py
Original file line number Diff line number Diff line change
Expand Up @@ -560,7 +560,11 @@ async def on_complete() -> None:

# For backwards compatibility, append a new ModelRequest using the tool returns and retries
if parts:
messages.append(_messages.ModelRequest(parts, run_id=graph_ctx.state.run_id))
messages.append(
_messages.ModelRequest(
parts, run_id=graph_ctx.state.run_id, timestamp=_utils.now_utc()
)
)

await agent_run.next(_agent_graph.SetFinalResult(final_result))

Expand Down Expand Up @@ -1005,6 +1009,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
1 change: 1 addition & 0 deletions pydantic_ai_slim/pydantic_ai/agent/wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
1 change: 1 addition & 0 deletions pydantic_ai_slim/pydantic_ai/durable_exec/dbos/_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -824,6 +824,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -769,6 +769,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -843,6 +843,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
10 changes: 7 additions & 3 deletions pydantic_ai_slim/pydantic_ai/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -997,6 +997,9 @@ class ModelRequest:

_: KW_ONLY

timestamp: datetime | None = None
"""The timestamp when the request was sent to the model."""

Comment on lines +1000 to +1002
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ModelRequest.timestamp needs to be None by default for backwards compat

instructions: str | None = None
"""The instructions for the model."""

Expand All @@ -1012,7 +1015,7 @@ class ModelRequest:
@classmethod
def user_text_prompt(cls, user_prompt: str, *, instructions: str | None = None) -> ModelRequest:
"""Create a `ModelRequest` with a single user prompt as text."""
return cls(parts=[UserPromptPart(user_prompt)], instructions=instructions)
return cls(parts=[UserPromptPart(user_prompt)], instructions=instructions, timestamp=_now_utc())

__repr__ = _utils.dataclasses_no_defaults_repr

Expand Down Expand Up @@ -1238,9 +1241,10 @@ class ModelResponse:
"""The name of the model that generated the response."""

timestamp: datetime = field(default_factory=_now_utc)
"""The timestamp of the response.
"""The timestamp when the response was received locally.

If the model provides a timestamp in the response (as OpenAI does) that will be used.
This is always a high-precision local datetime. Provider-specific timestamps
(if available) are stored in `provider_details['timestamp']`.
"""

kind: Literal['response'] = 'response'
Expand Down
4 changes: 2 additions & 2 deletions pydantic_ai_slim/pydantic_ai/models/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -531,6 +531,7 @@ def _process_response(self, response: BetaMessage) -> ModelResponse:
parts=items,
usage=_map_usage(response, self._provider.name, self._provider.base_url, self._model_name),
model_name=response.model,
timestamp=_utils.now_utc(),
provider_response_id=response.id,
provider_name=self._provider.name,
finish_reason=finish_reason,
Expand All @@ -551,7 +552,6 @@ async def _process_streamed_response(
model_request_parameters=model_request_parameters,
_model_name=first_chunk.message.model,
_response=peekable_response,
_timestamp=_utils.now_utc(),
_provider_name=self._provider.name,
_provider_url=self._provider.base_url,
)
Expand Down Expand Up @@ -1113,9 +1113,9 @@ class AnthropicStreamedResponse(StreamedResponse):

_model_name: AnthropicModelName
_response: AsyncIterable[BetaRawMessageStreamEvent]
_timestamp: datetime
_provider_name: str
_provider_url: str
_timestamp: datetime = field(default_factory=_utils.now_utc)

async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: # noqa: C901
current_block: BetaContentBlock | None = None
Expand Down
Loading