Skip to content
Open
Show file tree
Hide file tree
Changes from 12 commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
8d52d65
Clarify usage of agent factories
dsfaccini Nov 28, 2025
8c69124
add timestamps and update test snapshots
dsfaccini Nov 30, 2025
de6d989
fix: add missing timestamp field to ModelRequest test snapshots
dsfaccini Nov 30, 2025
97cad05
fix tests
dsfaccini Nov 30, 2025
e041398
coverage
dsfaccini Nov 30, 2025
ed199ca
improve code
dsfaccini Nov 30, 2025
47af45e
fix groq test
dsfaccini Nov 30, 2025
f9efa8a
covergae
dsfaccini Nov 30, 2025
7722ee9
coverage
dsfaccini Nov 30, 2025
bf1c640
add note
dsfaccini Nov 30, 2025
76aad15
- set default timestamp on StreamResponseModels
dsfaccini Dec 2, 2025
0e4500a
ModelRequest.timestamp=None by default for backwards compat
dsfaccini Dec 2, 2025
29e84ae
Merge branch 'main' into request-reponse-timestamps
dsfaccini Dec 4, 2025
5d0eff6
timestamp's set only in one place
dsfaccini Dec 4, 2025
b895578
Merge branch 'main' into request-reponse-timestamps
dsfaccini Dec 4, 2025
54bcb92
make sure last request always has timestamp
dsfaccini Dec 4, 2025
bad15b5
Merge branch 'main' into request-reponse-timestamps
dsfaccini Dec 5, 2025
9f9bf06
fix new tests
dsfaccini Dec 5, 2025
4176971
fix timestamps
dsfaccini Dec 5, 2025
8652eb1
fix test openai responses test and remove empty provider dicts
dsfaccini Dec 5, 2025
e811f4e
add signatures
dsfaccini Dec 5, 2025
a501928
re-add reset to signature and pd
dsfaccini Dec 5, 2025
8e1670e
fix snapshots
dsfaccini Dec 6, 2025
702b216
coverage
dsfaccini Dec 6, 2025
754c782
coverage
dsfaccini Dec 7, 2025
5327aea
coverage
dsfaccini Dec 8, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion docs/agents.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ print(result.output)
4. `result.output` will be a boolean indicating if the square is a winner. Pydantic performs the output validation, and it'll be typed as a `bool` since its type is derived from the `output_type` generic parameter of the agent.

!!! tip "Agents are designed for reuse, like FastAPI Apps"
Agents are intended to be instantiated once (frequently as module globals) and reused throughout your application, similar to a small [FastAPI][fastapi.FastAPI] app or an [APIRouter][fastapi.APIRouter].
Agents can be instantiated once as a module global and reused throughout your application, similar to a small [FastAPI][fastapi.FastAPI] app or an [APIRouter][fastapi.APIRouter], or be created dynamically by a factory function like `get_agent('agent-type')`, whichever you prefer.
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Remove this from this PR please ;)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this will go away when I update the branch


## Running Agents

Expand Down Expand Up @@ -321,6 +321,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down Expand Up @@ -385,6 +386,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down Expand Up @@ -1049,6 +1051,7 @@ with capture_run_messages() as messages: # (2)!
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -1073,6 +1076,7 @@ with capture_run_messages() as messages: # (2)!
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
1 change: 1 addition & 0 deletions docs/api/models/function.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ async def model_function(
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
]
Expand Down
6 changes: 6 additions & 0 deletions docs/deferred-tools.md
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -152,6 +153,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelRequest(
Expand All @@ -173,6 +175,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -197,6 +200,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -324,6 +328,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -350,6 +355,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
7 changes: 7 additions & 0 deletions docs/message-history.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -95,6 +96,7 @@ async def main():
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
)
]
Expand Down Expand Up @@ -122,6 +124,7 @@ async def main():
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -178,6 +181,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -198,6 +202,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -303,6 +308,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -323,6 +329,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
2 changes: 2 additions & 0 deletions docs/testing.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ async def test_forecast():
timestamp=IsNow(tz=timezone.utc), # (7)!
),
],
timestamp=IsNow(tz=timezone.utc),
run_id=IsStr(),
),
ModelResponse(
Expand Down Expand Up @@ -158,6 +159,7 @@ async def test_forecast():
timestamp=IsNow(tz=timezone.utc),
),
],
timestamp=IsNow(tz=timezone.utc),
run_id=IsStr(),
),
ModelResponse(
Expand Down
3 changes: 3 additions & 0 deletions docs/tools.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ print(dice_result.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -110,6 +111,7 @@ print(dice_result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -132,6 +134,7 @@ print(dice_result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
5 changes: 4 additions & 1 deletion pydantic_ai_slim/pydantic_ai/_a2a.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
ToolCallPart,
UserPromptPart,
VideoUrl,
_utils,
)

from .agent import AbstractAgent, AgentDepsT, OutputDataT
Expand Down Expand Up @@ -200,7 +201,9 @@ def build_message_history(self, history: list[Message]) -> list[ModelMessage]:
model_messages: list[ModelMessage] = []
for message in history:
if message['role'] == 'user':
model_messages.append(ModelRequest(parts=self._request_parts_from_a2a(message['parts'])))
model_messages.append(
ModelRequest(parts=self._request_parts_from_a2a(message['parts']), timestamp=_utils.now_utc())
)
else:
model_messages.append(ModelResponse(parts=self._response_parts_from_a2a(message['parts'])))
return model_messages
Expand Down
19 changes: 10 additions & 9 deletions pydantic_ai_slim/pydantic_ai/_agent_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from pydantic_ai._function_schema import _takes_ctx as is_takes_ctx # type: ignore
from pydantic_ai._instrumentation import DEFAULT_INSTRUMENTATION_VERSION
from pydantic_ai._tool_manager import ToolManager
from pydantic_ai._utils import dataclasses_no_defaults_repr, get_union_args, is_async_callable, run_in_executor
from pydantic_ai._utils import dataclasses_no_defaults_repr, get_union_args, is_async_callable, now_utc, run_in_executor
from pydantic_ai.builtin_tools import AbstractBuiltinTool
from pydantic_graph import BaseNode, GraphRunContext
from pydantic_graph.beta import Graph, GraphBuilder
Expand Down Expand Up @@ -229,7 +229,7 @@ async def run( # noqa: C901
if isinstance(last_message, _messages.ModelRequest) and self.user_prompt is None:
# Drop last message from history and reuse its parts
messages.pop()
next_message = _messages.ModelRequest(parts=last_message.parts)
next_message = _messages.ModelRequest(parts=last_message.parts, timestamp=now_utc())

# Extract `UserPromptPart` content from the popped message and add to `ctx.deps.prompt`
user_prompt_parts = [part for part in last_message.parts if isinstance(part, _messages.UserPromptPart)]
Expand Down Expand Up @@ -273,7 +273,7 @@ async def run( # noqa: C901
if self.user_prompt is not None:
parts.append(_messages.UserPromptPart(self.user_prompt))

next_message = _messages.ModelRequest(parts=parts)
next_message = _messages.ModelRequest(parts=parts, timestamp=now_utc())
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let's se it just once on next_message below like we do with instructions, instead of adding it to 2 constructors


next_message.instructions = instructions

Expand Down Expand Up @@ -437,6 +437,7 @@ async def stream(
assert not self._did_stream, 'stream() should only be called once per node'

model_settings, model_request_parameters, message_history, run_context = await self._prepare_request(ctx)
self.request.timestamp = now_utc()
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Actually if we have it here we don't need to set it above right?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yeah that makes sense, I wasn't totally sure about this line bc I ran into an issue with the temporal tests, but that issue was unrelated so this can stay. I'll remove the unnecessary assignments

async with ctx.deps.model.request_stream(
message_history, model_settings, model_request_parameters, run_context
) as streamed_response:
Expand Down Expand Up @@ -469,6 +470,7 @@ async def _make_request(
return self._result # pragma: no cover

model_settings, model_request_parameters, message_history, _ = await self._prepare_request(ctx)
self.request.timestamp = now_utc()
model_response = await ctx.deps.model.request(message_history, model_settings, model_request_parameters)
ctx.state.usage.requests += 1

Expand Down Expand Up @@ -625,7 +627,7 @@ async def _run_stream() -> AsyncIterator[_messages.HandleResponseEvent]: # noqa
run_context = build_run_context(ctx)
instructions = await ctx.deps.get_instructions(run_context)
self._next_node = ModelRequestNode[DepsT, NodeRunEndT](
_messages.ModelRequest(parts=[], instructions=instructions)
_messages.ModelRequest(parts=[], instructions=instructions, timestamp=now_utc())
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same as above; if we set the timestamp already before sending model.request, we don't need it here right?

)
return

Expand Down Expand Up @@ -693,7 +695,7 @@ async def _run_stream() -> AsyncIterator[_messages.HandleResponseEvent]: # noqa
run_context = build_run_context(ctx)
instructions = await ctx.deps.get_instructions(run_context)
self._next_node = ModelRequestNode[DepsT, NodeRunEndT](
_messages.ModelRequest(parts=[e.tool_retry], instructions=instructions)
_messages.ModelRequest(parts=[e.tool_retry], instructions=instructions, timestamp=now_utc())
)

self._events_iterator = _run_stream()
Expand Down Expand Up @@ -735,7 +737,7 @@ async def _handle_tool_calls(

instructions = await ctx.deps.get_instructions(run_context)
self._next_node = ModelRequestNode[DepsT, NodeRunEndT](
_messages.ModelRequest(parts=output_parts, instructions=instructions)
_messages.ModelRequest(parts=output_parts, instructions=instructions, timestamp=now_utc())
)

async def _handle_text_response(
Expand Down Expand Up @@ -770,7 +772,7 @@ def _handle_final_result(

# For backwards compatibility, append a new ModelRequest using the tool returns and retries
if tool_responses:
messages.append(_messages.ModelRequest(parts=tool_responses, run_id=ctx.state.run_id))
messages.append(_messages.ModelRequest(parts=tool_responses, run_id=ctx.state.run_id, timestamp=now_utc()))

return End(final_result)

Expand Down Expand Up @@ -1323,8 +1325,7 @@ def _clean_message_history(messages: list[_messages.ModelMessage]) -> list[_mess
key=lambda x: 0 if isinstance(x, _messages.ToolReturnPart | _messages.RetryPromptPart) else 1
)
merged_message = _messages.ModelRequest(
parts=parts,
instructions=last_message.instructions or message.instructions,
parts=parts, instructions=last_message.instructions or message.instructions, timestamp=now_utc()
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should definitely not create a new timestamp here, but use one of the existing ones

)
clean_messages[-1] = merged_message
else:
Expand Down
6 changes: 3 additions & 3 deletions pydantic_ai_slim/pydantic_ai/_mcp.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from collections.abc import Sequence
from typing import Literal

from . import exceptions, messages
from . import _utils, exceptions, messages

try:
from mcp import types as mcp_types
Expand Down Expand Up @@ -44,15 +44,15 @@ def map_from_mcp_params(params: mcp_types.CreateMessageRequestParams) -> list[me
# role is assistant
# if there are any request parts, add a request message wrapping them
if request_parts:
pai_messages.append(messages.ModelRequest(parts=request_parts))
pai_messages.append(messages.ModelRequest(parts=request_parts, timestamp=_utils.now_utc()))
request_parts = []

response_parts.append(map_from_sampling_content(content))

if response_parts:
pai_messages.append(messages.ModelResponse(parts=response_parts))
if request_parts:
pai_messages.append(messages.ModelRequest(parts=request_parts))
pai_messages.append(messages.ModelRequest(parts=request_parts, timestamp=_utils.now_utc()))
return pai_messages


Expand Down
1 change: 1 addition & 0 deletions pydantic_ai_slim/pydantic_ai/agent/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -508,6 +508,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
7 changes: 6 additions & 1 deletion pydantic_ai_slim/pydantic_ai/agent/abstract.py
Original file line number Diff line number Diff line change
Expand Up @@ -559,7 +559,11 @@ async def on_complete() -> None:

# For backwards compatibility, append a new ModelRequest using the tool returns and retries
if parts:
messages.append(_messages.ModelRequest(parts, run_id=graph_ctx.state.run_id))
messages.append(
_messages.ModelRequest(
parts, run_id=graph_ctx.state.run_id, timestamp=_utils.now_utc()
)
)

await agent_run.next(_agent_graph.SetFinalResult(final_result))

Expand Down Expand Up @@ -1004,6 +1008,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
1 change: 1 addition & 0 deletions pydantic_ai_slim/pydantic_ai/agent/wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
1 change: 1 addition & 0 deletions pydantic_ai_slim/pydantic_ai/durable_exec/dbos/_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -823,6 +823,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -768,6 +768,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -842,6 +842,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
Loading