Skip to content
Open
Show file tree
Hide file tree
Changes from 10 commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
8d52d65
Clarify usage of agent factories
dsfaccini Nov 28, 2025
8c69124
add timestamps and update test snapshots
dsfaccini Nov 30, 2025
de6d989
fix: add missing timestamp field to ModelRequest test snapshots
dsfaccini Nov 30, 2025
97cad05
fix tests
dsfaccini Nov 30, 2025
e041398
coverage
dsfaccini Nov 30, 2025
ed199ca
improve code
dsfaccini Nov 30, 2025
47af45e
fix groq test
dsfaccini Nov 30, 2025
f9efa8a
covergae
dsfaccini Nov 30, 2025
7722ee9
coverage
dsfaccini Nov 30, 2025
bf1c640
add note
dsfaccini Nov 30, 2025
76aad15
- set default timestamp on StreamResponseModels
dsfaccini Dec 2, 2025
0e4500a
ModelRequest.timestamp=None by default for backwards compat
dsfaccini Dec 2, 2025
29e84ae
Merge branch 'main' into request-reponse-timestamps
dsfaccini Dec 4, 2025
5d0eff6
timestamp's set only in one place
dsfaccini Dec 4, 2025
b895578
Merge branch 'main' into request-reponse-timestamps
dsfaccini Dec 4, 2025
54bcb92
make sure last request always has timestamp
dsfaccini Dec 4, 2025
bad15b5
Merge branch 'main' into request-reponse-timestamps
dsfaccini Dec 5, 2025
9f9bf06
fix new tests
dsfaccini Dec 5, 2025
4176971
fix timestamps
dsfaccini Dec 5, 2025
8652eb1
fix test openai responses test and remove empty provider dicts
dsfaccini Dec 5, 2025
e811f4e
add signatures
dsfaccini Dec 5, 2025
a501928
re-add reset to signature and pd
dsfaccini Dec 5, 2025
8e1670e
fix snapshots
dsfaccini Dec 6, 2025
702b216
coverage
dsfaccini Dec 6, 2025
754c782
coverage
dsfaccini Dec 7, 2025
5327aea
coverage
dsfaccini Dec 8, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion docs/agents.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ print(result.output)
4. `result.output` will be a boolean indicating if the square is a winner. Pydantic performs the output validation, and it'll be typed as a `bool` since its type is derived from the `output_type` generic parameter of the agent.

!!! tip "Agents are designed for reuse, like FastAPI Apps"
Agents are intended to be instantiated once (frequently as module globals) and reused throughout your application, similar to a small [FastAPI][fastapi.FastAPI] app or an [APIRouter][fastapi.APIRouter].
Agents can be instantiated once as a module global and reused throughout your application, similar to a small [FastAPI][fastapi.FastAPI] app or an [APIRouter][fastapi.APIRouter], or be created dynamically by a factory function like `get_agent('agent-type')`, whichever you prefer.
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Remove this from this PR please ;)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this will go away when I update the branch


## Running Agents

Expand Down Expand Up @@ -321,6 +321,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down Expand Up @@ -385,6 +386,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down Expand Up @@ -1049,6 +1051,7 @@ with capture_run_messages() as messages: # (2)!
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -1073,6 +1076,7 @@ with capture_run_messages() as messages: # (2)!
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
1 change: 1 addition & 0 deletions docs/api/models/function.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ async def model_function(
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
]
Expand Down
6 changes: 6 additions & 0 deletions docs/deferred-tools.md
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -152,6 +153,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelRequest(
Expand All @@ -173,6 +175,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -197,6 +200,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -324,6 +328,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -350,6 +355,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
7 changes: 7 additions & 0 deletions docs/message-history.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -95,6 +96,7 @@ async def main():
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
)
]
Expand Down Expand Up @@ -122,6 +124,7 @@ async def main():
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -178,6 +181,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -198,6 +202,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -303,6 +308,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -323,6 +329,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
2 changes: 2 additions & 0 deletions docs/testing.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ async def test_forecast():
timestamp=IsNow(tz=timezone.utc), # (7)!
),
],
timestamp=IsNow(tz=timezone.utc),
run_id=IsStr(),
),
ModelResponse(
Expand Down Expand Up @@ -158,6 +159,7 @@ async def test_forecast():
timestamp=IsNow(tz=timezone.utc),
),
],
timestamp=IsNow(tz=timezone.utc),
run_id=IsStr(),
),
ModelResponse(
Expand Down
3 changes: 3 additions & 0 deletions docs/tools.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ print(dice_result.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -110,6 +111,7 @@ print(dice_result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -132,6 +134,7 @@ print(dice_result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
4 changes: 3 additions & 1 deletion pydantic_ai_slim/pydantic_ai/_agent_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from pydantic_ai._function_schema import _takes_ctx as is_takes_ctx # type: ignore
from pydantic_ai._instrumentation import DEFAULT_INSTRUMENTATION_VERSION
from pydantic_ai._tool_manager import ToolManager
from pydantic_ai._utils import dataclasses_no_defaults_repr, get_union_args, is_async_callable, run_in_executor
from pydantic_ai._utils import dataclasses_no_defaults_repr, get_union_args, is_async_callable, now_utc, run_in_executor
from pydantic_ai.builtin_tools import AbstractBuiltinTool
from pydantic_graph import BaseNode, GraphRunContext
from pydantic_graph.beta import Graph, GraphBuilder
Expand Down Expand Up @@ -437,6 +437,7 @@ async def stream(
assert not self._did_stream, 'stream() should only be called once per node'

model_settings, model_request_parameters, message_history, run_context = await self._prepare_request(ctx)
self.request.timestamp = now_utc()
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Actually if we have it here we don't need to set it above right?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yeah that makes sense, I wasn't totally sure about this line bc I ran into an issue with the temporal tests, but that issue was unrelated so this can stay. I'll remove the unnecessary assignments

async with ctx.deps.model.request_stream(
message_history, model_settings, model_request_parameters, run_context
) as streamed_response:
Expand Down Expand Up @@ -469,6 +470,7 @@ async def _make_request(
return self._result # pragma: no cover

model_settings, model_request_parameters, message_history, _ = await self._prepare_request(ctx)
self.request.timestamp = now_utc()
model_response = await ctx.deps.model.request(message_history, model_settings, model_request_parameters)
ctx.state.usage.requests += 1

Expand Down
1 change: 1 addition & 0 deletions pydantic_ai_slim/pydantic_ai/agent/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -508,6 +508,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
1 change: 1 addition & 0 deletions pydantic_ai_slim/pydantic_ai/agent/abstract.py
Original file line number Diff line number Diff line change
Expand Up @@ -1004,6 +1004,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
1 change: 1 addition & 0 deletions pydantic_ai_slim/pydantic_ai/agent/wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
1 change: 1 addition & 0 deletions pydantic_ai_slim/pydantic_ai/durable_exec/dbos/_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -823,6 +823,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -768,6 +768,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -842,6 +842,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
8 changes: 6 additions & 2 deletions pydantic_ai_slim/pydantic_ai/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -994,6 +994,9 @@ class ModelRequest:

_: KW_ONLY

timestamp: datetime = field(default_factory=_now_utc)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think this should have a default, as it would be filled in for old requests without this field when they are deserialized and give the false impression that this date is accurate.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

that makes sense

"""The timestamp when the request was sent to the model."""

instructions: str | None = None
"""The instructions for the model."""

Expand Down Expand Up @@ -1235,9 +1238,10 @@ class ModelResponse:
"""The name of the model that generated the response."""

timestamp: datetime = field(default_factory=_now_utc)
"""The timestamp of the response.
"""The timestamp when the response was received locally.

If the model provides a timestamp in the response (as OpenAI does) that will be used.
This is always a high-precision local datetime. Provider-specific timestamps
(if available) are stored in `provider_details['timestamp']`.
"""

kind: Literal['response'] = 'response'
Expand Down
14 changes: 12 additions & 2 deletions pydantic_ai_slim/pydantic_ai/models/google.py
Original file line number Diff line number Diff line change
Expand Up @@ -491,6 +491,8 @@ def _process_response(self, response: GenerateContentResponse) -> ModelResponse:
raw_finish_reason = candidate.finish_reason
if raw_finish_reason: # pragma: no branch
vendor_details = {'finish_reason': raw_finish_reason.value}
if response.create_time is not None:
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should be done separately from whether this a finish reason

vendor_details['timestamp'] = response.create_time
finish_reason = _FINISH_REASON_MAP.get(raw_finish_reason)

if candidate.content is None or candidate.content.parts is None:
Expand Down Expand Up @@ -528,9 +530,10 @@ async def _process_streamed_response(
model_request_parameters=model_request_parameters,
_model_name=first_chunk.model_version or self._model_name,
_response=peekable_response,
_timestamp=first_chunk.create_time or _utils.now_utc(),
_timestamp=_utils.now_utc(),
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could this be a default or something, so we don't need to repeat this?

_provider_name=self._provider.name,
_provider_url=self._provider.base_url,
_provider_timestamp=first_chunk.create_time,
)

async def _map_messages(
Expand Down Expand Up @@ -655,6 +658,7 @@ class GeminiStreamedResponse(StreamedResponse):
_timestamp: datetime
_provider_name: str
_provider_url: str
_provider_timestamp: datetime | None = None

async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: # noqa: C901
code_execution_tool_call_id: str | None = None
Expand All @@ -670,9 +674,15 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]:
self.provider_response_id = chunk.response_id

raw_finish_reason = candidate.finish_reason
provider_details_dict: dict[str, Any] = {}
if raw_finish_reason:
self.provider_details = {'finish_reason': raw_finish_reason.value}
provider_details_dict['finish_reason'] = raw_finish_reason.value
self.finish_reason = _FINISH_REASON_MAP.get(raw_finish_reason)
if self._provider_timestamp is not None:
# _provider_timestamp is always None in Google streaming cassettes
provider_details_dict['timestamp'] = self._provider_timestamp # pragma: no cover
if provider_details_dict:
self.provider_details = provider_details_dict

# Google streams the grounding metadata (including the web search queries and results)
# _after_ the text that was generated using it, so it would show up out of order in the stream,
Expand Down
17 changes: 13 additions & 4 deletions pydantic_ai_slim/pydantic_ai/models/groq.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@ async def _completions_create(

def _process_response(self, response: chat.ChatCompletion) -> ModelResponse:
"""Process a non-streamed response, and prepare a message to return."""
timestamp = number_to_datetime(response.created)
timestamp = _utils.now_utc()
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We can drop this right, because the field already has a default?

choice = response.choices[0]
items: list[ModelResponsePart] = []
if choice.message.reasoning is not None:
Expand All @@ -340,7 +340,9 @@ def _process_response(self, response: chat.ChatCompletion) -> ModelResponse:
items.append(ToolCallPart(tool_name=c.function.name, args=c.function.arguments, tool_call_id=c.id))

raw_finish_reason = choice.finish_reason
provider_details = {'finish_reason': raw_finish_reason}
provider_details: dict[str, Any] = {'finish_reason': raw_finish_reason}
if response.created: # pragma: no branch
provider_details['timestamp'] = number_to_datetime(response.created)
finish_reason = _FINISH_REASON_MAP.get(raw_finish_reason)
return ModelResponse(
parts=items,
Expand Down Expand Up @@ -369,8 +371,9 @@ async def _process_streamed_response(
_response=peekable_response,
_model_name=first_chunk.model,
_model_profile=self.profile,
_timestamp=number_to_datetime(first_chunk.created),
_timestamp=_utils.now_utc(),
_provider_name=self._provider.name,
_provider_timestamp=first_chunk.created,
)

def _get_tools(self, model_request_parameters: ModelRequestParameters) -> list[chat.ChatCompletionToolParam]:
Expand Down Expand Up @@ -524,6 +527,7 @@ class GroqStreamedResponse(StreamedResponse):
_response: AsyncIterable[chat.ChatCompletionChunk]
_timestamp: datetime
_provider_name: str
_provider_timestamp: int | None = None

async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: # noqa: C901
try:
Expand All @@ -541,9 +545,14 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]:
except IndexError:
continue

provider_details_dict: dict[str, Any] = {}
if raw_finish_reason := choice.finish_reason:
self.provider_details = {'finish_reason': raw_finish_reason}
provider_details_dict['finish_reason'] = raw_finish_reason
self.finish_reason = _FINISH_REASON_MAP.get(raw_finish_reason)
if self._provider_timestamp is not None: # pragma: no branch
provider_details_dict['timestamp'] = number_to_datetime(self._provider_timestamp)
if provider_details_dict: # pragma: no branch
self.provider_details = provider_details_dict

if choice.delta.reasoning is not None:
if not reasoning:
Expand Down
Loading