-
Notifications
You must be signed in to change notification settings - Fork 1.5k
Add local timestamps to request and response models - include provider timestamp in provider_details
#3598
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Add local timestamps to request and response models - include provider timestamp in provider_details
#3598
Changes from 12 commits
8d52d65
8c69124
de6d989
97cad05
e041398
ed199ca
47af45e
f9efa8a
7722ee9
bf1c640
76aad15
0e4500a
29e84ae
5d0eff6
b895578
54bcb92
bad15b5
9f9bf06
4176971
8652eb1
e811f4e
a501928
8e1670e
702b216
754c782
5327aea
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -19,7 +19,7 @@ | |
| from pydantic_ai._function_schema import _takes_ctx as is_takes_ctx # type: ignore | ||
| from pydantic_ai._instrumentation import DEFAULT_INSTRUMENTATION_VERSION | ||
| from pydantic_ai._tool_manager import ToolManager | ||
| from pydantic_ai._utils import dataclasses_no_defaults_repr, get_union_args, is_async_callable, run_in_executor | ||
| from pydantic_ai._utils import dataclasses_no_defaults_repr, get_union_args, is_async_callable, now_utc, run_in_executor | ||
| from pydantic_ai.builtin_tools import AbstractBuiltinTool | ||
| from pydantic_graph import BaseNode, GraphRunContext | ||
| from pydantic_graph.beta import Graph, GraphBuilder | ||
|
|
@@ -229,7 +229,7 @@ async def run( # noqa: C901 | |
| if isinstance(last_message, _messages.ModelRequest) and self.user_prompt is None: | ||
| # Drop last message from history and reuse its parts | ||
| messages.pop() | ||
| next_message = _messages.ModelRequest(parts=last_message.parts) | ||
| next_message = _messages.ModelRequest(parts=last_message.parts, timestamp=now_utc()) | ||
|
|
||
| # Extract `UserPromptPart` content from the popped message and add to `ctx.deps.prompt` | ||
| user_prompt_parts = [part for part in last_message.parts if isinstance(part, _messages.UserPromptPart)] | ||
|
|
@@ -273,7 +273,7 @@ async def run( # noqa: C901 | |
| if self.user_prompt is not None: | ||
| parts.append(_messages.UserPromptPart(self.user_prompt)) | ||
|
|
||
| next_message = _messages.ModelRequest(parts=parts) | ||
| next_message = _messages.ModelRequest(parts=parts, timestamp=now_utc()) | ||
|
||
|
|
||
| next_message.instructions = instructions | ||
|
|
||
|
|
@@ -437,6 +437,7 @@ async def stream( | |
| assert not self._did_stream, 'stream() should only be called once per node' | ||
|
|
||
| model_settings, model_request_parameters, message_history, run_context = await self._prepare_request(ctx) | ||
| self.request.timestamp = now_utc() | ||
|
||
| async with ctx.deps.model.request_stream( | ||
| message_history, model_settings, model_request_parameters, run_context | ||
| ) as streamed_response: | ||
|
|
@@ -469,6 +470,7 @@ async def _make_request( | |
| return self._result # pragma: no cover | ||
|
|
||
| model_settings, model_request_parameters, message_history, _ = await self._prepare_request(ctx) | ||
| self.request.timestamp = now_utc() | ||
| model_response = await ctx.deps.model.request(message_history, model_settings, model_request_parameters) | ||
| ctx.state.usage.requests += 1 | ||
|
|
||
|
|
@@ -625,7 +627,7 @@ async def _run_stream() -> AsyncIterator[_messages.HandleResponseEvent]: # noqa | |
| run_context = build_run_context(ctx) | ||
| instructions = await ctx.deps.get_instructions(run_context) | ||
| self._next_node = ModelRequestNode[DepsT, NodeRunEndT]( | ||
| _messages.ModelRequest(parts=[], instructions=instructions) | ||
| _messages.ModelRequest(parts=[], instructions=instructions, timestamp=now_utc()) | ||
|
||
| ) | ||
| return | ||
|
|
||
|
|
@@ -693,7 +695,7 @@ async def _run_stream() -> AsyncIterator[_messages.HandleResponseEvent]: # noqa | |
| run_context = build_run_context(ctx) | ||
| instructions = await ctx.deps.get_instructions(run_context) | ||
| self._next_node = ModelRequestNode[DepsT, NodeRunEndT]( | ||
| _messages.ModelRequest(parts=[e.tool_retry], instructions=instructions) | ||
| _messages.ModelRequest(parts=[e.tool_retry], instructions=instructions, timestamp=now_utc()) | ||
| ) | ||
|
|
||
| self._events_iterator = _run_stream() | ||
|
|
@@ -735,7 +737,7 @@ async def _handle_tool_calls( | |
|
|
||
| instructions = await ctx.deps.get_instructions(run_context) | ||
| self._next_node = ModelRequestNode[DepsT, NodeRunEndT]( | ||
| _messages.ModelRequest(parts=output_parts, instructions=instructions) | ||
| _messages.ModelRequest(parts=output_parts, instructions=instructions, timestamp=now_utc()) | ||
| ) | ||
|
|
||
| async def _handle_text_response( | ||
|
|
@@ -770,7 +772,7 @@ def _handle_final_result( | |
|
|
||
| # For backwards compatibility, append a new ModelRequest using the tool returns and retries | ||
| if tool_responses: | ||
| messages.append(_messages.ModelRequest(parts=tool_responses, run_id=ctx.state.run_id)) | ||
| messages.append(_messages.ModelRequest(parts=tool_responses, run_id=ctx.state.run_id, timestamp=now_utc())) | ||
|
|
||
| return End(final_result) | ||
|
|
||
|
|
@@ -1323,8 +1325,7 @@ def _clean_message_history(messages: list[_messages.ModelMessage]) -> list[_mess | |
| key=lambda x: 0 if isinstance(x, _messages.ToolReturnPart | _messages.RetryPromptPart) else 1 | ||
| ) | ||
| merged_message = _messages.ModelRequest( | ||
| parts=parts, | ||
| instructions=last_message.instructions or message.instructions, | ||
| parts=parts, instructions=last_message.instructions or message.instructions, timestamp=now_utc() | ||
|
||
| ) | ||
| clean_messages[-1] = merged_message | ||
| else: | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Remove this from this PR please ;)
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
this will go away when I update the branch