Skip to content

Commit b700f88

Browse files
committed
Improve chat token display
1 parent d11d50d commit b700f88

File tree

3 files changed

+70
-57
lines changed

3 files changed

+70
-57
lines changed

crates/code_assistant/src/agent/runner.rs

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -109,24 +109,21 @@ impl Agent {
109109
fn build_current_metadata(&self) -> Option<ChatMetadata> {
110110
// Only build metadata if we have a session ID
111111
self.session_id.as_ref().map(|session_id| {
112-
// Calculate current context size from most recent assistant message
113-
let current_context_size = self
114-
.message_history
115-
.iter()
116-
.rev()
117-
.find(|msg| matches!(msg.role, MessageRole::Assistant))
118-
.and_then(|msg| msg.usage.as_ref())
119-
.map(|usage| usage.input_tokens + usage.cache_read_input_tokens)
120-
.unwrap_or(0);
121-
122-
// Calculate total usage across all messages
112+
// Calculate total usage and find last usage across all messages
123113
let mut total_usage = llm::Usage::zero();
114+
let mut last_usage = llm::Usage::zero();
115+
124116
for message in &self.message_history {
125117
if let Some(usage) = &message.usage {
126118
total_usage.input_tokens += usage.input_tokens;
127119
total_usage.output_tokens += usage.output_tokens;
128120
total_usage.cache_creation_input_tokens += usage.cache_creation_input_tokens;
129121
total_usage.cache_read_input_tokens += usage.cache_read_input_tokens;
122+
123+
// For assistant messages, update last usage (most recent wins)
124+
if matches!(message.role, MessageRole::Assistant) {
125+
last_usage = usage.clone();
126+
}
130127
}
131128
}
132129

@@ -139,8 +136,8 @@ impl Agent {
139136
created_at: SystemTime::now(), // Will be overridden by persistence
140137
updated_at: SystemTime::now(),
141138
message_count: self.message_history.len(),
142-
current_context_size,
143139
total_usage,
140+
last_usage,
144141
tokens_limit,
145142
}
146143
})

crates/code_assistant/src/persistence.rs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -55,12 +55,12 @@ pub struct ChatMetadata {
5555
pub created_at: SystemTime,
5656
pub updated_at: SystemTime,
5757
pub message_count: usize,
58-
/// Current context size (input + cache reads from most recent assistant message)
59-
#[serde(default)]
60-
pub current_context_size: u32,
6158
/// Total usage across the entire session
6259
#[serde(default)]
6360
pub total_usage: llm::Usage,
61+
/// Usage from the last assistant message
62+
#[serde(default)]
63+
pub last_usage: llm::Usage,
6464
/// Token limit from rate limiting headers (if available)
6565
#[serde(default)]
6666
pub tokens_limit: Option<u32>,
@@ -114,7 +114,7 @@ impl FileSessionPersistence {
114114
};
115115

116116
// Calculate usage information
117-
let (current_context_size, total_usage, tokens_limit) = calculate_session_usage(&session);
117+
let (total_usage, last_usage, tokens_limit) = calculate_session_usage(&session);
118118

119119
// Update or add metadata for this session
120120
let new_metadata = ChatMetadata {
@@ -123,8 +123,8 @@ impl FileSessionPersistence {
123123
created_at: session.created_at,
124124
updated_at: session.updated_at,
125125
message_count: session.messages.len(),
126-
current_context_size,
127126
total_usage,
127+
last_usage,
128128
tokens_limit,
129129
};
130130

@@ -225,9 +225,9 @@ pub fn generate_session_id() -> String {
225225
}
226226

227227
/// Calculate usage information from session messages
228-
fn calculate_session_usage(session: &ChatSession) -> (u32, llm::Usage, Option<u32>) {
228+
fn calculate_session_usage(session: &ChatSession) -> (llm::Usage, llm::Usage, Option<u32>) {
229229
let mut total_usage = llm::Usage::zero();
230-
let mut current_context_size = 0;
230+
let mut last_usage = llm::Usage::zero();
231231
let tokens_limit = None;
232232

233233
// Calculate total usage and find most recent assistant message usage
@@ -239,15 +239,15 @@ fn calculate_session_usage(session: &ChatSession) -> (u32, llm::Usage, Option<u3
239239
total_usage.cache_creation_input_tokens += usage.cache_creation_input_tokens;
240240
total_usage.cache_read_input_tokens += usage.cache_read_input_tokens;
241241

242-
// For assistant messages, update current context size (most recent wins)
242+
// For assistant messages, update last usage (most recent wins)
243243
if matches!(message.role, llm::MessageRole::Assistant) {
244-
current_context_size = usage.input_tokens + usage.cache_read_input_tokens;
244+
last_usage = usage.clone();
245245
}
246246
}
247247
}
248248

249249
// Note: We don't have access to rate_limit_info in persisted messages currently
250250
// This could be added later if needed, but tokens_limit is usually constant per provider
251251

252-
(current_context_size, total_usage, tokens_limit)
252+
(total_usage, last_usage, tokens_limit)
253253
}

crates/code_assistant/src/ui/gpui/chat_sidebar.rs

Lines changed: 51 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ use gpui::{
66
div, prelude::*, px, AppContext, Context, Entity, FocusHandle, Focusable, MouseButton,
77
MouseUpEvent, SharedString, Styled, Window,
88
};
9-
use gpui_component::{ActiveTheme, StyledExt};
9+
use gpui_component::{ActiveTheme, Icon, StyledExt};
1010
use std::time::SystemTime;
1111
use tracing::{debug, trace, warn};
1212

@@ -194,45 +194,61 @@ impl Render for ChatListItem {
194194
)
195195
.child(
196196
div()
197+
.flex()
198+
.items_center()
199+
.justify_between()
197200
.text_xs()
198201
.text_color(cx.theme().muted_foreground)
199-
.child(SharedString::from(formatted_date)),
200-
)
201-
.when(
202-
self.metadata.total_usage.input_tokens > 0
203-
|| self.metadata.total_usage.output_tokens > 0,
204-
|s| {
205-
let mut token_elements = Vec::new();
202+
.child(SharedString::from(formatted_date))
203+
.when(
204+
self.metadata.last_usage.input_tokens > 0
205+
|| self.metadata.last_usage.cache_read_input_tokens > 0,
206+
|d| {
207+
let mut token_elements = Vec::new();
206208

207-
// Input tokens (blue)
208-
token_elements.push(div().text_color(cx.theme().info).child(
209-
SharedString::from(format!("{}", self.metadata.total_usage.input_tokens)),
210-
));
211-
212-
// Cache reads (cyan) - only if > 0
213-
if self.metadata.total_usage.cache_read_input_tokens > 0 {
214-
token_elements.push(div().text_color(cx.theme().info.opacity(0.7)).child(
215-
SharedString::from(format!(
216-
"{}",
217-
self.metadata.total_usage.cache_read_input_tokens
218-
)),
219-
));
220-
}
221-
222-
// Output tokens (green)
223-
token_elements.push(div().text_color(cx.theme().success).child(
224-
SharedString::from(format!("{}", self.metadata.total_usage.output_tokens)),
225-
));
209+
// Input tokens from last request with arrow_up icon
210+
if self.metadata.last_usage.input_tokens > 0 {
211+
token_elements.push(
212+
div()
213+
.flex()
214+
.items_center()
215+
.gap_1()
216+
.text_color(cx.theme().muted_foreground)
217+
.child(
218+
Icon::default()
219+
.path(SharedString::from("icons/arrow_up.svg"))
220+
.text_color(cx.theme().muted_foreground),
221+
)
222+
.child(SharedString::from(format!(
223+
"{}",
224+
self.metadata.last_usage.input_tokens
225+
))),
226+
);
227+
}
226228

227-
// Context size (yellow) - only if > 0
228-
if self.metadata.current_context_size > 0 {
229-
token_elements.push(div().text_color(cx.theme().warning).child(
230-
SharedString::from(format!("/{}", self.metadata.current_context_size)),
231-
));
232-
}
229+
// Cache read tokens from last request with arrow_circle icon
230+
if self.metadata.last_usage.cache_read_input_tokens > 0 {
231+
token_elements.push(
232+
div()
233+
.flex()
234+
.items_center()
235+
.gap_1()
236+
.text_color(cx.theme().muted_foreground)
237+
.child(
238+
Icon::default()
239+
.path(SharedString::from("icons/arrow_circle.svg"))
240+
.text_color(cx.theme().muted_foreground),
241+
)
242+
.child(SharedString::from(format!(
243+
"{}",
244+
self.metadata.last_usage.cache_read_input_tokens
245+
))),
246+
);
247+
}
233248

234-
s.child(div().flex().gap_1().text_xs().children(token_elements))
235-
},
249+
d.child(div().flex().gap_2().children(token_elements))
250+
},
251+
),
236252
)
237253
}
238254
}

0 commit comments

Comments
 (0)