From 652371adff77504ed39a5c4803f52e4bc750614f Mon Sep 17 00:00:00 2001 From: Ahmed Ibrahim Date: Sun, 14 Sep 2025 23:02:50 -0400 Subject: [PATCH 1/2] Revert "refactor transcript view to handle HistoryCells (#3538)" This reverts commit 4891ee29c5f80d5bdb2018ac521a7fa23adf09b4. --- codex-rs/core/src/conversation_manager.rs | 32 +- .../core/tests/suite/fork_conversation.rs | 14 +- codex-rs/tui/src/app.rs | 21 +- codex-rs/tui/src/app_backtrack.rs | 160 +++++----- codex-rs/tui/src/backtrack_helpers.rs | 153 +++++++++ codex-rs/tui/src/history_cell.rs | 11 +- codex-rs/tui/src/lib.rs | 1 + codex-rs/tui/src/pager_overlay.rs | 302 +++++++----------- ...ts__transcript_overlay_snapshot_basic.snap | 4 +- 9 files changed, 389 insertions(+), 309 deletions(-) create mode 100644 codex-rs/tui/src/backtrack_helpers.rs diff --git a/codex-rs/core/src/conversation_manager.rs b/codex-rs/core/src/conversation_manager.rs index 3ca361a38a..7147eca233 100644 --- a/codex-rs/core/src/conversation_manager.rs +++ b/codex-rs/core/src/conversation_manager.rs @@ -134,19 +134,19 @@ impl ConversationManager { self.conversations.write().await.remove(conversation_id) } - /// Fork an existing conversation by taking messages up to the given position - /// (not including the message at the given position) and starting a new + /// Fork an existing conversation by dropping the last `drop_last_messages` + /// user/assistant messages from its transcript and starting a new /// conversation with identical configuration (unless overridden by the /// caller's `config`). The new conversation will have a fresh id. pub async fn fork_conversation( &self, - nth_user_message: usize, + num_messages_to_drop: usize, config: Config, path: PathBuf, ) -> CodexResult { // Compute the prefix up to the cut point. let history = RolloutRecorder::get_rollout_history(&path).await?; - let history = truncate_after_nth_user_message(history, nth_user_message); + let history = truncate_after_dropping_last_messages(history, num_messages_to_drop); // Spawn a new conversation with the computed initial history. let auth_manager = self.auth_manager.clone(); @@ -159,10 +159,14 @@ impl ConversationManager { } } -/// Return a prefix of `items` obtained by cutting strictly before the nth user message -/// (0-based) and all items that follow it. -fn truncate_after_nth_user_message(history: InitialHistory, n: usize) -> InitialHistory { - // Work directly on rollout items, and cut the vector at the nth user message input. +/// Return a prefix of `items` obtained by dropping the last `n` user messages +/// and all items that follow them. +fn truncate_after_dropping_last_messages(history: InitialHistory, n: usize) -> InitialHistory { + if n == 0 { + return InitialHistory::Forked(history.get_rollout_items()); + } + + // Work directly on rollout items, and cut the vector at the nth-from-last user message input. let items: Vec = history.get_rollout_items(); // Find indices of user message inputs in rollout order. @@ -175,13 +179,13 @@ fn truncate_after_nth_user_message(history: InitialHistory, n: usize) -> Initial } } - // If fewer than or equal to n user messages exist, treat as empty (out of range). - if user_positions.len() <= n { + // If fewer than n user messages exist, treat as empty. + if user_positions.len() < n { return InitialHistory::New; } - // Cut strictly before the nth user message (do not keep the nth itself). - let cut_idx = user_positions[n]; + // Cut strictly before the nth-from-last user message (do not keep the nth itself). + let cut_idx = user_positions[user_positions.len() - n]; let rolled: Vec = items.into_iter().take(cut_idx).collect(); if rolled.is_empty() { @@ -248,7 +252,7 @@ mod tests { .cloned() .map(RolloutItem::ResponseItem) .collect(); - let truncated = truncate_after_nth_user_message(InitialHistory::Forked(initial), 1); + let truncated = truncate_after_dropping_last_messages(InitialHistory::Forked(initial), 1); let got_items = truncated.get_rollout_items(); let expected_items = vec![ RolloutItem::ResponseItem(items[0].clone()), @@ -265,7 +269,7 @@ mod tests { .cloned() .map(RolloutItem::ResponseItem) .collect(); - let truncated2 = truncate_after_nth_user_message(InitialHistory::Forked(initial2), 2); + let truncated2 = truncate_after_dropping_last_messages(InitialHistory::Forked(initial2), 2); assert!(matches!(truncated2, InitialHistory::New)); } } diff --git a/codex-rs/core/tests/suite/fork_conversation.rs b/codex-rs/core/tests/suite/fork_conversation.rs index c9f66ea183..08e3f29d28 100644 --- a/codex-rs/core/tests/suite/fork_conversation.rs +++ b/codex-rs/core/tests/suite/fork_conversation.rs @@ -104,8 +104,7 @@ async fn fork_conversation_twice_drops_to_first_message() { items }; - // Compute expected prefixes after each fork by truncating base rollout - // strictly before the nth user input (0-based). + // Compute expected prefixes after each fork by truncating base rollout at nth-from-last user input. let base_items = read_items(&base_path); let find_user_input_positions = |items: &[RolloutItem]| -> Vec { let mut pos = Vec::new(); @@ -127,8 +126,11 @@ async fn fork_conversation_twice_drops_to_first_message() { }; let user_inputs = find_user_input_positions(&base_items); - // After cutting at nth user input (n=1 → second user message), cut strictly before that input. - let cut1 = user_inputs.get(1).copied().unwrap_or(0); + // After dropping last user input (n=1), cut strictly before that input if present, else empty. + let cut1 = user_inputs + .get(user_inputs.len().saturating_sub(1)) + .copied() + .unwrap_or(0); let expected_after_first: Vec = base_items[..cut1].to_vec(); // After dropping again (n=1 on fork1), compute expected relative to fork1's rollout. @@ -159,12 +161,12 @@ async fn fork_conversation_twice_drops_to_first_message() { serde_json::to_value(&expected_after_first).unwrap() ); - // Fork again with n=0 → drops the (new) last user message, leaving only the first. + // Fork again with n=1 → drops the (new) last user message, leaving only the first. let NewConversation { conversation: codex_fork2, .. } = conversation_manager - .fork_conversation(0, config_for_fork.clone(), fork1_path.clone()) + .fork_conversation(1, config_for_fork.clone(), fork1_path.clone()) .await .expect("fork 2"); diff --git a/codex-rs/tui/src/app.rs b/codex-rs/tui/src/app.rs index 40e3bd1c49..bfa9c0caf2 100644 --- a/codex-rs/tui/src/app.rs +++ b/codex-rs/tui/src/app.rs @@ -3,7 +3,6 @@ use crate::app_event::AppEvent; use crate::app_event_sender::AppEventSender; use crate::chatwidget::ChatWidget; use crate::file_search::FileSearchManager; -use crate::history_cell::HistoryCell; use crate::pager_overlay::Overlay; use crate::resume_picker::ResumeSelection; use crate::tui; @@ -46,7 +45,7 @@ pub(crate) struct App { pub(crate) file_search: FileSearchManager, - pub(crate) transcript_cells: Vec>, + pub(crate) transcript_lines: Vec>, // Pager overlay state (Transcript or Static like Diff) pub(crate) overlay: Option, @@ -132,7 +131,7 @@ impl App { active_profile, file_search, enhanced_keys_supported, - transcript_cells: Vec::new(), + transcript_lines: Vec::new(), overlay: None, deferred_history_lines: Vec::new(), has_emitted_history_lines: false, @@ -215,12 +214,15 @@ impl App { tui.frame_requester().schedule_frame(); } AppEvent::InsertHistoryCell(cell) => { - let cell: Arc = cell.into(); + let mut cell_transcript = cell.transcript_lines(); + if !cell.is_stream_continuation() && !self.transcript_lines.is_empty() { + cell_transcript.insert(0, Line::from("")); + } if let Some(Overlay::Transcript(t)) = &mut self.overlay { - t.insert_cell(cell.clone()); + t.insert_lines(cell_transcript.clone()); tui.frame_requester().schedule_frame(); } - self.transcript_cells.push(cell.clone()); + self.transcript_lines.extend(cell_transcript.clone()); let mut display = cell.display_lines(tui.terminal.last_known_screen_size.width); if !display.is_empty() { // Only insert a separating blank line for new cells that are not @@ -367,7 +369,7 @@ impl App { } => { // Enter alternate screen and set viewport to full size. let _ = tui.enter_alt_screen(); - self.overlay = Some(Overlay::new_transcript(self.transcript_cells.clone())); + self.overlay = Some(Overlay::new_transcript(self.transcript_lines.clone())); tui.frame_requester().schedule_frame(); } // Esc primes/advances backtracking only in normal (not working) mode @@ -392,7 +394,7 @@ impl App { kind: KeyEventKind::Press, .. } if self.backtrack.primed - && self.backtrack.nth_user_message != usize::MAX + && self.backtrack.count > 0 && self.chat_widget.composer_is_empty() => { // Delegate to helper for clarity; preserves behavior. @@ -426,6 +428,7 @@ mod tests { use codex_core::AuthManager; use codex_core::CodexAuth; use codex_core::ConversationManager; + use ratatui::text::Line; use std::sync::Arc; use std::sync::atomic::AtomicBool; @@ -448,7 +451,7 @@ mod tests { config, active_profile: None, file_search, - transcript_cells: Vec::new(), + transcript_lines: Vec::>::new(), overlay: None, deferred_history_lines: Vec::new(), has_emitted_history_lines: false, diff --git a/codex-rs/tui/src/app_backtrack.rs b/codex-rs/tui/src/app_backtrack.rs index 0da2760ff8..4e716c90ec 100644 --- a/codex-rs/tui/src/app_backtrack.rs +++ b/codex-rs/tui/src/app_backtrack.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; use crate::app::App; -use crate::history_cell::UserHistoryCell; +use crate::backtrack_helpers; use crate::pager_overlay::Overlay; use crate::tui; use crate::tui::TuiEvent; @@ -19,11 +19,11 @@ pub(crate) struct BacktrackState { pub(crate) primed: bool, /// Session id of the base conversation to fork from. pub(crate) base_id: Option, - /// Index in the transcript of the last user message. - pub(crate) nth_user_message: usize, + /// Current step count (Nth last user message). + pub(crate) count: usize, /// True when the transcript overlay is showing a backtrack preview. pub(crate) overlay_preview_active: bool, - /// Pending fork request: (base_id, nth_user_message, prefill). + /// Pending fork request: (base_id, drop_count, prefill). pub(crate) pending: Option<(ConversationId, usize, String)>, } @@ -96,9 +96,9 @@ impl App { &mut self, prefill: String, base_id: ConversationId, - nth_user_message: usize, + drop_last_messages: usize, ) { - self.backtrack.pending = Some((base_id, nth_user_message, prefill)); + self.backtrack.pending = Some((base_id, drop_last_messages, prefill)); self.app_event_tx.send(crate::app_event::AppEvent::CodexOp( codex_core::protocol::Op::GetPath, )); @@ -107,7 +107,7 @@ impl App { /// Open transcript overlay (enters alternate screen and shows full transcript). pub(crate) fn open_transcript_overlay(&mut self, tui: &mut tui::Tui) { let _ = tui.enter_alt_screen(); - self.overlay = Some(Overlay::new_transcript(self.transcript_cells.clone())); + self.overlay = Some(Overlay::new_transcript(self.transcript_lines.clone())); tui.frame_requester().schedule_frame(); } @@ -130,17 +130,15 @@ impl App { /// Re-render the full transcript into the terminal scrollback in one call. /// Useful when switching sessions to ensure prior history remains visible. pub(crate) fn render_transcript_once(&mut self, tui: &mut tui::Tui) { - if !self.transcript_cells.is_empty() { - for cell in &self.transcript_cells { - tui.insert_history_lines(cell.transcript_lines()); - } + if !self.transcript_lines.is_empty() { + tui.insert_history_lines(self.transcript_lines.clone()); } } /// Initialize backtrack state and show composer hint. fn prime_backtrack(&mut self) { self.backtrack.primed = true; - self.backtrack.nth_user_message = usize::MAX; + self.backtrack.count = 0; self.backtrack.base_id = self.chat_widget.conversation_id(); self.chat_widget.show_esc_backtrack_hint(); } @@ -159,44 +157,51 @@ impl App { self.backtrack.primed = true; self.backtrack.base_id = self.chat_widget.conversation_id(); self.backtrack.overlay_preview_active = true; - let last_user_cell_position = self - .transcript_cells - .iter() - .filter_map(|c| c.as_any().downcast_ref::()) - .count() as i64 - - 1; - if last_user_cell_position >= 0 { - self.apply_backtrack_selection(last_user_cell_position as usize); - } + let sel = self.compute_backtrack_selection(tui, 1); + self.apply_backtrack_selection(sel); tui.frame_requester().schedule_frame(); } /// Step selection to the next older user message and update overlay. fn step_backtrack_and_highlight(&mut self, tui: &mut tui::Tui) { - let last_user_cell_position = self - .transcript_cells - .iter() - .filter(|c| c.as_any().is::()) - .take(self.backtrack.nth_user_message) - .count() - .saturating_sub(1); - self.apply_backtrack_selection(last_user_cell_position); + let next = self.backtrack.count.saturating_add(1); + let sel = self.compute_backtrack_selection(tui, next); + self.apply_backtrack_selection(sel); tui.frame_requester().schedule_frame(); } + /// Compute normalized target, scroll offset, and highlight for requested step. + fn compute_backtrack_selection( + &self, + tui: &tui::Tui, + requested_n: usize, + ) -> (usize, Option, Option<(usize, usize)>) { + let nth = backtrack_helpers::normalize_backtrack_n(&self.transcript_lines, requested_n); + let header_idx = + backtrack_helpers::find_nth_last_user_header_index(&self.transcript_lines, nth); + let offset = header_idx.map(|idx| { + backtrack_helpers::wrapped_offset_before( + &self.transcript_lines, + idx, + tui.terminal.viewport_area.width, + ) + }); + let hl = backtrack_helpers::highlight_range_for_nth_last_user(&self.transcript_lines, nth); + (nth, offset, hl) + } + /// Apply a computed backtrack selection to the overlay and internal counter. - fn apply_backtrack_selection(&mut self, nth_user_message: usize) { - self.backtrack.nth_user_message = nth_user_message; + fn apply_backtrack_selection( + &mut self, + selection: (usize, Option, Option<(usize, usize)>), + ) { + let (nth, offset, hl) = selection; + self.backtrack.count = nth; if let Some(Overlay::Transcript(t)) = &mut self.overlay { - let cell = self - .transcript_cells - .iter() - .enumerate() - .filter(|(_, c)| c.as_any().is::()) - .nth(nth_user_message); - if let Some((idx, _)) = cell { - t.set_highlight_cell(Some(idx)); + if let Some(off) = offset { + t.set_scroll_offset(off); } + t.set_highlight_range(hl); } } @@ -214,19 +219,13 @@ impl App { /// Handle Enter in overlay backtrack preview: confirm selection and reset state. fn overlay_confirm_backtrack(&mut self, tui: &mut tui::Tui) { - let nth_user_message = self.backtrack.nth_user_message; if let Some(base_id) = self.backtrack.base_id { - let user_cells = self - .transcript_cells - .iter() - .filter_map(|c| c.as_any().downcast_ref::()) - .collect::>(); - let prefill = user_cells - .get(nth_user_message) - .map(|c| c.message.clone()) - .unwrap_or_default(); + let drop_last_messages = self.backtrack.count; + let prefill = + backtrack_helpers::nth_last_user_text(&self.transcript_lines, drop_last_messages) + .unwrap_or_default(); self.close_transcript_overlay(tui); - self.request_backtrack(prefill, base_id, nth_user_message); + self.request_backtrack(prefill, base_id, drop_last_messages); } self.reset_backtrack_state(); } @@ -245,15 +244,11 @@ impl App { /// Computes the prefill from the selected user message and requests history. pub(crate) fn confirm_backtrack_from_main(&mut self) { if let Some(base_id) = self.backtrack.base_id { - let prefill = self - .transcript_cells - .iter() - .filter(|c| c.as_any().is::()) - .nth(self.backtrack.nth_user_message) - .and_then(|c| c.as_any().downcast_ref::()) - .map(|c| c.message.clone()) - .unwrap_or_default(); - self.request_backtrack(prefill, base_id, self.backtrack.nth_user_message); + let drop_last_messages = self.backtrack.count; + let prefill = + backtrack_helpers::nth_last_user_text(&self.transcript_lines, drop_last_messages) + .unwrap_or_default(); + self.request_backtrack(prefill, base_id, drop_last_messages); } self.reset_backtrack_state(); } @@ -262,7 +257,7 @@ impl App { pub(crate) fn reset_backtrack_state(&mut self) { self.backtrack.primed = false; self.backtrack.base_id = None; - self.backtrack.nth_user_message = usize::MAX; + self.backtrack.count = 0; // In case a hint is somehow still visible (e.g., race with overlay open/close). self.chat_widget.clear_esc_backtrack_hint(); } @@ -276,9 +271,9 @@ impl App { ) -> Result<()> { if let Some((base_id, _, _)) = self.backtrack.pending.as_ref() && ev.conversation_id == *base_id - && let Some((_, nth_user_message, prefill)) = self.backtrack.pending.take() + && let Some((_, drop_count, prefill)) = self.backtrack.pending.take() { - self.fork_and_switch_to_new_conversation(tui, ev, nth_user_message, prefill) + self.fork_and_switch_to_new_conversation(tui, ev, drop_count, prefill) .await; } Ok(()) @@ -289,17 +284,17 @@ impl App { &mut self, tui: &mut tui::Tui, ev: ConversationPathResponseEvent, - nth_user_message: usize, + drop_count: usize, prefill: String, ) { let cfg = self.chat_widget.config_ref().clone(); // Perform the fork via a thin wrapper for clarity/testability. let result = self - .perform_fork(ev.path.clone(), nth_user_message, cfg.clone()) + .perform_fork(ev.path.clone(), drop_count, cfg.clone()) .await; match result { Ok(new_conv) => { - self.install_forked_conversation(tui, cfg, new_conv, nth_user_message, &prefill) + self.install_forked_conversation(tui, cfg, new_conv, drop_count, &prefill) } Err(e) => tracing::error!("error forking conversation: {e:#}"), } @@ -309,12 +304,10 @@ impl App { async fn perform_fork( &self, path: PathBuf, - nth_user_message: usize, + drop_count: usize, cfg: codex_core::config::Config, ) -> codex_core::error::Result { - self.server - .fork_conversation(nth_user_message, cfg, path) - .await + self.server.fork_conversation(drop_count, cfg, path).await } /// Install a forked conversation into the ChatWidget and update UI to reflect selection. @@ -323,7 +316,7 @@ impl App { tui: &mut tui::Tui, cfg: codex_core::config::Config, new_conv: codex_core::NewConversation, - nth_user_message: usize, + drop_count: usize, prefill: &str, ) { let conv = new_conv.conversation; @@ -340,7 +333,7 @@ impl App { self.chat_widget = crate::chatwidget::ChatWidget::new_from_existing(init, conv, session_configured); // Trim transcript up to the selected user message and re-render it. - self.trim_transcript_for_backtrack(nth_user_message); + self.trim_transcript_for_backtrack(drop_count); self.render_transcript_once(tui); if !prefill.is_empty() { self.chat_widget.set_composer_text(prefill.to_string()); @@ -348,21 +341,14 @@ impl App { tui.frame_requester().schedule_frame(); } - /// Trim transcript_cells to preserve only content up to the selected user message. - fn trim_transcript_for_backtrack(&mut self, nth_user_message: usize) { - let cut_idx = self - .transcript_cells - .iter() - .enumerate() - .filter_map(|(idx, cell)| { - if cell.as_any().is::() { - Some(idx) - } else { - None - } - }) - .nth(nth_user_message - 1) - .unwrap_or(self.transcript_cells.len()); - self.transcript_cells.truncate(cut_idx); + /// Trim transcript_lines to preserve only content up to the selected user message. + fn trim_transcript_for_backtrack(&mut self, drop_count: usize) { + if let Some(cut_idx) = + backtrack_helpers::find_nth_last_user_header_index(&self.transcript_lines, drop_count) + { + self.transcript_lines.truncate(cut_idx); + } else { + self.transcript_lines.clear(); + } } } diff --git a/codex-rs/tui/src/backtrack_helpers.rs b/codex-rs/tui/src/backtrack_helpers.rs new file mode 100644 index 0000000000..e2306b80b1 --- /dev/null +++ b/codex-rs/tui/src/backtrack_helpers.rs @@ -0,0 +1,153 @@ +use ratatui::text::Line; + +/// Convenience: compute the highlight range for the Nth last user message. +pub(crate) fn highlight_range_for_nth_last_user( + lines: &[Line<'_>], + n: usize, +) -> Option<(usize, usize)> { + let header = find_nth_last_user_header_index(lines, n)?; + Some(highlight_range_from_header(lines, header)) +} + +/// Compute the wrapped display-line offset before `header_idx`, for a given width. +pub(crate) fn wrapped_offset_before(lines: &[Line<'_>], header_idx: usize, width: u16) -> usize { + let before = &lines[0..header_idx]; + crate::wrapping::word_wrap_lines(before, width as usize).len() +} + +/// Find the header index for the Nth last user message in the transcript. +/// Returns `None` if `n == 0` or there are fewer than `n` user messages. +pub(crate) fn find_nth_last_user_header_index(lines: &[Line<'_>], n: usize) -> Option { + if n == 0 { + return None; + } + let mut found = 0usize; + for (idx, line) in lines.iter().enumerate().rev() { + let content: String = line + .spans + .iter() + .map(|s| s.content.as_ref()) + .collect::>() + .join(""); + if content.trim() == "user" { + found += 1; + if found == n { + return Some(idx); + } + } + } + None +} + +/// Normalize a requested backtrack step `n` against the available user messages. +/// - Returns `0` if there are no user messages. +/// - Returns `n` if the Nth last user message exists. +/// - Otherwise wraps to `1` (the most recent user message). +pub(crate) fn normalize_backtrack_n(lines: &[Line<'_>], n: usize) -> usize { + if n == 0 { + return 0; + } + if find_nth_last_user_header_index(lines, n).is_some() { + return n; + } + if find_nth_last_user_header_index(lines, 1).is_some() { + 1 + } else { + 0 + } +} + +/// Extract the text content of the Nth last user message. +/// The message body is considered to be the lines following the "user" header +/// until the first blank line. +pub(crate) fn nth_last_user_text(lines: &[Line<'_>], n: usize) -> Option { + let header_idx = find_nth_last_user_header_index(lines, n)?; + extract_message_text_after_header(lines, header_idx) +} + +/// Extract message text starting after `header_idx` until the first blank line. +fn extract_message_text_after_header(lines: &[Line<'_>], header_idx: usize) -> Option { + let start = header_idx + 1; + let mut out: Vec = Vec::new(); + for line in lines.iter().skip(start) { + let is_blank = line + .spans + .iter() + .all(|s| s.content.as_ref().trim().is_empty()); + if is_blank { + break; + } + let text: String = line + .spans + .iter() + .map(|s| s.content.as_ref()) + .collect::>() + .join(""); + out.push(text); + } + if out.is_empty() { + None + } else { + Some(out.join("\n")) + } +} + +/// Given a header index, return the inclusive range for the message block +/// [header_idx, end) where end is the first blank line after the header or the +/// end of the transcript. +fn highlight_range_from_header(lines: &[Line<'_>], header_idx: usize) -> (usize, usize) { + let mut end = header_idx + 1; + while end < lines.len() { + let is_blank = lines[end] + .spans + .iter() + .all(|s| s.content.as_ref().trim().is_empty()); + if is_blank { + break; + } + end += 1; + } + (header_idx, end) +} + +#[cfg(test)] +mod tests { + use super::*; + + fn line(s: &str) -> Line<'static> { + s.to_string().into() + } + + fn transcript_with_users(count: usize) -> Vec> { + // Build a transcript with `count` user messages, each followed by one body line and a blank line. + let mut v = Vec::new(); + for i in 0..count { + v.push(line("user")); + v.push(line(&format!("message {i}"))); + v.push(line("")); + } + v + } + + #[test] + fn normalize_wraps_to_one_when_past_oldest() { + let lines = transcript_with_users(2); + assert_eq!(normalize_backtrack_n(&lines, 1), 1); + assert_eq!(normalize_backtrack_n(&lines, 2), 2); + // Requesting 3rd when only 2 exist wraps to 1 + assert_eq!(normalize_backtrack_n(&lines, 3), 1); + } + + #[test] + fn normalize_returns_zero_when_no_user_messages() { + let lines = transcript_with_users(0); + assert_eq!(normalize_backtrack_n(&lines, 1), 0); + assert_eq!(normalize_backtrack_n(&lines, 5), 0); + } + + #[test] + fn normalize_keeps_valid_n() { + let lines = transcript_with_users(3); + assert_eq!(normalize_backtrack_n(&lines, 2), 2); + } +} diff --git a/codex-rs/tui/src/history_cell.rs b/codex-rs/tui/src/history_cell.rs index fee9da952c..c80cbcdb1a 100644 --- a/codex-rs/tui/src/history_cell.rs +++ b/codex-rs/tui/src/history_cell.rs @@ -43,7 +43,6 @@ use ratatui::style::Stylize; use ratatui::widgets::Paragraph; use ratatui::widgets::WidgetRef; use ratatui::widgets::Wrap; -use std::any::Any; use std::collections::HashMap; use std::io::Cursor; use std::path::Path; @@ -70,7 +69,7 @@ pub(crate) enum PatchEventType { /// Represents an event to display in the conversation history. Returns its /// `Vec>` representation to make it easier to display in a /// scrollable list. -pub(crate) trait HistoryCell: std::fmt::Debug + Send + Sync + Any { +pub(crate) trait HistoryCell: std::fmt::Debug + Send + Sync { fn display_lines(&self, width: u16) -> Vec>; fn transcript_lines(&self) -> Vec> { @@ -90,15 +89,9 @@ pub(crate) trait HistoryCell: std::fmt::Debug + Send + Sync + Any { } } -impl dyn HistoryCell { - pub(crate) fn as_any(&self) -> &dyn Any { - self - } -} - #[derive(Debug)] pub(crate) struct UserHistoryCell { - pub message: String, + message: String, } impl HistoryCell for UserHistoryCell { diff --git a/codex-rs/tui/src/lib.rs b/codex-rs/tui/src/lib.rs index 7f97b3333f..0f51cb22ef 100644 --- a/codex-rs/tui/src/lib.rs +++ b/codex-rs/tui/src/lib.rs @@ -32,6 +32,7 @@ mod app; mod app_backtrack; mod app_event; mod app_event_sender; +mod backtrack_helpers; mod bottom_pane; mod chatwidget; mod citation_regex; diff --git a/codex-rs/tui/src/pager_overlay.rs b/codex-rs/tui/src/pager_overlay.rs index e5524b252b..0a7e54f06f 100644 --- a/codex-rs/tui/src/pager_overlay.rs +++ b/codex-rs/tui/src/pager_overlay.rs @@ -1,8 +1,6 @@ use std::io::Result; -use std::sync::Arc; use std::time::Duration; -use crate::history_cell::HistoryCell; use crate::render::line_utils::push_owned_lines; use crate::tui; use crate::tui::TuiEvent; @@ -17,7 +15,6 @@ use ratatui::style::Styled; use ratatui::style::Stylize; use ratatui::text::Line; use ratatui::text::Span; -use ratatui::text::Text; use ratatui::widgets::Paragraph; use ratatui::widgets::WidgetRef; @@ -27,8 +24,8 @@ pub(crate) enum Overlay { } impl Overlay { - pub(crate) fn new_transcript(cells: Vec>) -> Self { - Self::Transcript(TranscriptOverlay::new(cells)) + pub(crate) fn new_transcript(lines: Vec>) -> Self { + Self::Transcript(TranscriptOverlay::new(lines)) } pub(crate) fn new_static_with_title(lines: Vec>, title: String) -> Self { @@ -76,24 +73,21 @@ fn render_key_hints(area: Rect, buf: &mut Buffer, pairs: &[(&str, &str)]) { /// Generic widget for rendering a pager view. struct PagerView { - texts: Vec>, + lines: Vec>, scroll_offset: usize, title: String, wrap_cache: Option, last_content_height: Option, - /// If set, on next render ensure this chunk is visible. - pending_scroll_chunk: Option, } impl PagerView { - fn new(texts: Vec>, title: String, scroll_offset: usize) -> Self { + fn new(lines: Vec>, title: String, scroll_offset: usize) -> Self { Self { - texts, + lines, scroll_offset, title, wrap_cache: None, last_content_height: None, - pending_scroll_chunk: None, } } @@ -102,14 +96,6 @@ impl PagerView { let content_area = self.scroll_area(area); self.update_last_content_height(content_area.height); self.ensure_wrapped(content_area.width); - // If there is a pending request to scroll a specific chunk into view, - // satisfy it now that wrapping is up to date for this width. - if let (Some(idx), Some(cache)) = - (self.pending_scroll_chunk.take(), self.wrap_cache.as_ref()) - && let Some(range) = cache.chunk_ranges.get(idx).cloned() - { - self.ensure_range_visible(range, content_area.height as usize, cache.wrapped.len()); - } // Compute page bounds without holding an immutable borrow on cache while mutating self let wrapped_len = self .wrap_cache @@ -122,12 +108,40 @@ impl PagerView { let start = self.scroll_offset; let end = (start + content_area.height as usize).min(wrapped_len); - let wrapped = self.cached(); + let (wrapped, _src_idx) = self.cached(); let page = &wrapped[start..end]; self.render_content_page_prepared(content_area, buf, page); self.render_bottom_bar(area, content_area, buf, wrapped); } + fn render_with_highlight( + &mut self, + area: Rect, + buf: &mut Buffer, + highlight: Option<(usize, usize)>, + ) { + self.render_header(area, buf); + let content_area = self.scroll_area(area); + self.update_last_content_height(content_area.height); + self.ensure_wrapped(content_area.width); + // Compute page bounds first to avoid borrow conflicts + let wrapped_len = self + .wrap_cache + .as_ref() + .map(|c| c.wrapped.len()) + .unwrap_or(0); + self.scroll_offset = self + .scroll_offset + .min(wrapped_len.saturating_sub(content_area.height as usize)); + let start = self.scroll_offset; + let end = (start + content_area.height as usize).min(wrapped_len); + + let (wrapped, src_idx) = self.cached(); + let page = self.page_with_optional_highlight(wrapped, src_idx, start, end, highlight); + self.render_content_page_prepared(content_area, buf, &page); + self.render_bottom_bar(area, content_area, buf, wrapped); + } + fn render_header(&self, area: Rect, buf: &mut Buffer) { Span::from("/ ".repeat(area.width as usize / 2)) .dim() @@ -256,8 +270,7 @@ impl PagerView { struct WrapCache { width: u16, wrapped: Vec>, - /// For each input Text chunk, the inclusive-excluded range of wrapped lines produced. - chunk_ranges: Vec>, + src_idx: Vec, base_len: usize, } @@ -265,37 +278,72 @@ impl PagerView { fn ensure_wrapped(&mut self, width: u16) { let width = width.max(1); let needs = match self.wrap_cache { - Some(ref c) => c.width != width || c.base_len != self.texts.len(), + Some(ref c) => c.width != width || c.base_len != self.lines.len(), None => true, }; if !needs { return; } let mut wrapped: Vec> = Vec::new(); - let mut chunk_ranges: Vec> = Vec::with_capacity(self.texts.len()); - for text in &self.texts { - let start = wrapped.len(); - for line in &text.lines { - let ws = crate::wrapping::word_wrap_line(line, width as usize); - push_owned_lines(&ws, &mut wrapped); - } - let end = wrapped.len(); - chunk_ranges.push(start..end); + let mut src_idx: Vec = Vec::new(); + for (i, line) in self.lines.iter().enumerate() { + let ws = crate::wrapping::word_wrap_line(line, width as usize); + src_idx.extend(std::iter::repeat_n(i, ws.len())); + push_owned_lines(&ws, &mut wrapped); } self.wrap_cache = Some(WrapCache { width, wrapped, - chunk_ranges, - base_len: self.texts.len(), + src_idx, + base_len: self.lines.len(), }); } - fn cached(&self) -> &[Line<'static>] { + fn cached(&self) -> (&[Line<'static>], &[usize]) { if let Some(cache) = self.wrap_cache.as_ref() { - &cache.wrapped + (&cache.wrapped, &cache.src_idx) } else { - &[] + (&[], &[]) + } + } + + fn page_with_optional_highlight<'a>( + &self, + wrapped: &'a [Line<'static>], + src_idx: &[usize], + start: usize, + end: usize, + highlight: Option<(usize, usize)>, + ) -> std::borrow::Cow<'a, [Line<'static>]> { + use ratatui::style::Modifier; + let (hi_start, hi_end) = match highlight { + Some(r) => r, + None => return std::borrow::Cow::Borrowed(&wrapped[start..end]), + }; + let mut out: Vec> = Vec::with_capacity(end - start); + let mut bold_done = false; + for (row, src_line) in wrapped + .iter() + .enumerate() + .skip(start) + .take(end.saturating_sub(start)) + { + let mut line = src_line.clone(); + if let Some(src) = src_idx.get(row).copied() + && src >= hi_start + && src < hi_end + { + for (i, s) in line.spans.iter_mut().enumerate() { + s.style.add_modifier |= Modifier::REVERSED; + if !bold_done && i == 0 { + s.style.add_modifier |= Modifier::BOLD; + bold_done = true; + } + } + } + out.push(line); } + std::borrow::Cow::Owned(out) } fn is_scrolled_to_bottom(&self) -> bool { @@ -315,108 +363,38 @@ impl PagerView { let max_scroll = cache.wrapped.len().saturating_sub(visible); self.scroll_offset >= max_scroll } - - /// Request that the given text chunk index be scrolled into view on next render. - fn scroll_chunk_into_view(&mut self, chunk_index: usize) { - self.pending_scroll_chunk = Some(chunk_index); - } - - fn ensure_range_visible( - &mut self, - range: std::ops::Range, - viewport_height: usize, - total_wrapped: usize, - ) { - if viewport_height == 0 || total_wrapped == 0 { - return; - } - let first = range.start.min(total_wrapped.saturating_sub(1)); - let last = range - .end - .saturating_sub(1) - .min(total_wrapped.saturating_sub(1)); - let current_top = self.scroll_offset.min(total_wrapped.saturating_sub(1)); - let current_bottom = current_top.saturating_add(viewport_height.saturating_sub(1)); - - if first < current_top { - self.scroll_offset = first; - } else if last > current_bottom { - // Scroll just enough so that 'last' is visible at the bottom - self.scroll_offset = last.saturating_sub(viewport_height.saturating_sub(1)); - } - } } pub(crate) struct TranscriptOverlay { view: PagerView, - cells: Vec>, - highlight_cell: Option, + highlight_range: Option<(usize, usize)>, is_done: bool, } impl TranscriptOverlay { - pub(crate) fn new(transcript_cells: Vec>) -> Self { + pub(crate) fn new(transcript_lines: Vec>) -> Self { Self { view: PagerView::new( - Self::render_cells_to_texts(&transcript_cells, None), + transcript_lines, "T R A N S C R I P T".to_string(), usize::MAX, ), - cells: transcript_cells, - highlight_cell: None, + highlight_range: None, is_done: false, } } - fn render_cells_to_texts( - cells: &[Arc], - highlight_cell: Option, - ) -> Vec> { - let mut texts: Vec> = Vec::new(); - let mut first = true; - for (idx, cell) in cells.iter().enumerate() { - let mut lines: Vec> = Vec::new(); - if !cell.is_stream_continuation() && !first { - lines.push(Line::from("")); - } - let cell_lines = if Some(idx) == highlight_cell { - cell.transcript_lines() - .into_iter() - .map(|l| l.reversed()) - .collect() - } else { - cell.transcript_lines() - }; - lines.extend(cell_lines); - texts.push(Text::from(lines)); - first = false; - } - texts - } - - pub(crate) fn insert_cell(&mut self, cell: Arc) { + pub(crate) fn insert_lines(&mut self, lines: Vec>) { let follow_bottom = self.view.is_scrolled_to_bottom(); - // Append as a new Text chunk (with a separating blank if needed) - let mut lines: Vec> = Vec::new(); - if !cell.is_stream_continuation() && !self.cells.is_empty() { - lines.push(Line::from("")); - } - lines.extend(cell.transcript_lines()); - self.view.texts.push(Text::from(lines)); - self.cells.push(cell); + self.view.lines.extend(lines); self.view.wrap_cache = None; if follow_bottom { self.view.scroll_offset = usize::MAX; } } - pub(crate) fn set_highlight_cell(&mut self, cell: Option) { - self.highlight_cell = cell; - self.view.wrap_cache = None; - self.view.texts = Self::render_cells_to_texts(&self.cells, self.highlight_cell); - if let Some(idx) = self.highlight_cell { - self.view.scroll_chunk_into_view(idx); - } + pub(crate) fn set_highlight_range(&mut self, range: Option<(usize, usize)>) { + self.highlight_range = range; } fn render_hints(&self, area: Rect, buf: &mut Buffer) { @@ -424,7 +402,9 @@ impl TranscriptOverlay { let line2 = Rect::new(area.x, area.y.saturating_add(1), area.width, 1); render_key_hints(line1, buf, PAGER_KEY_HINTS); let mut pairs: Vec<(&str, &str)> = vec![("q", "quit"), ("Esc", "edit prev")]; - if self.highlight_cell.is_some() { + if let Some((start, end)) = self.highlight_range + && end > start + { pairs.push(("⏎", "edit message")); } render_key_hints(line2, buf, &pairs); @@ -434,7 +414,8 @@ impl TranscriptOverlay { let top_h = area.height.saturating_sub(3); let top = Rect::new(area.x, area.y, area.width, top_h); let bottom = Rect::new(area.x, area.y + top_h, area.width, 3); - self.view.render(top, buf); + self.view + .render_with_highlight(top, buf, self.highlight_range); self.render_hints(bottom, buf); } } @@ -477,6 +458,9 @@ impl TranscriptOverlay { pub(crate) fn is_done(&self) -> bool { self.is_done } + pub(crate) fn set_scroll_offset(&mut self, offset: usize) { + self.view.scroll_offset = offset; + } } pub(crate) struct StaticOverlay { @@ -487,7 +471,7 @@ pub(crate) struct StaticOverlay { impl StaticOverlay { pub(crate) fn with_title(lines: Vec>, title: String) -> Self { Self { - view: PagerView::new(vec![Text::from(lines)], title, 0), + view: PagerView::new(lines, title, 0), is_done: false, } } @@ -550,26 +534,9 @@ mod tests { use ratatui::Terminal; use ratatui::backend::TestBackend; - #[derive(Debug)] - struct TestCell { - lines: Vec>, - } - - impl crate::history_cell::HistoryCell for TestCell { - fn display_lines(&self, _width: u16) -> Vec> { - self.lines.clone() - } - - fn transcript_lines(&self) -> Vec> { - self.lines.clone() - } - } - #[test] fn edit_prev_hint_is_visible() { - let mut overlay = TranscriptOverlay::new(vec![Arc::new(TestCell { - lines: vec![Line::from("hello")], - })]); + let mut overlay = TranscriptOverlay::new(vec![Line::from("hello")]); // Render into a small buffer and assert the backtrack hint is present let area = Rect::new(0, 0, 40, 10); @@ -594,15 +561,9 @@ mod tests { fn transcript_overlay_snapshot_basic() { // Prepare a transcript overlay with a few lines let mut overlay = TranscriptOverlay::new(vec![ - Arc::new(TestCell { - lines: vec![Line::from("alpha")], - }), - Arc::new(TestCell { - lines: vec![Line::from("beta")], - }), - Arc::new(TestCell { - lines: vec![Line::from("gamma")], - }), + Line::from("alpha"), + Line::from("beta"), + Line::from("gamma"), ]); let mut term = Terminal::new(TestBackend::new(40, 10)).expect("term"); term.draw(|f| overlay.render(f.area(), f.buffer_mut())) @@ -612,15 +573,8 @@ mod tests { #[test] fn transcript_overlay_keeps_scroll_pinned_at_bottom() { - let mut overlay = TranscriptOverlay::new( - (0..20) - .map(|i| { - Arc::new(TestCell { - lines: vec![Line::from(format!("line{i}"))], - }) as Arc - }) - .collect(), - ); + let mut overlay = + TranscriptOverlay::new((0..20).map(|i| Line::from(format!("line{i}"))).collect()); let mut term = Terminal::new(TestBackend::new(40, 12)).expect("term"); term.draw(|f| overlay.render(f.area(), f.buffer_mut())) .expect("draw"); @@ -630,33 +584,22 @@ mod tests { "expected initial render to leave view at bottom" ); - overlay.insert_cell(Arc::new(TestCell { - lines: vec!["tail".into()], - })); + overlay.insert_lines(vec!["tail".into()]); assert_eq!(overlay.view.scroll_offset, usize::MAX); } #[test] fn transcript_overlay_preserves_manual_scroll_position() { - let mut overlay = TranscriptOverlay::new( - (0..20) - .map(|i| { - Arc::new(TestCell { - lines: vec![Line::from(format!("line{i}"))], - }) as Arc - }) - .collect(), - ); + let mut overlay = + TranscriptOverlay::new((0..20).map(|i| Line::from(format!("line{i}"))).collect()); let mut term = Terminal::new(TestBackend::new(40, 12)).expect("term"); term.draw(|f| overlay.render(f.area(), f.buffer_mut())) .expect("draw"); overlay.view.scroll_offset = 0; - overlay.insert_cell(Arc::new(TestCell { - lines: vec!["tail".into()], - })); + overlay.insert_lines(vec!["tail".into()]); assert_eq!(overlay.view.scroll_offset, 0); } @@ -677,21 +620,17 @@ mod tests { #[test] fn pager_wrap_cache_reuses_for_same_width_and_rebuilds_on_change() { let long = "This is a long line that should wrap multiple times to ensure non-empty wrapped output."; - let mut pv = PagerView::new( - vec![Text::from(vec![long.into()]), Text::from(vec![long.into()])], - "T".to_string(), - 0, - ); + let mut pv = PagerView::new(vec![long.into(), long.into()], "T".to_string(), 0); // Build cache at width 24 pv.ensure_wrapped(24); - let w1 = pv.cached(); + let (w1, _) = pv.cached(); assert!(!w1.is_empty(), "expected wrapped output to be non-empty"); let ptr1 = w1.as_ptr(); // Re-run with same width: cache should be reused (pointer stability heuristic) pv.ensure_wrapped(24); - let w2 = pv.cached(); + let (w2, _) = pv.cached(); let ptr2 = w2.as_ptr(); assert_eq!(ptr1, ptr2, "cache should not rebuild for unchanged width"); @@ -699,7 +638,7 @@ mod tests { // Drop immutable borrow before mutating let prev_len = w2.len(); pv.ensure_wrapped(36); - let w3 = pv.cached(); + let (w3, _) = pv.cached(); assert_ne!( prev_len, w3.len(), @@ -710,16 +649,15 @@ mod tests { #[test] fn pager_wrap_cache_invalidates_on_append() { let long = "Another long line for wrapping behavior verification."; - let mut pv = PagerView::new(vec![Text::from(vec![long.into()])], "T".to_string(), 0); + let mut pv = PagerView::new(vec![long.into()], "T".to_string(), 0); pv.ensure_wrapped(28); - let w1 = pv.cached(); + let (w1, _) = pv.cached(); let len1 = w1.len(); // Append new lines should cause ensure_wrapped to rebuild due to len change - pv.texts.push(Text::from(vec![long.into()])); - pv.texts.push(Text::from(vec![long.into()])); + pv.lines.extend([long.into(), long.into()]); pv.ensure_wrapped(28); - let w2 = pv.cached(); + let (w2, _) = pv.cached(); assert!( w2.len() >= len1, "wrapped length should grow or stay same after append" diff --git a/codex-rs/tui/src/snapshots/codex_tui__pager_overlay__tests__transcript_overlay_snapshot_basic.snap b/codex-rs/tui/src/snapshots/codex_tui__pager_overlay__tests__transcript_overlay_snapshot_basic.snap index 0c03edef47..b9105e8021 100644 --- a/codex-rs/tui/src/snapshots/codex_tui__pager_overlay__tests__transcript_overlay_snapshot_basic.snap +++ b/codex-rs/tui/src/snapshots/codex_tui__pager_overlay__tests__transcript_overlay_snapshot_basic.snap @@ -4,10 +4,10 @@ expression: term.backend() --- "/ T R A N S C R I P T / / / / / / / / / " "alpha " -" " "beta " -" " "gamma " +"~ " +"~ " "───────────────────────────────── 100% ─" " ↑/↓ scroll PgUp/PgDn page Home/End " " q quit Esc edit prev " From 1dab80b777f55a0c95fefd099a353cdfc4a703a5 Mon Sep 17 00:00:00 2001 From: Ahmed Ibrahim Date: Sun, 14 Sep 2025 23:31:57 -0400 Subject: [PATCH 2/2] revert-3538-nornagon/transcript-cells --- codex-rs/core/tests/suite/compact_resume_fork.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/codex-rs/core/tests/suite/compact_resume_fork.rs b/codex-rs/core/tests/suite/compact_resume_fork.rs index dd14ad89b5..34b43ece91 100644 --- a/codex-rs/core/tests/suite/compact_resume_fork.rs +++ b/codex-rs/core/tests/suite/compact_resume_fork.rs @@ -74,7 +74,7 @@ async fn compact_resume_and_fork_preserve_model_history_view() { "compact+resume test expects resumed path {resumed_path:?} to exist", ); - let forked = fork_conversation(&manager, &config, resumed_path, 4).await; + let forked = fork_conversation(&manager, &config, resumed_path, 1).await; user_turn(&forked, "AFTER_FORK").await; // 3. Capture the requests to the model and validate the history slices. @@ -535,7 +535,7 @@ async fn compact_resume_after_second_compaction_preserves_history() { "second compact test expects resumed path {resumed_path:?} to exist", ); - let forked = fork_conversation(&manager, &config, resumed_path, 1).await; + let forked = fork_conversation(&manager, &config, resumed_path, 3).await; user_turn(&forked, "AFTER_FORK").await; compact_conversation(&forked).await;