Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 14 additions & 18 deletions codex-rs/core/src/conversation_manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -144,19 +144,19 @@ impl ConversationManager {
self.conversations.write().await.remove(conversation_id)
}

/// Fork an existing conversation by dropping the last `drop_last_messages`
/// user/assistant messages from its transcript and starting a new
/// Fork an existing conversation by taking messages up to the given position
/// (not including the message at the given position) and starting a new
/// conversation with identical configuration (unless overridden by the
/// caller's `config`). The new conversation will have a fresh id.
pub async fn fork_conversation(
&self,
num_messages_to_drop: usize,
nth_user_message: usize,
config: Config,
path: PathBuf,
) -> CodexResult<NewConversation> {
// Compute the prefix up to the cut point.
let history = RolloutRecorder::get_rollout_history(&path).await?;
let history = truncate_after_dropping_last_messages(history, num_messages_to_drop);
let history = truncate_after_nth_user_message(history, nth_user_message);

// Spawn a new conversation with the computed initial history.
let auth_manager = self.auth_manager.clone();
Expand All @@ -169,14 +169,10 @@ impl ConversationManager {
}
}

/// Return a prefix of `items` obtained by dropping the last `n` user messages
/// and all items that follow them.
fn truncate_after_dropping_last_messages(history: InitialHistory, n: usize) -> InitialHistory {
if n == 0 {
return InitialHistory::Forked(history.get_rollout_items());
}

// Work directly on rollout items, and cut the vector at the nth-from-last user message input.
/// Return a prefix of `items` obtained by cutting strictly before the nth user message
/// (0-based) and all items that follow it.
fn truncate_after_nth_user_message(history: InitialHistory, n: usize) -> InitialHistory {
// Work directly on rollout items, and cut the vector at the nth user message input.
let items: Vec<RolloutItem> = history.get_rollout_items();

// Find indices of user message inputs in rollout order.
Expand All @@ -189,13 +185,13 @@ fn truncate_after_dropping_last_messages(history: InitialHistory, n: usize) -> I
}
}

// If fewer than n user messages exist, treat as empty.
if user_positions.len() < n {
// If fewer than or equal to n user messages exist, treat as empty (out of range).
if user_positions.len() <= n {
return InitialHistory::New;
}

// Cut strictly before the nth-from-last user message (do not keep the nth itself).
let cut_idx = user_positions[user_positions.len() - n];
// Cut strictly before the nth user message (do not keep the nth itself).
let cut_idx = user_positions[n];
let rolled: Vec<RolloutItem> = items.into_iter().take(cut_idx).collect();

if rolled.is_empty() {
Expand Down Expand Up @@ -262,7 +258,7 @@ mod tests {
.cloned()
.map(RolloutItem::ResponseItem)
.collect();
let truncated = truncate_after_dropping_last_messages(InitialHistory::Forked(initial), 1);
let truncated = truncate_after_nth_user_message(InitialHistory::Forked(initial), 1);
let got_items = truncated.get_rollout_items();
let expected_items = vec![
RolloutItem::ResponseItem(items[0].clone()),
Expand All @@ -279,7 +275,7 @@ mod tests {
.cloned()
.map(RolloutItem::ResponseItem)
.collect();
let truncated2 = truncate_after_dropping_last_messages(InitialHistory::Forked(initial2), 2);
let truncated2 = truncate_after_nth_user_message(InitialHistory::Forked(initial2), 2);
assert!(matches!(truncated2, InitialHistory::New));
}
}
14 changes: 6 additions & 8 deletions codex-rs/core/tests/suite/fork_conversation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,8 @@ async fn fork_conversation_twice_drops_to_first_message() {
items
};

// Compute expected prefixes after each fork by truncating base rollout at nth-from-last user input.
// Compute expected prefixes after each fork by truncating base rollout
// strictly before the nth user input (0-based).
let base_items = read_items(&base_path);
let find_user_input_positions = |items: &[RolloutItem]| -> Vec<usize> {
let mut pos = Vec::new();
Expand All @@ -126,11 +127,8 @@ async fn fork_conversation_twice_drops_to_first_message() {
};
let user_inputs = find_user_input_positions(&base_items);

// After dropping last user input (n=1), cut strictly before that input if present, else empty.
let cut1 = user_inputs
.get(user_inputs.len().saturating_sub(1))
.copied()
.unwrap_or(0);
// After cutting at nth user input (n=1 → second user message), cut strictly before that input.
let cut1 = user_inputs.get(1).copied().unwrap_or(0);
let expected_after_first: Vec<RolloutItem> = base_items[..cut1].to_vec();

// After dropping again (n=1 on fork1), compute expected relative to fork1's rollout.
Expand Down Expand Up @@ -161,12 +159,12 @@ async fn fork_conversation_twice_drops_to_first_message() {
serde_json::to_value(&expected_after_first).unwrap()
);

// Fork again with n=1 → drops the (new) last user message, leaving only the first.
// Fork again with n=0 → drops the (new) last user message, leaving only the first.
let NewConversation {
conversation: codex_fork2,
..
} = conversation_manager
.fork_conversation(1, config_for_fork.clone(), fork1_path.clone())
.fork_conversation(0, config_for_fork.clone(), fork1_path.clone())
.await
.expect("fork 2");

Expand Down
21 changes: 9 additions & 12 deletions codex-rs/tui/src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use crate::app_event::AppEvent;
use crate::app_event_sender::AppEventSender;
use crate::chatwidget::ChatWidget;
use crate::file_search::FileSearchManager;
use crate::history_cell::HistoryCell;
use crate::pager_overlay::Overlay;
use crate::resume_picker::ResumeSelection;
use crate::tui;
Expand Down Expand Up @@ -46,7 +47,7 @@ pub(crate) struct App {

pub(crate) file_search: FileSearchManager,

pub(crate) transcript_lines: Vec<Line<'static>>,
pub(crate) transcript_cells: Vec<Arc<dyn HistoryCell>>,

// Pager overlay state (Transcript or Static like Diff)
pub(crate) overlay: Option<Overlay>,
Expand Down Expand Up @@ -131,7 +132,7 @@ impl App {
model_saved_to_global: false,
file_search,
enhanced_keys_supported,
transcript_lines: Vec::new(),
transcript_cells: Vec::new(),
overlay: None,
deferred_history_lines: Vec::new(),
has_emitted_history_lines: false,
Expand Down Expand Up @@ -213,15 +214,12 @@ impl App {
tui.frame_requester().schedule_frame();
}
AppEvent::InsertHistoryCell(cell) => {
let mut cell_transcript = cell.transcript_lines();
if !cell.is_stream_continuation() && !self.transcript_lines.is_empty() {
cell_transcript.insert(0, Line::from(""));
}
let cell: Arc<dyn HistoryCell> = cell.into();
if let Some(Overlay::Transcript(t)) = &mut self.overlay {
t.insert_lines(cell_transcript.clone());
t.insert_cell(cell.clone());
tui.frame_requester().schedule_frame();
}
self.transcript_lines.extend(cell_transcript.clone());
self.transcript_cells.push(cell.clone());
let mut display = cell.display_lines(tui.terminal.last_known_screen_size.width);
if !display.is_empty() {
// Only insert a separating blank line for new cells that are not
Expand Down Expand Up @@ -437,7 +435,7 @@ impl App {
} => {
// Enter alternate screen and set viewport to full size.
let _ = tui.enter_alt_screen();
self.overlay = Some(Overlay::new_transcript(self.transcript_lines.clone()));
self.overlay = Some(Overlay::new_transcript(self.transcript_cells.clone()));
tui.frame_requester().schedule_frame();
}
KeyEvent {
Expand Down Expand Up @@ -470,7 +468,7 @@ impl App {
kind: KeyEventKind::Press,
..
} if self.backtrack.primed
&& self.backtrack.count > 0
&& self.backtrack.nth_user_message != usize::MAX
&& self.chat_widget.composer_is_empty() =>
{
// Delegate to helper for clarity; preserves behavior.
Expand Down Expand Up @@ -503,7 +501,6 @@ mod tests {
use crate::file_search::FileSearchManager;
use codex_core::CodexAuth;
use codex_core::ConversationManager;
use ratatui::text::Line;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;

Expand All @@ -525,7 +522,7 @@ mod tests {
model_saved_to_profile: false,
model_saved_to_global: false,
file_search,
transcript_lines: Vec::<Line<'static>>::new(),
transcript_cells: Vec::new(),
overlay: None,
deferred_history_lines: Vec::new(),
has_emitted_history_lines: false,
Expand Down
Loading
Loading