From 89a31023fffa9789bb8dfb29448047a007a5064c Mon Sep 17 00:00:00 2001 From: jimmyfraiture Date: Fri, 12 Sep 2025 13:52:04 -0700 Subject: [PATCH] Fix NUX UI --- codex-rs/tui/src/app.rs | 32 +++++++++++++++-------------- codex-rs/tui/src/chatwidget.rs | 4 ++-- codex-rs/tui/src/history_cell.rs | 10 ++++++--- codex-rs/tui/src/new_model_popup.rs | 10 +++++---- 4 files changed, 32 insertions(+), 24 deletions(-) diff --git a/codex-rs/tui/src/app.rs b/codex-rs/tui/src/app.rs index 2b22396f43..2cb49b1849 100644 --- a/codex-rs/tui/src/app.rs +++ b/codex-rs/tui/src/app.rs @@ -327,13 +327,15 @@ impl App { fn show_model_save_hint(&mut self) { let model = self.config.model.clone(); if self.active_profile.is_some() { - self.chat_widget.add_info_message(format!( - "Model switched to {model}. Press Ctrl+S to save it for this profile, then press Ctrl+S again to set it as your global default." - )); + self.chat_widget.add_info_message( + format!("Model changed to {model} for the current session"), + Some("(ctrl+s to set as profile default)".to_string()), + ); } else { - self.chat_widget.add_info_message(format!( - "Model switched to {model}. Press Ctrl+S to save it as your global default." - )); + self.chat_widget.add_info_message( + format!("Model changed to {model} for the current session"), + Some("(ctrl+s to set as default)".to_string()), + ); } } @@ -372,9 +374,6 @@ impl App { let model = self.config.model.clone(); let effort = self.config.model_reasoning_effort; - let effort_label = effort - .map(|effort| effort.to_string()) - .unwrap_or_else(|| "none".to_string()); let codex_home = self.config.codex_home.clone(); match scope { @@ -382,9 +381,10 @@ impl App { match persist_model_selection(&codex_home, Some(profile), &model, effort).await { Ok(()) => { self.model_saved_to_profile = true; - self.chat_widget.add_info_message(format!( - "Saved model {model} ({effort_label}) for profile `{profile}`. Press Ctrl+S again to make this your global default." - )); + self.chat_widget.add_info_message( + format!("Profile model changed to {model} for all sessions"), + Some("(view global config in config.toml)".to_string()), + ); } Err(err) => { tracing::error!( @@ -401,9 +401,10 @@ impl App { match persist_model_selection(&codex_home, None, &model, effort).await { Ok(()) => { self.model_saved_to_global = true; - self.chat_widget.add_info_message(format!( - "Saved model {model} ({effort_label}) as your global default." - )); + self.chat_widget.add_info_message( + format!("Default model changed to {model} for all sessions"), + Some("(view global config in config.toml)".to_string()), + ) } Err(err) => { tracing::error!( @@ -420,6 +421,7 @@ impl App { self.chat_widget.add_info_message( "Model preference already saved globally; no further action needed." .to_string(), + None, ); } } diff --git a/codex-rs/tui/src/chatwidget.rs b/codex-rs/tui/src/chatwidget.rs index f47467e4c1..b6cbdde2e9 100644 --- a/codex-rs/tui/src/chatwidget.rs +++ b/codex-rs/tui/src/chatwidget.rs @@ -1277,8 +1277,8 @@ impl ChatWidget { self.config.model = model; } - pub(crate) fn add_info_message(&mut self, message: String) { - self.add_to_history(history_cell::new_info_event(message)); + pub(crate) fn add_info_message(&mut self, message: String, hint: Option) { + self.add_to_history(history_cell::new_info_event(message, hint)); self.request_redraw(); } diff --git a/codex-rs/tui/src/history_cell.rs b/codex-rs/tui/src/history_cell.rs index 42a79d60c5..f2bdc08a1f 100644 --- a/codex-rs/tui/src/history_cell.rs +++ b/codex-rs/tui/src/history_cell.rs @@ -1053,9 +1053,13 @@ pub(crate) fn new_mcp_tools_output( PlainHistoryCell { lines } } -pub(crate) fn new_info_event(message: String) -> PlainHistoryCell { - let lines: Vec> = - vec![vec![padded_emoji("💾").green(), " ".into(), message.into()].into()]; +pub(crate) fn new_info_event(message: String, hint: Option) -> PlainHistoryCell { + let mut line = vec!["> ".into(), message.into()]; + if let Some(hint) = hint { + line.push(" ".into()); + line.push(hint.dark_gray()); + } + let lines: Vec> = vec![line.into()]; PlainHistoryCell { lines } } diff --git a/codex-rs/tui/src/new_model_popup.rs b/codex-rs/tui/src/new_model_popup.rs index e89fb67ef5..ff09573e9f 100644 --- a/codex-rs/tui/src/new_model_popup.rs +++ b/codex-rs/tui/src/new_model_popup.rs @@ -82,12 +82,14 @@ impl WidgetRef for &ModelUpgradePopup { Clear.render(area, buf); let mut lines: Vec = vec![ + String::new().into(), + format!(" Codex is now powered by {GPT5_HIGH_MODEL}, a new model that is").into(), Line::from(vec![ - "> ".into(), - format!("Try {GPT5_HIGH_MODEL} as your default model").bold(), + " ".into(), + "faster, a better collaborator, ".bold(), + "and ".into(), + "more steerable.".bold(), ]), - format!(" {GPT5_HIGH_MODEL} is our latest model tuned for coding workflows.").into(), - " Switch now or keep your current default – you can change models any time.".into(), "".into(), ];