From 021e39b3031de7661245eff61ed9bbe11ffca531 Mon Sep 17 00:00:00 2001 From: Charley Cunningham Date: Fri, 20 Feb 2026 15:22:10 -0800 Subject: [PATCH] Show model/reasoning hint when switching modes (#12307) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - show an info message when switching collaboration modes changes the effective model or reasoning - include the target mode in the message (for example `... for Plan mode.`) - add TUI tests for model-change and reasoning-only change notifications on mode switch Screenshot 2026-02-20 at 2 01 40 PM --- codex-rs/tui/src/chatwidget.rs | 27 +++++++++++ codex-rs/tui/src/chatwidget/tests.rs | 67 +++++++++++++++++++++++++++- 2 files changed, 93 insertions(+), 1 deletion(-) diff --git a/codex-rs/tui/src/chatwidget.rs b/codex-rs/tui/src/chatwidget.rs index 7dcab8010..ca5aa7c62 100644 --- a/codex-rs/tui/src/chatwidget.rs +++ b/codex-rs/tui/src/chatwidget.rs @@ -6343,9 +6343,36 @@ impl ChatWidget { if !self.collaboration_modes_enabled() { return; } + let previous_mode = self.active_mode_kind(); + let previous_model = self.current_model().to_string(); + let previous_effort = self.effective_reasoning_effort(); self.active_collaboration_mask = Some(mask); self.update_collaboration_mode_indicator(); self.refresh_model_display(); + let next_mode = self.active_mode_kind(); + let next_model = self.current_model(); + let next_effort = self.effective_reasoning_effort(); + if previous_mode != next_mode + && (previous_model != next_model || previous_effort != next_effort) + { + let mut message = format!("Model changed to {next_model}"); + if !next_model.starts_with("codex-auto-") { + let reasoning_label = match next_effort { + Some(ReasoningEffortConfig::Minimal) => "minimal", + Some(ReasoningEffortConfig::Low) => "low", + Some(ReasoningEffortConfig::Medium) => "medium", + Some(ReasoningEffortConfig::High) => "high", + Some(ReasoningEffortConfig::XHigh) => "xhigh", + None | Some(ReasoningEffortConfig::None) => "default", + }; + message.push(' '); + message.push_str(reasoning_label); + } + message.push_str(" for "); + message.push_str(next_mode.display_name()); + message.push_str(" mode."); + self.add_info_message(message, None); + } self.request_redraw(); } diff --git a/codex-rs/tui/src/chatwidget/tests.rs b/codex-rs/tui/src/chatwidget/tests.rs index d6d2ab1ad..edf0e0168 100644 --- a/codex-rs/tui/src/chatwidget/tests.rs +++ b/codex-rs/tui/src/chatwidget/tests.rs @@ -3584,6 +3584,66 @@ async fn collab_mode_shift_tab_cycles_only_when_enabled_and_idle() { assert_eq!(chat.active_collaboration_mode_kind(), before); } +#[tokio::test] +async fn mode_switch_surfaces_model_change_notification_when_effective_model_changes() { + let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(Some("gpt-5")).await; + chat.set_feature_enabled(Feature::CollaborationModes, true); + let default_model = chat.current_model().to_string(); + + let mut plan_mask = + collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan) + .expect("expected plan collaboration mode"); + plan_mask.model = Some("gpt-5.1-codex-mini".to_string()); + chat.set_collaboration_mask(plan_mask); + + let plan_messages = drain_insert_history(&mut rx) + .iter() + .map(|lines| lines_to_single_string(lines)) + .collect::>() + .join("\n"); + assert!( + plan_messages.contains("Model changed to gpt-5.1-codex-mini medium for Plan mode."), + "expected Plan-mode model switch notice, got: {plan_messages:?}" + ); + + let default_mask = collaboration_modes::default_mask(chat.models_manager.as_ref()) + .expect("expected default collaboration mode"); + chat.set_collaboration_mask(default_mask); + + let default_messages = drain_insert_history(&mut rx) + .iter() + .map(|lines| lines_to_single_string(lines)) + .collect::>() + .join("\n"); + let expected_default_message = + format!("Model changed to {default_model} default for Default mode."); + assert!( + default_messages.contains(&expected_default_message), + "expected Default-mode model switch notice, got: {default_messages:?}" + ); +} + +#[tokio::test] +async fn mode_switch_surfaces_reasoning_change_notification_when_model_stays_same() { + let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.3-codex")).await; + chat.set_feature_enabled(Feature::CollaborationModes, true); + chat.set_reasoning_effort(Some(ReasoningEffortConfig::High)); + + let plan_mask = collaboration_modes::plan_mask(chat.models_manager.as_ref()) + .expect("expected plan collaboration mode"); + chat.set_collaboration_mask(plan_mask); + + let plan_messages = drain_insert_history(&mut rx) + .iter() + .map(|lines| lines_to_single_string(lines)) + .collect::>() + .join("\n"); + assert!( + plan_messages.contains("Model changed to gpt-5.3-codex medium for Plan mode."), + "expected reasoning-change notice in Plan mode, got: {plan_messages:?}" + ); +} + #[tokio::test] async fn collab_slash_command_opens_picker_and_updates_mode() { let (mut chat, mut rx, mut op_rx) = make_chatwidget_manual(None).await; @@ -3649,7 +3709,12 @@ async fn plan_slash_command_switches_to_plan_mode() { chat.dispatch_command(SlashCommand::Plan); - assert!(rx.try_recv().is_err(), "plan should not emit an app event"); + while let Ok(event) = rx.try_recv() { + assert!( + matches!(event, AppEvent::InsertHistoryCell(_)), + "plan should not emit a non-history app event: {event:?}" + ); + } assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan); assert_eq!(chat.current_collaboration_mode(), &initial); }