Show model/reasoning hint when switching modes (#12307)

## Summary
- show an info message when switching collaboration modes changes the
effective model or reasoning
- include the target mode in the message (for example `... for Plan
mode.`)
- add TUI tests for model-change and reasoning-only change notifications
on mode switch

<img width="715" height="184" alt="Screenshot 2026-02-20 at 2 01 40 PM"
src="https://github.com/user-attachments/assets/18d1beb3-ab87-4e1c-9ada-a10218520420"
/>
This commit is contained in:
Charley Cunningham 2026-02-20 15:22:10 -08:00 committed by GitHub
parent 65b9fe8f30
commit 021e39b303
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 93 additions and 1 deletions

View file

@ -6343,9 +6343,36 @@ impl ChatWidget {
if !self.collaboration_modes_enabled() {
return;
}
let previous_mode = self.active_mode_kind();
let previous_model = self.current_model().to_string();
let previous_effort = self.effective_reasoning_effort();
self.active_collaboration_mask = Some(mask);
self.update_collaboration_mode_indicator();
self.refresh_model_display();
let next_mode = self.active_mode_kind();
let next_model = self.current_model();
let next_effort = self.effective_reasoning_effort();
if previous_mode != next_mode
&& (previous_model != next_model || previous_effort != next_effort)
{
let mut message = format!("Model changed to {next_model}");
if !next_model.starts_with("codex-auto-") {
let reasoning_label = match next_effort {
Some(ReasoningEffortConfig::Minimal) => "minimal",
Some(ReasoningEffortConfig::Low) => "low",
Some(ReasoningEffortConfig::Medium) => "medium",
Some(ReasoningEffortConfig::High) => "high",
Some(ReasoningEffortConfig::XHigh) => "xhigh",
None | Some(ReasoningEffortConfig::None) => "default",
};
message.push(' ');
message.push_str(reasoning_label);
}
message.push_str(" for ");
message.push_str(next_mode.display_name());
message.push_str(" mode.");
self.add_info_message(message, None);
}
self.request_redraw();
}

View file

@ -3584,6 +3584,66 @@ async fn collab_mode_shift_tab_cycles_only_when_enabled_and_idle() {
assert_eq!(chat.active_collaboration_mode_kind(), before);
}
#[tokio::test]
async fn mode_switch_surfaces_model_change_notification_when_effective_model_changes() {
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
chat.set_feature_enabled(Feature::CollaborationModes, true);
let default_model = chat.current_model().to_string();
let mut plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
.expect("expected plan collaboration mode");
plan_mask.model = Some("gpt-5.1-codex-mini".to_string());
chat.set_collaboration_mask(plan_mask);
let plan_messages = drain_insert_history(&mut rx)
.iter()
.map(|lines| lines_to_single_string(lines))
.collect::<Vec<_>>()
.join("\n");
assert!(
plan_messages.contains("Model changed to gpt-5.1-codex-mini medium for Plan mode."),
"expected Plan-mode model switch notice, got: {plan_messages:?}"
);
let default_mask = collaboration_modes::default_mask(chat.models_manager.as_ref())
.expect("expected default collaboration mode");
chat.set_collaboration_mask(default_mask);
let default_messages = drain_insert_history(&mut rx)
.iter()
.map(|lines| lines_to_single_string(lines))
.collect::<Vec<_>>()
.join("\n");
let expected_default_message =
format!("Model changed to {default_model} default for Default mode.");
assert!(
default_messages.contains(&expected_default_message),
"expected Default-mode model switch notice, got: {default_messages:?}"
);
}
#[tokio::test]
async fn mode_switch_surfaces_reasoning_change_notification_when_model_stays_same() {
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.3-codex")).await;
chat.set_feature_enabled(Feature::CollaborationModes, true);
chat.set_reasoning_effort(Some(ReasoningEffortConfig::High));
let plan_mask = collaboration_modes::plan_mask(chat.models_manager.as_ref())
.expect("expected plan collaboration mode");
chat.set_collaboration_mask(plan_mask);
let plan_messages = drain_insert_history(&mut rx)
.iter()
.map(|lines| lines_to_single_string(lines))
.collect::<Vec<_>>()
.join("\n");
assert!(
plan_messages.contains("Model changed to gpt-5.3-codex medium for Plan mode."),
"expected reasoning-change notice in Plan mode, got: {plan_messages:?}"
);
}
#[tokio::test]
async fn collab_slash_command_opens_picker_and_updates_mode() {
let (mut chat, mut rx, mut op_rx) = make_chatwidget_manual(None).await;
@ -3649,7 +3709,12 @@ async fn plan_slash_command_switches_to_plan_mode() {
chat.dispatch_command(SlashCommand::Plan);
assert!(rx.try_recv().is_err(), "plan should not emit an app event");
while let Ok(event) = rx.try_recv() {
assert!(
matches!(event, AppEvent::InsertHistoryCell(_)),
"plan should not emit a non-history app event: {event:?}"
);
}
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
assert_eq!(chat.current_collaboration_mode(), &initial);
}