diff --git a/codex-rs/app-server/src/codex_message_processor.rs b/codex-rs/app-server/src/codex_message_processor.rs index 2f3660239..44a74a067 100644 --- a/codex-rs/app-server/src/codex_message_processor.rs +++ b/codex-rs/app-server/src/codex_message_processor.rs @@ -6493,6 +6493,14 @@ fn collect_resume_override_mismatches( config_snapshot.model_provider_id )); } + if let Some(requested_service_tier) = request.service_tier.as_ref() + && requested_service_tier != &config_snapshot.service_tier + { + mismatch_details.push(format!( + "service_tier requested={requested_service_tier:?} active={:?}", + config_snapshot.service_tier + )); + } if let Some(requested_cwd) = request.cwd.as_deref() { let requested_cwd_path = std::path::PathBuf::from(requested_cwd); if requested_cwd_path != config_snapshot.cwd { @@ -7259,6 +7267,43 @@ mod tests { validate_dynamic_tools(&tools).expect("valid schema"); } + #[test] + fn collect_resume_override_mismatches_includes_service_tier() { + let request = ThreadResumeParams { + thread_id: "thread-1".to_string(), + history: None, + path: None, + model: None, + model_provider: None, + service_tier: Some(Some(codex_protocol::config_types::ServiceTier::Fast)), + cwd: None, + approval_policy: None, + sandbox: None, + config: None, + base_instructions: None, + developer_instructions: None, + personality: None, + persist_extended_history: false, + }; + let config_snapshot = ThreadConfigSnapshot { + model: "gpt-5".to_string(), + model_provider_id: "openai".to_string(), + service_tier: Some(codex_protocol::config_types::ServiceTier::Flex), + approval_policy: codex_protocol::protocol::AskForApproval::OnRequest, + sandbox_policy: codex_protocol::protocol::SandboxPolicy::DangerFullAccess, + cwd: PathBuf::from("/tmp"), + ephemeral: false, + reasoning_effort: None, + personality: None, + session_source: SessionSource::Cli, + }; + + assert_eq!( + collect_resume_override_mismatches(&request, &config_snapshot), + vec!["service_tier requested=Some(Fast) active=Some(Flex)".to_string()] + ); + } + #[test] fn extract_conversation_summary_prefers_plain_user_messages() -> Result<()> { let conversation_id = ThreadId::from_string("3f941c35-29b3-493b-b0a4-e25800d9aeb0")?; diff --git a/codex-rs/tui/src/app.rs b/codex-rs/tui/src/app.rs index fe8aca54f..5722c5020 100644 --- a/codex-rs/tui/src/app.rs +++ b/codex-rs/tui/src/app.rs @@ -806,6 +806,8 @@ impl App { history_cell::SessionHeaderHistoryCell::new( self.chat_widget.current_model().to_string(), self.chat_widget.current_reasoning_effort(), + self.chat_widget + .should_show_fast_status(self.chat_widget.current_service_tier()), self.config.cwd.clone(), version, ) @@ -5178,6 +5180,7 @@ mod tests { is_first, None, None, + false, )) as Arc }; @@ -5827,6 +5830,7 @@ mod tests { is_first, None, None, + false, )) as Arc }; diff --git a/codex-rs/tui/src/chatwidget.rs b/codex-rs/tui/src/chatwidget.rs index 7a71d4064..5d0a9dbec 100644 --- a/codex-rs/tui/src/chatwidget.rs +++ b/codex-rs/tui/src/chatwidget.rs @@ -1224,6 +1224,7 @@ impl ChatWidget { self.sync_fast_command_enabled(); self.sync_personality_command_enabled(); let startup_tooltip_override = self.startup_tooltip_override.take(); + let show_fast_status = self.should_show_fast_status(event.service_tier); let session_info_cell = history_cell::new_session_info( &self.config, &model_for_header, @@ -1233,6 +1234,7 @@ impl ChatWidget { self.auth_manager .auth_cached() .and_then(|auth| auth.account_plan_type()), + show_fast_status, ); self.apply_session_info_cell(session_info_cell); @@ -7221,6 +7223,15 @@ impl ChatWidget { self.config.service_tier } + pub(crate) fn should_show_fast_status(&self, service_tier: Option) -> bool { + matches!(service_tier, Some(ServiceTier::Fast)) + && self + .auth_manager + .auth_cached() + .as_ref() + .is_some_and(CodexAuth::is_chatgpt_auth) + } + fn fast_mode_enabled(&self) -> bool { self.config.features.enabled(Feature::FastMode) } @@ -7555,6 +7566,7 @@ impl ChatWidget { DEFAULT_MODEL_DISPLAY_NAME.to_string(), placeholder_style, None, + false, config.cwd.clone(), CODEX_CLI_VERSION, )) diff --git a/codex-rs/tui/src/chatwidget/tests.rs b/codex-rs/tui/src/chatwidget/tests.rs index 72fa61c13..6d8ecc78f 100644 --- a/codex-rs/tui/src/chatwidget/tests.rs +++ b/codex-rs/tui/src/chatwidget/tests.rs @@ -7458,6 +7458,26 @@ async fn user_turn_carries_service_tier_after_fast_toggle() { } } +#[tokio::test] +async fn fast_status_indicator_requires_chatgpt_auth() { + let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.3-codex")).await; + chat.set_service_tier(Some(ServiceTier::Fast)); + + assert!(!chat.should_show_fast_status(chat.current_service_tier())); + + set_chatgpt_auth(&mut chat); + + assert!(chat.should_show_fast_status(chat.current_service_tier())); +} + +#[tokio::test] +async fn fast_status_indicator_is_hidden_when_fast_mode_is_off() { + let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.3-codex")).await; + set_chatgpt_auth(&mut chat); + + assert!(!chat.should_show_fast_status(chat.current_service_tier())); +} + #[tokio::test] async fn approvals_popup_shows_disabled_presets() { let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await; diff --git a/codex-rs/tui/src/history_cell.rs b/codex-rs/tui/src/history_cell.rs index b5e839c32..3a568a0f8 100644 --- a/codex-rs/tui/src/history_cell.rs +++ b/codex-rs/tui/src/history_cell.rs @@ -1047,6 +1047,7 @@ pub(crate) fn new_session_info( is_first_event: bool, tooltip_override: Option, auth_plan: Option, + show_fast_status: bool, ) -> SessionInfoCell { let SessionConfiguredEvent { model, @@ -1057,6 +1058,7 @@ pub(crate) fn new_session_info( let header = SessionHeaderHistoryCell::new( model.clone(), reasoning_effort, + show_fast_status, config.cwd.clone(), CODEX_CLI_VERSION, ); @@ -1143,6 +1145,7 @@ pub(crate) struct SessionHeaderHistoryCell { model: String, model_style: Style, reasoning_effort: Option, + show_fast_status: bool, directory: PathBuf, } @@ -1150,6 +1153,7 @@ impl SessionHeaderHistoryCell { pub(crate) fn new( model: String, reasoning_effort: Option, + show_fast_status: bool, directory: PathBuf, version: &'static str, ) -> Self { @@ -1157,6 +1161,7 @@ impl SessionHeaderHistoryCell { model, Style::default(), reasoning_effort, + show_fast_status, directory, version, ) @@ -1166,6 +1171,7 @@ impl SessionHeaderHistoryCell { model: String, model_style: Style, reasoning_effort: Option, + show_fast_status: bool, directory: PathBuf, version: &'static str, ) -> Self { @@ -1174,6 +1180,7 @@ impl SessionHeaderHistoryCell { model, model_style, reasoning_effort, + show_fast_status, directory, } } @@ -1253,6 +1260,10 @@ impl HistoryCell for SessionHeaderHistoryCell { spans.push(Span::from(" ")); spans.push(Span::from(reasoning)); } + if self.show_fast_status { + spans.push(" ".into()); + spans.push(Span::styled("fast", self.model_style.magenta())); + } spans.push(" ".dim()); spans.push(CHANGE_MODEL_HINT_COMMAND.cyan()); spans.push(CHANGE_MODEL_HINT_EXPLANATION.dim()); @@ -2612,6 +2623,7 @@ mod tests { false, Some("Model just became available".to_string()), Some(PlanType::Free), + false, ); let rendered = render_transcript(&cell).join("\n"); @@ -2629,6 +2641,7 @@ mod tests { false, Some("Model just became available".to_string()), Some(PlanType::Free), + false, ); let rendered = render_transcript(&cell).join("\n"); @@ -2645,6 +2658,7 @@ mod tests { true, Some("Model just became available".to_string()), Some(PlanType::Free), + false, ); let rendered = render_transcript(&cell).join("\n"); @@ -2663,6 +2677,7 @@ mod tests { false, Some("Model just became available".to_string()), Some(PlanType::Free), + false, ); let rendered = render_transcript(&cell).join("\n"); @@ -3295,18 +3310,39 @@ mod tests { let cell = SessionHeaderHistoryCell::new( "gpt-4o".to_string(), Some(ReasoningEffortConfig::High), + true, std::env::temp_dir(), "test", ); let lines = render_lines(&cell.display_lines(80)); let model_line = lines - .into_iter() + .iter() + .find(|line| line.contains("model:")) + .expect("model line"); + + assert!(model_line.contains("gpt-4o high fast")); + assert!(model_line.contains("/model to change")); + } + + #[test] + fn session_header_hides_fast_status_when_disabled() { + let cell = SessionHeaderHistoryCell::new( + "gpt-4o".to_string(), + Some(ReasoningEffortConfig::High), + false, + std::env::temp_dir(), + "test", + ); + + let lines = render_lines(&cell.display_lines(80)); + let model_line = lines + .iter() .find(|line| line.contains("model:")) .expect("model line"); assert!(model_line.contains("gpt-4o high")); - assert!(model_line.contains("/model to change")); + assert!(!model_line.contains("fast")); } #[test] diff --git a/codex-rs/tui/src/snapshots/codex_tui__app__tests__clear_ui_after_long_transcript_fresh_header_only.snap b/codex-rs/tui/src/snapshots/codex_tui__app__tests__clear_ui_after_long_transcript_fresh_header_only.snap index abb423ff1..98d601506 100644 --- a/codex-rs/tui/src/snapshots/codex_tui__app__tests__clear_ui_after_long_transcript_fresh_header_only.snap +++ b/codex-rs/tui/src/snapshots/codex_tui__app__tests__clear_ui_after_long_transcript_fresh_header_only.snap @@ -1,6 +1,5 @@ --- source: tui/src/app.rs -assertion_line: 3452 expression: rendered --- ╭─────────────────────────────────────────────╮ diff --git a/codex-rs/tui/src/snapshots/codex_tui__history_cell__tests__session_info_availability_nux_tooltip_snapshot.snap b/codex-rs/tui/src/snapshots/codex_tui__history_cell__tests__session_info_availability_nux_tooltip_snapshot.snap index 82869c017..ad0fc4be1 100644 --- a/codex-rs/tui/src/snapshots/codex_tui__history_cell__tests__session_info_availability_nux_tooltip_snapshot.snap +++ b/codex-rs/tui/src/snapshots/codex_tui__history_cell__tests__session_info_availability_nux_tooltip_snapshot.snap @@ -1,6 +1,5 @@ --- source: tui/src/history_cell.rs -assertion_line: 2608 expression: rendered --- ╭─────────────────────────────────────╮