Enhance model picker (#7709)

# External (non-OpenAI) Pull Request Requirements

Before opening this Pull Request, please read the dedicated
"Contributing" markdown file or your PR may be closed:
https://github.com/openai/codex/blob/main/docs/contributing.md

If your PR conforms to our contribution guidelines, replace this text
with a detailed and high quality description of your changes.

Include a link to a bug report or enhancement request.
This commit is contained in:
Ahmed Ibrahim 2025-12-08 14:22:51 -08:00 committed by GitHub
parent 0a32acaa2d
commit 71c75e648c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 154 additions and 45 deletions

View file

@ -700,6 +700,9 @@ impl App {
AppEvent::OpenReasoningPopup { model } => {
self.chat_widget.open_reasoning_popup(model);
}
AppEvent::OpenAllModelsPopup { models } => {
self.chat_widget.open_all_models_popup(models);
}
AppEvent::OpenFullAccessConfirmation { preset } => {
self.chat_widget.open_full_access_confirmation(preset);
}
@ -799,20 +802,17 @@ impl App {
.await
{
Ok(()) => {
let reasoning_label = Self::reasoning_label(effort);
if let Some(profile) = profile {
self.chat_widget.add_info_message(
format!(
"Model changed to {model} {reasoning_label} for {profile} profile"
),
None,
);
} else {
self.chat_widget.add_info_message(
format!("Model changed to {model} {reasoning_label}"),
None,
);
let mut message = format!("Model changed to {model}");
if let Some(label) = Self::reasoning_label_for(&model, effort) {
message.push(' ');
message.push_str(label);
}
if let Some(profile) = profile {
message.push_str(" for ");
message.push_str(profile);
message.push_str(" profile");
}
self.chat_widget.add_info_message(message, None);
}
Err(err) => {
tracing::error!(
@ -1012,6 +1012,13 @@ impl App {
}
}
fn reasoning_label_for(
model: &str,
reasoning_effort: Option<ReasoningEffortConfig>,
) -> Option<&'static str> {
(!model.starts_with("codex-auto-")).then(|| Self::reasoning_label(reasoning_effort))
}
pub(crate) fn token_usage(&self) -> codex_core::protocol::TokenUsage {
self.chat_widget.token_usage()
}

View file

@ -74,6 +74,11 @@ pub(crate) enum AppEvent {
model: ModelPreset,
},
/// Open the full model picker (non-auto models).
OpenAllModelsPopup {
models: Vec<ModelPreset>,
},
/// Open the confirmation prompt before enabling full access mode.
OpenFullAccessConfirmation {
preset: ApprovalPreset,

View file

@ -2156,8 +2156,8 @@ impl ChatWidget {
});
}
/// Open a popup to choose the model (stage 1). After selecting a model,
/// a second popup is shown to choose the reasoning effort.
/// Open a popup to choose a quick auto model. Selecting "All models"
/// opens the full picker with every available preset.
pub(crate) fn open_model_popup(&mut self) {
let current_model = self.config.model.clone();
let presets: Vec<ModelPreset> =
@ -2174,13 +2174,103 @@ impl ChatWidget {
}
};
let current_label = presets
.iter()
.find(|preset| preset.model == current_model)
.map(|preset| preset.display_name.to_string())
.unwrap_or_else(|| current_model.clone());
let (mut auto_presets, other_presets): (Vec<ModelPreset>, Vec<ModelPreset>) = presets
.into_iter()
.partition(|preset| Self::is_auto_model(&preset.model));
if auto_presets.is_empty() {
self.open_all_models_popup(other_presets);
return;
}
auto_presets.sort_by_key(|preset| Self::auto_model_order(&preset.model));
let mut items: Vec<SelectionItem> = auto_presets
.into_iter()
.map(|preset| {
let description =
(!preset.description.is_empty()).then_some(preset.description.clone());
let model = preset.model.clone();
let actions = Self::model_selection_actions(
model.clone(),
Some(preset.default_reasoning_effort),
);
SelectionItem {
name: preset.display_name,
description,
is_current: model == current_model,
actions,
dismiss_on_select: true,
..Default::default()
}
})
.collect();
if !other_presets.is_empty() {
let all_models = other_presets;
let actions: Vec<SelectionAction> = vec![Box::new(move |tx| {
tx.send(AppEvent::OpenAllModelsPopup {
models: all_models.clone(),
});
})];
let is_current = !items.iter().any(|item| item.is_current);
let description = Some(format!(
"Choose a specific model and reasoning level (current: {current_label})"
));
items.push(SelectionItem {
name: "All models".to_string(),
description,
is_current,
actions,
dismiss_on_select: true,
..Default::default()
});
}
self.bottom_pane.show_selection_view(SelectionViewParams {
title: Some("Select Model".to_string()),
subtitle: Some("Pick a quick auto mode or browse all models.".to_string()),
footer_hint: Some(standard_popup_hint_line()),
items,
..Default::default()
});
}
fn is_auto_model(model: &str) -> bool {
model.starts_with("codex-auto-")
}
fn auto_model_order(model: &str) -> usize {
match model {
"codex-auto-fast" => 0,
"codex-auto-balanced" => 1,
"codex-auto-thorough" => 2,
_ => 3,
}
}
pub(crate) fn open_all_models_popup(&mut self, presets: Vec<ModelPreset>) {
if presets.is_empty() {
self.add_info_message(
"No additional models are available right now.".to_string(),
None,
);
return;
}
let current_model = self.config.model.clone();
let mut items: Vec<SelectionItem> = Vec::new();
for preset in presets.into_iter() {
let description = if preset.description.is_empty() {
None
} else {
Some(preset.description.to_string())
};
let description =
(!preset.description.is_empty()).then_some(preset.description.to_string());
let is_current = preset.model == current_model;
let single_supported_effort = preset.supported_reasoning_efforts.len() == 1;
let preset_for_action = preset.clone();
@ -2212,6 +2302,36 @@ impl ChatWidget {
});
}
fn model_selection_actions(
model_for_action: String,
effort_for_action: Option<ReasoningEffortConfig>,
) -> Vec<SelectionAction> {
vec![Box::new(move |tx| {
let effort_label = effort_for_action
.map(|effort| effort.to_string())
.unwrap_or_else(|| "default".to_string());
tx.send(AppEvent::CodexOp(Op::OverrideTurnContext {
cwd: None,
approval_policy: None,
sandbox_policy: None,
model: Some(model_for_action.clone()),
effort: Some(effort_for_action),
summary: None,
}));
tx.send(AppEvent::UpdateModel(model_for_action.clone()));
tx.send(AppEvent::UpdateReasoningEffort(effort_for_action));
tx.send(AppEvent::PersistModelSelection {
model: model_for_action.clone(),
effort: effort_for_action,
});
tracing::info!(
"Selected model: {}, Selected effort: {}",
model_for_action,
effort_label
);
})]
}
/// Open a popup to choose the reasoning effort (stage 2) for the given model.
pub(crate) fn open_reasoning_popup(&mut self, preset: ModelPreset) {
let default_effort: ReasoningEffortConfig = preset.default_reasoning_effort;
@ -2320,30 +2440,7 @@ impl ChatWidget {
};
let model_for_action = model_slug.clone();
let effort_for_action = choice.stored;
let actions: Vec<SelectionAction> = vec![Box::new(move |tx| {
tx.send(AppEvent::CodexOp(Op::OverrideTurnContext {
cwd: None,
approval_policy: None,
sandbox_policy: None,
model: Some(model_for_action.clone()),
effort: Some(effort_for_action),
summary: None,
}));
tx.send(AppEvent::UpdateModel(model_for_action.clone()));
tx.send(AppEvent::UpdateReasoningEffort(effort_for_action));
tx.send(AppEvent::PersistModelSelection {
model: model_for_action.clone(),
effort: effort_for_action,
});
tracing::info!(
"Selected model: {}, Selected effort: {}",
model_for_action,
effort_for_action
.map(|e| e.to_string())
.unwrap_or_else(|| "default".to_string())
);
})];
let actions = Self::model_selection_actions(model_for_action, choice.stored);
items.push(SelectionItem {
name: effort_label,

View file

@ -1961,7 +1961,7 @@ fn reasoning_popup_escape_returns_to_model_popup() {
chat.handle_key_event(KeyEvent::new(KeyCode::Esc, KeyModifiers::NONE));
let after_escape = render_bottom_popup(&chat, 80);
assert!(after_escape.contains("Select Model and Effort"));
assert!(after_escape.contains("Select Model"));
assert!(!after_escape.contains("Select Reasoning Level"));
}