copy for model migration nudge (#6585)

This commit is contained in:
Ahmed Ibrahim 2025-11-12 21:56:30 -08:00 committed by GitHub
parent e3aaee00c8
commit 305fe73d83
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 15 additions and 24 deletions

View file

@ -84,7 +84,7 @@ async fn handle_model_migration_prompt_if_needed(
return None;
}
match run_model_migration_prompt(tui, &target_model).await {
match run_model_migration_prompt(tui).await {
ModelMigrationOutcome::Accepted => {
app_event_tx.send(AppEvent::PersistModelMigrationPromptAcknowledged {
migration_config: "hide_gpt5_1_migration_prompt".to_string(),

View file

@ -1856,7 +1856,7 @@ impl ChatWidget {
self.bottom_pane.show_selection_view(SelectionViewParams {
title: Some("Select Model and Effort".to_string()),
subtitle: Some(
"Access legacy models by running codex -m <model_name> or in your config"
"Access legacy models by running codex -m <model_name> or in your config.toml"
.to_string(),
),
footer_hint: Some("Press enter to select reasoning effort, or esc to dismiss.".into()),

View file

@ -3,7 +3,7 @@ source: tui/src/chatwidget/tests.rs
expression: popup
---
Select Model and Effort
Access legacy models by running codex -m <model_name> or in your config
Access legacy models by running codex -m <model_name> or in your config.toml
1. gpt-5.1-codex Optimized for codex.
2. gpt-5.1-codex-mini Optimized for codex. Cheaper, faster, but less

View file

@ -24,10 +24,7 @@ pub(crate) enum ModelMigrationOutcome {
Exit,
}
pub(crate) async fn run_model_migration_prompt(
tui: &mut Tui,
target_model: &str,
) -> ModelMigrationOutcome {
pub(crate) async fn run_model_migration_prompt(tui: &mut Tui) -> ModelMigrationOutcome {
// Render the prompt on the terminal's alternate screen so exiting or cancelling
// does not leave a large blank region in the normal scrollback. This does not
// change the prompt's appearance only where it is drawn.
@ -48,7 +45,7 @@ pub(crate) async fn run_model_migration_prompt(
let alt = AltScreenGuard::enter(tui);
let mut screen = ModelMigrationScreen::new(alt.tui.frame_requester(), target_model);
let mut screen = ModelMigrationScreen::new(alt.tui.frame_requester());
let _ = alt.tui.draw(u16::MAX, |frame| {
frame.render_widget_ref(&screen, frame.area());
@ -79,16 +76,14 @@ pub(crate) async fn run_model_migration_prompt(
struct ModelMigrationScreen {
request_frame: FrameRequester,
target_model: String,
done: bool,
should_exit: bool,
}
impl ModelMigrationScreen {
fn new(request_frame: FrameRequester, target_model: &str) -> Self {
fn new(request_frame: FrameRequester) -> Self {
Self {
request_frame,
target_model: target_model.to_string(),
done: false,
should_exit: false,
}
@ -140,10 +135,7 @@ impl WidgetRef for &ModelMigrationScreen {
column.push("");
column.push(Line::from(vec![
"> ".into(),
"Introducing ".bold(),
"our ".bold(),
self.target_model.clone().bold(),
" models".bold(),
"Introducing our gpt-5.1 models".bold(),
]));
column.push(Line::from(""));
@ -199,7 +191,7 @@ mod tests {
let mut terminal = Terminal::with_options(backend).expect("terminal");
terminal.set_viewport_area(Rect::new(0, 0, width, height));
let screen = ModelMigrationScreen::new(FrameRequester::test_dummy(), "gpt-5.1-codex");
let screen = ModelMigrationScreen::new(FrameRequester::test_dummy());
{
let mut frame = terminal.get_frame();
@ -216,7 +208,7 @@ mod tests {
let mut terminal = Terminal::with_options(backend).expect("terminal");
terminal.set_viewport_area(Rect::new(0, 0, 65, 12));
let screen = ModelMigrationScreen::new(FrameRequester::test_dummy(), "gpt-5.1");
let screen = ModelMigrationScreen::new(FrameRequester::test_dummy());
{
let mut frame = terminal.get_frame();
frame.render_widget_ref(&screen, frame.area());
@ -231,7 +223,7 @@ mod tests {
let mut terminal = Terminal::with_options(backend).expect("terminal");
terminal.set_viewport_area(Rect::new(0, 0, 60, 12));
let screen = ModelMigrationScreen::new(FrameRequester::test_dummy(), "gpt-5.1-codex");
let screen = ModelMigrationScreen::new(FrameRequester::test_dummy());
{
let mut frame = terminal.get_frame();
frame.render_widget_ref(&screen, frame.area());
@ -246,7 +238,7 @@ mod tests {
let mut terminal = Terminal::with_options(backend).expect("terminal");
terminal.set_viewport_area(Rect::new(0, 0, 60, 12));
let screen = ModelMigrationScreen::new(FrameRequester::test_dummy(), "gpt-5.1-codex-mini");
let screen = ModelMigrationScreen::new(FrameRequester::test_dummy());
{
let mut frame = terminal.get_frame();
frame.render_widget_ref(&screen, frame.area());
@ -257,8 +249,7 @@ mod tests {
#[test]
fn escape_key_accepts_prompt() {
let screen_target = "gpt-5.1-codex";
let mut screen = ModelMigrationScreen::new(FrameRequester::test_dummy(), screen_target);
let mut screen = ModelMigrationScreen::new(FrameRequester::test_dummy());
// Simulate pressing Escape
screen.handle_key(KeyEvent::new(

View file

@ -2,7 +2,7 @@
source: tui/src/model_migration.rs
expression: terminal.backend()
---
> Introducing our gpt-5.1-codex models
> Introducing our gpt-5.1 models
We've upgraded our family of models supported in Codex to
gpt-5.1, gpt-5.1-codex and gpt-5.1-codex-mini.

View file

@ -2,7 +2,7 @@
source: tui/src/model_migration.rs
expression: terminal.backend()
---
> Introducing our gpt-5.1-codex models
> Introducing our gpt-5.1 models
We've upgraded our family of models supported in Codex to
gpt-5.1, gpt-5.1-codex and gpt-5.1-codex-mini.

View file

@ -2,7 +2,7 @@
source: tui/src/model_migration.rs
expression: terminal.backend()
---
> Introducing our gpt-5.1-codex-mini models
> Introducing our gpt-5.1 models
We've upgraded our family of models supported in Codex to
gpt-5.1, gpt-5.1-codex and gpt-5.1-codex-mini.