Use markdown for migration screen (#8952)

Next steps will be routing this to model info
This commit is contained in:
Ahmed Ibrahim 2026-01-12 23:41:42 -08:00 committed by GitHub
parent 18b737910c
commit 325ce985f1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 147 additions and 13 deletions

1
codex-rs/Cargo.lock generated
View file

@ -1308,6 +1308,7 @@ dependencies = [
"image",
"include_dir",
"indexmap 2.12.0",
"indoc",
"keyring",
"landlock",
"libc",

View file

@ -142,6 +142,7 @@ icu_decimal = "2.1"
icu_locale_core = "2.1"
icu_provider = { version = "2.1", features = ["sync"] }
ignore = "0.4.23"
indoc = "2.0"
image = { version = "^0.25.9", default-features = false }
include_dir = "0.7.4"
indexmap = "2.12.0"

View file

@ -45,6 +45,7 @@ eventsource-stream = { workspace = true }
futures = { workspace = true }
http = { workspace = true }
include_dir = { workspace = true }
indoc = { workspace = true }
indexmap = { workspace = true }
keyring = { workspace = true, features = ["crypto-rust"] }
libc = { workspace = true }

View file

@ -3,6 +3,7 @@ use codex_protocol::openai_models::ModelPreset;
use codex_protocol::openai_models::ModelUpgrade;
use codex_protocol::openai_models::ReasoningEffort;
use codex_protocol::openai_models::ReasoningEffortPreset;
use indoc::indoc;
use once_cell::sync::Lazy;
pub const HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG: &str = "hide_gpt5_1_migration_prompt";
@ -318,6 +319,16 @@ fn gpt_52_codex_upgrade() -> ModelUpgrade {
"Codex is now powered by gpt-5.2-codex, our latest frontier agentic coding model. It is smarter and faster than its predecessors and capable of long-running project-scale work."
.to_string(),
),
migration_markdown: Some(
indoc! {r#"
**Codex just got an upgrade. Introducing {model_to}.**
Codex is now powered by gpt-5.2-codex, our latest frontier agentic coding model. It is smarter and faster than its predecessors and capable of long-running project-scale work. Learn more about {model_to} at https://openai.com/index/introducing-gpt-5-2-codex
You can continue using {model_from} if you prefer.
"#}
.to_string(),
),
}
}

View file

@ -6,6 +6,7 @@ use codex_protocol::openai_models::ModelPreset;
use codex_protocol::openai_models::ReasoningEffort;
use codex_protocol::openai_models::ReasoningEffortPreset;
use core_test_support::load_default_config_for_test;
use indoc::indoc;
use pretty_assertions::assert_eq;
use tempfile::tempdir;
@ -410,6 +411,16 @@ fn gpt52_codex_upgrade() -> codex_protocol::openai_models::ModelUpgrade {
"Codex is now powered by gpt-5.2-codex, our latest frontier agentic coding model. It is smarter and faster than its predecessors and capable of long-running project-scale work."
.to_string(),
),
migration_markdown: Some(
indoc! {r#"
**Codex just got an upgrade. Introducing {model_to}.**
Codex is now powered by gpt-5.2-codex, our latest frontier agentic coding model. It is smarter and faster than its predecessors and capable of long-running project-scale work. Learn more about {model_to} at https://openai.com/index/introducing-gpt-5-2-codex
You can continue using {model_from} if you prefer.
"#}
.to_string(),
),
}
}

View file

@ -54,6 +54,7 @@ pub struct ModelUpgrade {
pub migration_config_key: String,
pub model_link: Option<String>,
pub upgrade_copy: Option<String>,
pub migration_markdown: Option<String>,
}
/// Metadata describing a Codex-supported model.
@ -234,6 +235,7 @@ impl From<ModelInfo> for ModelPreset {
// todo(aibrahim): add the model link here.
model_link: None,
upgrade_copy: None,
migration_markdown: None,
}),
show_in_picker: info.visibility == ModelVisibility::List,
supported_in_api: info.supported_in_api,

View file

@ -201,6 +201,7 @@ async fn handle_model_migration_prompt_if_needed(
migration_config_key,
model_link,
upgrade_copy,
migration_markdown,
}) = upgrade
{
if migration_prompt_hidden(config, migration_config_key.as_str()) {
@ -234,6 +235,7 @@ async fn handle_model_migration_prompt_if_needed(
&target_model,
model_link.clone(),
upgrade_copy.clone(),
migration_markdown.clone(),
heading_label,
target_description,
can_opt_out,
@ -1598,6 +1600,9 @@ mod tests {
fn model_migration_copy_to_plain_text(
copy: &crate::model_migration::ModelMigrationCopy,
) -> String {
if let Some(markdown) = copy.markdown.as_ref() {
return markdown.clone();
}
let mut s = String::new();
for span in &copy.heading {
s.push_str(&span.content);
@ -1680,6 +1685,7 @@ mod tests {
migration_config_key: HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG.to_string(),
model_link: None,
upgrade_copy: None,
migration_markdown: None,
});
available.retain(|preset| preset.model != "gpt-5-codex");
available.push(current.clone());
@ -1735,6 +1741,7 @@ mod tests {
&upgrade.id,
upgrade.model_link.clone(),
upgrade.upgrade_copy.clone(),
upgrade.migration_markdown.clone(),
target.display_name.clone(),
target_description,
can_opt_out,

View file

@ -1,4 +1,5 @@
use crate::key_hint;
use crate::markdown_render::render_markdown_text_with_width;
use crate::render::Insets;
use crate::render::renderable::ColumnRenderable;
use crate::render::renderable::Renderable;
@ -34,6 +35,7 @@ pub(crate) struct ModelMigrationCopy {
pub heading: Vec<Span<'static>>,
pub content: Vec<Line<'static>>,
pub can_opt_out: bool,
pub markdown: Option<String>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
@ -55,15 +57,30 @@ impl MigrationMenuOption {
}
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn migration_copy_for_models(
current_model: &str,
target_model: &str,
model_link: Option<String>,
migration_copy: Option<String>,
migration_markdown: Option<String>,
target_display_name: String,
target_description: Option<String>,
can_opt_out: bool,
) -> ModelMigrationCopy {
if let Some(migration_markdown) = migration_markdown {
return ModelMigrationCopy {
heading: Vec::new(),
content: Vec::new(),
can_opt_out,
markdown: Some(fill_migration_markdown(
&migration_markdown,
current_model,
target_model,
)),
};
}
let heading_text = Span::from(format!(
"Codex just got an upgrade. Introducing {target_display_name}."
))
@ -113,6 +130,7 @@ pub(crate) fn migration_copy_for_models(
heading: vec![heading_text],
content,
can_opt_out,
markdown: None,
}
}
@ -237,9 +255,13 @@ impl WidgetRef for &ModelMigrationScreen {
let mut column = ColumnRenderable::new();
column.push("");
column.push(self.heading_line());
column.push(Line::from(""));
self.render_content(&mut column);
if let Some(markdown) = self.copy.markdown.as_ref() {
self.render_markdown_content(markdown, area.width, &mut column);
} else {
column.push(self.heading_line());
column.push(Line::from(""));
self.render_content(&mut column);
}
if self.copy.can_opt_out {
self.render_menu(&mut column);
}
@ -290,6 +312,21 @@ impl ModelMigrationScreen {
}
}
fn render_markdown_content(
&self,
markdown: &str,
area_width: u16,
column: &mut ColumnRenderable,
) {
let horizontal_inset = 2;
let content_width = area_width.saturating_sub(horizontal_inset);
let wrap_width = (content_width > 0).then_some(content_width as usize);
let rendered = render_markdown_text_with_width(markdown, wrap_width);
for line in rendered.lines {
column.push(line.inset(Insets::tlbr(0, horizontal_inset, 0, 0)));
}
}
fn render_menu(&self, column: &mut ColumnRenderable) {
column.push(Line::from(""));
column.push(
@ -348,6 +385,12 @@ fn is_ctrl_exit_combo(key_event: KeyEvent) -> bool {
&& matches!(key_event.code, KeyCode::Char('c') | KeyCode::Char('d'))
}
fn fill_migration_markdown(template: &str, current_model: &str, target_model: &str) -> String {
template
.replace("{model_from}", current_model)
.replace("{model_to}", target_model)
}
#[cfg(test)]
mod tests {
use super::ModelMigrationScreen;
@ -378,6 +421,7 @@ mod tests {
"Upgrade to gpt-5.2-codex for the latest and greatest agentic coding model."
.to_string(),
),
None,
"gpt-5.1-codex-max".to_string(),
Some("Codex-optimized flagship for deep and fast reasoning.".to_string()),
true,
@ -406,6 +450,7 @@ mod tests {
"gpt-5.1",
Some("https://www.codex.com/models/gpt-5.1".to_string()),
None,
None,
"gpt-5.1".to_string(),
Some("Broad world knowledge with strong general reasoning.".to_string()),
false,
@ -432,6 +477,7 @@ mod tests {
"gpt-5.1-codex-max",
Some("https://www.codex.com/models/gpt-5.1-codex-max".to_string()),
None,
None,
"gpt-5.1-codex-max".to_string(),
Some("Codex-optimized flagship for deep and fast reasoning.".to_string()),
false,
@ -458,6 +504,7 @@ mod tests {
"gpt-5.1-codex-mini",
Some("https://www.codex.com/models/gpt-5.1-codex-mini".to_string()),
None,
None,
"gpt-5.1-codex-mini".to_string(),
Some("Optimized for codex. Cheaper, faster, but less capable.".to_string()),
false,
@ -480,6 +527,7 @@ mod tests {
"gpt-new",
Some("https://www.codex.com/models/gpt-new".to_string()),
None,
None,
"gpt-new".to_string(),
Some("Latest recommended model for better performance.".to_string()),
true,
@ -508,6 +556,7 @@ mod tests {
"gpt-new",
Some("https://www.codex.com/models/gpt-new".to_string()),
None,
None,
"gpt-new".to_string(),
Some("Latest recommended model for better performance.".to_string()),
true,

View file

@ -1,9 +1,8 @@
---
source: tui/src/app.rs
assertion_line: 1579
expression: model_migration_copy_to_plain_text(&copy)
---
Codex just got an upgrade. Introducing gpt-5.2-codex.
**Codex just got an upgrade. Introducing gpt-5.2-codex.**
Codex is now powered by gpt-5.2-codex, our latest frontier agentic coding model. It is smarter and faster than its predecessors and capable of long-running project-scale work. Learn more about gpt-5.2-codex at https://openai.com/index/introducing-gpt-5-2-codex

View file

@ -231,6 +231,7 @@ async fn handle_model_migration_prompt_if_needed(
id: target_model,
reasoning_effort_mapping,
migration_config_key,
migration_markdown,
..
}) = upgrade
{
@ -273,6 +274,7 @@ async fn handle_model_migration_prompt_if_needed(
&target_model,
heading_label,
target_description,
migration_markdown.clone(),
can_opt_out,
);
match run_model_migration_prompt(tui, prompt_copy).await {
@ -2384,6 +2386,9 @@ mod tests {
fn model_migration_copy_to_plain_text(
copy: &crate::model_migration::ModelMigrationCopy,
) -> String {
if let Some(markdown) = copy.markdown.as_ref() {
return markdown.clone();
}
let mut s = String::new();
for span in &copy.heading {
s.push_str(&span.content);
@ -2478,6 +2483,7 @@ mod tests {
&upgrade.id,
target.display_name,
target_description,
upgrade.migration_markdown.clone(),
can_opt_out,
);

View file

@ -1,4 +1,5 @@
use crate::key_hint;
use crate::markdown_render::render_markdown_text_with_width;
use crate::render::Insets;
use crate::render::renderable::ColumnRenderable;
use crate::render::renderable::Renderable;
@ -34,6 +35,7 @@ pub(crate) struct ModelMigrationCopy {
pub heading: Vec<Span<'static>>,
pub content: Vec<Line<'static>>,
pub can_opt_out: bool,
pub markdown: Option<String>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
@ -55,13 +57,28 @@ impl MigrationMenuOption {
}
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn migration_copy_for_models(
current_model: &str,
target_model: &str,
target_display_name: String,
target_description: Option<String>,
migration_markdown: Option<String>,
can_opt_out: bool,
) -> ModelMigrationCopy {
if let Some(migration_markdown) = migration_markdown {
return ModelMigrationCopy {
heading: Vec::new(),
content: Vec::new(),
can_opt_out,
markdown: Some(fill_migration_markdown(
&migration_markdown,
current_model,
target_model,
)),
};
}
let heading_text = Span::from(format!("Try {target_display_name}")).bold();
let description_line = target_description
.filter(|desc| !desc.is_empty())
@ -93,6 +110,7 @@ pub(crate) fn migration_copy_for_models(
heading: vec![heading_text],
content,
can_opt_out,
markdown: None,
}
}
@ -218,9 +236,13 @@ impl WidgetRef for &ModelMigrationScreen {
let mut column = ColumnRenderable::new();
column.push("");
column.push(self.heading_line());
column.push(Line::from(""));
self.render_content(&mut column);
if let Some(markdown) = self.copy.markdown.as_ref() {
self.render_markdown_content(markdown, area.width, &mut column);
} else {
column.push(self.heading_line());
column.push(Line::from(""));
self.render_content(&mut column);
}
if self.copy.can_opt_out {
self.render_menu(&mut column);
}
@ -271,6 +293,21 @@ impl ModelMigrationScreen {
}
}
fn render_markdown_content(
&self,
markdown: &str,
area_width: u16,
column: &mut ColumnRenderable,
) {
let horizontal_inset = 2;
let content_width = area_width.saturating_sub(horizontal_inset);
let wrap_width = (content_width > 0).then_some(content_width as usize);
let rendered = render_markdown_text_with_width(markdown, wrap_width);
for line in rendered.lines {
column.push(line.inset(Insets::tlbr(0, horizontal_inset, 0, 0)));
}
}
fn render_menu(&self, column: &mut ColumnRenderable) {
column.push(Line::from(""));
column.push(
@ -329,6 +366,12 @@ fn is_ctrl_exit_combo(key_event: KeyEvent) -> bool {
&& matches!(key_event.code, KeyCode::Char('c') | KeyCode::Char('d'))
}
fn fill_migration_markdown(template: &str, current_model: &str, target_model: &str) -> String {
template
.replace("{model_from}", current_model)
.replace("{model_to}", target_model)
}
#[cfg(test)]
mod tests {
use super::ModelMigrationScreen;
@ -356,6 +399,7 @@ mod tests {
"gpt-5.1-codex-max",
"gpt-5.1-codex-max".to_string(),
Some("Codex-optimized flagship for deep and fast reasoning.".to_string()),
None,
true,
),
);
@ -382,6 +426,7 @@ mod tests {
"gpt-5.1",
"gpt-5.1".to_string(),
Some("Broad world knowledge with strong general reasoning.".to_string()),
None,
false,
),
);
@ -406,6 +451,7 @@ mod tests {
"gpt-5.1-codex-max",
"gpt-5.1-codex-max".to_string(),
Some("Codex-optimized flagship for deep and fast reasoning.".to_string()),
None,
false,
),
);
@ -430,6 +476,7 @@ mod tests {
"gpt-5.1-codex-mini",
"gpt-5.1-codex-mini".to_string(),
Some("Optimized for codex. Cheaper, faster, but less capable.".to_string()),
None,
false,
),
);
@ -450,6 +497,7 @@ mod tests {
"gpt-new",
"gpt-new".to_string(),
Some("Latest recommended model for better performance.".to_string()),
None,
true,
),
);
@ -476,6 +524,7 @@ mod tests {
"gpt-new",
"gpt-new".to_string(),
Some("Latest recommended model for better performance.".to_string()),
None,
true,
),
);

View file

@ -1,12 +1,9 @@
---
source: tui2/src/app.rs
assertion_line: 2314
expression: model_migration_copy_to_plain_text(&copy)
---
Try gpt-5.2-codex
**Codex just got an upgrade. Introducing gpt-5.2-codex.**
We recommend switching from gpt-5.1-codex to gpt-5.2-codex.
Latest frontier agentic coding model.
Codex is now powered by gpt-5.2-codex, our latest frontier agentic coding model. It is smarter and faster than its predecessors and capable of long-running project-scale work. Learn more about gpt-5.2-codex at https://openai.com/index/introducing-gpt-5-2-codex
You can continue using gpt-5.1-codex if you prefer.