diff --git a/codex-rs/app-server/tests/suite/v2/turn_start.rs b/codex-rs/app-server/tests/suite/v2/turn_start.rs index d375a2db2..3abe73e2e 100644 --- a/codex-rs/app-server/tests/suite/v2/turn_start.rs +++ b/codex-rs/app-server/tests/suite/v2/turn_start.rs @@ -433,7 +433,7 @@ async fn turn_start_accepts_personality_override_v2() -> Result<()> { let thread_req = mcp .send_thread_start_request(ThreadStartParams { - model: Some("gpt-5.2-codex".to_string()), + model: Some("exp-codex-personality".to_string()), ..Default::default() }) .await?; diff --git a/codex-rs/core/src/models_manager/model_info.rs b/codex-rs/core/src/models_manager/model_info.rs index 8fb77e29e..c07c144c1 100644 --- a/codex-rs/core/src/models_manager/model_info.rs +++ b/codex-rs/core/src/models_manager/model_info.rs @@ -169,6 +169,16 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo { model_info!( slug, base_instructions: GPT_5_2_CODEX_INSTRUCTIONS.to_string(), + model_instructions_template: Some(ModelInstructionsTemplate { + template: GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string(), + personality_messages: Some(PersonalityMessages(BTreeMap::from([( + Personality::Friendly, + PERSONALITY_FRIENDLY.to_string(), + ), ( + Personality::Pragmatic, + PERSONALITY_PRAGMATIC.to_string(), + )]))), + }), apply_patch_tool_type: Some(ApplyPatchToolType::Freeform), shell_type: ConfigShellToolType::ShellCommand, supports_parallel_tool_calls: true, @@ -203,16 +213,6 @@ pub(crate) fn find_model_info_for_slug(slug: &str) -> ModelInfo { truncation_policy: TruncationPolicyConfig::tokens(10_000), context_window: Some(CONTEXT_WINDOW_272K), supported_reasoning_levels: supported_reasoning_level_low_medium_high_xhigh(), - model_instructions_template: Some(ModelInstructionsTemplate { - template: GPT_5_2_CODEX_INSTRUCTIONS_TEMPLATE.to_string(), - personality_messages: Some(PersonalityMessages(BTreeMap::from([( - Personality::Friendly, - PERSONALITY_FRIENDLY.to_string(), - ), ( - Personality::Pragmatic, - PERSONALITY_PRAGMATIC.to_string(), - )]))), - }), ) } else if slug.starts_with("gpt-5.1-codex-max") { model_info!( diff --git a/codex-rs/core/tests/suite/personality.rs b/codex-rs/core/tests/suite/personality.rs index c17994cbc..410ca08e4 100644 --- a/codex-rs/core/tests/suite/personality.rs +++ b/codex-rs/core/tests/suite/personality.rs @@ -122,7 +122,7 @@ async fn config_personality_some_sets_instructions_template() -> anyhow::Result< let server = start_mock_server().await; let resp_mock = mount_sse_once(&server, sse_completed("resp-1")).await; let mut builder = test_codex() - .with_model("gpt-5.2-codex") + .with_model("exp-codex-personality") .with_config(|config| { config.model_personality = Some(Personality::Friendly); config.features.disable(Feature::RemoteModels); @@ -179,7 +179,7 @@ async fn user_turn_personality_some_adds_update_message() -> anyhow::Result<()> ) .await; let mut builder = test_codex() - .with_model("gpt-5.2-codex") + .with_model("exp-codex-personality") .with_config(|config| { config.features.disable(Feature::RemoteModels); });