chore(config) Rename config setting to personality (#10314)

## Summary
Let's make the setting name consistent with the SlashCommand!

## Testing
- [x] Updated tests
This commit is contained in:
Dylan Hurd 2026-01-31 20:38:06 -07:00 committed by GitHub
parent 101d359cd7
commit a33fa4bfe5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 64 additions and 68 deletions

View file

@ -1770,7 +1770,7 @@ impl CodexMessageProcessor {
codex_linux_sandbox_exe: self.codex_linux_sandbox_exe.clone(),
base_instructions,
developer_instructions,
model_personality: personality,
personality,
..Default::default()
}
}

View file

@ -261,9 +261,6 @@
],
"description": "Optional path to a file containing model instructions."
},
"model_personality": {
"$ref": "#/definitions/Personality"
},
"model_provider": {
"description": "The key in the `model_providers` map identifying the [`ModelProviderInfo`] to use.",
"type": "string"
@ -280,6 +277,9 @@
"oss_provider": {
"type": "string"
},
"personality": {
"$ref": "#/definitions/Personality"
},
"sandbox_mode": {
"$ref": "#/definitions/SandboxMode"
},
@ -1376,14 +1376,6 @@
],
"description": "Optional path to a file containing model instructions that will override the built-in instructions for the selected model. Users are STRONGLY DISCOURAGED from using this field, as deviating from the instructions sanctioned by Codex will likely degrade model performance."
},
"model_personality": {
"allOf": [
{
"$ref": "#/definitions/Personality"
}
],
"description": "EXPERIMENTAL Optionally specify a personality for the model"
},
"model_provider": {
"description": "Provider to use from the model_providers map.",
"type": "string"
@ -1442,6 +1434,14 @@
],
"description": "OTEL configuration."
},
"personality": {
"allOf": [
{
"$ref": "#/definitions/Personality"
}
],
"description": "Optionally specify a personality for the model"
},
"profile": {
"description": "Profile to use from the `profiles` map.",
"type": "string"
@ -1569,4 +1569,4 @@
},
"title": "ConfigToml",
"type": "object"
}
}

View file

@ -332,7 +332,7 @@ impl Codex {
.base_instructions
.clone()
.or_else(|| conversation_history.get_base_instructions().map(|s| s.text))
.unwrap_or_else(|| model_info.get_model_instructions(config.model_personality));
.unwrap_or_else(|| model_info.get_model_instructions(config.personality));
// Respect explicit thread-start tools; fall back to persisted tools when resuming a thread.
let dynamic_tools = if dynamic_tools.is_empty() {
conversation_history.get_dynamic_tools().unwrap_or_default()
@ -356,7 +356,7 @@ impl Codex {
model_reasoning_summary: config.model_reasoning_summary,
developer_instructions: config.developer_instructions.clone(),
user_instructions,
personality: config.model_personality,
personality: config.personality,
base_instructions,
compact_prompt: config.compact_prompt.clone(),
approval_policy: config.approval_policy.clone(),
@ -634,7 +634,7 @@ impl Session {
per_turn_config.model_reasoning_effort =
session_configuration.collaboration_mode.reasoning_effort();
per_turn_config.model_reasoning_summary = session_configuration.model_reasoning_summary;
per_turn_config.model_personality = session_configuration.personality;
per_turn_config.personality = session_configuration.personality;
per_turn_config.web_search_mode = Some(resolve_web_search_mode_for_turn(
per_turn_config.web_search_mode,
session_configuration.provider.is_azure_responses_endpoint(),
@ -4975,11 +4975,11 @@ mod tests {
model_reasoning_summary: config.model_reasoning_summary,
developer_instructions: config.developer_instructions.clone(),
user_instructions: config.user_instructions.clone(),
personality: config.model_personality,
personality: config.personality,
base_instructions: config
.base_instructions
.clone()
.unwrap_or_else(|| model_info.get_model_instructions(config.model_personality)),
.unwrap_or_else(|| model_info.get_model_instructions(config.personality)),
compact_prompt: config.compact_prompt.clone(),
approval_policy: config.approval_policy.clone(),
sandbox_policy: config.sandbox_policy.clone(),
@ -5058,11 +5058,11 @@ mod tests {
model_reasoning_summary: config.model_reasoning_summary,
developer_instructions: config.developer_instructions.clone(),
user_instructions: config.user_instructions.clone(),
personality: config.model_personality,
personality: config.personality,
base_instructions: config
.base_instructions
.clone()
.unwrap_or_else(|| model_info.get_model_instructions(config.model_personality)),
.unwrap_or_else(|| model_info.get_model_instructions(config.personality)),
compact_prompt: config.compact_prompt.clone(),
approval_policy: config.approval_policy.clone(),
sandbox_policy: config.sandbox_policy.clone(),
@ -5325,11 +5325,11 @@ mod tests {
model_reasoning_summary: config.model_reasoning_summary,
developer_instructions: config.developer_instructions.clone(),
user_instructions: config.user_instructions.clone(),
personality: config.model_personality,
personality: config.personality,
base_instructions: config
.base_instructions
.clone()
.unwrap_or_else(|| model_info.get_model_instructions(config.model_personality)),
.unwrap_or_else(|| model_info.get_model_instructions(config.personality)),
compact_prompt: config.compact_prompt.clone(),
approval_policy: config.approval_policy.clone(),
sandbox_policy: config.sandbox_policy.clone(),
@ -5445,11 +5445,11 @@ mod tests {
model_reasoning_summary: config.model_reasoning_summary,
developer_instructions: config.developer_instructions.clone(),
user_instructions: config.user_instructions.clone(),
personality: config.model_personality,
personality: config.personality,
base_instructions: config
.base_instructions
.clone()
.unwrap_or_else(|| model_info.get_model_instructions(config.model_personality)),
.unwrap_or_else(|| model_info.get_model_instructions(config.personality)),
compact_prompt: config.compact_prompt.clone(),
approval_policy: config.approval_policy.clone(),
sandbox_policy: config.sandbox_policy.clone(),

View file

@ -278,7 +278,7 @@ impl ConfigDocument {
mutated
}),
ConfigEdit::SetModelPersonality { personality } => Ok(self.write_profile_value(
&["model_personality"],
&["personality"],
personality.map(|personality| value(personality.to_string())),
)),
ConfigEdit::SetNoticeHideFullAccessWarning(acknowledged) => Ok(self.write_value(
@ -724,7 +724,7 @@ impl ConfigEditsBuilder {
self
}
pub fn set_model_personality(mut self, personality: Option<Personality>) -> Self {
pub fn set_personality(mut self, personality: Option<Personality>) -> Self {
self.edits
.push(ConfigEdit::SetModelPersonality { personality });
self

View file

@ -134,7 +134,7 @@ pub struct Config {
pub model_provider: ModelProviderInfo,
/// Optionally specify the personality of the model
pub model_personality: Option<Personality>,
pub personality: Option<Personality>,
/// Approval policy for executing commands.
pub approval_policy: Constrained<AskForApproval>,
@ -912,9 +912,8 @@ pub struct ConfigToml {
/// Override to force-enable reasoning summaries for the configured model.
pub model_supports_reasoning_summaries: Option<bool>,
/// EXPERIMENTAL
/// Optionally specify a personality for the model
pub model_personality: Option<Personality>,
pub personality: Option<Personality>,
/// Base URL for requests to ChatGPT (as opposed to the OpenAI API).
pub chatgpt_base_url: Option<String>,
@ -1193,7 +1192,7 @@ pub struct ConfigOverrides {
pub codex_linux_sandbox_exe: Option<PathBuf>,
pub base_instructions: Option<String>,
pub developer_instructions: Option<String>,
pub model_personality: Option<Personality>,
pub personality: Option<Personality>,
pub compact_prompt: Option<String>,
pub include_apply_patch_tool: Option<bool>,
pub show_raw_agent_reasoning: Option<bool>,
@ -1300,7 +1299,7 @@ impl Config {
codex_linux_sandbox_exe,
base_instructions,
developer_instructions,
model_personality,
personality,
compact_prompt,
include_apply_patch_tool: include_apply_patch_tool_override,
show_raw_agent_reasoning,
@ -1497,9 +1496,9 @@ impl Config {
Self::try_read_non_empty_file(model_instructions_path, "model instructions file")?;
let base_instructions = base_instructions.or(file_base_instructions);
let developer_instructions = developer_instructions.or(cfg.developer_instructions);
let model_personality = model_personality
.or(config_profile.model_personality)
.or(cfg.model_personality)
let personality = personality
.or(config_profile.personality)
.or(cfg.personality)
.or_else(|| {
features
.enabled(Feature::Personality)
@ -1557,7 +1556,7 @@ impl Config {
notify: cfg.notify,
user_instructions,
base_instructions,
model_personality,
personality,
developer_instructions,
compact_prompt,
// The config.toml omits "_mode" because it's a config file. However, "_mode"
@ -3812,7 +3811,7 @@ model_verbosity = "high"
model_reasoning_summary: ReasoningSummary::Detailed,
model_supports_reasoning_summaries: None,
model_verbosity: None,
model_personality: None,
personality: None,
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
developer_instructions: None,
@ -3897,7 +3896,7 @@ model_verbosity = "high"
model_reasoning_summary: ReasoningSummary::default(),
model_supports_reasoning_summaries: None,
model_verbosity: None,
model_personality: None,
personality: None,
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
developer_instructions: None,
@ -3997,7 +3996,7 @@ model_verbosity = "high"
model_reasoning_summary: ReasoningSummary::default(),
model_supports_reasoning_summaries: None,
model_verbosity: None,
model_personality: None,
personality: None,
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
developer_instructions: None,
@ -4083,7 +4082,7 @@ model_verbosity = "high"
model_reasoning_summary: ReasoningSummary::Detailed,
model_supports_reasoning_summaries: None,
model_verbosity: Some(Verbosity::High),
model_personality: None,
personality: None,
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
developer_instructions: None,

View file

@ -25,7 +25,7 @@ pub struct ConfigProfile {
pub model_reasoning_effort: Option<ReasoningEffort>,
pub model_reasoning_summary: Option<ReasoningSummary>,
pub model_verbosity: Option<Verbosity>,
pub model_personality: Option<Personality>,
pub personality: Option<Personality>,
pub chatgpt_base_url: Option<String>,
/// Optional path to a file containing model instructions.
pub model_instructions_file: Option<AbsolutePathBuf>,

View file

@ -88,7 +88,7 @@ impl ContextManager {
let model_info = turn_context.client.get_model_info();
let personality = turn_context
.personality
.or(turn_context.client.config().model_personality);
.or(turn_context.client.config().personality);
let base_instructions = model_info.get_model_instructions(personality);
let base_tokens = i64::try_from(approx_token_count(&base_instructions)).unwrap_or(i64::MAX);

View file

@ -36,7 +36,7 @@ pub async fn maybe_migrate_personality(
let config_profile = config_toml
.get_config_profile(None)
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
if config_toml.model_personality.is_some() || config_profile.model_personality.is_some() {
if config_toml.personality.is_some() || config_profile.personality.is_some() {
create_marker(&marker_path).await?;
return Ok(PersonalityMigrationStatus::SkippedExplicitPersonality);
}
@ -52,7 +52,7 @@ pub async fn maybe_migrate_personality(
}
ConfigEditsBuilder::new(codex_home)
.set_model_personality(Some(Personality::Pragmatic))
.set_personality(Some(Personality::Pragmatic))
.apply()
.await
.map_err(|err| {
@ -211,7 +211,7 @@ mod tests {
assert!(temp.path().join(PERSONALITY_MIGRATION_FILENAME).exists());
let persisted = read_config_toml(temp.path()).await?;
assert_eq!(persisted.model_personality, Some(Personality::Pragmatic));
assert_eq!(persisted.personality, Some(Personality::Pragmatic));
Ok(())
}
@ -232,7 +232,7 @@ mod tests {
async fn skips_when_personality_explicit() -> io::Result<()> {
let temp = TempDir::new()?;
ConfigEditsBuilder::new(temp.path())
.set_model_personality(Some(Personality::Friendly))
.set_personality(Some(Personality::Friendly))
.apply()
.await
.map_err(|err| io::Error::other(format!("failed to write config: {err}")))?;
@ -247,7 +247,7 @@ mod tests {
assert!(temp.path().join(PERSONALITY_MIGRATION_FILENAME).exists());
let persisted = read_config_toml(temp.path()).await?;
assert_eq!(persisted.model_personality, Some(Personality::Friendly));
assert_eq!(persisted.personality, Some(Personality::Friendly));
Ok(())
}

View file

@ -46,15 +46,15 @@ fn sse_completed(id: &str) -> String {
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn model_personality_does_not_mutate_base_instructions_without_template() {
async fn personality_does_not_mutate_base_instructions_without_template() {
let codex_home = TempDir::new().expect("create temp dir");
let mut config = load_default_config_for_test(&codex_home).await;
config.features.enable(Feature::Personality);
config.model_personality = Some(Personality::Friendly);
config.personality = Some(Personality::Friendly);
let model_info = ModelsManager::construct_model_info_offline("gpt-5.1", &config);
assert_eq!(
model_info.get_model_instructions(config.model_personality),
model_info.get_model_instructions(config.personality),
model_info.base_instructions
);
}
@ -64,14 +64,14 @@ async fn base_instructions_override_disables_personality_template() {
let codex_home = TempDir::new().expect("create temp dir");
let mut config = load_default_config_for_test(&codex_home).await;
config.features.enable(Feature::Personality);
config.model_personality = Some(Personality::Friendly);
config.personality = Some(Personality::Friendly);
config.base_instructions = Some("override instructions".to_string());
let model_info = ModelsManager::construct_model_info_offline("gpt-5.2-codex", &config);
assert_eq!(model_info.base_instructions, "override instructions");
assert_eq!(
model_info.get_model_instructions(config.model_personality),
model_info.get_model_instructions(config.personality),
"override instructions"
);
}
@ -133,7 +133,7 @@ async fn config_personality_some_sets_instructions_template() -> anyhow::Result<
.with_config(|config| {
config.features.disable(Feature::RemoteModels);
config.features.enable(Feature::Personality);
config.model_personality = Some(Personality::Friendly);
config.personality = Some(Personality::Friendly);
});
let test = builder.build(&server).await?;
@ -277,11 +277,11 @@ async fn instructions_uses_base_if_feature_disabled() -> anyhow::Result<()> {
let codex_home = TempDir::new().expect("create temp dir");
let mut config = load_default_config_for_test(&codex_home).await;
config.features.disable(Feature::Personality);
config.model_personality = Some(Personality::Friendly);
config.personality = Some(Personality::Friendly);
let model_info = ModelsManager::construct_model_info_offline("gpt-5.2-codex", &config);
assert_eq!(
model_info.get_model_instructions(config.model_personality),
model_info.get_model_instructions(config.personality),
model_info.base_instructions
);
@ -378,7 +378,7 @@ async fn user_turn_personality_skips_if_feature_disabled() -> anyhow::Result<()>
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn ignores_remote_model_personality_if_remote_models_disabled() -> anyhow::Result<()> {
async fn ignores_remote_personality_if_remote_models_disabled() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let server = MockServer::builder()
@ -439,7 +439,7 @@ async fn ignores_remote_model_personality_if_remote_models_disabled() -> anyhow:
config.features.disable(Feature::RemoteModels);
config.features.enable(Feature::Personality);
config.model = Some(remote_slug.to_string());
config.model_personality = Some(Personality::Friendly);
config.personality = Some(Personality::Friendly);
});
let test = builder.build(&server).await?;
@ -554,7 +554,7 @@ async fn remote_model_friendly_personality_instructions_with_feature() -> anyhow
config.features.enable(Feature::RemoteModels);
config.features.enable(Feature::Personality);
config.model = Some(remote_slug.to_string());
config.model_personality = Some(Personality::Friendly);
config.personality = Some(Personality::Friendly);
});
let test = builder.build(&server).await?;

View file

@ -131,7 +131,7 @@ async fn no_marker_sessions_sets_personality() -> io::Result<()> {
);
let persisted = read_config_toml(temp.path()).await?;
assert_eq!(persisted.model_personality, Some(Personality::Pragmatic));
assert_eq!(persisted.personality, Some(Personality::Pragmatic));
Ok(())
}
@ -149,6 +149,6 @@ async fn no_marker_archived_sessions_sets_personality() -> io::Result<()> {
);
let persisted = read_config_toml(temp.path()).await?;
assert_eq!(persisted.model_personality, Some(Personality::Pragmatic));
assert_eq!(persisted.personality, Some(Personality::Pragmatic));
Ok(())
}

View file

@ -251,7 +251,7 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
codex_linux_sandbox_exe,
base_instructions: None,
developer_instructions: None,
model_personality: None,
personality: None,
compact_prompt: None,
include_apply_patch_tool: None,
show_raw_agent_reasoning: oss.then_some(true),

View file

@ -1875,7 +1875,7 @@ impl App {
let profile = self.active_profile.as_deref();
match ConfigEditsBuilder::new(&self.config.codex_home)
.with_profile(profile)
.set_model_personality(Some(personality))
.set_personality(Some(personality))
.apply()
.await
{
@ -2325,7 +2325,7 @@ impl App {
}
fn on_update_personality(&mut self, personality: Personality) {
self.config.model_personality = Some(personality);
self.config.personality = Some(personality);
self.chat_widget.set_personality(personality);
}

View file

@ -3244,7 +3244,7 @@ impl ChatWidget {
};
let personality = self
.config
.model_personality
.personality
.filter(|_| self.config.features.enabled(Feature::Personality))
.filter(|_| self.current_model_supports_personality());
let op = Op::UserTurn {
@ -3880,10 +3880,7 @@ impl ChatWidget {
}
fn open_personality_popup_for_current_model(&mut self) {
let current_personality = self
.config
.model_personality
.unwrap_or(Personality::Friendly);
let current_personality = self.config.personality.unwrap_or(Personality::Friendly);
let personalities = [Personality::Friendly, Personality::Pragmatic];
let supports_personality = self.current_model_supports_personality();
@ -5184,7 +5181,7 @@ impl ChatWidget {
/// Set the personality in the widget's config copy.
pub(crate) fn set_personality(&mut self, personality: Personality) {
self.config.model_personality = Some(personality);
self.config.personality = Some(personality);
}
/// Set the model in the widget's config copy and stored collaboration mode.