chore(config) Default Personality Pragmatic (#10705)

## Summary
Switch back to Pragmatic personality

## Testing
- [x] Updated unit tests
This commit is contained in:
Dylan Hurd 2026-02-04 21:22:47 -08:00 committed by GitHub
parent 1dc06b6ffc
commit a05aadfa1b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 16 additions and 16 deletions

View file

@ -403,7 +403,7 @@ async fn thread_resume_accepts_personality_override() -> Result<()> {
.send_thread_resume_request(ThreadResumeParams {
thread_id: thread.id.clone(),
model: Some("gpt-5.2-codex".to_string()),
personality: Some(Personality::Pragmatic),
personality: Some(Personality::Friendly),
..Default::default()
})
.await?;

View file

@ -455,7 +455,7 @@ async fn turn_start_accepts_personality_override_v2() -> Result<()> {
text: "Hello".to_string(),
text_elements: Vec::new(),
}],
personality: Some(Personality::Pragmatic),
personality: Some(Personality::Friendly),
..Default::default()
})
.await?;
@ -560,7 +560,7 @@ async fn turn_start_change_personality_mid_thread_v2() -> Result<()> {
text: "Hello again".to_string(),
text_elements: Vec::new(),
}],
personality: Some(Personality::Pragmatic),
personality: Some(Personality::Friendly),
..Default::default()
})
.await?;

View file

@ -1550,7 +1550,7 @@ impl Config {
.or_else(|| {
features
.enabled(Feature::Personality)
.then_some(Personality::Friendly)
.then_some(Personality::Pragmatic)
});
let experimental_compact_prompt_path = config_profile
@ -3870,7 +3870,7 @@ model_verbosity = "high"
model_reasoning_summary: ReasoningSummary::Detailed,
model_supports_reasoning_summaries: None,
model_verbosity: None,
personality: Some(Personality::Friendly),
personality: Some(Personality::Pragmatic),
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
developer_instructions: None,
@ -3956,7 +3956,7 @@ model_verbosity = "high"
model_reasoning_summary: ReasoningSummary::default(),
model_supports_reasoning_summaries: None,
model_verbosity: None,
personality: Some(Personality::Friendly),
personality: Some(Personality::Pragmatic),
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
developer_instructions: None,
@ -4057,7 +4057,7 @@ model_verbosity = "high"
model_reasoning_summary: ReasoningSummary::default(),
model_supports_reasoning_summaries: None,
model_verbosity: None,
personality: Some(Personality::Friendly),
personality: Some(Personality::Pragmatic),
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
developer_instructions: None,
@ -4144,7 +4144,7 @@ model_verbosity = "high"
model_reasoning_summary: ReasoningSummary::Detailed,
model_supports_reasoning_summaries: None,
model_verbosity: Some(Verbosity::High),
personality: Some(Personality::Friendly),
personality: Some(Personality::Pragmatic),
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
base_instructions: None,
developer_instructions: None,

View file

@ -233,7 +233,7 @@ async fn config_personality_none_sends_no_personality() -> anyhow::Result<()> {
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn default_personality_is_friendly_without_config_toml() -> anyhow::Result<()> {
async fn default_personality_is_pragmatic_without_config_toml() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
@ -269,7 +269,7 @@ async fn default_personality_is_friendly_without_config_toml() -> anyhow::Result
let request = resp_mock.single_request();
let instructions_text = request.instructions_text();
assert!(
instructions_text.contains(LOCAL_FRIENDLY_TEMPLATE),
instructions_text.contains(LOCAL_PRAGMATIC_TEMPLATE),
"expected default friendly template, got: {instructions_text:?}"
);
@ -324,7 +324,7 @@ async fn user_turn_personality_some_adds_update_message() -> anyhow::Result<()>
effort: None,
summary: None,
collaboration_mode: None,
personality: Some(Personality::Pragmatic),
personality: Some(Personality::Friendly),
})
.await?;
@ -365,7 +365,7 @@ async fn user_turn_personality_some_adds_update_message() -> anyhow::Result<()>
"expected personality update preamble, got {personality_text:?}"
);
assert!(
personality_text.contains(LOCAL_PRAGMATIC_TEMPLATE),
personality_text.contains(LOCAL_FRIENDLY_TEMPLATE),
"expected personality update to include the local pragmatic template, got: {personality_text:?}"
);
@ -902,7 +902,7 @@ async fn user_turn_personality_remote_model_template_includes_update_message() -
effort: None,
summary: None,
collaboration_mode: None,
personality: Some(Personality::Pragmatic),
personality: Some(Personality::Friendly),
})
.await?;
@ -942,7 +942,7 @@ async fn user_turn_personality_remote_model_template_includes_update_message() -
"expected personality update preamble, got {personality_text:?}"
);
assert!(
personality_text.contains(remote_pragmatic_message),
personality_text.contains(remote_friendly_message),
"expected personality update to include remote template, got: {personality_text:?}"
);

View file

@ -5,7 +5,7 @@ expression: popup
Select Personality
Choose a communication style for Codex. Disable in /experimental.
1. Friendly (current) Warm, collaborative, and helpful.
2. Pragmatic Concise, task-focused, and direct.
1. Friendly Warm, collaborative, and helpful.
2. Pragmatic (current) Concise, task-focused, and direct.
Press enter to confirm or esc to go back