Add collaboration_mode override to turns (#9408)

This commit is contained in:
Ahmed Ibrahim 2026-01-16 21:51:25 -08:00 committed by GitHub
parent ad8bf59cbf
commit 146d54cede
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
33 changed files with 281 additions and 13 deletions

View file

@ -4,6 +4,7 @@ use std::path::PathBuf;
use crate::protocol::common::AuthMode;
use codex_protocol::account::PlanType;
use codex_protocol::approvals::ExecPolicyAmendment as CoreExecPolicyAmendment;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::config_types::SandboxMode as CoreSandboxMode;
@ -1538,6 +1539,10 @@ pub struct TurnStartParams {
pub summary: Option<ReasoningSummary>,
/// Optional JSON Schema used to constrain the final assistant message for this turn.
pub output_schema: Option<JsonValue>,
/// EXPERIMENTAL - set a pre-set collaboration mode.
/// Takes precedence over model, reasoning_effort, and developer instructions if set.
pub collaboration_mode: Option<CollaborationMode>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]

View file

@ -3231,6 +3231,7 @@ impl CodexMessageProcessor {
effort,
summary,
final_output_json_schema: output_schema,
collaboration_mode: None,
})
.await;
@ -3344,7 +3345,8 @@ impl CodexMessageProcessor {
|| params.sandbox_policy.is_some()
|| params.model.is_some()
|| params.effort.is_some()
|| params.summary.is_some();
|| params.summary.is_some()
|| params.collaboration_mode.is_some();
// If any overrides are provided, update the session turn context first.
if has_any_overrides {
@ -3356,6 +3358,7 @@ impl CodexMessageProcessor {
model: params.model,
effort: params.effort.map(Some),
summary: params.summary,
collaboration_mode: params.collaboration_mode,
})
.await;
}

View file

@ -35,7 +35,10 @@ use codex_app_server_protocol::TurnStartedNotification;
use codex_app_server_protocol::TurnStatus;
use codex_app_server_protocol::UserInput as V2UserInput;
use codex_core::protocol_config_types::ReasoningSummary;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::Settings;
use codex_protocol::openai_models::ReasoningEffort;
use core_test_support::responses;
use core_test_support::skip_if_no_network;
use pretty_assertions::assert_eq;
use std::path::Path;
@ -305,6 +308,77 @@ async fn turn_start_emits_notifications_and_accepts_model_override() -> Result<(
Ok(())
}
#[tokio::test]
async fn turn_start_accepts_collaboration_mode_override_v2() -> Result<()> {
skip_if_no_network!(Ok(()));
let server = responses::start_mock_server().await;
let body = responses::sse(vec![
responses::ev_response_created("resp-1"),
responses::ev_assistant_message("msg-1", "Done"),
responses::ev_completed("resp-1"),
]);
let response_mock = responses::mount_sse_once(&server, body).await;
let codex_home = TempDir::new()?;
create_config_toml(codex_home.path(), &server.uri(), "never")?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let thread_req = mcp
.send_thread_start_request(ThreadStartParams {
..Default::default()
})
.await?;
let thread_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
)
.await??;
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
let collaboration_mode = CollaborationMode::Custom(Settings {
model: "mock-model-collab".to_string(),
reasoning_effort: Some(ReasoningEffort::High),
developer_instructions: None,
});
let turn_req = mcp
.send_turn_start_request(TurnStartParams {
thread_id: thread.id.clone(),
input: vec![V2UserInput::Text {
text: "Hello".to_string(),
text_elements: Vec::new(),
}],
model: Some("mock-model-override".to_string()),
effort: Some(ReasoningEffort::Low),
summary: Some(ReasoningSummary::Auto),
output_schema: None,
collaboration_mode: Some(collaboration_mode),
..Default::default()
})
.await?;
let turn_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
)
.await??;
let _turn: TurnStartResponse = to_response::<TurnStartResponse>(turn_resp)?;
timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_notification_message("turn/completed"),
)
.await??;
let request = response_mock.single_request();
let payload = request.body_json();
assert_eq!(payload["model"].as_str(), Some("mock-model-collab"));
Ok(())
}
#[tokio::test]
async fn turn_start_accepts_local_image_input() -> Result<()> {
// Two Codex turns hit the mock model (session start + turn/start).
@ -703,6 +777,7 @@ async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
effort: Some(ReasoningEffort::Medium),
summary: Some(ReasoningSummary::Auto),
output_schema: None,
collaboration_mode: None,
})
.await?;
timeout(
@ -732,6 +807,7 @@ async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
effort: Some(ReasoningEffort::Medium),
summary: Some(ReasoningSummary::Auto),
output_schema: None,
collaboration_mode: None,
})
.await?;
timeout(

View file

@ -1842,13 +1842,17 @@ async fn submission_loop(sess: Arc<Session>, config: Arc<Config>, rx_sub: Receiv
model,
effort,
summary,
collaboration_mode,
} => {
let collaboration_mode = {
let collaboration_mode = if let Some(collab_mode) = collaboration_mode {
collab_mode
} else {
let state = sess.state.lock().await;
state
.session_configuration
.collaboration_mode
.with_updates(model, effort, None)
state.session_configuration.collaboration_mode.with_updates(
model.clone(),
effort,
None,
)
};
handlers::override_turn_context(
&sess,
@ -2013,12 +2017,15 @@ mod handlers {
summary,
final_output_json_schema,
items,
collaboration_mode,
} => {
let collaboration_mode = Some(CollaborationMode::Custom(Settings {
model,
reasoning_effort: effort,
developer_instructions: None,
}));
let collaboration_mode = collaboration_mode.or_else(|| {
Some(CollaborationMode::Custom(Settings {
model: model.clone(),
reasoning_effort: effort,
developer_instructions: None,
}))
});
(
items,
SessionSettingsUpdate {

View file

@ -279,6 +279,7 @@ impl TestCodex {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -311,6 +311,7 @@ async fn apply_patch_cli_move_without_content_change_has_no_turn_diff(
model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -898,6 +899,7 @@ async fn apply_patch_shell_command_heredoc_with_cd_emits_turn_diff() -> Result<(
model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -976,6 +978,7 @@ async fn apply_patch_shell_command_failure_propagates_error_and_skips_diff() ->
model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -1124,6 +1127,7 @@ async fn apply_patch_emits_turn_diff_event_with_unified_diff(
model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -1185,6 +1189,7 @@ async fn apply_patch_turn_diff_for_rename_with_content_change(
model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -1254,6 +1259,7 @@ async fn apply_patch_aggregates_diff_across_multiple_tool_calls() -> Result<()>
model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -1323,6 +1329,7 @@ async fn apply_patch_aggregates_diff_preserves_success_after_failure() -> Result
model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -501,6 +501,7 @@ async fn submit_turn(
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -22,7 +22,9 @@ use codex_core::protocol::Op;
use codex_core::protocol::SessionSource;
use codex_otel::OtelManager;
use codex_protocol::ThreadId;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::config_types::Settings;
use codex_protocol::config_types::Verbosity;
use codex_protocol::models::FunctionCallOutputPayload;
use codex_protocol::models::ReasoningItemContent;
@ -44,6 +46,7 @@ use core_test_support::test_codex::test_codex;
use core_test_support::wait_for_event;
use dunce::canonicalize as normalize_path;
use futures::StreamExt;
use pretty_assertions::assert_eq;
use serde_json::json;
use std::io::Write;
use std::sync::Arc;
@ -865,6 +868,60 @@ async fn includes_default_reasoning_effort_in_request_when_defined_by_model_info
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn user_turn_collaboration_mode_overrides_model_and_effort() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let server = MockServer::start().await;
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
let TestCodex {
codex,
config,
session_configured,
..
} = test_codex()
.with_model("gpt-5.1-codex")
.build(&server)
.await?;
let collaboration_mode = CollaborationMode::Custom(Settings {
model: "gpt-5.1".to_string(),
reasoning_effort: Some(ReasoningEffort::High),
developer_instructions: None,
});
codex
.submit(Op::UserTurn {
items: vec![UserInput::Text {
text: "hello".into(),
text_elements: Vec::new(),
}],
cwd: config.cwd.clone(),
approval_policy: config.approval_policy.value(),
sandbox_policy: config.sandbox_policy.get().clone(),
model: session_configured.model.clone(),
effort: Some(ReasoningEffort::Low),
summary: config.model_reasoning_summary,
collaboration_mode: Some(collaboration_mode),
final_output_json_schema: None,
})
.await?;
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
let request_body = resp_mock.single_request().body_json();
assert_eq!(request_body["model"].as_str(), Some("gpt-5.1"));
assert_eq!(
request_body
.get("reasoning")
.and_then(|t| t.get("effort"))
.and_then(|v| v.as_str()),
Some("high")
);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn configured_reasoning_summary_is_sent() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));

View file

@ -1288,6 +1288,7 @@ async fn auto_compact_runs_after_resume_when_token_usage_is_over_limit() {
model: resumed.session_configured.model.clone(),
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await
.unwrap();

View file

@ -81,6 +81,7 @@ async fn execpolicy_blocks_shell_invocation() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -127,6 +127,7 @@ async fn copy_paste_local_image_persists_rollout_request_shape() -> anyhow::Resu
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -205,6 +206,7 @@ async fn drag_drop_image_persists_rollout_request_shape() -> anyhow::Result<()>
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -85,6 +85,7 @@ async fn codex_returns_json_result(model: String) -> anyhow::Result<()> {
model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -40,6 +40,7 @@ async fn override_turn_context_does_not_persist_when_config_exists() {
model: Some("o3".to_string()),
effort: Some(Some(ReasoningEffort::High)),
summary: None,
collaboration_mode: None,
})
.await
.expect("submit override");
@ -82,6 +83,7 @@ async fn override_turn_context_does_not_create_config_file() {
model: Some("o3".to_string()),
effort: Some(Some(ReasoningEffort::Medium)),
summary: None,
collaboration_mode: None,
})
.await
.expect("submit override");

View file

@ -98,6 +98,7 @@ async fn renews_cache_ttl_on_matching_models_etag() -> Result<()> {
model: test.session_configured.model.clone(),
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -109,6 +109,7 @@ async fn refresh_models_on_models_etag_mismatch_and_avoid_duplicate_models_fetch
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -109,6 +109,7 @@ async fn permissions_message_added_on_override_change() -> Result<()> {
model: None,
effort: None,
summary: None,
collaboration_mode: None,
})
.await?;
@ -224,6 +225,7 @@ async fn resume_replays_permissions_messages() -> Result<()> {
model: None,
effort: None,
summary: None,
collaboration_mode: None,
})
.await?;
@ -300,6 +302,7 @@ async fn resume_and_fork_append_permissions_messages() -> Result<()> {
model: None,
effort: None,
summary: None,
collaboration_mode: None,
})
.await?;

View file

@ -11,6 +11,8 @@ use codex_core::protocol::SandboxPolicy;
use codex_core::protocol_config_types::ReasoningSummary;
use codex_core::shell::Shell;
use codex_core::shell::default_user_shell;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::Settings;
use codex_protocol::config_types::WebSearchMode;
use codex_protocol::openai_models::ReasoningEffort;
use codex_protocol::user_input::UserInput;
@ -22,6 +24,7 @@ use core_test_support::skip_if_no_network;
use core_test_support::test_codex::TestCodex;
use core_test_support::test_codex::test_codex;
use core_test_support::wait_for_event;
use pretty_assertions::assert_eq;
use tempfile::TempDir;
fn text_user_input(text: String) -> serde_json::Value {
@ -344,6 +347,7 @@ async fn overrides_turn_context_but_keeps_cached_prefix_and_key_constant() -> an
model: Some("o3".to_string()),
effort: Some(Some(ReasoningEffort::High)),
summary: Some(ReasoningSummary::Detailed),
collaboration_mode: None,
})
.await?;
@ -399,14 +403,21 @@ async fn override_before_first_turn_emits_environment_context() -> anyhow::Resul
let TestCodex { codex, .. } = test_codex().build(&server).await?;
let collaboration_mode = CollaborationMode::Custom(Settings {
model: "gpt-5.1".to_string(),
reasoning_effort: Some(ReasoningEffort::High),
developer_instructions: None,
});
codex
.submit(Op::OverrideTurnContext {
cwd: None,
approval_policy: Some(AskForApproval::Never),
sandbox_policy: None,
model: None,
effort: None,
model: Some("gpt-5.1-codex".to_string()),
effort: Some(Some(ReasoningEffort::Low)),
summary: None,
collaboration_mode: Some(collaboration_mode),
})
.await?;
@ -423,6 +434,13 @@ async fn override_before_first_turn_emits_environment_context() -> anyhow::Resul
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
let body = req.single_request().body_json();
assert_eq!(body["model"].as_str(), Some("gpt-5.1"));
assert_eq!(
body.get("reasoning")
.and_then(|reasoning| reasoning.get("effort"))
.and_then(|value| value.as_str()),
Some("high")
);
let input = body["input"]
.as_array()
.expect("input array must be present");
@ -554,6 +572,7 @@ async fn per_turn_overrides_keep_cached_prefix_and_key_constant() -> anyhow::Res
model: "o3".to_string(),
effort: Some(ReasoningEffort::High),
summary: ReasoningSummary::Detailed,
collaboration_mode: None,
final_output_json_schema: None,
})
.await?;
@ -646,6 +665,7 @@ async fn send_user_turn_with_no_changes_does_not_send_environment_context() -> a
model: default_model.clone(),
effort: default_effort,
summary: default_summary,
collaboration_mode: None,
final_output_json_schema: None,
})
.await?;
@ -663,6 +683,7 @@ async fn send_user_turn_with_no_changes_does_not_send_environment_context() -> a
model: default_model.clone(),
effort: default_effort,
summary: default_summary,
collaboration_mode: None,
final_output_json_schema: None,
})
.await?;
@ -741,6 +762,7 @@ async fn send_user_turn_with_changes_sends_environment_context() -> anyhow::Resu
model: default_model,
effort: default_effort,
summary: default_summary,
collaboration_mode: None,
final_output_json_schema: None,
})
.await?;
@ -758,6 +780,7 @@ async fn send_user_turn_with_changes_sends_environment_context() -> anyhow::Resu
model: "o3".to_string(),
effort: Some(ReasoningEffort::High),
summary: ReasoningSummary::Detailed,
collaboration_mode: None,
final_output_json_schema: None,
})
.await?;

View file

@ -139,6 +139,7 @@ async fn remote_models_remote_model_uses_unified_exec() -> Result<()> {
model: Some(REMOTE_MODEL_SLUG.to_string()),
effort: None,
summary: None,
collaboration_mode: None,
})
.await?;
@ -174,6 +175,7 @@ async fn remote_models_remote_model_uses_unified_exec() -> Result<()> {
model: REMOTE_MODEL_SLUG.to_string(),
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -363,6 +365,7 @@ async fn remote_models_apply_remote_base_instructions() -> Result<()> {
model: Some(model.to_string()),
effort: None,
summary: None,
collaboration_mode: None,
})
.await?;
@ -379,6 +382,7 @@ async fn remote_models_apply_remote_base_instructions() -> Result<()> {
model: model.to_string(),
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -820,6 +820,7 @@ async fn review_uses_overridden_cwd_for_base_branch_merge_base() {
model: None,
effort: None,
summary: None,
collaboration_mode: None,
})
.await
.unwrap();

View file

@ -118,6 +118,7 @@ async fn stdio_server_round_trip() -> anyhow::Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -256,6 +257,7 @@ async fn stdio_image_responses_round_trip() -> anyhow::Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -452,6 +454,7 @@ async fn stdio_image_completions_round_trip() -> anyhow::Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -596,6 +599,7 @@ async fn stdio_server_propagates_whitelisted_env_vars() -> anyhow::Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -751,6 +755,7 @@ async fn streamable_http_tool_call_round_trip() -> anyhow::Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -938,6 +943,7 @@ async fn streamable_http_with_oauth_round_trip() -> anyhow::Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -80,6 +80,7 @@ async fn run_snapshot_command(command: &str) -> Result<SnapshotRun> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -157,6 +158,7 @@ async fn run_shell_command_snapshot(command: &str) -> Result<SnapshotRun> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -295,6 +297,7 @@ async fn shell_command_snapshot_still_intercepts_apply_patch() -> Result<()> {
model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -78,6 +78,7 @@ async fn user_turn_includes_skill_instructions() -> Result<()> {
model: session_model,
effort: None,
summary: codex_protocol::config_types::ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -91,6 +91,7 @@ async fn shell_tool_executes_command_and_streams_output() -> anyhow::Result<()>
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -158,6 +159,7 @@ async fn update_plan_tool_emits_plan_update_event() -> anyhow::Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -235,6 +237,7 @@ async fn update_plan_tool_rejects_malformed_payload() -> anyhow::Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -324,6 +327,7 @@ async fn apply_patch_tool_executes_and_emits_patch_events() -> anyhow::Result<()
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -421,6 +425,7 @@ async fn apply_patch_reports_parse_diagnostics() -> anyhow::Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -47,6 +47,7 @@ async fn run_turn(test: &TestCodex, prompt: &str) -> anyhow::Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -363,6 +364,7 @@ async fn shell_tools_start_before_response_completed_when_stream_delayed() -> an
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -547,6 +547,7 @@ async fn mcp_image_output_preserves_image_and_no_text_summary() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -209,6 +209,7 @@ async fn unified_exec_intercepts_apply_patch_exec_command() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -336,6 +337,7 @@ async fn unified_exec_emits_exec_command_begin_event() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -412,6 +414,7 @@ async fn unified_exec_resolves_relative_workdir() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -491,6 +494,7 @@ async fn unified_exec_respects_workdir_override() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -582,6 +586,7 @@ async fn unified_exec_emits_exec_command_end_event() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -655,6 +660,7 @@ async fn unified_exec_emits_output_delta_for_exec_command() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -729,6 +735,7 @@ async fn unified_exec_full_lifecycle_with_background_end_event() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -857,6 +864,7 @@ async fn unified_exec_emits_terminal_interaction_for_write_stdin() -> Result<()>
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -992,6 +1000,7 @@ async fn unified_exec_terminal_interaction_captures_delayed_output() -> Result<(
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -1150,6 +1159,7 @@ async fn unified_exec_emits_one_begin_and_one_end_event() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -1246,6 +1256,7 @@ async fn exec_command_reports_chunk_and_exit_metadata() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -1362,6 +1373,7 @@ async fn unified_exec_defaults_to_pipe() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -1450,6 +1462,7 @@ async fn unified_exec_can_enable_tty() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -1529,6 +1542,7 @@ async fn unified_exec_respects_early_exit_notifications() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -1658,6 +1672,7 @@ async fn write_stdin_returns_exit_metadata_and_clears_session() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -1824,6 +1839,7 @@ async fn unified_exec_emits_end_event_when_session_dies_via_stdin() -> Result<()
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -1899,6 +1915,7 @@ async fn unified_exec_closes_long_running_session_at_turn_end() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -2019,6 +2036,7 @@ async fn unified_exec_reuses_session_via_stdin() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -2152,6 +2170,7 @@ PY
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
// This is a worst case scenario for the truncate logic.
@ -2264,6 +2283,7 @@ async fn unified_exec_timeout_and_followup_poll() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -2358,6 +2378,7 @@ PY
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -2438,6 +2459,7 @@ async fn unified_exec_runs_under_sandbox() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -2540,6 +2562,7 @@ async fn unified_exec_python_prompt_under_seatbelt() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -2633,6 +2656,7 @@ async fn unified_exec_runs_on_all_platforms() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -2766,6 +2790,7 @@ async fn unified_exec_prunes_exited_sessions_first() -> Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -88,6 +88,7 @@ async fn user_turn_with_local_image_attaches_image() -> anyhow::Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -180,6 +181,7 @@ async fn view_image_tool_attaches_local_image() -> anyhow::Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -311,6 +313,7 @@ async fn view_image_tool_errors_when_path_is_directory() -> anyhow::Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -384,6 +387,7 @@ async fn view_image_tool_placeholder_for_non_image_files() -> anyhow::Result<()>
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -476,6 +480,7 @@ async fn view_image_tool_errors_when_file_missing() -> anyhow::Result<()> {
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;
@ -557,6 +562,7 @@ async fn replaces_invalid_local_image_after_bad_request() -> anyhow::Result<()>
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: None,
})
.await?;

View file

@ -452,6 +452,7 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
effort: default_effort,
summary: default_summary,
final_output_json_schema: output_schema,
collaboration_mode: None,
})
.await?;
info!("Sent prompt with event ID: {task_id}");

View file

@ -13,6 +13,7 @@ use std::time::Duration;
use crate::ThreadId;
use crate::approvals::ElicitationRequestEvent;
use crate::config_types::CollaborationMode;
use crate::config_types::ReasoningSummary as ReasoningSummaryConfig;
use crate::custom_prompts::CustomPrompt;
use crate::items::TurnItem;
@ -115,6 +116,11 @@ pub enum Op {
summary: ReasoningSummaryConfig,
// The JSON schema to use for the final assistant message
final_output_json_schema: Option<Value>,
/// EXPERIMENTAL - set a pre-set collaboration mode.
/// Takes precedence over model, effort, and developer instructions if set.
#[serde(skip_serializing_if = "Option::is_none")]
collaboration_mode: Option<CollaborationMode>,
},
/// Override parts of the persistent turn context for subsequent turns.
@ -150,6 +156,11 @@ pub enum Op {
/// Updated reasoning summary preference (honored only for reasoning-capable models).
#[serde(skip_serializing_if = "Option::is_none")]
summary: Option<ReasoningSummaryConfig>,
/// EXPERIMENTAL - set a pre-set collaboration mode.
/// Takes precedence over model, effort, and developer instructions if set.
#[serde(skip_serializing_if = "Option::is_none")]
collaboration_mode: Option<CollaborationMode>,
},
/// Approve a command execution

View file

@ -1110,6 +1110,7 @@ impl App {
model: None,
effort: None,
summary: None,
collaboration_mode: None,
},
));
self.app_event_tx

View file

@ -2737,6 +2737,7 @@ impl ChatWidget {
model: Some(switch_model.clone()),
effort: Some(Some(default_effort)),
summary: None,
collaboration_mode: None,
}));
tx.send(AppEvent::UpdateModel(switch_model.clone()));
tx.send(AppEvent::UpdateReasoningEffort(Some(default_effort)));
@ -3011,6 +3012,7 @@ impl ChatWidget {
model: Some(model_for_action.clone()),
effort: Some(effort_for_action),
summary: None,
collaboration_mode: None,
}));
tx.send(AppEvent::UpdateModel(model_for_action.clone()));
tx.send(AppEvent::UpdateReasoningEffort(effort_for_action));
@ -3182,6 +3184,7 @@ impl ChatWidget {
model: Some(model.clone()),
effort: Some(effort),
summary: None,
collaboration_mode: None,
}));
self.app_event_tx.send(AppEvent::UpdateModel(model.clone()));
self.app_event_tx
@ -3350,6 +3353,7 @@ impl ChatWidget {
model: None,
effort: None,
summary: None,
collaboration_mode: None,
}));
tx.send(AppEvent::UpdateAskForApprovalPolicy(approval));
tx.send(AppEvent::UpdateSandboxPolicy(sandbox_clone));

View file

@ -1820,6 +1820,7 @@ impl App {
model: None,
effort: None,
summary: None,
collaboration_mode: None,
},
));
self.app_event_tx

View file

@ -2492,6 +2492,7 @@ impl ChatWidget {
model: Some(switch_model.clone()),
effort: Some(Some(default_effort)),
summary: None,
collaboration_mode: None,
}));
tx.send(AppEvent::UpdateModel(switch_model.clone()));
tx.send(AppEvent::UpdateReasoningEffort(Some(default_effort)));
@ -2724,6 +2725,7 @@ impl ChatWidget {
model: Some(model_for_action.clone()),
effort: Some(effort_for_action),
summary: None,
collaboration_mode: None,
}));
tx.send(AppEvent::UpdateModel(model_for_action.clone()));
tx.send(AppEvent::UpdateReasoningEffort(effort_for_action));
@ -2895,6 +2897,7 @@ impl ChatWidget {
model: Some(model.clone()),
effort: Some(effort),
summary: None,
collaboration_mode: None,
}));
self.app_event_tx.send(AppEvent::UpdateModel(model.clone()));
self.app_event_tx
@ -3040,6 +3043,7 @@ impl ChatWidget {
model: None,
effort: None,
summary: None,
collaboration_mode: None,
}));
tx.send(AppEvent::UpdateAskForApprovalPolicy(approval));
tx.send(AppEvent::UpdateSandboxPolicy(sandbox_clone));