Introduce collaboration modes (#9340)

- Merge `model` and `reasoning_effort` under collaboration modes.
- Add additional instructions for custom collaboration mode
- Default to Custom to not change behavior
This commit is contained in:
Ahmed Ibrahim 2026-01-16 16:28:22 -08:00 committed by GitHub
parent c26fe64539
commit 246f506551
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 156 additions and 48 deletions

View file

@ -160,12 +160,13 @@ use crate::user_notification::UserNotification;
use crate::util::backoff;
use codex_async_utils::OrCancelExt;
use codex_otel::OtelManager;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig;
use codex_protocol::config_types::Settings;
use codex_protocol::models::ContentItem;
use codex_protocol::models::DeveloperInstructions;
use codex_protocol::models::ResponseInputItem;
use codex_protocol::models::ResponseItem;
use codex_protocol::openai_models::ReasoningEffort as ReasoningEffortConfig;
use codex_protocol::protocol::CodexErrorInfo;
use codex_protocol::protocol::InitialHistory;
use codex_protocol::user_input::UserInput;
@ -270,10 +271,16 @@ impl Codex {
crate::models_manager::manager::RefreshStrategy::OnlineIfUncached,
)
.await;
// TODO (aibrahim): Consolidate config.model and config.model_reasoning_effort into config.collaboration_mode
// to avoid extracting these fields separately and constructing CollaborationMode here.
let collaboration_mode = CollaborationMode::Custom(Settings {
model: model.clone(),
reasoning_effort: config.model_reasoning_effort,
developer_instructions: None,
});
let session_configuration = SessionConfiguration {
provider: config.model_provider.clone(),
model: model.clone(),
model_reasoning_effort: config.model_reasoning_effort,
collaboration_mode,
model_reasoning_summary: config.model_reasoning_summary,
developer_instructions: config.developer_instructions.clone(),
user_instructions,
@ -422,10 +429,7 @@ pub(crate) struct SessionConfiguration {
/// Provider identifier ("openai", "openrouter", ...).
provider: ModelProviderInfo,
/// If not specified, server will use its default model.
model: String,
model_reasoning_effort: Option<ReasoningEffortConfig>,
collaboration_mode: CollaborationMode,
model_reasoning_summary: ReasoningSummaryConfig,
/// Developer instructions that supplement the base instructions.
@ -463,11 +467,8 @@ pub(crate) struct SessionConfiguration {
impl SessionConfiguration {
pub(crate) fn apply(&self, updates: &SessionSettingsUpdate) -> ConstraintResult<Self> {
let mut next_configuration = self.clone();
if let Some(model) = updates.model.clone() {
next_configuration.model = model;
}
if let Some(effort) = updates.reasoning_effort {
next_configuration.model_reasoning_effort = effort;
if let Some(collaboration_mode) = updates.collaboration_mode.clone() {
next_configuration.collaboration_mode = collaboration_mode;
}
if let Some(summary) = updates.reasoning_summary {
next_configuration.model_reasoning_summary = summary;
@ -490,8 +491,7 @@ pub(crate) struct SessionSettingsUpdate {
pub(crate) cwd: Option<PathBuf>,
pub(crate) approval_policy: Option<AskForApproval>,
pub(crate) sandbox_policy: Option<SandboxPolicy>,
pub(crate) model: Option<String>,
pub(crate) reasoning_effort: Option<Option<ReasoningEffortConfig>>,
pub(crate) collaboration_mode: Option<CollaborationMode>,
pub(crate) reasoning_summary: Option<ReasoningSummaryConfig>,
pub(crate) final_output_json_schema: Option<Option<Value>>,
}
@ -502,7 +502,8 @@ impl Session {
// todo(aibrahim): store this state somewhere else so we don't need to mut config
let config = session_configuration.original_config_do_not_use.clone();
let mut per_turn_config = (*config).clone();
per_turn_config.model_reasoning_effort = session_configuration.model_reasoning_effort;
per_turn_config.model_reasoning_effort =
session_configuration.collaboration_mode.reasoning_effort();
per_turn_config.model_reasoning_summary = session_configuration.model_reasoning_summary;
per_turn_config.features = config.features.clone();
per_turn_config
@ -520,7 +521,7 @@ impl Session {
sub_id: String,
) -> TurnContext {
let otel_manager = otel_manager.clone().with_model(
session_configuration.model.as_str(),
session_configuration.collaboration_mode.model(),
model_info.slug.as_str(),
);
@ -531,7 +532,7 @@ impl Session {
model_info.clone(),
otel_manager,
provider,
session_configuration.model_reasoning_effort,
session_configuration.collaboration_mode.reasoning_effort(),
session_configuration.model_reasoning_summary,
conversation_id,
session_configuration.session_source.clone(),
@ -579,7 +580,8 @@ impl Session {
) -> anyhow::Result<Arc<Self>> {
debug!(
"Configuring session: model={}; provider={:?}",
session_configuration.model, session_configuration.provider
session_configuration.collaboration_mode.model(),
session_configuration.provider
);
if !session_configuration.cwd.is_absolute() {
return Err(anyhow::anyhow!(
@ -655,8 +657,8 @@ impl Session {
let auth = auth.as_ref();
let otel_manager = OtelManager::new(
conversation_id,
session_configuration.model.as_str(),
session_configuration.model.as_str(),
session_configuration.collaboration_mode.model(),
session_configuration.collaboration_mode.model(),
auth.and_then(CodexAuth::get_account_id),
auth.and_then(CodexAuth::get_account_email),
auth.map(|a| a.mode),
@ -680,7 +682,7 @@ impl Session {
otel_manager.conversation_starts(
config.model_provider.name.as_str(),
config.model_reasoning_effort,
session_configuration.collaboration_mode.reasoning_effort(),
config.model_reasoning_summary,
config.model_context_window,
config.model_auto_compact_token_limit,
@ -737,12 +739,12 @@ impl Session {
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
session_id: conversation_id,
forked_from_id,
model: session_configuration.model.clone(),
model: session_configuration.collaboration_mode.model().to_string(),
model_provider_id: config.model_provider_id.clone(),
approval_policy: session_configuration.approval_policy.value(),
sandbox_policy: session_configuration.sandbox_policy.get().clone(),
cwd: session_configuration.cwd.clone(),
reasoning_effort: session_configuration.model_reasoning_effort,
reasoning_effort: session_configuration.collaboration_mode.reasoning_effort(),
history_log_id,
history_entry_count,
initial_messages,
@ -978,7 +980,10 @@ impl Session {
let model_info = self
.services
.models_manager
.get_model_info(session_configuration.model.as_str(), &per_turn_config)
.get_model_info(
session_configuration.collaboration_mode.model(),
&per_turn_config,
)
.await;
let mut turn_context: TurnContext = Self::make_turn_context(
Some(Arc::clone(&self.services.auth_manager)),
@ -1837,6 +1842,13 @@ async fn submission_loop(sess: Arc<Session>, config: Arc<Config>, rx_sub: Receiv
effort,
summary,
} => {
let collaboration_mode = {
let state = sess.state.lock().await;
state
.session_configuration
.collaboration_mode
.with_updates(model, effort, None)
};
handlers::override_turn_context(
&sess,
sub.id.clone(),
@ -1844,8 +1856,7 @@ async fn submission_loop(sess: Arc<Session>, config: Arc<Config>, rx_sub: Receiv
cwd,
approval_policy,
sandbox_policy,
model,
reasoning_effort: effort,
collaboration_mode: Some(collaboration_mode),
reasoning_summary: summary,
..Default::default()
},
@ -1953,6 +1964,8 @@ mod handlers {
use codex_protocol::protocol::WarningEvent;
use crate::context_manager::is_user_turn_boundary;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::Settings;
use codex_protocol::user_input::UserInput;
use codex_rmcp_client::ElicitationAction;
use codex_rmcp_client::ElicitationResponse;
@ -1999,18 +2012,24 @@ mod handlers {
summary,
final_output_json_schema,
items,
} => (
items,
SessionSettingsUpdate {
cwd: Some(cwd),
approval_policy: Some(approval_policy),
sandbox_policy: Some(sandbox_policy),
model: Some(model),
reasoning_effort: Some(effort),
reasoning_summary: Some(summary),
final_output_json_schema: Some(final_output_json_schema),
},
),
} => {
let collaboration_mode = Some(CollaborationMode::Custom(Settings {
model,
reasoning_effort: effort,
developer_instructions: None,
}));
(
items,
SessionSettingsUpdate {
cwd: Some(cwd),
approval_policy: Some(approval_policy),
sandbox_policy: Some(sandbox_policy),
collaboration_mode,
reasoning_summary: Some(summary),
final_output_json_schema: Some(final_output_json_schema),
},
)
}
Op::UserInput {
items,
final_output_json_schema,
@ -3402,10 +3421,15 @@ mod tests {
let config = build_test_config(codex_home.path()).await;
let config = Arc::new(config);
let model = ModelsManager::get_model_offline(config.model.as_deref());
let reasoning_effort = config.model_reasoning_effort;
let collaboration_mode = CollaborationMode::Custom(Settings {
model,
reasoning_effort,
developer_instructions: None,
});
let session_configuration = SessionConfiguration {
provider: config.model_provider.clone(),
model,
model_reasoning_effort: config.model_reasoning_effort,
collaboration_mode,
model_reasoning_summary: config.model_reasoning_summary,
developer_instructions: config.developer_instructions.clone(),
user_instructions: config.user_instructions.clone(),
@ -3468,10 +3492,15 @@ mod tests {
let config = build_test_config(codex_home.path()).await;
let config = Arc::new(config);
let model = ModelsManager::get_model_offline(config.model.as_deref());
let reasoning_effort = config.model_reasoning_effort;
let collaboration_mode = CollaborationMode::Custom(Settings {
model,
reasoning_effort,
developer_instructions: None,
});
let session_configuration = SessionConfiguration {
provider: config.model_provider.clone(),
model,
model_reasoning_effort: config.model_reasoning_effort,
collaboration_mode,
model_reasoning_summary: config.model_reasoning_summary,
developer_instructions: config.developer_instructions.clone(),
user_instructions: config.user_instructions.clone(),
@ -3718,10 +3747,15 @@ mod tests {
let exec_policy = ExecPolicyManager::default();
let (agent_status_tx, _agent_status_rx) = watch::channel(AgentStatus::PendingInit);
let model = ModelsManager::get_model_offline(config.model.as_deref());
let reasoning_effort = config.model_reasoning_effort;
let collaboration_mode = CollaborationMode::Custom(Settings {
model,
reasoning_effort,
developer_instructions: None,
});
let session_configuration = SessionConfiguration {
provider: config.model_provider.clone(),
model,
model_reasoning_effort: config.model_reasoning_effort,
collaboration_mode,
model_reasoning_summary: config.model_reasoning_summary,
developer_instructions: config.developer_instructions.clone(),
user_instructions: config.user_instructions.clone(),
@ -3735,7 +3769,7 @@ mod tests {
};
let per_turn_config = Session::build_per_turn_config(&session_configuration);
let model_info = ModelsManager::construct_model_info_offline(
session_configuration.model.as_str(),
session_configuration.collaboration_mode.model(),
&per_turn_config,
);
let otel_manager = otel_manager(
@ -3813,10 +3847,15 @@ mod tests {
let exec_policy = ExecPolicyManager::default();
let (agent_status_tx, _agent_status_rx) = watch::channel(AgentStatus::PendingInit);
let model = ModelsManager::get_model_offline(config.model.as_deref());
let reasoning_effort = config.model_reasoning_effort;
let collaboration_mode = CollaborationMode::Custom(Settings {
model,
reasoning_effort,
developer_instructions: None,
});
let session_configuration = SessionConfiguration {
provider: config.model_provider.clone(),
model,
model_reasoning_effort: config.model_reasoning_effort,
collaboration_mode,
model_reasoning_summary: config.model_reasoning_summary,
developer_instructions: config.developer_instructions.clone(),
user_instructions: config.user_instructions.clone(),
@ -3830,7 +3869,7 @@ mod tests {
};
let per_turn_config = Session::build_per_turn_config(&session_configuration);
let model_info = ModelsManager::construct_model_info_offline(
session_configuration.model.as_str(),
session_configuration.collaboration_mode.model(),
&per_turn_config,
);
let otel_manager = otel_manager(

View file

@ -4,6 +4,8 @@ use serde::Serialize;
use strum_macros::Display;
use ts_rs::TS;
use crate::openai_models::ReasoningEffort;
/// A summary of the reasoning performed by the model. This can be useful for
/// debugging and understanding the model's reasoning process.
/// See https://platform.openai.com/docs/guides/reasoning?api-mode=responses#reasoning-summaries
@ -127,3 +129,70 @@ pub enum AltScreenMode {
/// Never use alternate screen (inline mode only).
Never,
}
/// Collaboration mode for a Codex session.
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize, JsonSchema, TS)]
#[serde(tag = "mode", rename_all = "lowercase")]
pub enum CollaborationMode {
Plan(Settings),
Collaborate(Settings),
Execute(Settings),
Custom(Settings),
}
impl CollaborationMode {
/// Returns a reference to the settings, regardless of variant.
fn settings(&self) -> &Settings {
match self {
CollaborationMode::Plan(settings)
| CollaborationMode::Collaborate(settings)
| CollaborationMode::Execute(settings)
| CollaborationMode::Custom(settings) => settings,
}
}
pub fn model(&self) -> &str {
self.settings().model.as_str()
}
pub fn reasoning_effort(&self) -> Option<ReasoningEffort> {
self.settings().reasoning_effort
}
/// Updates the collaboration mode with new model and/or effort values.
///
/// - `model`: `Some(s)` to update the model, `None` to keep the current model
/// - `effort`: `Some(Some(e))` to set effort to `e`, `Some(None)` to clear effort, `None` to keep current effort
/// - `developer_instructions`: `Some(s)` to update developer instructions, `None` to keep current
///
/// Returns a new `CollaborationMode` with updated values, preserving the variant.
pub fn with_updates(
&self,
model: Option<String>,
effort: Option<Option<ReasoningEffort>>,
developer_instructions: Option<String>,
) -> Self {
let settings = self.settings();
let updated_settings = Settings {
model: model.unwrap_or_else(|| settings.model.clone()),
reasoning_effort: effort.unwrap_or(settings.reasoning_effort),
developer_instructions: developer_instructions
.or_else(|| settings.developer_instructions.clone()),
};
match self {
CollaborationMode::Plan(_) => CollaborationMode::Plan(updated_settings),
CollaborationMode::Collaborate(_) => CollaborationMode::Collaborate(updated_settings),
CollaborationMode::Execute(_) => CollaborationMode::Execute(updated_settings),
CollaborationMode::Custom(_) => CollaborationMode::Custom(updated_settings),
}
}
}
/// Settings for a collaboration mode.
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize, JsonSchema, TS)]
pub struct Settings {
pub model: String,
pub reasoning_effort: Option<ReasoningEffort>,
pub developer_instructions: Option<String>,
}