Enable request_user_input in Default mode (#12735)

## Summary
- allow `request_user_input` in Default collaboration mode as well as
Plan
- update the Default-mode instructions to prefer assumptions first and
use `request_user_input` only when a question is unavoidable
- update request_user_input and app-server tests to match the new
Default-mode behavior
- refactor collaboration-mode availability plumbing into
`CollaborationModesConfig` for future mode-related flags

## Codex author
`codex resume 019c9124-ed28-7c13-96c6-b916b1c97d49`
This commit is contained in:
Charley Cunningham 2026-02-25 15:20:46 -08:00 committed by GitHub
parent 2bd87d1a75
commit 2f4d6ded1d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
29 changed files with 426 additions and 147 deletions

View file

@ -8150,14 +8150,8 @@
"type": "object"
},
"CollaborationModeMask": {
"description": "A mask for collaboration mode settings, allowing partial updates. All fields except `name` are optional, enabling selective updates.",
"description": "EXPERIMENTAL - collaboration mode preset metadata for clients.",
"properties": {
"developer_instructions": {
"type": [
"string",
"null"
]
},
"mode": {
"anyOf": [
{

View file

@ -1,11 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { ModeKind } from "./ModeKind";
import type { ReasoningEffort } from "./ReasoningEffort";
/**
* A mask for collaboration mode settings, allowing partial updates.
* All fields except `name` are optional, enabling selective updates.
*/
export type CollaborationModeMask = { name: string, mode: ModeKind | null, model: string | null, reasoning_effort: ReasoningEffort | null | null, developer_instructions: string | null | null, };

View file

@ -44,7 +44,6 @@ export type { CollabResumeEndEvent } from "./CollabResumeEndEvent";
export type { CollabWaitingBeginEvent } from "./CollabWaitingBeginEvent";
export type { CollabWaitingEndEvent } from "./CollabWaitingEndEvent";
export type { CollaborationMode } from "./CollaborationMode";
export type { CollaborationModeMask } from "./CollaborationModeMask";
export type { ContentItem } from "./ContentItem";
export type { ContextCompactedEvent } from "./ContextCompactedEvent";
export type { ContextCompactionItem } from "./ContextCompactionItem";

View file

@ -0,0 +1,10 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { ModeKind } from "../ModeKind";
import type { ReasoningEffort } from "../ReasoningEffort";
/**
* EXPERIMENTAL - collaboration mode preset metadata for clients.
*/
export type CollaborationModeMask = { name: string, mode: ModeKind | null, model: string | null, reasoning_effort: ReasoningEffort | null | null, };

View file

@ -34,6 +34,7 @@ export type { CollabAgentState } from "./CollabAgentState";
export type { CollabAgentStatus } from "./CollabAgentStatus";
export type { CollabAgentTool } from "./CollabAgentTool";
export type { CollabAgentToolCallStatus } from "./CollabAgentToolCallStatus";
export type { CollaborationModeMask } from "./CollaborationModeMask";
export type { CommandAction } from "./CommandAction";
export type { CommandExecParams } from "./CommandExecParams";
export type { CommandExecResponse } from "./CommandExecResponse";

View file

@ -10,8 +10,9 @@ use codex_protocol::approvals::NetworkApprovalProtocol as CoreNetworkApprovalPro
use codex_protocol::approvals::NetworkPolicyAmendment as CoreNetworkPolicyAmendment;
use codex_protocol::approvals::NetworkPolicyRuleAction as CoreNetworkPolicyRuleAction;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::config_types::CollaborationModeMask as CoreCollaborationModeMask;
use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::Personality;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::config_types::SandboxMode as CoreSandboxMode;
@ -1409,6 +1410,30 @@ pub struct ModelListResponse {
#[ts(export_to = "v2/")]
pub struct CollaborationModeListParams {}
/// EXPERIMENTAL - collaboration mode preset metadata for clients.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct CollaborationModeMask {
pub name: String,
pub mode: Option<ModeKind>,
pub model: Option<String>,
#[serde(rename = "reasoning_effort")]
#[ts(rename = "reasoning_effort")]
pub reasoning_effort: Option<Option<ReasoningEffort>>,
}
impl From<CoreCollaborationModeMask> for CollaborationModeMask {
fn from(value: CoreCollaborationModeMask) -> Self {
Self {
name: value.name,
mode: value.mode,
model: value.model,
reasoning_effort: value.reasoning_effort,
}
}
}
/// EXPERIMENTAL - collaboration mode presets response.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]

View file

@ -144,7 +144,7 @@ Example with notification opt-out:
- `command/exec` — run a single command under the server sandbox without starting a thread/turn (handy for utilities and validation).
- `model/list` — list available models (set `includeHidden: true` to include entries with `hidden: true`), with reasoning effort options and optional `upgrade` model ids.
- `experimentalFeature/list` — list feature flags with stage metadata (`beta`, `underDevelopment`, `stable`, etc.), enabled/default-enabled state, and cursor pagination. For non-beta flags, `displayName`/`description`/`announcement` are `null`.
- `collaborationMode/list` — list available collaboration mode presets (experimental, no pagination).
- `collaborationMode/list` — list available collaboration mode presets (experimental, no pagination). This response omits built-in developer instructions; clients should either pass `settings.developer_instructions: null` when setting a mode to use Codex's built-in instructions, or provide their own instructions explicitly.
- `skills/list` — list skills for one or more `cwd` values (optional `forceReload`).
- `skills/remote/list` — list public remote skills (**under development; do not call from production clients yet**).
- `skills/remote/export` — download a remote skill by `hazelnutId` into `skills` under `codex_home` (**under development; do not call from production clients yet**).

View file

@ -222,6 +222,7 @@ use codex_core::find_thread_path_by_id_str;
use codex_core::git_info::git_diff_to_remote;
use codex_core::mcp::collect_mcp_snapshot;
use codex_core::mcp::group_tools_by_server;
use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use codex_core::parse_cursor;
use codex_core::read_head_for_summary;
use codex_core::read_session_meta_line;
@ -479,11 +480,13 @@ impl CodexMessageProcessor {
fn normalize_turn_start_collaboration_mode(
&self,
mut collaboration_mode: CollaborationMode,
collaboration_modes_config: CollaborationModesConfig,
) -> CollaborationMode {
if collaboration_mode.settings.developer_instructions.is_none()
&& let Some(instructions) = self
.thread_manager
.list_collaboration_modes()
.get_models_manager()
.list_collaboration_modes_for_config(collaboration_modes_config)
.into_iter()
.find(|preset| preset.mode == Some(collaboration_mode.mode))
.and_then(|preset| preset.developer_instructions.flatten())
@ -3909,7 +3912,11 @@ impl CodexMessageProcessor {
params: CollaborationModeListParams,
) {
let CollaborationModeListParams {} = params;
let items = thread_manager.list_collaboration_modes();
let items = thread_manager
.list_collaboration_modes()
.into_iter()
.map(Into::into)
.collect();
let response = CollaborationModeListResponse { data: items };
outgoing.send_response(request_id, response).await;
}
@ -5568,9 +5575,12 @@ impl CodexMessageProcessor {
}
};
let collaboration_mode = params
.collaboration_mode
.map(|mode| self.normalize_turn_start_collaboration_mode(mode));
let collaboration_modes_config = CollaborationModesConfig {
default_mode_request_user_input: thread.enabled(Feature::DefaultModeRequestUserInput),
};
let collaboration_mode = params.collaboration_mode.map(|mode| {
self.normalize_turn_start_collaboration_mode(mode, collaboration_modes_config)
});
// Map v2 input items to core input items.
let mapped_items: Vec<CoreInputItem> = params

View file

@ -49,6 +49,7 @@ use codex_core::default_client::USER_AGENT_SUFFIX;
use codex_core::default_client::get_codex_user_agent;
use codex_core::default_client::set_default_client_residency_requirement;
use codex_core::default_client::set_default_originator;
use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use codex_feedback::CodexFeedback;
use codex_protocol::ThreadId;
use codex_protocol::protocol::SessionSource;
@ -182,6 +183,11 @@ impl MessageProcessor {
auth_manager.clone(),
SessionSource::VSCode,
config.model_catalog.clone(),
CollaborationModesConfig {
default_mode_request_user_input: config
.features
.enabled(codex_core::features::Feature::DefaultModeRequestUserInput),
},
));
let cloud_requirements = Arc::new(RwLock::new(cloud_requirements));
let codex_message_processor = CodexMessageProcessor::new(CodexMessageProcessorArgs {

View file

@ -13,11 +13,10 @@ use app_test_support::McpProcess;
use app_test_support::to_response;
use codex_app_server_protocol::CollaborationModeListParams;
use codex_app_server_protocol::CollaborationModeListResponse;
use codex_app_server_protocol::CollaborationModeMask;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::RequestId;
use codex_core::test_support::builtin_collaboration_mode_presets;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::config_types::ModeKind;
use pretty_assertions::assert_eq;
use tempfile::TempDir;
use tokio::time::timeout;
@ -33,7 +32,7 @@ async fn list_collaboration_modes_returns_presets() -> Result<()> {
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_list_collaboration_modes_request(CollaborationModeListParams {})
.send_list_collaboration_modes_request(CollaborationModeListParams::default())
.await?;
let response: JSONRPCResponse = timeout(
@ -45,28 +44,15 @@ async fn list_collaboration_modes_returns_presets() -> Result<()> {
let CollaborationModeListResponse { data: items } =
to_response::<CollaborationModeListResponse>(response)?;
let expected = vec![plan_preset(), default_preset()];
let expected: Vec<CollaborationModeMask> = builtin_collaboration_mode_presets()
.into_iter()
.map(|preset| CollaborationModeMask {
name: preset.name,
mode: preset.mode,
model: preset.model,
reasoning_effort: preset.reasoning_effort,
})
.collect();
assert_eq!(expected, items);
Ok(())
}
/// Builds the plan preset that the list response is expected to return.
///
/// If the defaults change in the app server, this helper should be updated alongside the
/// contract, or the test will fail in ways that imply a regression in the API.
fn plan_preset() -> CollaborationModeMask {
let presets = builtin_collaboration_mode_presets();
presets
.into_iter()
.find(|p| p.mode == Some(ModeKind::Plan))
.unwrap()
}
/// Builds the default preset that the list response is expected to return.
fn default_preset() -> CollaborationModeMask {
let presets = builtin_collaboration_mode_presets();
presets
.into_iter()
.find(|p| p.mode == Some(ModeKind::Default))
.unwrap()
}

View file

@ -123,9 +123,6 @@ sandbox_mode = "read-only"
model_provider = "mock_provider"
[features]
collaboration_modes = true
[model_providers.mock_provider]
name = "Mock provider for test"
base_url = "{server_uri}/v1"

View file

@ -48,7 +48,9 @@ use codex_protocol::openai_models::ReasoningEffort;
use core_test_support::responses;
use core_test_support::skip_if_no_network;
use pretty_assertions::assert_eq;
use serde_json::json;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::path::Path;
use tempfile::TempDir;
use tokio::time::timeout;
@ -352,7 +354,7 @@ async fn turn_start_accepts_collaboration_mode_override_v2() -> Result<()> {
codex_home.path(),
&server.uri(),
"never",
&BTreeMap::default(),
&BTreeMap::from([(Feature::DefaultModeRequestUserInput, true)]),
)?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
@ -412,7 +414,92 @@ async fn turn_start_accepts_collaboration_mode_override_v2() -> Result<()> {
let payload = request.body_json();
assert_eq!(payload["model"].as_str(), Some("mock-model-collab"));
let payload_text = payload.to_string();
assert!(payload_text.contains("The `request_user_input` tool is unavailable in Default mode."));
assert!(payload_text.contains("The `request_user_input` tool is available in Default mode."));
Ok(())
}
#[tokio::test]
async fn turn_start_uses_thread_feature_overrides_for_collaboration_mode_instructions_v2()
-> Result<()> {
skip_if_no_network!(Ok(()));
let server = responses::start_mock_server().await;
let body = responses::sse(vec![
responses::ev_response_created("resp-1"),
responses::ev_assistant_message("msg-1", "Done"),
responses::ev_completed("resp-1"),
]);
let response_mock = responses::mount_sse_once(&server, body).await;
let codex_home = TempDir::new()?;
create_config_toml(
codex_home.path(),
&server.uri(),
"never",
&BTreeMap::default(),
)?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let thread_req = mcp
.send_thread_start_request(ThreadStartParams {
model: Some("gpt-5.2-codex".to_string()),
config: Some(HashMap::from([(
"features.default_mode_request_user_input".to_string(),
json!(true),
)])),
..Default::default()
})
.await?;
let thread_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
)
.await??;
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
let collaboration_mode = CollaborationMode {
mode: ModeKind::Default,
settings: Settings {
model: "mock-model-collab".to_string(),
reasoning_effort: Some(ReasoningEffort::High),
developer_instructions: None,
},
};
let turn_req = mcp
.send_turn_start_request(TurnStartParams {
thread_id: thread.id.clone(),
input: vec![V2UserInput::Text {
text: "Hello".to_string(),
text_elements: Vec::new(),
}],
model: Some("mock-model-override".to_string()),
effort: Some(ReasoningEffort::Low),
summary: Some(ReasoningSummary::Auto),
output_schema: None,
collaboration_mode: Some(collaboration_mode),
..Default::default()
})
.await?;
let turn_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
)
.await??;
let _turn: TurnStartResponse = to_response::<TurnStartResponse>(turn_resp)?;
timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_notification_message("turn/completed"),
)
.await??;
let request = response_mock.single_request();
let payload_text = request.body_json().to_string();
assert!(payload_text.contains("The `request_user_input` tool is available in Default mode."));
Ok(())
}

View file

@ -328,6 +328,9 @@
"connectors": {
"type": "boolean"
},
"default_mode_request_user_input": {
"type": "boolean"
},
"elevated_windows_sandbox": {
"type": "boolean"
},
@ -1621,6 +1624,9 @@
"connectors": {
"type": "boolean"
},
"default_mode_request_user_input": {
"type": "boolean"
},
"elevated_windows_sandbox": {
"type": "boolean"
},

View file

@ -29,6 +29,8 @@ use crate::features::FEATURES;
use crate::features::Feature;
use crate::features::Features;
use crate::features::maybe_push_unstable_features_warning;
#[cfg(test)]
use crate::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use crate::models_manager::manager::ModelsManager;
use crate::parse_command::parse_command;
use crate::parse_turn_item;
@ -8293,6 +8295,7 @@ mod tests {
config.codex_home.clone(),
auth_manager.clone(),
None,
CollaborationModesConfig::default(),
));
let model = ModelsManager::get_model_offline_for_tests(config.model.as_deref());
let model_info =
@ -8369,6 +8372,7 @@ mod tests {
config.codex_home.clone(),
auth_manager.clone(),
None,
CollaborationModesConfig::default(),
));
let agent_control = AgentControl::default();
let exec_policy = ExecPolicyManager::default();
@ -8527,6 +8531,7 @@ mod tests {
config.codex_home.clone(),
auth_manager.clone(),
None,
CollaborationModesConfig::default(),
));
let agent_control = AgentControl::default();
let exec_policy = ExecPolicyManager::default();

View file

@ -137,6 +137,8 @@ pub enum Feature {
SkillApproval,
/// Steer feature flag - when enabled, Enter submits immediately instead of queuing.
Steer,
/// Allow request_user_input in Default collaboration mode.
DefaultModeRequestUserInput,
/// Enable collaboration modes (Plan, Default).
/// Kept for config backward compatibility; behavior is always collaboration-modes-enabled.
CollaborationModes,
@ -639,6 +641,12 @@ pub const FEATURES: &[FeatureSpec] = &[
stage: Stage::Stable,
default_enabled: true,
},
FeatureSpec {
id: Feature::DefaultModeRequestUserInput,
key: "default_mode_request_user_input",
stage: Stage::UnderDevelopment,
default_enabled: false,
},
FeatureSpec {
id: Feature::CollaborationModes,
key: "collaboration_modes",

View file

@ -8,9 +8,23 @@ const COLLABORATION_MODE_DEFAULT: &str =
include_str!("../../templates/collaboration_mode/default.md");
const KNOWN_MODE_NAMES_PLACEHOLDER: &str = "{{KNOWN_MODE_NAMES}}";
const REQUEST_USER_INPUT_AVAILABILITY_PLACEHOLDER: &str = "{{REQUEST_USER_INPUT_AVAILABILITY}}";
const ASKING_QUESTIONS_GUIDANCE_PLACEHOLDER: &str = "{{ASKING_QUESTIONS_GUIDANCE}}";
pub(crate) fn builtin_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
vec![plan_preset(), default_preset()]
/// Stores feature flags that control collaboration-mode behavior.
///
/// Keep mode-related flags here so new collaboration-mode capabilities can be
/// added without large cross-cutting diffs to constructor and call-site
/// signatures.
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
pub struct CollaborationModesConfig {
/// Enables `request_user_input` availability in Default mode.
pub default_mode_request_user_input: bool,
}
pub(crate) fn builtin_collaboration_mode_presets(
collaboration_modes_config: CollaborationModesConfig,
) -> Vec<CollaborationModeMask> {
vec![plan_preset(), default_preset(collaboration_modes_config)]
}
fn plan_preset() -> CollaborationModeMask {
@ -23,26 +37,35 @@ fn plan_preset() -> CollaborationModeMask {
}
}
fn default_preset() -> CollaborationModeMask {
fn default_preset(collaboration_modes_config: CollaborationModesConfig) -> CollaborationModeMask {
CollaborationModeMask {
name: ModeKind::Default.display_name().to_string(),
mode: Some(ModeKind::Default),
model: None,
reasoning_effort: None,
developer_instructions: Some(Some(default_mode_instructions())),
developer_instructions: Some(Some(default_mode_instructions(collaboration_modes_config))),
}
}
fn default_mode_instructions() -> String {
fn default_mode_instructions(collaboration_modes_config: CollaborationModesConfig) -> String {
let known_mode_names = format_mode_names(&TUI_VISIBLE_COLLABORATION_MODES);
let request_user_input_availability =
request_user_input_availability_message(ModeKind::Default);
let request_user_input_availability = request_user_input_availability_message(
ModeKind::Default,
collaboration_modes_config.default_mode_request_user_input,
);
let asking_questions_guidance = asking_questions_guidance_message(
collaboration_modes_config.default_mode_request_user_input,
);
COLLABORATION_MODE_DEFAULT
.replace(KNOWN_MODE_NAMES_PLACEHOLDER, &known_mode_names)
.replace(
REQUEST_USER_INPUT_AVAILABILITY_PLACEHOLDER,
&request_user_input_availability,
)
.replace(
ASKING_QUESTIONS_GUIDANCE_PLACEHOLDER,
&asking_questions_guidance,
)
}
fn format_mode_names(modes: &[ModeKind]) -> String {
@ -55,9 +78,14 @@ fn format_mode_names(modes: &[ModeKind]) -> String {
}
}
fn request_user_input_availability_message(mode: ModeKind) -> String {
fn request_user_input_availability_message(
mode: ModeKind,
default_mode_request_user_input: bool,
) -> String {
let mode_name = mode.display_name();
if mode.allows_request_user_input() {
if mode.allows_request_user_input()
|| (default_mode_request_user_input && mode == ModeKind::Default)
{
format!("The `request_user_input` tool is available in {mode_name} mode.")
} else {
format!(
@ -66,6 +94,14 @@ fn request_user_input_availability_message(mode: ModeKind) -> String {
}
}
fn asking_questions_guidance_message(default_mode_request_user_input: bool) -> String {
if default_mode_request_user_input {
"In Default mode, strongly prefer making reasonable assumptions and executing the user's request rather than stopping to ask questions. If you absolutely must ask a question because the answer cannot be discovered from local context and a reasonable assumption would be risky, prefer using the `request_user_input` tool rather than writing a multiple choice question as a textual assistant message. Never write a multiple choice question as a textual assistant message.".to_string()
} else {
"In Default mode, strongly prefer making reasonable assumptions and executing the user's request rather than stopping to ask questions. If you absolutely must ask a question because the answer cannot be discovered from local context and a reasonable assumption would be risky, ask the user directly with a concise plain-text question. Never write a multiple choice question as a textual assistant message.".to_string()
}
}
#[cfg(test)]
mod tests {
use super::*;
@ -74,7 +110,10 @@ mod tests {
#[test]
fn preset_names_use_mode_display_names() {
assert_eq!(plan_preset().name, ModeKind::Plan.display_name());
assert_eq!(default_preset().name, ModeKind::Default.display_name());
assert_eq!(
default_preset(CollaborationModesConfig::default()).name,
ModeKind::Default.display_name()
);
assert_eq!(
plan_preset().reasoning_effort,
Some(Some(ReasoningEffort::Medium))
@ -83,20 +122,38 @@ mod tests {
#[test]
fn default_mode_instructions_replace_mode_names_placeholder() {
let default_instructions = default_preset()
.developer_instructions
.expect("default preset should include instructions")
.expect("default instructions should be set");
let default_instructions = default_preset(CollaborationModesConfig {
default_mode_request_user_input: true,
})
.developer_instructions
.expect("default preset should include instructions")
.expect("default instructions should be set");
assert!(!default_instructions.contains(KNOWN_MODE_NAMES_PLACEHOLDER));
assert!(!default_instructions.contains(REQUEST_USER_INPUT_AVAILABILITY_PLACEHOLDER));
assert!(!default_instructions.contains(ASKING_QUESTIONS_GUIDANCE_PLACEHOLDER));
let known_mode_names = format_mode_names(&TUI_VISIBLE_COLLABORATION_MODES);
let expected_snippet = format!("Known mode names are {known_mode_names}.");
assert!(default_instructions.contains(&expected_snippet));
let expected_availability_message =
request_user_input_availability_message(ModeKind::Default);
request_user_input_availability_message(ModeKind::Default, true);
assert!(default_instructions.contains(&expected_availability_message));
assert!(default_instructions.contains("prefer using the `request_user_input` tool"));
}
#[test]
fn default_mode_instructions_use_plain_text_questions_when_feature_disabled() {
let default_instructions = default_preset(CollaborationModesConfig::default())
.developer_instructions
.expect("default preset should include instructions")
.expect("default instructions should be set");
assert!(!default_instructions.contains("prefer using the `request_user_input` tool"));
assert!(
default_instructions
.contains("ask the user directly with a concise plain-text question")
);
}
}

View file

@ -8,6 +8,7 @@ use crate::default_client::build_reqwest_client;
use crate::error::CodexErr;
use crate::error::Result as CoreResult;
use crate::model_provider_info::ModelProviderInfo;
use crate::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use crate::models_manager::collaboration_mode_presets::builtin_collaboration_mode_presets;
use crate::models_manager::model_info;
use codex_api::ModelsClient;
@ -55,6 +56,7 @@ enum CatalogMode {
pub struct ModelsManager {
remote_models: RwLock<Vec<ModelInfo>>,
catalog_mode: CatalogMode,
collaboration_modes_config: CollaborationModesConfig,
auth_manager: Arc<AuthManager>,
etag: RwLock<Option<String>>,
cache_manager: ModelsCacheManager,
@ -71,6 +73,7 @@ impl ModelsManager {
codex_home: PathBuf,
auth_manager: Arc<AuthManager>,
model_catalog: Option<ModelsResponse>,
collaboration_modes_config: CollaborationModesConfig,
) -> Self {
let cache_path = codex_home.join(MODEL_CACHE_FILE);
let cache_manager = ModelsCacheManager::new(cache_path, DEFAULT_MODEL_CACHE_TTL);
@ -88,6 +91,7 @@ impl ModelsManager {
Self {
remote_models: RwLock::new(remote_models),
catalog_mode,
collaboration_modes_config,
auth_manager,
etag: RwLock::new(None),
cache_manager,
@ -110,7 +114,14 @@ impl ModelsManager {
///
/// Returns a static set of presets seeded with the configured model.
pub fn list_collaboration_modes(&self) -> Vec<CollaborationModeMask> {
builtin_collaboration_mode_presets()
self.list_collaboration_modes_for_config(self.collaboration_modes_config)
}
pub fn list_collaboration_modes_for_config(
&self,
collaboration_modes_config: CollaborationModesConfig,
) -> Vec<CollaborationModeMask> {
builtin_collaboration_mode_presets(collaboration_modes_config)
}
/// Attempt to list models without blocking, using the current cached state.
@ -378,6 +389,7 @@ impl ModelsManager {
.unwrap_or_else(|err| panic!("failed to load bundled models.json: {err}")),
),
catalog_mode: CatalogMode::Default,
collaboration_modes_config: CollaborationModesConfig::default(),
auth_manager,
etag: RwLock::new(None),
cache_manager,
@ -504,7 +516,12 @@ mod tests {
.expect("load default test config");
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
let manager = ModelsManager::new(codex_home.path().to_path_buf(), auth_manager, None);
let manager = ModelsManager::new(
codex_home.path().to_path_buf(),
auth_manager,
None,
CollaborationModesConfig::default(),
);
let known_slug = manager
.get_remote_models()
.await
@ -541,6 +558,7 @@ mod tests {
Some(ModelsResponse {
models: vec![remote_model("gpt-overlay", "Overlay", 0)],
}),
CollaborationModesConfig::default(),
);
let model_info = manager
@ -564,7 +582,12 @@ mod tests {
.expect("load default test config");
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
let manager = ModelsManager::new(codex_home.path().to_path_buf(), auth_manager, None);
let manager = ModelsManager::new(
codex_home.path().to_path_buf(),
auth_manager,
None,
CollaborationModesConfig::default(),
);
let known_slug = manager
.get_remote_models()
.await
@ -590,7 +613,12 @@ mod tests {
.expect("load default test config");
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
let manager = ModelsManager::new(codex_home.path().to_path_buf(), auth_manager, None);
let manager = ModelsManager::new(
codex_home.path().to_path_buf(),
auth_manager,
None,
CollaborationModesConfig::default(),
);
let known_slug = manager
.get_remote_models()
.await

View file

@ -85,5 +85,7 @@ pub fn all_model_presets() -> &'static Vec<ModelPreset> {
}
pub fn builtin_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
collaboration_mode_presets::builtin_collaboration_mode_presets()
collaboration_mode_presets::builtin_collaboration_mode_presets(
collaboration_mode_presets::CollaborationModesConfig::default(),
)
}

View file

@ -11,6 +11,7 @@ use crate::error::CodexErr;
use crate::error::Result as CodexResult;
use crate::file_watcher::FileWatcher;
use crate::file_watcher::FileWatcherEvent;
use crate::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use crate::models_manager::manager::ModelsManager;
use crate::protocol::Event;
use crate::protocol::EventMsg;
@ -143,6 +144,7 @@ impl ThreadManager {
auth_manager: Arc<AuthManager>,
session_source: SessionSource,
model_catalog: Option<ModelsResponse>,
collaboration_modes_config: CollaborationModesConfig,
) -> Self {
let (thread_created_tx, _) = broadcast::channel(THREAD_CREATED_CHANNEL_CAPACITY);
let skills_manager = Arc::new(SkillsManager::new(codex_home.clone()));
@ -155,6 +157,7 @@ impl ThreadManager {
codex_home,
auth_manager.clone(),
model_catalog,
collaboration_modes_config,
)),
skills_manager,
file_watcher,

View file

@ -12,10 +12,15 @@ use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::TUI_VISIBLE_COLLABORATION_MODES;
use codex_protocol::request_user_input::RequestUserInputArgs;
fn format_allowed_modes() -> String {
fn request_user_input_is_available(mode: ModeKind, default_mode_request_user_input: bool) -> bool {
mode.allows_request_user_input()
|| (default_mode_request_user_input && mode == ModeKind::Default)
}
fn format_allowed_modes(default_mode_request_user_input: bool) -> String {
let mode_names: Vec<&str> = TUI_VISIBLE_COLLABORATION_MODES
.into_iter()
.filter(|mode| mode.allows_request_user_input())
.filter(|mode| request_user_input_is_available(*mode, default_mode_request_user_input))
.map(ModeKind::display_name)
.collect();
@ -27,8 +32,11 @@ fn format_allowed_modes() -> String {
}
}
pub(crate) fn request_user_input_unavailable_message(mode: ModeKind) -> Option<String> {
if mode.allows_request_user_input() {
pub(crate) fn request_user_input_unavailable_message(
mode: ModeKind,
default_mode_request_user_input: bool,
) -> Option<String> {
if request_user_input_is_available(mode, default_mode_request_user_input) {
None
} else {
let mode_name = mode.display_name();
@ -38,14 +46,16 @@ pub(crate) fn request_user_input_unavailable_message(mode: ModeKind) -> Option<S
}
}
pub(crate) fn request_user_input_tool_description() -> String {
let allowed_modes = format_allowed_modes();
pub(crate) fn request_user_input_tool_description(default_mode_request_user_input: bool) -> String {
let allowed_modes = format_allowed_modes(default_mode_request_user_input);
format!(
"Request user input for one to three short questions and wait for the response. This tool is only available in {allowed_modes}."
)
}
pub struct RequestUserInputHandler;
pub struct RequestUserInputHandler {
pub default_mode_request_user_input: bool,
}
#[async_trait]
impl ToolHandler for RequestUserInputHandler {
@ -72,7 +82,9 @@ impl ToolHandler for RequestUserInputHandler {
};
let mode = session.collaboration_mode().await.mode;
if let Some(message) = request_user_input_unavailable_message(mode) {
if let Some(message) =
request_user_input_unavailable_message(mode, self.default_mode_request_user_input)
{
return Err(FunctionCallError::RespondToModel(message));
}
@ -117,7 +129,7 @@ mod tests {
use pretty_assertions::assert_eq;
#[test]
fn request_user_input_mode_availability_is_plan_only() {
fn request_user_input_mode_availability_defaults_to_plan_only() {
assert!(ModeKind::Plan.allows_request_user_input());
assert!(!ModeKind::Default.allows_request_user_input());
assert!(!ModeKind::Execute.allows_request_user_input());
@ -125,27 +137,38 @@ mod tests {
}
#[test]
fn request_user_input_unavailable_messages_use_default_name_for_default_modes() {
assert_eq!(request_user_input_unavailable_message(ModeKind::Plan), None);
fn request_user_input_unavailable_messages_respect_default_mode_feature_flag() {
assert_eq!(
request_user_input_unavailable_message(ModeKind::Default),
request_user_input_unavailable_message(ModeKind::Plan, false),
None
);
assert_eq!(
request_user_input_unavailable_message(ModeKind::Default, false),
Some("request_user_input is unavailable in Default mode".to_string())
);
assert_eq!(
request_user_input_unavailable_message(ModeKind::Execute),
request_user_input_unavailable_message(ModeKind::Default, true),
None
);
assert_eq!(
request_user_input_unavailable_message(ModeKind::Execute, false),
Some("request_user_input is unavailable in Execute mode".to_string())
);
assert_eq!(
request_user_input_unavailable_message(ModeKind::PairProgramming),
request_user_input_unavailable_message(ModeKind::PairProgramming, false),
Some("request_user_input is unavailable in Pair Programming mode".to_string())
);
}
#[test]
fn request_user_input_tool_description_mentions_plan_only() {
fn request_user_input_tool_description_mentions_available_modes() {
assert_eq!(
request_user_input_tool_description(),
request_user_input_tool_description(false),
"Request user input for one to three short questions and wait for the response. This tool is only available in Plan mode.".to_string()
);
assert_eq!(
request_user_input_tool_description(true),
"Request user input for one to three short questions and wait for the response. This tool is only available in Default or Plan mode.".to_string()
);
}
}

View file

@ -6,6 +6,7 @@ use crate::config::AgentRoleConfig;
use crate::features::Feature;
use crate::features::Features;
use crate::mcp_connection_manager::ToolInfo;
use crate::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use crate::tools::handlers::PLAN_TOOL;
use crate::tools::handlers::SEARCH_TOOL_BM25_DEFAULT_LIMIT;
use crate::tools::handlers::SEARCH_TOOL_BM25_TOOL_NAME;
@ -54,7 +55,7 @@ pub(crate) struct ToolsConfig {
pub js_repl_enabled: bool,
pub js_repl_tools_only: bool,
pub collab_tools: bool,
pub collaboration_modes_tools: bool,
pub default_mode_request_user_input: bool,
pub experimental_supported_tools: Vec<String>,
pub agent_jobs_tools: bool,
pub agent_jobs_worker_tools: bool,
@ -80,7 +81,8 @@ impl ToolsConfig {
let include_js_repl_tools_only =
include_js_repl && features.enabled(Feature::JsReplToolsOnly);
let include_collab_tools = features.enabled(Feature::Collab);
let include_collaboration_modes_tools = features.enabled(Feature::CollaborationModes);
let include_default_mode_request_user_input =
features.enabled(Feature::DefaultModeRequestUserInput);
let include_search_tool = features.enabled(Feature::Apps);
let include_agent_jobs = include_collab_tools && features.enabled(Feature::Sqlite);
let request_permission_enabled = features.enabled(Feature::RequestPermissions);
@ -137,7 +139,7 @@ impl ToolsConfig {
js_repl_enabled: include_js_repl,
js_repl_tools_only: include_js_repl_tools_only,
collab_tools: include_collab_tools,
collaboration_modes_tools: include_collaboration_modes_tools,
default_mode_request_user_input: include_default_mode_request_user_input,
experimental_supported_tools: model_info.experimental_supported_tools.clone(),
agent_jobs_tools: include_agent_jobs,
agent_jobs_worker_tools,
@ -872,7 +874,9 @@ fn create_wait_tool() -> ToolSpec {
})
}
fn create_request_user_input_tool() -> ToolSpec {
fn create_request_user_input_tool(
collaboration_modes_config: CollaborationModesConfig,
) -> ToolSpec {
let mut option_props = BTreeMap::new();
option_props.insert(
"label".to_string(),
@ -943,7 +947,9 @@ fn create_request_user_input_tool() -> ToolSpec {
ToolSpec::Function(ResponsesApiTool {
name: "request_user_input".to_string(),
description: request_user_input_tool_description(),
description: request_user_input_tool_description(
collaboration_modes_config.default_mode_request_user_input,
),
strict: false,
parameters: JsonSchema::Object {
properties,
@ -1664,7 +1670,9 @@ pub(crate) fn build_specs(
let mcp_handler = Arc::new(McpHandler);
let mcp_resource_handler = Arc::new(McpResourceHandler);
let shell_command_handler = Arc::new(ShellCommandHandler::from(config.shell_command_backend));
let request_user_input_handler = Arc::new(RequestUserInputHandler);
let request_user_input_handler = Arc::new(RequestUserInputHandler {
default_mode_request_user_input: config.default_mode_request_user_input,
});
let search_tool_handler = Arc::new(SearchToolBm25Handler);
let js_repl_handler = Arc::new(JsReplHandler);
let js_repl_reset_handler = Arc::new(JsReplResetHandler);
@ -1727,10 +1735,10 @@ pub(crate) fn build_specs(
builder.register_handler("js_repl_reset", js_repl_reset_handler);
}
if config.collaboration_modes_tools {
builder.push_spec(create_request_user_input_tool());
builder.register_handler("request_user_input", request_user_input_handler);
}
builder.push_spec(create_request_user_input_tool(CollaborationModesConfig {
default_mode_request_user_input: config.default_mode_request_user_input,
}));
builder.register_handler("request_user_input", request_user_input_handler);
if config.search_tool
&& let Some(app_tools) = app_tools
@ -2024,7 +2032,6 @@ mod tests {
let model_info = model_info_from_models_json("gpt-5-codex");
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
features.enable(Feature::CollaborationModes);
let config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
features: &features,
@ -2055,7 +2062,7 @@ mod tests {
create_exec_command_tool(true, false),
create_write_stdin_tool(),
PLAN_TOOL.clone(),
create_request_user_input_tool(),
create_request_user_input_tool(CollaborationModesConfig::default()),
create_apply_patch_freeform_tool(),
ToolSpec::WebSearch {
external_web_access: Some(true),
@ -2087,8 +2094,6 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::Collab);
features.enable(Feature::CollaborationModes);
features.enable(Feature::Sqlite);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
features: &features,
@ -2115,7 +2120,6 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::Collab);
features.enable(Feature::CollaborationModes);
features.enable(Feature::Sqlite);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
@ -2141,12 +2145,11 @@ mod tests {
}
#[test]
fn request_user_input_requires_collaboration_modes_feature() {
fn request_user_input_description_reflects_default_mode_feature_flag() {
let config = test_config();
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.disable(Feature::CollaborationModes);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
features: &features,
@ -2154,12 +2157,13 @@ mod tests {
session_source: SessionSource::Cli,
});
let (tools, _) = build_specs(&tools_config, None, None, &[]).build();
assert!(
!tools.iter().any(|t| t.spec.name() == "request_user_input"),
"request_user_input should be disabled when collaboration_modes feature is off"
let request_user_input_tool = find_tool(&tools, "request_user_input");
assert_eq!(
request_user_input_tool.spec,
create_request_user_input_tool(CollaborationModesConfig::default())
);
features.enable(Feature::CollaborationModes);
features.enable(Feature::DefaultModeRequestUserInput);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
features: &features,
@ -2167,7 +2171,13 @@ mod tests {
session_source: SessionSource::Cli,
});
let (tools, _) = build_specs(&tools_config, None, None, &[]).build();
assert_contains_tool_names(&tools, &["request_user_input"]);
let request_user_input_tool = find_tool(&tools, "request_user_input");
assert_eq!(
request_user_input_tool.spec,
create_request_user_input_tool(CollaborationModesConfig {
default_mode_request_user_input: true,
})
);
}
#[test]
@ -2335,8 +2345,7 @@ mod tests {
let config = test_config();
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::CollaborationModes);
let features = Features::with_defaults();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
features: &features,
@ -2359,8 +2368,7 @@ mod tests {
let config = test_config();
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::CollaborationModes);
let features = Features::with_defaults();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
features: &features,
@ -2381,8 +2389,7 @@ mod tests {
#[test]
fn test_build_specs_gpt5_codex_default() {
let mut features = Features::with_defaults();
features.enable(Feature::CollaborationModes);
let features = Features::with_defaults();
assert_default_model_tools(
"gpt-5-codex",
&features,
@ -2400,8 +2407,7 @@ mod tests {
#[test]
fn test_build_specs_gpt51_codex_default() {
let mut features = Features::with_defaults();
features.enable(Feature::CollaborationModes);
let features = Features::with_defaults();
assert_default_model_tools(
"gpt-5.1-codex",
&features,
@ -2421,7 +2427,6 @@ mod tests {
fn test_build_specs_gpt5_codex_unified_exec_web_search() {
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
features.enable(Feature::CollaborationModes);
assert_model_tools(
"gpt-5-codex",
&features,
@ -2442,7 +2447,6 @@ mod tests {
fn test_build_specs_gpt51_codex_unified_exec_web_search() {
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
features.enable(Feature::CollaborationModes);
assert_model_tools(
"gpt-5.1-codex",
&features,
@ -2461,8 +2465,7 @@ mod tests {
#[test]
fn test_gpt_5_1_codex_max_defaults() {
let mut features = Features::with_defaults();
features.enable(Feature::CollaborationModes);
let features = Features::with_defaults();
assert_default_model_tools(
"gpt-5.1-codex-max",
&features,
@ -2480,8 +2483,7 @@ mod tests {
#[test]
fn test_codex_5_1_mini_defaults() {
let mut features = Features::with_defaults();
features.enable(Feature::CollaborationModes);
let features = Features::with_defaults();
assert_default_model_tools(
"gpt-5.1-codex-mini",
&features,
@ -2499,8 +2501,7 @@ mod tests {
#[test]
fn test_gpt_5_defaults() {
let mut features = Features::with_defaults();
features.enable(Feature::CollaborationModes);
let features = Features::with_defaults();
assert_default_model_tools(
"gpt-5",
&features,
@ -2517,8 +2518,7 @@ mod tests {
#[test]
fn test_gpt_5_1_defaults() {
let mut features = Features::with_defaults();
features.enable(Feature::CollaborationModes);
let features = Features::with_defaults();
assert_default_model_tools(
"gpt-5.1",
&features,
@ -2538,7 +2538,6 @@ mod tests {
fn test_gpt_5_1_codex_max_unified_exec_web_search() {
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
features.enable(Feature::CollaborationModes);
assert_model_tools(
"gpt-5.1-codex-max",
&features,

View file

@ -8,4 +8,4 @@ Your active mode changes only when new developer instructions with a different `
{{REQUEST_USER_INPUT_AVAILABILITY}}
If a decision is necessary and cannot be discovered from local context, ask the user directly. However, in Default mode you should strongly prefer executing the user's request rather than stopping to ask questions.
{{ASKING_QUESTIONS_GUIDANCE}}

View file

@ -12,6 +12,7 @@ use codex_core::built_in_model_providers;
use codex_core::default_client::originator;
use codex_core::error::CodexErr;
use codex_core::features::Feature;
use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use codex_otel::OtelManager;
use codex_otel::TelemetryAuthMode;
use codex_protocol::ThreadId;
@ -584,6 +585,11 @@ async fn prefers_apikey_when_config_prefers_apikey_even_with_chatgpt_tokens() {
auth_manager,
SessionSource::Exec,
config.model_catalog.clone(),
CollaborationModesConfig {
default_mode_request_user_input: config
.features
.enabled(Feature::DefaultModeRequestUserInput),
},
);
let NewThread { thread: codex, .. } = thread_manager
.start_thread(config)

View file

@ -1,4 +1,5 @@
use codex_core::CodexAuth;
use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use codex_core::models_manager::manager::ModelsManager;
use codex_protocol::openai_models::TruncationPolicyConfig;
use core_test_support::load_default_config_for_test;
@ -12,7 +13,12 @@ async fn offline_model_info_without_tool_output_override() {
let auth_manager = codex_core::test_support::auth_manager_from_auth(
CodexAuth::create_dummy_chatgpt_auth_for_testing(),
);
let manager = ModelsManager::new(config.codex_home.clone(), auth_manager, None);
let manager = ModelsManager::new(
config.codex_home.clone(),
auth_manager,
None,
CollaborationModesConfig::default(),
);
let model_info = manager.get_model_info("gpt-5.1", &config).await;
@ -30,7 +36,12 @@ async fn offline_model_info_with_tool_output_override() {
let auth_manager = codex_core::test_support::auth_manager_from_auth(
CodexAuth::create_dummy_chatgpt_auth_for_testing(),
);
let manager = ModelsManager::new(config.codex_home.clone(), auth_manager, None);
let manager = ModelsManager::new(
config.codex_home.clone(),
auth_manager,
None,
CollaborationModesConfig::default(),
);
let model_info = manager.get_model_info("gpt-5.1-codex", &config).await;

View file

@ -86,8 +86,10 @@ async fn request_user_input_round_trip_for_mode(mode: ModeKind) -> anyhow::Resul
session_configured,
..
} = builder
.with_config(|config| {
config.features.enable(Feature::CollaborationModes);
.with_config(move |config| {
if mode == ModeKind::Default {
config.features.enable(Feature::DefaultModeRequestUserInput);
}
})
.build(&server)
.await?;
@ -198,18 +200,13 @@ where
let server = start_mock_server().await;
let builder = test_codex();
let mut builder = test_codex();
let TestCodex {
codex,
cwd,
session_configured,
..
} = builder
.with_config(|config| {
config.features.enable(Feature::CollaborationModes);
})
.build(&server)
.await?;
} = builder.build(&server).await?;
let mode_slug = mode_name.to_lowercase().replace(' ', "-");
let call_id = format!("user-input-{mode_slug}-call");
@ -290,7 +287,7 @@ async fn request_user_input_rejected_in_execute_mode_alias() -> anyhow::Result<(
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn request_user_input_rejected_in_default_mode() -> anyhow::Result<()> {
async fn request_user_input_rejected_in_default_mode_by_default() -> anyhow::Result<()> {
assert_request_user_input_rejected("Default", |model| CollaborationMode {
mode: ModeKind::Default,
settings: Settings {
@ -302,6 +299,11 @@ async fn request_user_input_rejected_in_default_mode() -> anyhow::Result<()> {
.await
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn request_user_input_round_trip_in_default_mode_with_feature() -> anyhow::Result<()> {
request_user_input_round_trip_for_mode(ModeKind::Default).await
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn request_user_input_rejected_in_pair_mode_alias() -> anyhow::Result<()> {
assert_request_user_input_rejected("Pair Programming", |model| CollaborationMode {

View file

@ -32,6 +32,7 @@ use codex_core::config_loader::ConfigLoadError;
use codex_core::config_loader::format_config_error_with_source;
use codex_core::format_exec_policy_error_with_source;
use codex_core::git_info::get_git_repo_root;
use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use codex_core::models_manager::manager::RefreshStrategy;
use codex_protocol::approvals::ElicitationAction;
use codex_protocol::config_types::SandboxMode;
@ -398,6 +399,11 @@ pub async fn run_main(cli: Cli, arg0_paths: Arg0DispatchPaths) -> anyhow::Result
auth_manager.clone(),
SessionSource::Exec,
config.model_catalog.clone(),
CollaborationModesConfig {
default_mode_request_user_input: config
.features
.enabled(codex_core::features::Feature::DefaultModeRequestUserInput),
},
));
let default_model = thread_manager
.get_models_manager()

View file

@ -6,6 +6,7 @@ use codex_core::ThreadManager;
use codex_core::config::Config;
use codex_core::default_client::USER_AGENT_SUFFIX;
use codex_core::default_client::get_codex_user_agent;
use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use codex_protocol::ThreadId;
use codex_protocol::protocol::SessionSource;
use codex_protocol::protocol::Submission;
@ -62,6 +63,11 @@ impl MessageProcessor {
auth_manager,
SessionSource::Mcp,
config.model_catalog.clone(),
CollaborationModesConfig {
default_mode_request_user_input: config
.features
.enabled(codex_core::features::Feature::DefaultModeRequestUserInput),
},
));
Self {
outgoing,

View file

@ -47,6 +47,7 @@ use codex_core::config::edit::ConfigEdit;
use codex_core::config::edit::ConfigEditsBuilder;
use codex_core::config_loader::ConfigLayerStackOrdering;
use codex_core::features::Feature;
use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use codex_core::models_manager::manager::RefreshStrategy;
use codex_core::models_manager::model_presets::HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG;
use codex_core::models_manager::model_presets::HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG;
@ -1319,6 +1320,11 @@ impl App {
auth_manager.clone(),
SessionSource::Cli,
config.model_catalog.clone(),
CollaborationModesConfig {
default_mode_request_user_input: config
.features
.enabled(codex_core::features::Feature::DefaultModeRequestUserInput),
},
));
let mut model = thread_manager
.get_models_manager()

View file

@ -25,6 +25,7 @@ use codex_core::config::types::WindowsSandboxModeToml;
use codex_core::config_loader::RequirementSource;
use codex_core::features::FEATURES;
use codex_core::features::Feature;
use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use codex_core::models_manager::manager::ModelsManager;
use codex_core::skills::model::SkillMetadata;
use codex_core::terminal::TerminalName;
@ -1636,7 +1637,12 @@ async fn make_chatwidget_manual(
let auth_manager =
codex_core::test_support::auth_manager_from_auth(CodexAuth::from_api_key("test"));
let codex_home = cfg.codex_home.clone();
let models_manager = Arc::new(ModelsManager::new(codex_home, auth_manager.clone(), None));
let models_manager = Arc::new(ModelsManager::new(
codex_home,
auth_manager.clone(),
None,
CollaborationModesConfig::default(),
));
let reasoning_effort = None;
let base_mode = CollaborationMode {
mode: ModeKind::Default,
@ -1762,6 +1768,7 @@ fn set_chatgpt_auth(chat: &mut ChatWidget) {
chat.config.codex_home.clone(),
chat.auth_manager.clone(),
None,
CollaborationModesConfig::default(),
));
}