Remove test-support feature from codex-core and replace it with explicit test toggles (#11405)

## Why

`codex-core` was being built in multiple feature-resolved permutations
because test-only behavior was modeled as crate features. For a large
crate, those permutations increase compile cost and reduce cache reuse.

## Net Change

- Removed the `test-support` crate feature and related feature wiring so
`codex-core` no longer needs separate feature shapes for test consumers.
- Standardized cross-crate test-only access behind
`codex_core::test_support`.
- External test code now imports helpers from
`codex_core::test_support`.
- Underlying implementation hooks are kept internal (`pub(crate)`)
instead of broadly public.

## Outcome

- Fewer `codex-core` build permutations.
- Better incremental cache reuse across test targets.
- No intended production behavior change.
This commit is contained in:
Michael Bolin 2026-02-10 22:44:02 -08:00 committed by GitHub
parent f6dd9e37e7
commit 476c1a7160
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
36 changed files with 393 additions and 266 deletions

View file

@ -12,7 +12,7 @@ anyhow = { workspace = true }
base64 = { workspace = true }
chrono = { workspace = true }
codex-app-server-protocol = { workspace = true }
codex-core = { workspace = true, features = ["test-support"] }
codex-core = { workspace = true }
codex-protocol = { workspace = true }
codex-utils-cargo-bin = { workspace = true }
serde = { workspace = true }

View file

@ -1,6 +1,6 @@
use chrono::DateTime;
use chrono::Utc;
use codex_core::models_manager::model_presets::all_model_presets;
use codex_core::test_support::all_model_presets;
use codex_protocol::openai_models::ConfigShellToolType;
use codex_protocol::openai_models::ModelInfo;
use codex_protocol::openai_models::ModelPreset;

View file

@ -15,7 +15,7 @@ use codex_app_server_protocol::CollaborationModeListParams;
use codex_app_server_protocol::CollaborationModeListResponse;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::RequestId;
use codex_core::models_manager::test_builtin_collaboration_mode_presets;
use codex_core::test_support::builtin_collaboration_mode_presets;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::config_types::ModeKind;
use pretty_assertions::assert_eq;
@ -55,7 +55,7 @@ async fn list_collaboration_modes_returns_presets() -> Result<()> {
/// If the defaults change in the app server, this helper should be updated alongside the
/// contract, or the test will fail in ways that imply a regression in the API.
fn plan_preset() -> CollaborationModeMask {
let presets = test_builtin_collaboration_mode_presets();
let presets = builtin_collaboration_mode_presets();
presets
.into_iter()
.find(|p| p.mode == Some(ModeKind::Plan))
@ -64,7 +64,7 @@ fn plan_preset() -> CollaborationModeMask {
/// Builds the default preset that the list response is expected to return.
fn default_preset() -> CollaborationModeMask {
let presets = test_builtin_collaboration_mode_presets();
let presets = builtin_collaboration_mode_presets();
presets
.into_iter()
.find(|p| p.mode == Some(ModeKind::Default))

View file

@ -3,10 +3,6 @@ load("//:defs.bzl", "codex_rust_crate")
codex_rust_crate(
name = "core",
crate_name = "codex_core",
# TODO(mbolin): Eliminate the use of features in the version of the
# rust_library() that is used by rust_binary() rules for release artifacts
# such as the Codex CLI.
crate_features = ["test-support"],
compile_data = glob(
include = ["**"],
exclude = [

View file

@ -113,10 +113,6 @@ which = { workspace = true }
wildmatch = { workspace = true }
zip = { workspace = true }
[features]
test-support = []
[target.'cfg(target_os = "linux")'.dependencies]
keyring = { workspace = true, features = ["linux-native-async-persistent"] }
landlock = { workspace = true }

View file

@ -215,7 +215,7 @@ mod tests {
impl AgentControlHarness {
async fn new() -> Self {
let (home, config) = test_config().await;
let manager = ThreadManager::with_models_provider_and_home(
let manager = ThreadManager::with_models_provider_and_home_for_tests(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
config.codex_home.clone(),
@ -484,7 +484,7 @@ mod tests {
TomlValue::Integer(max_threads as i64),
)])
.await;
let manager = ThreadManager::with_models_provider_and_home(
let manager = ThreadManager::with_models_provider_and_home_for_tests(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
config.codex_home.clone(),
@ -527,7 +527,7 @@ mod tests {
TomlValue::Integer(max_threads as i64),
)])
.await;
let manager = ThreadManager::with_models_provider_and_home(
let manager = ThreadManager::with_models_provider_and_home_for_tests(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
config.codex_home.clone(),
@ -561,7 +561,7 @@ mod tests {
TomlValue::Integer(max_threads as i64),
)])
.await;
let manager = ThreadManager::with_models_provider_and_home(
let manager = ThreadManager::with_models_provider_and_home_for_tests(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
config.codex_home.clone(),
@ -597,7 +597,7 @@ mod tests {
TomlValue::Integer(max_threads as i64),
)])
.await;
let manager = ThreadManager::with_models_provider_and_home(
let manager = ThreadManager::with_models_provider_and_home_for_tests(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
config.codex_home.clone(),
@ -650,7 +650,7 @@ mod tests {
TomlValue::Integer(max_threads as i64),
)])
.await;
let manager = ThreadManager::with_models_provider_and_home(
let manager = ThreadManager::with_models_provider_and_home_for_tests(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
config.codex_home.clone(),

View file

@ -981,9 +981,8 @@ impl AuthManager {
}
}
#[cfg(any(test, feature = "test-support"))]
/// Create an AuthManager with a specific CodexAuth, for testing only.
pub fn from_auth_for_testing(auth: CodexAuth) -> Arc<Self> {
pub(crate) fn from_auth_for_testing(auth: CodexAuth) -> Arc<Self> {
let cached = CachedAuth {
auth: Some(auth),
external_refresher: None,
@ -998,9 +997,11 @@ impl AuthManager {
})
}
#[cfg(any(test, feature = "test-support"))]
/// Create an AuthManager with a specific CodexAuth and codex home, for testing only.
pub fn from_auth_for_testing_with_home(auth: CodexAuth, codex_home: PathBuf) -> Arc<Self> {
pub(crate) fn from_auth_for_testing_with_home(
auth: CodexAuth,
codex_home: PathBuf,
) -> Arc<Self> {
let cached = CachedAuth {
auth: Some(auth),
external_refresher: None,

View file

@ -5787,8 +5787,9 @@ mod tests {
let codex_home = tempfile::tempdir().expect("create temp dir");
let config = build_test_config(codex_home.path()).await;
let config = Arc::new(config);
let model = ModelsManager::get_model_offline(config.model.as_deref());
let model_info = ModelsManager::construct_model_info_offline(model.as_str(), &config);
let model = ModelsManager::get_model_offline_for_tests(config.model.as_deref());
let model_info =
ModelsManager::construct_model_info_offline_for_tests(model.as_str(), &config);
let reasoning_effort = config.model_reasoning_effort;
let collaboration_mode = CollaborationMode {
mode: ModeKind::Default,
@ -5876,8 +5877,9 @@ mod tests {
let codex_home = tempfile::tempdir().expect("create temp dir");
let config = build_test_config(codex_home.path()).await;
let config = Arc::new(config);
let model = ModelsManager::get_model_offline(config.model.as_deref());
let model_info = ModelsManager::construct_model_info_offline(model.as_str(), &config);
let model = ModelsManager::get_model_offline_for_tests(config.model.as_deref());
let model_info =
ModelsManager::construct_model_info_offline_for_tests(model.as_str(), &config);
let reasoning_effort = config.model_reasoning_effort;
let collaboration_mode = CollaborationMode {
mode: ModeKind::Default,
@ -6129,7 +6131,7 @@ mod tests {
) -> OtelManager {
OtelManager::new(
conversation_id,
ModelsManager::get_model_offline(config.model.as_deref()).as_str(),
ModelsManager::get_model_offline_for_tests(config.model.as_deref()).as_str(),
model_info.slug.as_str(),
None,
Some("test@test.com".to_string()),
@ -6145,8 +6147,9 @@ mod tests {
let codex_home = tempfile::tempdir().expect("create temp dir");
let config = build_test_config(codex_home.path()).await;
let config = Arc::new(config);
let model = ModelsManager::get_model_offline(config.model.as_deref());
let model_info = ModelsManager::construct_model_info_offline(model.as_str(), &config);
let model = ModelsManager::get_model_offline_for_tests(config.model.as_deref());
let model_info =
ModelsManager::construct_model_info_offline_for_tests(model.as_str(), &config);
let reasoning_effort = config.model_reasoning_effort;
let collaboration_mode = CollaborationMode {
mode: ModeKind::Default,
@ -6197,8 +6200,9 @@ mod tests {
let agent_control = AgentControl::default();
let exec_policy = ExecPolicyManager::default();
let (agent_status_tx, _agent_status_rx) = watch::channel(AgentStatus::PendingInit);
let model = ModelsManager::get_model_offline(config.model.as_deref());
let model_info = ModelsManager::construct_model_info_offline(model.as_str(), &config);
let model = ModelsManager::get_model_offline_for_tests(config.model.as_deref());
let model_info =
ModelsManager::construct_model_info_offline_for_tests(model.as_str(), &config);
let reasoning_effort = config.model_reasoning_effort;
let collaboration_mode = CollaborationMode {
mode: ModeKind::Default,
@ -6231,7 +6235,7 @@ mod tests {
dynamic_tools: Vec::new(),
};
let per_turn_config = Session::build_per_turn_config(&session_configuration);
let model_info = ModelsManager::construct_model_info_offline(
let model_info = ModelsManager::construct_model_info_offline_for_tests(
session_configuration.collaboration_mode.model(),
&per_turn_config,
);
@ -6333,8 +6337,9 @@ mod tests {
let agent_control = AgentControl::default();
let exec_policy = ExecPolicyManager::default();
let (agent_status_tx, _agent_status_rx) = watch::channel(AgentStatus::PendingInit);
let model = ModelsManager::get_model_offline(config.model.as_deref());
let model_info = ModelsManager::construct_model_info_offline(model.as_str(), &config);
let model = ModelsManager::get_model_offline_for_tests(config.model.as_deref());
let model_info =
ModelsManager::construct_model_info_offline_for_tests(model.as_str(), &config);
let reasoning_effort = config.model_reasoning_effort;
let collaboration_mode = CollaborationMode {
mode: ModeKind::Default,
@ -6367,7 +6372,7 @@ mod tests {
dynamic_tools: Vec::new(),
};
let per_turn_config = Session::build_per_turn_config(&session_configuration);
let model_info = ModelsManager::construct_model_info_offline(
let model_info = ModelsManager::construct_model_info_offline_for_tests(
session_configuration.collaboration_mode.model(),
&per_turn_config,
);

View file

@ -59,6 +59,7 @@ mod session_prefix;
mod shell_detect;
mod stream_events_utils;
mod tagged_block_parser;
pub mod test_support;
mod text_encoding;
pub mod token_data;
mod truncate;
@ -145,8 +146,6 @@ pub use file_watcher::FileWatcherEvent;
pub use safety::get_platform_sandbox;
pub use tools::spec::parse_tool_input_schema;
pub use turn_metadata::build_turn_metadata_header;
#[cfg(any(test, feature = "test-support"))]
pub use unified_exec::set_deterministic_process_ids_for_tests;
// Re-export the protocol types from the standalone `codex-protocol` crate so existing
// `codex_core::protocol::...` references continue to work across the workspace.
pub use codex_protocol::protocol;

View file

@ -208,7 +208,7 @@ mod tests {
.await
.expect("initialize state db");
let manager = ThreadManager::with_models_provider_and_home(
let manager = ThreadManager::with_models_provider_and_home_for_tests(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
config.codex_home.clone(),

View file

@ -9,15 +9,10 @@ const COLLABORATION_MODE_DEFAULT: &str =
const KNOWN_MODE_NAMES_PLACEHOLDER: &str = "{{KNOWN_MODE_NAMES}}";
const REQUEST_USER_INPUT_AVAILABILITY_PLACEHOLDER: &str = "{{REQUEST_USER_INPUT_AVAILABILITY}}";
pub(super) fn builtin_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
pub(crate) fn builtin_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
vec![plan_preset(), default_preset()]
}
#[cfg(any(test, feature = "test-support"))]
pub fn test_builtin_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
builtin_collaboration_mode_presets()
}
fn plan_preset() -> CollaborationModeMask {
CollaborationModeMask {
name: ModeKind::Plan.display_name().to_string(),

View file

@ -336,9 +336,8 @@ impl ModelsManager {
}
}
#[cfg(any(test, feature = "test-support"))]
/// Construct a manager with a specific provider for testing.
pub fn with_provider(
pub(crate) fn with_provider_for_tests(
codex_home: PathBuf,
auth_manager: Arc<AuthManager>,
provider: ModelProviderInfo,
@ -355,9 +354,8 @@ impl ModelsManager {
}
}
#[cfg(any(test, feature = "test-support"))]
/// Get model identifier without consulting remote state or cache.
pub fn get_model_offline(model: Option<&str>) -> String {
pub(crate) fn get_model_offline_for_tests(model: Option<&str>) -> String {
if let Some(model) = model {
return model.to_string();
}
@ -370,9 +368,11 @@ impl ModelsManager {
.unwrap_or_default()
}
#[cfg(any(test, feature = "test-support"))]
/// Build `ModelInfo` without consulting remote state or cache.
pub fn construct_model_info_offline(model: &str, config: &Config) -> ModelInfo {
pub(crate) fn construct_model_info_offline_for_tests(
model: &str,
config: &Config,
) -> ModelInfo {
model_info::with_config_overrides(model_info::model_info_from_slug(model), config)
}
}
@ -482,8 +482,11 @@ mod tests {
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
let provider = provider_for(server.uri());
let manager =
ModelsManager::with_provider(codex_home.path().to_path_buf(), auth_manager, provider);
let manager = ModelsManager::with_provider_for_tests(
codex_home.path().to_path_buf(),
auth_manager,
provider,
);
manager
.refresh_available_models(&config, RefreshStrategy::OnlineIfUncached)
@ -536,8 +539,11 @@ mod tests {
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
let provider = provider_for(server.uri());
let manager =
ModelsManager::with_provider(codex_home.path().to_path_buf(), auth_manager, provider);
let manager = ModelsManager::with_provider_for_tests(
codex_home.path().to_path_buf(),
auth_manager,
provider,
);
manager
.refresh_available_models(&config, RefreshStrategy::OnlineIfUncached)
@ -580,8 +586,11 @@ mod tests {
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
let provider = provider_for(server.uri());
let manager =
ModelsManager::with_provider(codex_home.path().to_path_buf(), auth_manager, provider);
let manager = ModelsManager::with_provider_for_tests(
codex_home.path().to_path_buf(),
auth_manager,
provider,
);
manager
.refresh_available_models(&config, RefreshStrategy::OnlineIfUncached)
@ -646,8 +655,11 @@ mod tests {
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
let provider = provider_for(server.uri());
let manager =
ModelsManager::with_provider(codex_home.path().to_path_buf(), auth_manager, provider);
let manager = ModelsManager::with_provider_for_tests(
codex_home.path().to_path_buf(),
auth_manager,
provider,
);
manager
.refresh_available_models(&config, RefreshStrategy::OnlineIfUncached)
@ -712,8 +724,11 @@ mod tests {
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
let provider = provider_for(server.uri());
let mut manager =
ModelsManager::with_provider(codex_home.path().to_path_buf(), auth_manager, provider);
let mut manager = ModelsManager::with_provider_for_tests(
codex_home.path().to_path_buf(),
auth_manager,
provider,
);
manager.cache_manager.set_ttl(Duration::ZERO);
manager
@ -784,8 +799,11 @@ mod tests {
AuthCredentialsStoreMode::File,
));
let provider = provider_for(server.uri());
let manager =
ModelsManager::with_provider(codex_home.path().to_path_buf(), auth_manager, provider);
let manager = ModelsManager::with_provider_for_tests(
codex_home.path().to_path_buf(),
auth_manager,
provider,
);
manager
.refresh_available_models(&config, RefreshStrategy::Online)
@ -811,8 +829,11 @@ mod tests {
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
let provider = provider_for("http://example.test".to_string());
let mut manager =
ModelsManager::with_provider(codex_home.path().to_path_buf(), auth_manager, provider);
let mut manager = ModelsManager::with_provider_for_tests(
codex_home.path().to_path_buf(),
auth_manager,
provider,
);
manager.local_models = Vec::new();
let hidden_model = remote_model_with_visibility("hidden", "Hidden", 0, "hide");

View file

@ -4,9 +4,6 @@ pub mod manager;
pub mod model_info;
pub mod model_presets;
#[cfg(any(test, feature = "test-support"))]
pub use collaboration_mode_presets::test_builtin_collaboration_mode_presets;
/// Convert the client version string to a whole version string (e.g. "1.2.3-alpha.4" -> "1.2.3").
pub fn client_version_to_whole() -> String {
format!(

View file

@ -11,7 +11,7 @@ pub const HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG: &str = "hide_gpt5_1_migration_pro
pub const HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG: &str =
"hide_gpt-5.1-codex-max_migration_prompt";
static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
pub(crate) static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
vec![
ModelPreset {
id: "gpt-5.2-codex".to_string(),
@ -359,11 +359,6 @@ pub(super) fn builtin_model_presets(_auth_mode: Option<AuthMode>) -> Vec<ModelPr
PRESETS.iter().cloned().collect()
}
#[cfg(any(test, feature = "test-support"))]
pub fn all_model_presets() -> &'static Vec<ModelPreset> {
&PRESETS
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -0,0 +1,78 @@
//! Test-only helpers exposed for cross-crate integration tests.
//!
//! Production code should not depend on this module.
//! We prefer this to using a crate feature to avoid building multiple
//! permutations of the crate.
use std::path::PathBuf;
use std::sync::Arc;
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::openai_models::ModelInfo;
use codex_protocol::openai_models::ModelPreset;
use crate::AuthManager;
use crate::CodexAuth;
use crate::ModelProviderInfo;
use crate::ThreadManager;
use crate::config::Config;
use crate::models_manager::collaboration_mode_presets;
use crate::models_manager::manager::ModelsManager;
use crate::models_manager::model_presets;
use crate::thread_manager;
use crate::unified_exec;
pub fn set_thread_manager_test_mode(enabled: bool) {
thread_manager::set_thread_manager_test_mode_for_tests(enabled);
}
pub fn set_deterministic_process_ids(enabled: bool) {
unified_exec::set_deterministic_process_ids_for_tests(enabled);
}
pub fn auth_manager_from_auth(auth: CodexAuth) -> Arc<AuthManager> {
AuthManager::from_auth_for_testing(auth)
}
pub fn auth_manager_from_auth_with_home(auth: CodexAuth, codex_home: PathBuf) -> Arc<AuthManager> {
AuthManager::from_auth_for_testing_with_home(auth, codex_home)
}
pub fn thread_manager_with_models_provider(
auth: CodexAuth,
provider: ModelProviderInfo,
) -> ThreadManager {
ThreadManager::with_models_provider_for_tests(auth, provider)
}
pub fn thread_manager_with_models_provider_and_home(
auth: CodexAuth,
provider: ModelProviderInfo,
codex_home: PathBuf,
) -> ThreadManager {
ThreadManager::with_models_provider_and_home_for_tests(auth, provider, codex_home)
}
pub fn models_manager_with_provider(
codex_home: PathBuf,
auth_manager: Arc<AuthManager>,
provider: ModelProviderInfo,
) -> ModelsManager {
ModelsManager::with_provider_for_tests(codex_home, auth_manager, provider)
}
pub fn get_model_offline(model: Option<&str>) -> String {
ModelsManager::get_model_offline_for_tests(model)
}
pub fn construct_model_info_offline(model: &str, config: &Config) -> ModelInfo {
ModelsManager::construct_model_info_offline_for_tests(model, config)
}
pub fn all_model_presets() -> &'static Vec<ModelPreset> {
&model_presets::PRESETS
}
pub fn builtin_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
collaboration_mode_presets::builtin_collaboration_mode_presets()
}

View file

@ -1,7 +1,5 @@
use crate::AuthManager;
#[cfg(any(test, feature = "test-support"))]
use crate::CodexAuth;
#[cfg(any(test, feature = "test-support"))]
use crate::ModelProviderInfo;
use crate::agent::AgentControl;
use crate::codex::Codex;
@ -31,25 +29,50 @@ use codex_protocol::protocol::SessionSource;
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
#[cfg(any(test, feature = "test-support"))]
use tempfile::TempDir;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use tokio::runtime::Handle;
#[cfg(any(test, feature = "test-support"))]
use tokio::runtime::RuntimeFlavor;
use tokio::sync::RwLock;
use tokio::sync::broadcast;
use tracing::warn;
const THREAD_CREATED_CHANNEL_CAPACITY: usize = 1024;
/// Test-only override for enabling thread-manager behaviors used by integration
/// tests.
///
/// In production builds this value should remain at its default (`false`) and
/// must not be toggled.
static FORCE_TEST_THREAD_MANAGER_BEHAVIOR: AtomicBool = AtomicBool::new(false);
type CapturedOps = Vec<(ThreadId, Op)>;
type SharedCapturedOps = Arc<std::sync::Mutex<CapturedOps>>;
pub(crate) fn set_thread_manager_test_mode_for_tests(enabled: bool) {
FORCE_TEST_THREAD_MANAGER_BEHAVIOR.store(enabled, Ordering::Relaxed);
}
fn should_use_test_thread_manager_behavior() -> bool {
FORCE_TEST_THREAD_MANAGER_BEHAVIOR.load(Ordering::Relaxed)
}
struct TempCodexHomeGuard {
path: PathBuf,
}
impl Drop for TempCodexHomeGuard {
fn drop(&mut self) {
let _ = std::fs::remove_dir_all(&self.path);
}
}
fn build_file_watcher(codex_home: PathBuf, skills_manager: Arc<SkillsManager>) -> Arc<FileWatcher> {
#[cfg(any(test, feature = "test-support"))]
if let Ok(handle) = Handle::try_current()
if should_use_test_thread_manager_behavior()
&& let Ok(handle) = Handle::try_current()
&& handle.runtime_flavor() == RuntimeFlavor::CurrentThread
{
// The real watcher spins background tasks that can starve the
// current-thread test runtime and cause event waits to time out.
// Integration tests compile with the `test-support` feature.
warn!("using noop file watcher under current-thread test runtime");
return Arc::new(FileWatcher::noop());
}
@ -95,8 +118,7 @@ pub struct NewThread {
/// them in memory.
pub struct ThreadManager {
state: Arc<ThreadManagerState>,
#[cfg(any(test, feature = "test-support"))]
_test_codex_home_guard: Option<TempDir>,
_test_codex_home_guard: Option<TempCodexHomeGuard>,
}
/// Shared, `Arc`-owned state for [`ThreadManager`]. This `Arc` is required to have a single
@ -110,10 +132,8 @@ pub(crate) struct ThreadManagerState {
skills_manager: Arc<SkillsManager>,
file_watcher: Arc<FileWatcher>,
session_source: SessionSource,
#[cfg(any(test, feature = "test-support"))]
#[allow(dead_code)]
// Captures submitted ops for testing purpose.
ops_log: Arc<std::sync::Mutex<Vec<(ThreadId, Op)>>>,
// Captures submitted ops for testing purpose when test mode is enabled.
ops_log: Option<SharedCapturedOps>,
}
impl ThreadManager {
@ -134,33 +154,40 @@ impl ThreadManager {
file_watcher,
auth_manager,
session_source,
#[cfg(any(test, feature = "test-support"))]
ops_log: Arc::new(std::sync::Mutex::new(Vec::new())),
ops_log: should_use_test_thread_manager_behavior()
.then(|| Arc::new(std::sync::Mutex::new(Vec::new()))),
}),
#[cfg(any(test, feature = "test-support"))]
_test_codex_home_guard: None,
}
}
#[cfg(any(test, feature = "test-support"))]
/// Construct with a dummy AuthManager containing the provided CodexAuth.
/// Used for integration tests: should not be used by ordinary business logic.
pub fn with_models_provider(auth: CodexAuth, provider: ModelProviderInfo) -> Self {
let temp_dir = tempfile::tempdir().unwrap_or_else(|err| panic!("temp codex home: {err}"));
let codex_home = temp_dir.path().to_path_buf();
let mut manager = Self::with_models_provider_and_home(auth, provider, codex_home);
manager._test_codex_home_guard = Some(temp_dir);
pub(crate) fn with_models_provider_for_tests(
auth: CodexAuth,
provider: ModelProviderInfo,
) -> Self {
set_thread_manager_test_mode_for_tests(true);
let codex_home = std::env::temp_dir().join(format!(
"codex-thread-manager-test-{}",
uuid::Uuid::new_v4()
));
std::fs::create_dir_all(&codex_home)
.unwrap_or_else(|err| panic!("temp codex home dir create failed: {err}"));
let mut manager =
Self::with_models_provider_and_home_for_tests(auth, provider, codex_home.clone());
manager._test_codex_home_guard = Some(TempCodexHomeGuard { path: codex_home });
manager
}
#[cfg(any(test, feature = "test-support"))]
/// Construct with a dummy AuthManager containing the provided CodexAuth and codex home.
/// Used for integration tests: should not be used by ordinary business logic.
pub fn with_models_provider_and_home(
pub(crate) fn with_models_provider_and_home_for_tests(
auth: CodexAuth,
provider: ModelProviderInfo,
codex_home: PathBuf,
) -> Self {
set_thread_manager_test_mode_for_tests(true);
let auth_manager = AuthManager::from_auth_for_testing(auth);
let (thread_created_tx, _) = broadcast::channel(THREAD_CREATED_CHANNEL_CAPACITY);
let skills_manager = Arc::new(SkillsManager::new(codex_home.clone()));
@ -169,7 +196,7 @@ impl ThreadManager {
state: Arc::new(ThreadManagerState {
threads: Arc::new(RwLock::new(HashMap::new())),
thread_created_tx,
models_manager: Arc::new(ModelsManager::with_provider(
models_manager: Arc::new(ModelsManager::with_provider_for_tests(
codex_home,
auth_manager.clone(),
provider,
@ -178,8 +205,8 @@ impl ThreadManager {
file_watcher,
auth_manager,
session_source: SessionSource::Exec,
#[cfg(any(test, feature = "test-support"))]
ops_log: Arc::new(std::sync::Mutex::new(Vec::new())),
ops_log: should_use_test_thread_manager_behavior()
.then(|| Arc::new(std::sync::Mutex::new(Vec::new()))),
}),
_test_codex_home_guard: None,
}
@ -340,13 +367,12 @@ impl ThreadManager {
AgentControl::new(Arc::downgrade(&self.state))
}
#[cfg(any(test, feature = "test-support"))]
#[allow(dead_code)]
#[cfg(test)]
pub(crate) fn captured_ops(&self) -> Vec<(ThreadId, Op)> {
self.state
.ops_log
.lock()
.map(|log| log.clone())
.as_ref()
.and_then(|ops_log| ops_log.lock().ok().map(|log| log.clone()))
.unwrap_or_default()
}
}
@ -364,11 +390,10 @@ impl ThreadManagerState {
/// Send an operation to a thread by ID.
pub(crate) async fn send_op(&self, thread_id: ThreadId, op: Op) -> CodexResult<String> {
let thread = self.get_thread(thread_id).await?;
#[cfg(any(test, feature = "test-support"))]
if let Some(ops_log) = &self.ops_log
&& let Ok(mut log) = ops_log.lock()
{
if let Ok(mut log) = self.ops_log.lock() {
log.push((thread_id, op.clone()));
}
log.push((thread_id, op.clone()));
}
thread.submit(op).await
}

View file

@ -895,7 +895,7 @@ mod tests {
}
fn thread_manager() -> ThreadManager {
ThreadManager::with_models_provider(
ThreadManager::with_models_provider_for_tests(
CodexAuth::from_api_key("dummy"),
built_in_model_providers()["openai"].clone(),
)

View file

@ -165,10 +165,10 @@ fn create_approval_parameters(include_prefix_rule: bool) -> BTreeMap<String, Jso
"justification".to_string(),
JsonSchema::String {
description: Some(
r#"Only set if sandbox_permissions is \"require_escalated\".
Request approval from the user to run this command outside the sandbox.
Phrased as a simple question that summarizes the purpose of the
command as it relates to the task at hand - e.g. 'Do you want to
r#"Only set if sandbox_permissions is \"require_escalated\".
Request approval from the user to run this command outside the sandbox.
Phrased as a simple question that summarizes the purpose of the
command as it relates to the task at hand - e.g. 'Do you want to
fetch and pull the latest version of this git branch?'"#
.to_string(),
),
@ -182,7 +182,7 @@ fn create_approval_parameters(include_prefix_rule: bool) -> BTreeMap<String, Jso
JsonSchema::Array {
items: Box::new(JsonSchema::String { description: None }),
description: Some(
r#"Only specify when sandbox_permissions is `require_escalated`.
r#"Only specify when sandbox_permissions is `require_escalated`.
Suggest a prefix command pattern that will allow you to fulfill similar requests from the user in the future.
Should be a short but reasonable prefix, e.g. [\"git\", \"pull\"] or [\"uv\", \"run\"] or [\"pytest\"]."#.to_string(),
),
@ -339,7 +339,7 @@ fn create_shell_tool(include_prefix_rule: bool) -> ToolSpec {
let description = if cfg!(windows) {
r#"Runs a Powershell command (Windows) and returns its output. Arguments to `shell` will be passed to CreateProcessW(). Most commands should be prefixed with ["powershell.exe", "-Command"].
Examples of valid command strings:
- ls -a (show hidden): ["powershell.exe", "-Command", "Get-ChildItem -Force"]
@ -402,7 +402,7 @@ fn create_shell_command_tool(include_prefix_rule: bool) -> ToolSpec {
let description = if cfg!(windows) {
r#"Runs a Powershell command (Windows) and returns its output.
Examples of valid command strings:
- ls -a (show hidden): "Get-ChildItem -Force"
@ -1766,7 +1766,8 @@ mod tests {
#[test]
fn test_build_specs_collab_tools_enabled() {
let config = test_config();
let model_info = ModelsManager::construct_model_info_offline("gpt-5-codex", &config);
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::Collab);
features.enable(Feature::CollaborationModes);
@ -1791,7 +1792,8 @@ mod tests {
#[test]
fn request_user_input_requires_collaboration_modes_feature() {
let config = test_config();
let model_info = ModelsManager::construct_model_info_offline("gpt-5-codex", &config);
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.disable(Feature::CollaborationModes);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
@ -1851,7 +1853,8 @@ mod tests {
#[test]
fn web_search_mode_cached_sets_external_web_access_false() {
let config = test_config();
let model_info = ModelsManager::construct_model_info_offline("gpt-5-codex", &config);
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let features = Features::with_defaults();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
@ -1873,7 +1876,8 @@ mod tests {
#[test]
fn web_search_mode_live_sets_external_web_access_true() {
let config = test_config();
let model_info = ModelsManager::construct_model_info_offline("gpt-5-codex", &config);
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let features = Features::with_defaults();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
@ -2098,7 +2102,7 @@ mod tests {
#[test]
fn test_build_specs_default_shell_present() {
let config = test_config();
let model_info = ModelsManager::construct_model_info_offline("o3", &config);
let model_info = ModelsManager::construct_model_info_offline_for_tests("o3", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
@ -2120,7 +2124,8 @@ mod tests {
#[ignore]
fn test_parallel_support_flags() {
let config = test_config();
let model_info = ModelsManager::construct_model_info_offline("gpt-5-codex", &config);
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
@ -2175,7 +2180,7 @@ mod tests {
#[test]
fn test_build_specs_mcp_tools_converted() {
let config = test_config();
let model_info = ModelsManager::construct_model_info_offline("o3", &config);
let model_info = ModelsManager::construct_model_info_offline_for_tests("o3", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
@ -2260,7 +2265,7 @@ mod tests {
#[test]
fn test_build_specs_mcp_tools_sorted_by_name() {
let config = test_config();
let model_info = ModelsManager::construct_model_info_offline("o3", &config);
let model_info = ModelsManager::construct_model_info_offline_for_tests("o3", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
@ -2304,7 +2309,8 @@ mod tests {
#[test]
fn test_mcp_tool_property_missing_type_defaults_to_string() {
let config = test_config();
let model_info = ModelsManager::construct_model_info_offline("gpt-5-codex", &config);
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
@ -2356,7 +2362,8 @@ mod tests {
#[test]
fn test_mcp_tool_integer_normalized_to_number() {
let config = test_config();
let model_info = ModelsManager::construct_model_info_offline("gpt-5-codex", &config);
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
@ -2404,7 +2411,8 @@ mod tests {
#[test]
fn test_mcp_tool_array_without_items_gets_default_string_items() {
let config = test_config();
let model_info = ModelsManager::construct_model_info_offline("gpt-5-codex", &config);
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
features.enable(Feature::ApplyPatchFreeform);
@ -2456,7 +2464,8 @@ mod tests {
#[test]
fn test_mcp_tool_anyof_defaults_to_string() {
let config = test_config();
let model_info = ModelsManager::construct_model_info_offline("gpt-5-codex", &config);
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
@ -2516,7 +2525,7 @@ mod tests {
let expected = if cfg!(windows) {
r#"Runs a Powershell command (Windows) and returns its output. Arguments to `shell` will be passed to CreateProcessW(). Most commands should be prefixed with ["powershell.exe", "-Command"].
Examples of valid command strings:
- ls -a (show hidden): ["powershell.exe", "-Command", "Get-ChildItem -Force"]
@ -2546,7 +2555,7 @@ Examples of valid command strings:
let expected = if cfg!(windows) {
r#"Runs a Powershell command (Windows) and returns its output.
Examples of valid command strings:
- ls -a (show hidden): "Get-ChildItem -Force"
@ -2565,7 +2574,8 @@ Examples of valid command strings:
#[test]
fn test_get_openai_tools_mcp_tools_with_additional_properties_schema() {
let config = test_config();
let model_info = ModelsManager::construct_model_info_offline("gpt-5-codex", &config);
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {

View file

@ -42,8 +42,7 @@ mod head_tail_buffer;
mod process;
mod process_manager;
#[cfg(any(test, feature = "test-support"))]
pub fn set_deterministic_process_ids_for_tests(enabled: bool) {
pub(crate) fn set_deterministic_process_ids_for_tests(enabled: bool) {
process_manager::set_deterministic_process_ids_for_tests(enabled);
}

View file

@ -62,28 +62,20 @@ const UNIFIED_EXEC_ENV: [(&str, &str); 10] = [
("CODEX_CI", "1"),
];
#[cfg(any(test, feature = "test-support"))]
/// Test-only override for deterministic unified exec process IDs.
///
/// In production builds this value should remain at its default (`false`) and
/// must not be toggled.
static FORCE_DETERMINISTIC_PROCESS_IDS: AtomicBool = AtomicBool::new(false);
#[cfg(any(test, feature = "test-support"))]
pub(super) fn set_deterministic_process_ids_for_tests(enabled: bool) {
FORCE_DETERMINISTIC_PROCESS_IDS.store(enabled, Ordering::Relaxed);
}
#[cfg(any(test, feature = "test-support"))]
fn deterministic_process_ids_forced_for_tests() -> bool {
FORCE_DETERMINISTIC_PROCESS_IDS.load(Ordering::Relaxed)
}
#[cfg(not(any(test, feature = "test-support")))]
fn deterministic_process_ids_forced_for_tests() -> bool {
false
}
fn should_use_deterministic_process_ids() -> bool {
cfg!(test) || deterministic_process_ids_forced_for_tests()
}

View file

@ -11,7 +11,7 @@ path = "lib.rs"
anyhow = { workspace = true }
assert_cmd = { workspace = true }
base64 = { workspace = true }
codex-core = { workspace = true, features = ["test-support"] }
codex-core = { workspace = true }
codex-protocol = { workspace = true }
codex-utils-absolute-path = { workspace = true }
codex-utils-cargo-bin = { workspace = true }

View file

@ -20,7 +20,8 @@ pub mod test_codex_exec;
#[ctor]
fn enable_deterministic_unified_exec_process_ids_for_tests() {
codex_core::set_deterministic_process_ids_for_tests(true);
codex_core::test_support::set_thread_manager_test_mode(true);
codex_core::test_support::set_deterministic_process_ids(true);
}
#[track_caller]

View file

@ -170,7 +170,7 @@ impl TestCodexBuilder {
resume_from: Option<PathBuf>,
) -> anyhow::Result<TestCodex> {
let auth = self.auth.clone();
let thread_manager = ThreadManager::with_models_provider_and_home(
let thread_manager = codex_core::test_support::thread_manager_with_models_provider_and_home(
auth.clone(),
config.model_provider.clone(),
config.codex_home.clone(),
@ -179,7 +179,7 @@ impl TestCodexBuilder {
let new_conversation = match resume_from {
Some(path) => {
let auth_manager = codex_core::AuthManager::from_auth_for_testing(auth);
let auth_manager = codex_core::test_support::auth_manager_from_auth(auth);
thread_manager
.resume_thread_from_rollout(config.clone(), path, auth_manager)
.await?

View file

@ -1,7 +1,6 @@
use std::process::Command;
use std::sync::Arc;
use codex_core::AuthManager;
use codex_core::CodexAuth;
use codex_core::ContentItem;
use codex_core::ModelClient;
@ -10,7 +9,6 @@ use codex_core::Prompt;
use codex_core::ResponseEvent;
use codex_core::ResponseItem;
use codex_core::WireApi;
use codex_core::models_manager::manager::ModelsManager;
use codex_otel::OtelManager;
use codex_otel::TelemetryAuthMode;
use codex_protocol::ThreadId;
@ -65,14 +63,15 @@ async fn responses_stream_includes_subagent_header_on_review() {
config.model_provider = provider.clone();
let effort = config.model_reasoning_effort;
let summary = config.model_reasoning_summary;
let model = ModelsManager::get_model_offline(config.model.as_deref());
let model = codex_core::test_support::get_model_offline(config.model.as_deref());
config.model = Some(model.clone());
let config = Arc::new(config);
let conversation_id = ThreadId::new();
let auth_mode = TelemetryAuthMode::Chatgpt;
let session_source = SessionSource::SubAgent(SubAgentSource::Review);
let model_info = ModelsManager::construct_model_info_offline(model.as_str(), &config);
let model_info =
codex_core::test_support::construct_model_info_offline(model.as_str(), &config);
let otel_manager = OtelManager::new(
conversation_id,
model.as_str(),
@ -169,14 +168,15 @@ async fn responses_stream_includes_subagent_header_on_other() {
config.model_provider = provider.clone();
let effort = config.model_reasoning_effort;
let summary = config.model_reasoning_summary;
let model = ModelsManager::get_model_offline(config.model.as_deref());
let model = codex_core::test_support::get_model_offline(config.model.as_deref());
config.model = Some(model.clone());
let config = Arc::new(config);
let conversation_id = ThreadId::new();
let auth_mode = TelemetryAuthMode::Chatgpt;
let session_source = SessionSource::SubAgent(SubAgentSource::Other("my-task".to_string()));
let model_info = ModelsManager::construct_model_info_offline(model.as_str(), &config);
let model_info =
codex_core::test_support::construct_model_info_offline(model.as_str(), &config);
let otel_manager = OtelManager::new(
conversation_id,
@ -275,12 +275,14 @@ async fn responses_respects_model_info_overrides_from_config() {
let config = Arc::new(config);
let conversation_id = ThreadId::new();
let auth_mode = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"))
.auth_mode()
.map(TelemetryAuthMode::from);
let auth_mode =
codex_core::test_support::auth_manager_from_auth(CodexAuth::from_api_key("Test API Key"))
.auth_mode()
.map(TelemetryAuthMode::from);
let session_source =
SessionSource::SubAgent(SubAgentSource::Other("override-check".to_string()));
let model_info = ModelsManager::construct_model_info_offline(model.as_str(), &config);
let model_info =
codex_core::test_support::construct_model_info_offline(model.as_str(), &config);
let otel_manager = OtelManager::new(
conversation_id,
model.as_str(),

View file

@ -1,4 +1,3 @@
use codex_core::AuthManager;
use codex_core::CodexAuth;
use codex_core::ContentItem;
use codex_core::LocalShellAction;
@ -17,7 +16,6 @@ use codex_core::built_in_model_providers;
use codex_core::default_client::originator;
use codex_core::error::CodexErr;
use codex_core::features::Feature;
use codex_core::models_manager::manager::ModelsManager;
use codex_core::protocol::EventMsg;
use codex_core::protocol::Op;
use codex_core::protocol::SessionSource;
@ -573,7 +571,7 @@ async fn prefers_apikey_when_config_prefers_apikey_even_with_chatgpt_tokens() {
let auth_manager =
match CodexAuth::from_auth_storage(codex_home.path(), AuthCredentialsStoreMode::File) {
Ok(Some(auth)) => codex_core::AuthManager::from_auth_for_testing(auth),
Ok(Some(auth)) => codex_core::test_support::auth_manager_from_auth(auth),
Ok(None) => panic!("No CodexAuth found in codex_home"),
Err(e) => panic!("Failed to load CodexAuth: {e}"),
};
@ -1318,12 +1316,14 @@ async fn azure_responses_request_includes_store_and_reasoning_ids() {
config.model_provider = provider.clone();
let effort = config.model_reasoning_effort;
let summary = config.model_reasoning_summary;
let model = ModelsManager::get_model_offline(config.model.as_deref());
let model = codex_core::test_support::get_model_offline(config.model.as_deref());
config.model = Some(model.clone());
let config = Arc::new(config);
let model_info = ModelsManager::construct_model_info_offline(model.as_str(), &config);
let model_info =
codex_core::test_support::construct_model_info_offline(model.as_str(), &config);
let conversation_id = ThreadId::new();
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
let auth_manager =
codex_core::test_support::auth_manager_from_auth(CodexAuth::from_api_key("Test API Key"));
let otel_manager = OtelManager::new(
conversation_id,
model.as_str(),

View file

@ -1,5 +1,4 @@
#![allow(clippy::expect_used, clippy::unwrap_used)]
use codex_core::AuthManager;
use codex_core::CodexAuth;
use codex_core::ContentItem;
use codex_core::ModelClient;
@ -11,7 +10,6 @@ use codex_core::ResponseItem;
use codex_core::WireApi;
use codex_core::X_RESPONSESAPI_INCLUDE_TIMING_METRICS_HEADER;
use codex_core::features::Feature;
use codex_core::models_manager::manager::ModelsManager;
use codex_core::protocol::EventMsg;
use codex_core::protocol::Op;
use codex_core::protocol::SessionSource;
@ -979,10 +977,11 @@ async fn websocket_harness_with_options(
config.features.enable(Feature::ResponsesWebsocketsV2);
}
let config = Arc::new(config);
let mut model_info = ModelsManager::construct_model_info_offline(MODEL, &config);
let mut model_info = codex_core::test_support::construct_model_info_offline(MODEL, &config);
model_info.prefer_websockets = prefer_websockets;
let conversation_id = ThreadId::new();
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
let auth_manager =
codex_core::test_support::auth_manager_from_auth(CodexAuth::from_api_key("Test API Key"));
let exporter = InMemoryMetricExporter::default();
let metrics = MetricsClient::new(
MetricsConfig::in_memory("test", "codex-core", env!("CARGO_PKG_VERSION"), exporter)

View file

@ -1001,7 +1001,7 @@ async fn resume_conversation(
config: &Config,
path: std::path::PathBuf,
) -> Arc<CodexThread> {
let auth_manager = codex_core::AuthManager::from_auth_for_testing(
let auth_manager = codex_core::test_support::auth_manager_from_auth(
codex_core::CodexAuth::from_api_key("dummy"),
);
manager

View file

@ -11,7 +11,7 @@ async fn offline_model_info_without_tool_output_override() {
let codex_home = TempDir::new().expect("create temp dir");
let mut config = load_default_config_for_test(&codex_home).await;
config.features.enable(Feature::RemoteModels);
let auth_manager = codex_core::AuthManager::from_auth_for_testing(
let auth_manager = codex_core::test_support::auth_manager_from_auth(
CodexAuth::create_dummy_chatgpt_auth_for_testing(),
);
let manager = ModelsManager::new(config.codex_home.clone(), auth_manager);
@ -30,7 +30,7 @@ async fn offline_model_info_with_tool_output_override() {
let mut config = load_default_config_for_test(&codex_home).await;
config.features.enable(Feature::RemoteModels);
config.tool_output_token_limit = Some(123);
let auth_manager = codex_core::AuthManager::from_auth_for_testing(
let auth_manager = codex_core::test_support::auth_manager_from_auth(
CodexAuth::create_dummy_chatgpt_auth_for_testing(),
);
let manager = ModelsManager::new(config.codex_home.clone(), auth_manager);

View file

@ -47,7 +47,7 @@ async fn personality_does_not_mutate_base_instructions_without_template() {
config.features.enable(Feature::Personality);
config.personality = Some(Personality::Friendly);
let model_info = ModelsManager::construct_model_info_offline("gpt-5.1", &config);
let model_info = codex_core::test_support::construct_model_info_offline("gpt-5.1", &config);
assert_eq!(
model_info.get_model_instructions(config.personality),
model_info.base_instructions
@ -62,7 +62,8 @@ async fn base_instructions_override_disables_personality_template() {
config.personality = Some(Personality::Friendly);
config.base_instructions = Some("override instructions".to_string());
let model_info = ModelsManager::construct_model_info_offline("gpt-5.2-codex", &config);
let model_info =
codex_core::test_support::construct_model_info_offline("gpt-5.2-codex", &config);
assert_eq!(model_info.base_instructions, "override instructions");
assert_eq!(
@ -470,7 +471,8 @@ async fn instructions_uses_base_if_feature_disabled() -> anyhow::Result<()> {
config.features.disable(Feature::Personality);
config.personality = Some(Personality::Friendly);
let model_info = ModelsManager::construct_model_info_offline("gpt-5.2-codex", &config);
let model_info =
codex_core::test_support::construct_model_info_offline("gpt-5.2-codex", &config);
assert_eq!(
model_info.get_model_instructions(config.personality),
model_info.base_instructions

View file

@ -100,9 +100,9 @@ async fn remote_models_get_model_info_uses_longest_matching_prefix() -> Result<(
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
};
let manager = ModelsManager::with_provider(
let manager = codex_core::test_support::models_manager_with_provider(
codex_home.path().to_path_buf(),
codex_core::auth::AuthManager::from_auth_for_testing(auth),
codex_core::test_support::auth_manager_from_auth(auth),
provider,
);
@ -497,9 +497,9 @@ async fn remote_models_preserve_builtin_presets() -> Result<()> {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
};
let manager = ModelsManager::with_provider(
let manager = codex_core::test_support::models_manager_with_provider(
codex_home.path().to_path_buf(),
codex_core::auth::AuthManager::from_auth_for_testing(auth),
codex_core::test_support::auth_manager_from_auth(auth),
provider,
);
@ -562,9 +562,9 @@ async fn remote_models_merge_adds_new_high_priority_first() -> Result<()> {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
};
let manager = ModelsManager::with_provider(
let manager = codex_core::test_support::models_manager_with_provider(
codex_home.path().to_path_buf(),
codex_core::auth::AuthManager::from_auth_for_testing(auth),
codex_core::test_support::auth_manager_from_auth(auth),
provider,
);
@ -613,9 +613,9 @@ async fn remote_models_merge_replaces_overlapping_model() -> Result<()> {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
};
let manager = ModelsManager::with_provider(
let manager = codex_core::test_support::models_manager_with_provider(
codex_home.path().to_path_buf(),
codex_core::auth::AuthManager::from_auth_for_testing(auth),
codex_core::test_support::auth_manager_from_auth(auth),
provider,
);
@ -661,9 +661,9 @@ async fn remote_models_merge_preserves_bundled_models_on_empty_response() -> Res
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
};
let manager = ModelsManager::with_provider(
let manager = codex_core::test_support::models_manager_with_provider(
codex_home.path().to_path_buf(),
codex_core::auth::AuthManager::from_auth_for_testing(auth),
codex_core::test_support::auth_manager_from_auth(auth),
provider,
);
@ -706,9 +706,9 @@ async fn remote_models_request_times_out_after_5s() -> Result<()> {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
};
let manager = ModelsManager::with_provider(
let manager = codex_core::test_support::models_manager_with_provider(
codex_home.path().to_path_buf(),
codex_core::auth::AuthManager::from_auth_for_testing(auth),
codex_core::test_support::auth_manager_from_auth(auth),
provider,
);
@ -773,9 +773,9 @@ async fn remote_models_hide_picker_only_models() -> Result<()> {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
};
let manager = ModelsManager::with_provider(
let manager = codex_core::test_support::models_manager_with_provider(
codex_home.path().to_path_buf(),
codex_core::auth::AuthManager::from_auth_for_testing(auth),
codex_core::test_support::auth_manager_from_auth(auth),
provider,
);

View file

@ -1,9 +1,7 @@
#![allow(clippy::unwrap_used, clippy::expect_used)]
use codex_core::AuthManager;
use codex_core::CodexAuth;
use codex_core::NewThread;
use codex_core::ThreadManager;
use codex_core::protocol::EventMsg;
use codex_core::protocol::InitialHistory;
use codex_core::protocol::ResumedHistory;
@ -58,11 +56,12 @@ async fn emits_warning_when_resumed_model_differs() {
let initial_history = resume_history(&config, "previous-model", &rollout_path);
let thread_manager = ThreadManager::with_models_provider(
let thread_manager = codex_core::test_support::thread_manager_with_models_provider(
CodexAuth::from_api_key("test"),
config.model_provider.clone(),
);
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test"));
let auth_manager =
codex_core::test_support::auth_manager_from_auth(CodexAuth::from_api_key("test"));
// Act: resume the conversation.
let NewThread {

View file

@ -1,9 +1,7 @@
#![allow(clippy::unwrap_used, clippy::expect_used)]
use codex_core::AuthManager;
use codex_core::CodexAuth;
use codex_core::NewThread;
use codex_core::ThreadManager;
use codex_core::config::CONFIG_TOML_FILE;
use codex_core::features::Feature;
use codex_core::protocol::EventMsg;
@ -30,11 +28,12 @@ async fn emits_warning_when_unstable_features_enabled_via_config() {
toml! { features = { child_agents_md = true } }.into(),
);
let thread_manager = ThreadManager::with_models_provider(
let thread_manager = codex_core::test_support::thread_manager_with_models_provider(
CodexAuth::from_api_key("test"),
config.model_provider.clone(),
);
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test"));
let auth_manager =
codex_core::test_support::auth_manager_from_auth(CodexAuth::from_api_key("test"));
let NewThread {
thread: conversation,
@ -67,11 +66,12 @@ async fn suppresses_warning_when_configured() {
toml! { features = { child_agents_md = true } }.into(),
);
let thread_manager = ThreadManager::with_models_provider(
let thread_manager = codex_core::test_support::thread_manager_with_models_provider(
CodexAuth::from_api_key("test"),
config.model_provider.clone(),
);
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test"));
let auth_manager =
codex_core::test_support::auth_manager_from_auth(CodexAuth::from_api_key("test"));
let NewThread {
thread: conversation,

View file

@ -120,7 +120,7 @@ arboard = { workspace = true }
[dev-dependencies]
codex-cli = { workspace = true }
codex-core = { workspace = true, features = ["test-support"] }
codex-core = { workspace = true }
codex-utils-cargo-bin = { workspace = true }
codex-utils-pty = { workspace = true }
assert_matches = { workspace = true }

View file

@ -2624,12 +2624,9 @@ mod tests {
use crate::history_cell::HistoryCell;
use crate::history_cell::UserHistoryCell;
use crate::history_cell::new_session_info;
use codex_core::AuthManager;
use codex_core::CodexAuth;
use codex_core::ThreadManager;
use codex_core::config::ConfigBuilder;
use codex_core::config::ConfigOverrides;
use codex_core::models_manager::manager::ModelsManager;
use codex_core::protocol::AskForApproval;
use codex_core::protocol::Event;
use codex_core::protocol::EventMsg;
@ -2726,14 +2723,17 @@ mod tests {
async fn make_test_app() -> App {
let (chat_widget, app_event_tx, _rx, _op_rx) = make_chatwidget_manual_with_sender().await;
let config = chat_widget.config_ref().clone();
let server = Arc::new(ThreadManager::with_models_provider(
let server = Arc::new(
codex_core::test_support::thread_manager_with_models_provider(
CodexAuth::from_api_key("Test API Key"),
config.model_provider.clone(),
),
);
let auth_manager = codex_core::test_support::auth_manager_from_auth(
CodexAuth::from_api_key("Test API Key"),
config.model_provider.clone(),
));
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
);
let file_search = FileSearchManager::new(config.cwd.clone(), app_event_tx.clone());
let model = ModelsManager::get_model_offline(config.model.as_deref());
let model = codex_core::test_support::get_model_offline(config.model.as_deref());
let otel_manager = test_otel_manager(&config, model.as_str());
App {
@ -2779,14 +2779,17 @@ mod tests {
) {
let (chat_widget, app_event_tx, rx, op_rx) = make_chatwidget_manual_with_sender().await;
let config = chat_widget.config_ref().clone();
let server = Arc::new(ThreadManager::with_models_provider(
let server = Arc::new(
codex_core::test_support::thread_manager_with_models_provider(
CodexAuth::from_api_key("Test API Key"),
config.model_provider.clone(),
),
);
let auth_manager = codex_core::test_support::auth_manager_from_auth(
CodexAuth::from_api_key("Test API Key"),
config.model_provider.clone(),
));
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
);
let file_search = FileSearchManager::new(config.cwd.clone(), app_event_tx.clone());
let model = ModelsManager::get_model_offline(config.model.as_deref());
let model = codex_core::test_support::get_model_offline(config.model.as_deref());
let otel_manager = test_otel_manager(&config, model.as_str());
(
@ -2830,7 +2833,7 @@ mod tests {
}
fn test_otel_manager(config: &Config, model: &str) -> OtelManager {
let model_info = ModelsManager::construct_model_info_offline(model, config);
let model_info = codex_core::test_support::construct_model_info_offline(model, config);
OtelManager::new(
ThreadId::new(),
model,
@ -2846,7 +2849,7 @@ mod tests {
}
fn all_model_presets() -> Vec<ModelPreset> {
codex_core::models_manager::model_presets::all_model_presets().clone()
codex_core::test_support::all_model_presets().clone()
}
fn model_migration_copy_to_plain_text(

View file

@ -16,7 +16,6 @@ use crate::test_backend::VT100Backend;
use crate::tui::FrameRequester;
use assert_matches::assert_matches;
use codex_common::approval_presets::builtin_approval_presets;
use codex_core::AuthManager;
use codex_core::CodexAuth;
use codex_core::config::Config;
use codex_core::config::ConfigBuilder;
@ -965,13 +964,16 @@ async fn helpers_are_available_and_do_not_panic() {
let (tx_raw, _rx) = unbounded_channel::<AppEvent>();
let tx = AppEventSender::new(tx_raw);
let cfg = test_config().await;
let resolved_model = ModelsManager::get_model_offline(cfg.model.as_deref());
let resolved_model = codex_core::test_support::get_model_offline(cfg.model.as_deref());
let otel_manager = test_otel_manager(&cfg, resolved_model.as_str());
let thread_manager = Arc::new(ThreadManager::with_models_provider(
CodexAuth::from_api_key("test"),
cfg.model_provider.clone(),
));
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test"));
let thread_manager = Arc::new(
codex_core::test_support::thread_manager_with_models_provider(
CodexAuth::from_api_key("test"),
cfg.model_provider.clone(),
),
);
let auth_manager =
codex_core::test_support::auth_manager_from_auth(CodexAuth::from_api_key("test"));
let init = ChatWidgetInit {
config: cfg,
frame_requester: FrameRequester::test_dummy(),
@ -993,7 +995,7 @@ async fn helpers_are_available_and_do_not_panic() {
}
fn test_otel_manager(config: &Config, model: &str) -> OtelManager {
let model_info = ModelsManager::construct_model_info_offline(model, config);
let model_info = codex_core::test_support::construct_model_info_offline(model, config);
OtelManager::new(
ThreadId::new(),
model,
@ -1022,7 +1024,7 @@ async fn make_chatwidget_manual(
let mut cfg = test_config().await;
let resolved_model = model_override
.map(str::to_owned)
.unwrap_or_else(|| ModelsManager::get_model_offline(cfg.model.as_deref()));
.unwrap_or_else(|| codex_core::test_support::get_model_offline(cfg.model.as_deref()));
if let Some(model) = model_override {
cfg.model = Some(model.to_string());
}
@ -1039,7 +1041,8 @@ async fn make_chatwidget_manual(
});
bottom.set_steer_enabled(true);
bottom.set_collaboration_modes_enabled(cfg.features.enabled(Feature::CollaborationModes));
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test"));
let auth_manager =
codex_core::test_support::auth_manager_from_auth(CodexAuth::from_api_key("test"));
let codex_home = cfg.codex_home.clone();
let models_manager = Arc::new(ModelsManager::new(codex_home, auth_manager.clone()));
let reasoning_effort = None;
@ -1144,8 +1147,9 @@ fn next_submit_op(op_rx: &mut tokio::sync::mpsc::UnboundedReceiver<Op>) -> Op {
}
fn set_chatgpt_auth(chat: &mut ChatWidget) {
chat.auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
chat.auth_manager = codex_core::test_support::auth_manager_from_auth(
CodexAuth::create_dummy_chatgpt_auth_for_testing(),
);
chat.models_manager = Arc::new(ModelsManager::new(
chat.config.codex_home.clone(),
chat.auth_manager.clone(),
@ -1448,8 +1452,9 @@ async fn rate_limit_snapshots_keep_separate_entries_per_limit_id() {
#[tokio::test]
async fn rate_limit_switch_prompt_skips_when_on_lower_cost_model() {
let (mut chat, _, _) = make_chatwidget_manual(Some(NUDGE_MODEL_SLUG)).await;
chat.auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
chat.auth_manager = codex_core::test_support::auth_manager_from_auth(
CodexAuth::create_dummy_chatgpt_auth_for_testing(),
);
chat.on_rate_limit_snapshot(Some(snapshot(95.0)));
@ -1463,7 +1468,7 @@ async fn rate_limit_switch_prompt_skips_when_on_lower_cost_model() {
async fn rate_limit_switch_prompt_skips_non_codex_limit() {
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
let (mut chat, _, _) = make_chatwidget_manual(Some("gpt-5")).await;
chat.auth_manager = AuthManager::from_auth_for_testing(auth);
chat.auth_manager = codex_core::test_support::auth_manager_from_auth(auth);
chat.on_rate_limit_snapshot(Some(RateLimitSnapshot {
limit_id: Some("codex_other".to_string()),
@ -1488,7 +1493,7 @@ async fn rate_limit_switch_prompt_skips_non_codex_limit() {
async fn rate_limit_switch_prompt_shows_once_per_session() {
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
let (mut chat, _, _) = make_chatwidget_manual(Some("gpt-5")).await;
chat.auth_manager = AuthManager::from_auth_for_testing(auth);
chat.auth_manager = codex_core::test_support::auth_manager_from_auth(auth);
chat.on_rate_limit_snapshot(Some(snapshot(90.0)));
assert!(
@ -1512,7 +1517,7 @@ async fn rate_limit_switch_prompt_shows_once_per_session() {
async fn rate_limit_switch_prompt_respects_hidden_notice() {
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
let (mut chat, _, _) = make_chatwidget_manual(Some("gpt-5")).await;
chat.auth_manager = AuthManager::from_auth_for_testing(auth);
chat.auth_manager = codex_core::test_support::auth_manager_from_auth(auth);
chat.config.notices.hide_rate_limit_model_nudge = Some(true);
chat.on_rate_limit_snapshot(Some(snapshot(95.0)));
@ -1527,7 +1532,7 @@ async fn rate_limit_switch_prompt_respects_hidden_notice() {
async fn rate_limit_switch_prompt_defers_until_task_complete() {
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
let (mut chat, _, _) = make_chatwidget_manual(Some("gpt-5")).await;
chat.auth_manager = AuthManager::from_auth_for_testing(auth);
chat.auth_manager = codex_core::test_support::auth_manager_from_auth(auth);
chat.bottom_pane.set_task_running(true);
chat.on_rate_limit_snapshot(Some(snapshot(90.0)));
@ -1547,8 +1552,9 @@ async fn rate_limit_switch_prompt_defers_until_task_complete() {
#[tokio::test]
async fn rate_limit_switch_prompt_popup_snapshot() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
chat.auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
chat.auth_manager = codex_core::test_support::auth_manager_from_auth(
CodexAuth::create_dummy_chatgpt_auth_for_testing(),
);
chat.on_rate_limit_snapshot(Some(snapshot(92.0)));
chat.maybe_show_pending_rate_limit_prompt();
@ -1881,8 +1887,9 @@ async fn plan_implementation_popup_shows_after_proposed_plan_output() {
#[tokio::test]
async fn plan_implementation_popup_skips_when_rate_limit_prompt_pending() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
chat.auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
chat.auth_manager = codex_core::test_support::auth_manager_from_auth(
CodexAuth::create_dummy_chatgpt_auth_for_testing(),
);
chat.set_feature_enabled(Feature::CollaborationModes, true);
let plan_mask =
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
@ -3143,13 +3150,16 @@ async fn collaboration_modes_defaults_to_code_on_startup() {
.build()
.await
.expect("config");
let resolved_model = ModelsManager::get_model_offline(cfg.model.as_deref());
let resolved_model = codex_core::test_support::get_model_offline(cfg.model.as_deref());
let otel_manager = test_otel_manager(&cfg, resolved_model.as_str());
let thread_manager = Arc::new(ThreadManager::with_models_provider(
CodexAuth::from_api_key("test"),
cfg.model_provider.clone(),
));
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test"));
let thread_manager = Arc::new(
codex_core::test_support::thread_manager_with_models_provider(
CodexAuth::from_api_key("test"),
cfg.model_provider.clone(),
),
);
let auth_manager =
codex_core::test_support::auth_manager_from_auth(CodexAuth::from_api_key("test"));
let init = ChatWidgetInit {
config: cfg,
frame_requester: FrameRequester::test_dummy(),
@ -3189,13 +3199,16 @@ async fn experimental_mode_plan_applies_on_startup() {
.build()
.await
.expect("config");
let resolved_model = ModelsManager::get_model_offline(cfg.model.as_deref());
let resolved_model = codex_core::test_support::get_model_offline(cfg.model.as_deref());
let otel_manager = test_otel_manager(&cfg, resolved_model.as_str());
let thread_manager = Arc::new(ThreadManager::with_models_provider(
CodexAuth::from_api_key("test"),
cfg.model_provider.clone(),
));
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test"));
let thread_manager = Arc::new(
codex_core::test_support::thread_manager_with_models_provider(
CodexAuth::from_api_key("test"),
cfg.model_provider.clone(),
),
);
let auth_manager =
codex_core::test_support::auth_manager_from_auth(CodexAuth::from_api_key("test"));
let init = ChatWidgetInit {
config: cfg,
frame_requester: FrameRequester::test_dummy(),

View file

@ -7,7 +7,6 @@ use chrono::Utc;
use codex_core::AuthManager;
use codex_core::config::Config;
use codex_core::config::ConfigBuilder;
use codex_core::models_manager::manager::ModelsManager;
use codex_core::protocol::CreditsSnapshot;
use codex_core::protocol::RateLimitSnapshot;
use codex_core::protocol::RateLimitWindow;
@ -40,7 +39,7 @@ fn test_auth_manager(config: &Config) -> AuthManager {
fn token_info_for(model_slug: &str, config: &Config, usage: &TokenUsage) -> TokenUsageInfo {
let context_window =
ModelsManager::construct_model_info_offline(model_slug, config).context_window;
codex_core::test_support::construct_model_info_offline(model_slug, config).context_window;
TokenUsageInfo {
total_token_usage: usage.clone(),
last_token_usage: usage.clone(),
@ -139,7 +138,7 @@ async fn status_snapshot_includes_reasoning_details() {
};
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let model_slug = ModelsManager::get_model_offline(config.model.as_deref());
let model_slug = codex_core::test_support::get_model_offline(config.model.as_deref());
let token_info = token_info_for(&model_slug, &config, &usage);
let reasoning_effort_override = Some(Some(ReasoningEffort::High));
@ -190,7 +189,7 @@ async fn status_snapshot_includes_forked_from() {
.single()
.expect("valid time");
let model_slug = ModelsManager::get_model_offline(config.model.as_deref());
let model_slug = codex_core::test_support::get_model_offline(config.model.as_deref());
let token_info = token_info_for(&model_slug, &config, &usage);
let session_id =
ThreadId::from_string("0f0f3c13-6cf9-4aa4-8b80-7d49c2f1be2e").expect("session id");
@ -257,7 +256,7 @@ async fn status_snapshot_includes_monthly_limit() {
};
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let model_slug = ModelsManager::get_model_offline(config.model.as_deref());
let model_slug = codex_core::test_support::get_model_offline(config.model.as_deref());
let token_info = token_info_for(&model_slug, &config, &usage);
let composite = new_status_output(
&config,
@ -307,7 +306,7 @@ async fn status_snapshot_shows_unlimited_credits() {
plan_type: None,
};
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let model_slug = ModelsManager::get_model_offline(config.model.as_deref());
let model_slug = codex_core::test_support::get_model_offline(config.model.as_deref());
let token_info = token_info_for(&model_slug, &config, &usage);
let composite = new_status_output(
&config,
@ -356,7 +355,7 @@ async fn status_snapshot_shows_positive_credits() {
plan_type: None,
};
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let model_slug = ModelsManager::get_model_offline(config.model.as_deref());
let model_slug = codex_core::test_support::get_model_offline(config.model.as_deref());
let token_info = token_info_for(&model_slug, &config, &usage);
let composite = new_status_output(
&config,
@ -405,7 +404,7 @@ async fn status_snapshot_hides_zero_credits() {
plan_type: None,
};
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let model_slug = ModelsManager::get_model_offline(config.model.as_deref());
let model_slug = codex_core::test_support::get_model_offline(config.model.as_deref());
let token_info = token_info_for(&model_slug, &config, &usage);
let composite = new_status_output(
&config,
@ -452,7 +451,7 @@ async fn status_snapshot_hides_when_has_no_credits_flag() {
plan_type: None,
};
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let model_slug = ModelsManager::get_model_offline(config.model.as_deref());
let model_slug = codex_core::test_support::get_model_offline(config.model.as_deref());
let token_info = token_info_for(&model_slug, &config, &usage);
let composite = new_status_output(
&config,
@ -497,7 +496,7 @@ async fn status_card_token_usage_excludes_cached_tokens() {
.single()
.expect("timestamp");
let model_slug = ModelsManager::get_model_offline(config.model.as_deref());
let model_slug = codex_core::test_support::get_model_offline(config.model.as_deref());
let token_info = token_info_for(&model_slug, &config, &usage);
let composite = new_status_output(
&config,
@ -558,7 +557,7 @@ async fn status_snapshot_truncates_in_narrow_terminal() {
};
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let model_slug = ModelsManager::get_model_offline(config.model.as_deref());
let model_slug = codex_core::test_support::get_model_offline(config.model.as_deref());
let token_info = token_info_for(&model_slug, &config, &usage);
let reasoning_effort_override = Some(Some(ReasoningEffort::High));
let composite = new_status_output(
@ -608,7 +607,7 @@ async fn status_snapshot_shows_missing_limits_message() {
.single()
.expect("timestamp");
let model_slug = ModelsManager::get_model_offline(config.model.as_deref());
let model_slug = codex_core::test_support::get_model_offline(config.model.as_deref());
let token_info = token_info_for(&model_slug, &config, &usage);
let composite = new_status_output(
&config,
@ -677,7 +676,7 @@ async fn status_snapshot_includes_credits_and_limits() {
};
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let model_slug = ModelsManager::get_model_offline(config.model.as_deref());
let model_slug = codex_core::test_support::get_model_offline(config.model.as_deref());
let token_info = token_info_for(&model_slug, &config, &usage);
let composite = new_status_output(
&config,
@ -734,7 +733,7 @@ async fn status_snapshot_shows_empty_limits_message() {
.expect("timestamp");
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let model_slug = ModelsManager::get_model_offline(config.model.as_deref());
let model_slug = codex_core::test_support::get_model_offline(config.model.as_deref());
let token_info = token_info_for(&model_slug, &config, &usage);
let composite = new_status_output(
&config,
@ -800,7 +799,7 @@ async fn status_snapshot_shows_stale_limits_message() {
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let now = captured_at + ChronoDuration::minutes(20);
let model_slug = ModelsManager::get_model_offline(config.model.as_deref());
let model_slug = codex_core::test_support::get_model_offline(config.model.as_deref());
let token_info = token_info_for(&model_slug, &config, &usage);
let composite = new_status_output(
&config,
@ -870,7 +869,7 @@ async fn status_snapshot_cached_limits_hide_credits_without_flag() {
let rate_display = rate_limit_snapshot_display(&snapshot, captured_at);
let now = captured_at + ChronoDuration::minutes(20);
let model_slug = ModelsManager::get_model_offline(config.model.as_deref());
let model_slug = codex_core::test_support::get_model_offline(config.model.as_deref());
let token_info = token_info_for(&model_slug, &config, &usage);
let composite = new_status_output(
&config,
@ -924,7 +923,7 @@ async fn status_context_window_uses_last_usage() {
.single()
.expect("timestamp");
let model_slug = ModelsManager::get_model_offline(config.model.as_deref());
let model_slug = codex_core::test_support::get_model_offline(config.model.as_deref());
let token_info = TokenUsageInfo {
total_token_usage: total_usage.clone(),
last_token_usage: last_usage,