chore: migrate from Config::load_from_base_config_with_overrides to ConfigBuilder (#8276)

https://github.com/openai/codex/pull/8235 introduced `ConfigBuilder` and
this PR updates all call non-test call sites to use it instead of
`Config::load_from_base_config_with_overrides()`.

This is important because `load_from_base_config_with_overrides()` uses
an empty `ConfigRequirements`, which is a reasonable default for testing
so the tests are not influenced by the settings on the host. This method
is now guarded by `#[cfg(test)]` so it cannot be used by business logic.

Because `ConfigBuilder::build()` is `async`, many of the test methods
had to be migrated to be `async`, as well. On the bright side, this made
it possible to eliminate a bunch of `block_on_future()` stuff.
This commit is contained in:
Michael Bolin 2025-12-18 16:12:52 -08:00 committed by GitHub
parent 2d9826098e
commit 3d4ced3ff5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
42 changed files with 1081 additions and 1176 deletions

1
codex-rs/Cargo.lock generated
View file

@ -1323,7 +1323,6 @@ dependencies = [
"thiserror 2.0.17",
"time",
"tokio",
"tokio-test",
"tokio-util",
"toml 0.9.5",
"toml_edit",

View file

@ -132,7 +132,6 @@ predicates = { workspace = true }
pretty_assertions = { workspace = true }
serial_test = { workspace = true }
tempfile = { workspace = true }
tokio-test = { workspace = true }
tracing-subscriber = { workspace = true }
tracing-test = { workspace = true, features = ["no-env-filter"] }
walkdir = { workspace = true }

View file

@ -636,8 +636,7 @@ mod tests {
use crate::auth::storage::FileAuthStorage;
use crate::auth::storage::get_auth_file;
use crate::config::Config;
use crate::config::ConfigOverrides;
use crate::config::ConfigToml;
use crate::config::ConfigBuilder;
use crate::token_data::IdTokenInfo;
use crate::token_data::KnownPlan as InternalKnownPlan;
use crate::token_data::PlanType as InternalPlanType;
@ -862,17 +861,16 @@ mod tests {
Ok(fake_jwt)
}
fn build_config(
async fn build_config(
codex_home: &Path,
forced_login_method: Option<ForcedLoginMethod>,
forced_chatgpt_workspace_id: Option<String>,
) -> Config {
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.to_path_buf(),
)
.expect("config should load");
let mut config = ConfigBuilder::default()
.codex_home(codex_home.to_path_buf())
.build()
.await
.expect("config should load");
config.forced_login_method = forced_login_method;
config.forced_chatgpt_workspace_id = forced_chatgpt_workspace_id;
config
@ -915,7 +913,7 @@ mod tests {
login_with_api_key(codex_home.path(), "sk-test", AuthCredentialsStoreMode::File)
.expect("seed api key");
let config = build_config(codex_home.path(), Some(ForcedLoginMethod::Chatgpt), None);
let config = build_config(codex_home.path(), Some(ForcedLoginMethod::Chatgpt), None).await;
let err = super::enforce_login_restrictions(&config)
.await
@ -941,7 +939,7 @@ mod tests {
)
.expect("failed to write auth file");
let config = build_config(codex_home.path(), None, Some("org_mine".to_string()));
let config = build_config(codex_home.path(), None, Some("org_mine".to_string())).await;
let err = super::enforce_login_restrictions(&config)
.await
@ -967,7 +965,7 @@ mod tests {
)
.expect("failed to write auth file");
let config = build_config(codex_home.path(), None, Some("org_mine".to_string()));
let config = build_config(codex_home.path(), None, Some("org_mine".to_string())).await;
super::enforce_login_restrictions(&config)
.await
@ -985,7 +983,7 @@ mod tests {
login_with_api_key(codex_home.path(), "sk-test", AuthCredentialsStoreMode::File)
.expect("seed api key");
let config = build_config(codex_home.path(), None, Some("org_mine".to_string()));
let config = build_config(codex_home.path(), None, Some("org_mine".to_string())).await;
super::enforce_login_restrictions(&config)
.await
@ -1002,7 +1000,7 @@ mod tests {
let _guard = EnvVarGuard::set(CODEX_API_KEY_ENV_VAR, "sk-env");
let codex_home = tempdir().unwrap();
let config = build_config(codex_home.path(), Some(ForcedLoginMethod::Chatgpt), None);
let config = build_config(codex_home.path(), Some(ForcedLoginMethod::Chatgpt), None).await;
let err = super::enforce_login_restrictions(&config)
.await

View file

@ -2750,8 +2750,7 @@ pub(crate) use tests::make_session_and_context_with_rx;
mod tests {
use super::*;
use crate::CodexAuth;
use crate::config::ConfigOverrides;
use crate::config::ConfigToml;
use crate::config::ConfigBuilder;
use crate::exec::ExecToolCallOutput;
use crate::function_tool::FunctionCallError;
use crate::shell::default_user_shell;
@ -2778,6 +2777,7 @@ mod tests {
use codex_app_server_protocol::AuthMode;
use codex_protocol::models::ContentItem;
use codex_protocol::models::ResponseItem;
use std::path::Path;
use std::time::Duration;
use tokio::time::sleep;
@ -2790,9 +2790,9 @@ mod tests {
use std::sync::Arc;
use std::time::Duration as StdDuration;
#[test]
fn reconstruct_history_matches_live_compactions() {
let (session, turn_context) = make_session_and_context();
#[tokio::test]
async fn reconstruct_history_matches_live_compactions() {
let (session, turn_context) = make_session_and_context().await;
let (rollout_items, expected) = sample_rollout(&session, &turn_context);
let reconstructed = session.reconstruct_history_from_rollout(&turn_context, &rollout_items);
@ -2800,47 +2800,40 @@ mod tests {
assert_eq!(expected, reconstructed);
}
#[test]
fn record_initial_history_reconstructs_resumed_transcript() {
let (session, turn_context) = make_session_and_context();
#[tokio::test]
async fn record_initial_history_reconstructs_resumed_transcript() {
let (session, turn_context) = make_session_and_context().await;
let (rollout_items, expected) = sample_rollout(&session, &turn_context);
tokio_test::block_on(session.record_initial_history(InitialHistory::Resumed(
ResumedHistory {
session
.record_initial_history(InitialHistory::Resumed(ResumedHistory {
conversation_id: ConversationId::default(),
history: rollout_items,
rollout_path: PathBuf::from("/tmp/resume.jsonl"),
},
)));
}))
.await;
let actual = tokio_test::block_on(async {
session.state.lock().await.clone_history().get_history()
});
let actual = session.state.lock().await.clone_history().get_history();
assert_eq!(expected, actual);
}
#[test]
fn record_initial_history_reconstructs_forked_transcript() {
let (session, turn_context) = make_session_and_context();
#[tokio::test]
async fn record_initial_history_reconstructs_forked_transcript() {
let (session, turn_context) = make_session_and_context().await;
let (rollout_items, expected) = sample_rollout(&session, &turn_context);
tokio_test::block_on(session.record_initial_history(InitialHistory::Forked(rollout_items)));
session
.record_initial_history(InitialHistory::Forked(rollout_items))
.await;
let actual = tokio_test::block_on(async {
session.state.lock().await.clone_history().get_history()
});
let actual = session.state.lock().await.clone_history().get_history();
assert_eq!(expected, actual);
}
#[test]
fn set_rate_limits_retains_previous_credits() {
#[tokio::test]
async fn set_rate_limits_retains_previous_credits() {
let codex_home = tempfile::tempdir().expect("create temp dir");
let config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)
.expect("load default test config");
let config = build_test_config(codex_home.path()).await;
let config = Arc::new(config);
let model = ModelsManager::get_model_offline(config.model.as_deref());
let session_configuration = SessionConfiguration {
@ -2904,15 +2897,10 @@ mod tests {
);
}
#[test]
fn set_rate_limits_updates_plan_type_when_present() {
#[tokio::test]
async fn set_rate_limits_updates_plan_type_when_present() {
let codex_home = tempfile::tempdir().expect("create temp dir");
let config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)
.expect("load default test config");
let config = build_test_config(codex_home.path()).await;
let config = Arc::new(config);
let model = ModelsManager::get_model_offline(config.model.as_deref());
let session_configuration = SessionConfiguration {
@ -3002,8 +2990,8 @@ mod tests {
assert_eq!(expected, got);
}
#[test]
fn includes_timed_out_message() {
#[tokio::test]
async fn includes_timed_out_message() {
let exec = ExecToolCallOutput {
exit_code: 0,
stdout: StreamOutput::new(String::new()),
@ -3012,7 +3000,7 @@ mod tests {
duration: StdDuration::from_secs(1),
timed_out: true,
};
let (_, turn_context) = make_session_and_context();
let (_, turn_context) = make_session_and_context().await;
let out = format_exec_output_str(&exec, turn_context.truncation_policy);
@ -3085,6 +3073,14 @@ mod tests {
})
}
async fn build_test_config(codex_home: &Path) -> Config {
ConfigBuilder::default()
.codex_home(codex_home.to_path_buf())
.build()
.await
.expect("load default test config")
}
fn otel_manager(
conversation_id: ConversationId,
config: &Config,
@ -3104,15 +3100,10 @@ mod tests {
)
}
pub(crate) fn make_session_and_context() -> (Session, TurnContext) {
pub(crate) async fn make_session_and_context() -> (Session, TurnContext) {
let (tx_event, _rx_event) = async_channel::unbounded();
let codex_home = tempfile::tempdir().expect("create temp dir");
let config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)
.expect("load default test config");
let config = build_test_config(codex_home.path()).await;
let config = Arc::new(config);
let conversation_id = ConversationId::default();
let auth_manager =
@ -3191,19 +3182,14 @@ mod tests {
// Like make_session_and_context, but returns Arc<Session> and the event receiver
// so tests can assert on emitted events.
pub(crate) fn make_session_and_context_with_rx() -> (
pub(crate) async fn make_session_and_context_with_rx() -> (
Arc<Session>,
Arc<TurnContext>,
async_channel::Receiver<Event>,
) {
let (tx_event, rx_event) = async_channel::unbounded();
let codex_home = tempfile::tempdir().expect("create temp dir");
let config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)
.expect("load default test config");
let config = build_test_config(codex_home.path()).await;
let config = Arc::new(config);
let conversation_id = ConversationId::default();
let auth_manager =
@ -3282,7 +3268,7 @@ mod tests {
#[tokio::test]
async fn record_model_warning_appends_user_message() {
let (mut session, turn_context) = make_session_and_context();
let (mut session, turn_context) = make_session_and_context().await;
let mut features = Features::with_defaults();
features.enable(Feature::ModelWarnings);
session.features = features;
@ -3341,7 +3327,7 @@ mod tests {
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
#[test_log::test]
async fn abort_regular_task_emits_turn_aborted_only() {
let (sess, tc, rx) = make_session_and_context_with_rx();
let (sess, tc, rx) = make_session_and_context_with_rx().await;
let input = vec![UserInput::Text {
text: "hello".to_string(),
}];
@ -3370,7 +3356,7 @@ mod tests {
#[tokio::test]
async fn abort_gracefuly_emits_turn_aborted_only() {
let (sess, tc, rx) = make_session_and_context_with_rx();
let (sess, tc, rx) = make_session_and_context_with_rx().await;
let input = vec![UserInput::Text {
text: "hello".to_string(),
}];
@ -3396,7 +3382,7 @@ mod tests {
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn abort_review_task_emits_exited_then_aborted_and_records_history() {
let (sess, tc, rx) = make_session_and_context_with_rx();
let (sess, tc, rx) = make_session_and_context_with_rx().await;
let input = vec![UserInput::Text {
text: "start review".to_string(),
}];
@ -3444,7 +3430,7 @@ mod tests {
#[tokio::test]
async fn fatal_tool_error_stops_turn_and_reports_error() {
let (session, turn_context, _rx) = make_session_and_context_with_rx();
let (session, turn_context, _rx) = make_session_and_context_with_rx().await;
let tools = {
session
.services
@ -3607,7 +3593,7 @@ mod tests {
use crate::turn_diff_tracker::TurnDiffTracker;
use std::collections::HashMap;
let (session, mut turn_context_raw) = make_session_and_context();
let (session, mut turn_context_raw) = make_session_and_context().await;
// Ensure policy is NOT OnRequest so the early rejection path triggers
turn_context_raw.approval_policy = AskForApproval::OnFailure;
let session = Arc::new(session);
@ -3738,7 +3724,7 @@ mod tests {
use crate::sandboxing::SandboxPermissions;
use crate::turn_diff_tracker::TurnDiffTracker;
let (session, mut turn_context_raw) = make_session_and_context();
let (session, mut turn_context_raw) = make_session_and_context().await;
turn_context_raw.approval_policy = AskForApproval::OnFailure;
let session = Arc::new(session);
let turn_context = Arc::new(turn_context_raw);

View file

@ -366,7 +366,7 @@ mod tests {
rx_event: rx_events,
});
let (session, ctx, _rx_evt) = crate::codex::make_session_and_context_with_rx();
let (session, ctx, _rx_evt) = crate::codex::make_session_and_context_with_rx().await;
let (tx_out, rx_out) = bounded(1);
tx_out

View file

@ -694,7 +694,6 @@ mod tests {
use codex_protocol::openai_models::ReasoningEffort;
use pretty_assertions::assert_eq;
use tempfile::tempdir;
use tokio::runtime::Builder;
use toml::Value as TomlValue;
#[test]
@ -1455,22 +1454,16 @@ model_reasoning_effort = "high"
assert_eq!(contents, initial_expected);
}
#[test]
fn blocking_set_asynchronous_helpers_available() {
let rt = Builder::new_current_thread()
.enable_all()
.build()
.expect("runtime");
#[tokio::test]
async fn blocking_set_asynchronous_helpers_available() {
let tmp = tempdir().expect("tmpdir");
let codex_home = tmp.path().to_path_buf();
rt.block_on(async {
ConfigEditsBuilder::new(&codex_home)
.set_hide_full_access_warning(true)
.apply()
.await
.expect("persist");
});
ConfigEditsBuilder::new(&codex_home)
.set_hide_full_access_warning(true)
.apply()
.await
.expect("persist");
let raw = std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
let notice = toml::from_str::<TomlValue>(&raw)

View file

@ -992,14 +992,13 @@ pub fn resolve_oss_provider(
}
impl Config {
/// Meant to be used exclusively for tests. For new tests, prefer using
/// [ConfigBuilder::build()], if possible, so ultimately we can make this
/// method private to this file.
pub fn load_from_base_config_with_overrides(
#[cfg(test)]
fn load_from_base_config_with_overrides(
cfg: ConfigToml,
overrides: ConfigOverrides,
codex_home: PathBuf,
) -> std::io::Result<Self> {
// Note this ignores requirements.toml enforcement for tests.
let requirements = ConfigRequirements::default();
Self::load_config_with_requirements(cfg, overrides, codex_home, requirements)
}

View file

@ -379,9 +379,9 @@ mod tests {
assert_matches!(truncated2, InitialHistory::New);
}
#[test]
fn ignores_session_prefix_messages_when_truncating() {
let (session, turn_context) = make_session_and_context();
#[tokio::test]
async fn ignores_session_prefix_messages_when_truncating() {
let (session, turn_context) = make_session_and_context().await;
let mut items = session.build_initial_context(&turn_context);
items.push(user_msg("feature request"));
items.push(assistant_msg("ack"));

View file

@ -401,9 +401,7 @@ fn history_log_id(_metadata: &std::fs::Metadata) -> Option<u64> {
#[cfg(test)]
mod tests {
use super::*;
use crate::config::Config;
use crate::config::ConfigOverrides;
use crate::config::ConfigToml;
use crate::config::ConfigBuilder;
use codex_protocol::ConversationId;
use pretty_assertions::assert_eq;
use std::fs::File;
@ -493,12 +491,11 @@ mod tests {
async fn append_entry_trims_history_when_beyond_max_bytes() {
let codex_home = TempDir::new().expect("create temp dir");
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)
.expect("load config");
let mut config = ConfigBuilder::default()
.codex_home(codex_home.path().to_path_buf())
.build()
.await
.expect("load config");
let conversation_id = ConversationId::new();
@ -541,12 +538,11 @@ mod tests {
async fn append_entry_trims_history_to_soft_cap() {
let codex_home = TempDir::new().expect("create temp dir");
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)
.expect("load config");
let mut config = ConfigBuilder::default()
.codex_home(codex_home.path().to_path_buf())
.build()
.await
.expect("load config");
let conversation_id = ConversationId::new();

View file

@ -314,9 +314,7 @@ mod tests {
use super::*;
use crate::CodexAuth;
use crate::auth::AuthCredentialsStoreMode;
use crate::config::Config;
use crate::config::ConfigOverrides;
use crate::config::ConfigToml;
use crate::config::ConfigBuilder;
use crate::features::Feature;
use crate::model_provider_info::WireApi;
use codex_protocol::openai_models::ModelsResponse;
@ -397,12 +395,11 @@ mod tests {
.await;
let codex_home = tempdir().expect("temp dir");
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)
.expect("load default test config");
let mut config = ConfigBuilder::default()
.codex_home(codex_home.path().to_path_buf())
.build()
.await
.expect("load default test config");
config.features.enable(Feature::RemoteModels);
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
@ -455,12 +452,11 @@ mod tests {
.await;
let codex_home = tempdir().expect("temp dir");
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)
.expect("load default test config");
let mut config = ConfigBuilder::default()
.codex_home(codex_home.path().to_path_buf())
.build()
.await
.expect("load default test config");
config.features.enable(Feature::RemoteModels);
let auth_manager = Arc::new(AuthManager::new(
codex_home.path().to_path_buf(),
@ -511,12 +507,11 @@ mod tests {
.await;
let codex_home = tempdir().expect("temp dir");
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)
.expect("load default test config");
let mut config = ConfigBuilder::default()
.codex_home(codex_home.path().to_path_buf())
.build()
.await
.expect("load default test config");
config.features.enable(Feature::RemoteModels);
let auth_manager = Arc::new(AuthManager::new(
codex_home.path().to_path_buf(),
@ -587,12 +582,11 @@ mod tests {
.await;
let codex_home = tempdir().expect("temp dir");
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)
.expect("load default test config");
let mut config = ConfigBuilder::default()
.codex_home(codex_home.path().to_path_buf())
.build()
.await
.expect("load default test config");
config.features.enable(Feature::RemoteModels);
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());

View file

@ -232,8 +232,7 @@ fn merge_project_docs_with_skills(
#[cfg(test)]
mod tests {
use super::*;
use crate::config::ConfigOverrides;
use crate::config::ConfigToml;
use crate::config::ConfigBuilder;
use crate::skills::load_skills;
use std::fs;
use std::path::PathBuf;
@ -244,14 +243,13 @@ mod tests {
/// optionally specify a custom `instructions` string when `None` the
/// value is cleared to mimic a scenario where no system instructions have
/// been configured.
fn make_config(root: &TempDir, limit: usize, instructions: Option<&str>) -> Config {
async fn make_config(root: &TempDir, limit: usize, instructions: Option<&str>) -> Config {
let codex_home = TempDir::new().unwrap();
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)
.expect("defaults for test should always succeed");
let mut config = ConfigBuilder::default()
.codex_home(codex_home.path().to_path_buf())
.build()
.await
.expect("defaults for test should always succeed");
config.cwd = root.path().to_path_buf();
config.project_doc_max_bytes = limit;
@ -260,13 +258,13 @@ mod tests {
config
}
fn make_config_with_fallback(
async fn make_config_with_fallback(
root: &TempDir,
limit: usize,
instructions: Option<&str>,
fallbacks: &[&str],
) -> Config {
let mut config = make_config(root, limit, instructions);
let mut config = make_config(root, limit, instructions).await;
config.project_doc_fallback_filenames = fallbacks
.iter()
.map(std::string::ToString::to_string)
@ -279,7 +277,7 @@ mod tests {
async fn no_doc_file_returns_none() {
let tmp = tempfile::tempdir().expect("tempdir");
let res = get_user_instructions(&make_config(&tmp, 4096, None), None).await;
let res = get_user_instructions(&make_config(&tmp, 4096, None).await, None).await;
assert!(
res.is_none(),
"Expected None when AGENTS.md is absent and no system instructions provided"
@ -293,7 +291,7 @@ mod tests {
let tmp = tempfile::tempdir().expect("tempdir");
fs::write(tmp.path().join("AGENTS.md"), "hello world").unwrap();
let res = get_user_instructions(&make_config(&tmp, 4096, None), None)
let res = get_user_instructions(&make_config(&tmp, 4096, None).await, None)
.await
.expect("doc expected");
@ -312,7 +310,7 @@ mod tests {
let huge = "A".repeat(LIMIT * 2); // 2 KiB
fs::write(tmp.path().join("AGENTS.md"), &huge).unwrap();
let res = get_user_instructions(&make_config(&tmp, LIMIT, None), None)
let res = get_user_instructions(&make_config(&tmp, LIMIT, None).await, None)
.await
.expect("doc expected");
@ -341,7 +339,7 @@ mod tests {
std::fs::create_dir_all(&nested).unwrap();
// Build config pointing at the nested dir.
let mut cfg = make_config(&repo, 4096, None);
let mut cfg = make_config(&repo, 4096, None).await;
cfg.cwd = nested;
let res = get_user_instructions(&cfg, None)
@ -356,7 +354,7 @@ mod tests {
let tmp = tempfile::tempdir().expect("tempdir");
fs::write(tmp.path().join("AGENTS.md"), "something").unwrap();
let res = get_user_instructions(&make_config(&tmp, 0, None), None).await;
let res = get_user_instructions(&make_config(&tmp, 0, None).await, None).await;
assert!(
res.is_none(),
"With limit 0 the function should return None"
@ -372,7 +370,7 @@ mod tests {
const INSTRUCTIONS: &str = "base instructions";
let res = get_user_instructions(&make_config(&tmp, 4096, Some(INSTRUCTIONS)), None)
let res = get_user_instructions(&make_config(&tmp, 4096, Some(INSTRUCTIONS)).await, None)
.await
.expect("should produce a combined instruction string");
@ -389,7 +387,8 @@ mod tests {
const INSTRUCTIONS: &str = "some instructions";
let res = get_user_instructions(&make_config(&tmp, 4096, Some(INSTRUCTIONS)), None).await;
let res =
get_user_instructions(&make_config(&tmp, 4096, Some(INSTRUCTIONS)).await, None).await;
assert_eq!(res, Some(INSTRUCTIONS.to_string()));
}
@ -415,7 +414,7 @@ mod tests {
std::fs::create_dir_all(&nested).unwrap();
fs::write(nested.join("AGENTS.md"), "crate doc").unwrap();
let mut cfg = make_config(&repo, 4096, None);
let mut cfg = make_config(&repo, 4096, None).await;
cfg.cwd = nested;
let res = get_user_instructions(&cfg, None)
@ -431,7 +430,7 @@ mod tests {
fs::write(tmp.path().join(DEFAULT_PROJECT_DOC_FILENAME), "versioned").unwrap();
fs::write(tmp.path().join(LOCAL_PROJECT_DOC_FILENAME), "local").unwrap();
let cfg = make_config(&tmp, 4096, None);
let cfg = make_config(&tmp, 4096, None).await;
let res = get_user_instructions(&cfg, None)
.await
@ -453,7 +452,7 @@ mod tests {
let tmp = tempfile::tempdir().expect("tempdir");
fs::write(tmp.path().join("EXAMPLE.md"), "example instructions").unwrap();
let cfg = make_config_with_fallback(&tmp, 4096, None, &["EXAMPLE.md"]);
let cfg = make_config_with_fallback(&tmp, 4096, None, &["EXAMPLE.md"]).await;
let res = get_user_instructions(&cfg, None)
.await
@ -469,7 +468,7 @@ mod tests {
fs::write(tmp.path().join("AGENTS.md"), "primary").unwrap();
fs::write(tmp.path().join("EXAMPLE.md"), "secondary").unwrap();
let cfg = make_config_with_fallback(&tmp, 4096, None, &["EXAMPLE.md", ".example.md"]);
let cfg = make_config_with_fallback(&tmp, 4096, None, &["EXAMPLE.md", ".example.md"]).await;
let res = get_user_instructions(&cfg, None)
.await
@ -493,7 +492,7 @@ mod tests {
let tmp = tempfile::tempdir().expect("tempdir");
fs::write(tmp.path().join("AGENTS.md"), "base doc").unwrap();
let cfg = make_config(&tmp, 4096, None);
let cfg = make_config(&tmp, 4096, None).await;
create_skill(
cfg.codex_home.clone(),
"pdf-processing",
@ -524,7 +523,7 @@ mod tests {
#[tokio::test]
async fn skills_render_without_project_doc() {
let tmp = tempfile::tempdir().expect("tempdir");
let cfg = make_config(&tmp, 4096, None);
let cfg = make_config(&tmp, 4096, None).await;
create_skill(cfg.codex_home.clone(), "linting", "run clippy");
let skills = load_skills(&cfg);

View file

@ -302,21 +302,19 @@ fn extract_frontmatter(contents: &str) -> Option<String> {
#[cfg(test)]
mod tests {
use super::*;
use crate::config::ConfigOverrides;
use crate::config::ConfigToml;
use crate::config::ConfigBuilder;
use codex_protocol::protocol::SkillScope;
use pretty_assertions::assert_eq;
use std::path::Path;
use std::process::Command;
use tempfile::TempDir;
fn make_config(codex_home: &TempDir) -> Config {
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)
.expect("defaults for test should always succeed");
async fn make_config(codex_home: &TempDir) -> Config {
let mut config = ConfigBuilder::default()
.codex_home(codex_home.path().to_path_buf())
.build()
.await
.expect("defaults for test should always succeed");
config.cwd = codex_home.path().to_path_buf();
config
@ -352,11 +350,11 @@ mod tests {
path
}
#[test]
fn loads_valid_skill() {
#[tokio::test]
async fn loads_valid_skill() {
let codex_home = tempfile::tempdir().expect("tempdir");
write_skill(&codex_home, "demo", "demo-skill", "does things\ncarefully");
let cfg = make_config(&codex_home);
let cfg = make_config(&codex_home).await;
let outcome = load_skills(&cfg);
assert!(
@ -376,15 +374,15 @@ mod tests {
);
}
#[test]
fn loads_short_description_from_metadata() {
#[tokio::test]
async fn loads_short_description_from_metadata() {
let codex_home = tempfile::tempdir().expect("tempdir");
let skill_dir = codex_home.path().join("skills/demo");
fs::create_dir_all(&skill_dir).unwrap();
let contents = "---\nname: demo-skill\ndescription: long description\nmetadata:\n short-description: short summary\n---\n\n# Body\n";
fs::write(skill_dir.join(SKILLS_FILENAME), contents).unwrap();
let cfg = make_config(&codex_home);
let cfg = make_config(&codex_home).await;
let outcome = load_skills(&cfg);
assert!(
outcome.errors.is_empty(),
@ -398,8 +396,8 @@ mod tests {
);
}
#[test]
fn enforces_short_description_length_limits() {
#[tokio::test]
async fn enforces_short_description_length_limits() {
let codex_home = tempfile::tempdir().expect("tempdir");
let skill_dir = codex_home.path().join("skills/demo");
fs::create_dir_all(&skill_dir).unwrap();
@ -409,7 +407,7 @@ mod tests {
);
fs::write(skill_dir.join(SKILLS_FILENAME), contents).unwrap();
let cfg = make_config(&codex_home);
let cfg = make_config(&codex_home).await;
let outcome = load_skills(&cfg);
assert_eq!(outcome.skills.len(), 0);
assert_eq!(outcome.errors.len(), 1);
@ -422,8 +420,8 @@ mod tests {
);
}
#[test]
fn skips_hidden_and_invalid() {
#[tokio::test]
async fn skips_hidden_and_invalid() {
let codex_home = tempfile::tempdir().expect("tempdir");
let hidden_dir = codex_home.path().join("skills/.hidden");
fs::create_dir_all(&hidden_dir).unwrap();
@ -438,7 +436,7 @@ mod tests {
fs::create_dir_all(&invalid_dir).unwrap();
fs::write(invalid_dir.join(SKILLS_FILENAME), "---\nname: bad").unwrap();
let cfg = make_config(&codex_home);
let cfg = make_config(&codex_home).await;
let outcome = load_skills(&cfg);
assert_eq!(outcome.skills.len(), 0);
assert_eq!(outcome.errors.len(), 1);
@ -450,12 +448,12 @@ mod tests {
);
}
#[test]
fn enforces_length_limits() {
#[tokio::test]
async fn enforces_length_limits() {
let codex_home = tempfile::tempdir().expect("tempdir");
let max_desc = "\u{1F4A1}".repeat(MAX_DESCRIPTION_LEN);
write_skill(&codex_home, "max-len", "max-len", &max_desc);
let cfg = make_config(&codex_home);
let cfg = make_config(&codex_home).await;
let outcome = load_skills(&cfg);
assert!(
@ -476,8 +474,8 @@ mod tests {
);
}
#[test]
fn loads_skills_from_repo_root() {
#[tokio::test]
async fn loads_skills_from_repo_root() {
let codex_home = tempfile::tempdir().expect("tempdir");
let repo_dir = tempfile::tempdir().expect("tempdir");
@ -493,7 +491,7 @@ mod tests {
.join(REPO_ROOT_CONFIG_DIR_NAME)
.join(SKILLS_DIR_NAME);
write_skill_at(&skills_root, "repo", "repo-skill", "from repo");
let mut cfg = make_config(&codex_home);
let mut cfg = make_config(&codex_home).await;
cfg.cwd = repo_dir.path().to_path_buf();
let repo_root = normalize_path(&skills_root).unwrap_or_else(|_| skills_root.clone());
@ -509,8 +507,8 @@ mod tests {
assert!(skill.path.starts_with(&repo_root));
}
#[test]
fn loads_skills_from_nearest_codex_dir_under_repo_root() {
#[tokio::test]
async fn loads_skills_from_nearest_codex_dir_under_repo_root() {
let codex_home = tempfile::tempdir().expect("tempdir");
let repo_dir = tempfile::tempdir().expect("tempdir");
@ -544,7 +542,7 @@ mod tests {
"from nested",
);
let mut cfg = make_config(&codex_home);
let mut cfg = make_config(&codex_home).await;
cfg.cwd = nested_dir;
let outcome = load_skills(&cfg);
@ -557,8 +555,8 @@ mod tests {
assert_eq!(outcome.skills[0].name, "nested-skill");
}
#[test]
fn loads_skills_from_codex_dir_when_not_git_repo() {
#[tokio::test]
async fn loads_skills_from_codex_dir_when_not_git_repo() {
let codex_home = tempfile::tempdir().expect("tempdir");
let work_dir = tempfile::tempdir().expect("tempdir");
@ -572,7 +570,7 @@ mod tests {
"from cwd",
);
let mut cfg = make_config(&codex_home);
let mut cfg = make_config(&codex_home).await;
cfg.cwd = work_dir.path().to_path_buf();
let outcome = load_skills(&cfg);
@ -586,8 +584,8 @@ mod tests {
assert_eq!(outcome.skills[0].scope, SkillScope::Repo);
}
#[test]
fn deduplicates_by_name_preferring_repo_over_user() {
#[tokio::test]
async fn deduplicates_by_name_preferring_repo_over_user() {
let codex_home = tempfile::tempdir().expect("tempdir");
let repo_dir = tempfile::tempdir().expect("tempdir");
@ -609,7 +607,7 @@ mod tests {
"from repo",
);
let mut cfg = make_config(&codex_home);
let mut cfg = make_config(&codex_home).await;
cfg.cwd = repo_dir.path().to_path_buf();
let outcome = load_skills(&cfg);
@ -623,14 +621,14 @@ mod tests {
assert_eq!(outcome.skills[0].scope, SkillScope::Repo);
}
#[test]
fn loads_system_skills_with_lowest_priority() {
#[tokio::test]
async fn loads_system_skills_with_lowest_priority() {
let codex_home = tempfile::tempdir().expect("tempdir");
write_system_skill(&codex_home, "system", "dupe-skill", "from system");
write_skill(&codex_home, "user", "dupe-skill", "from user");
let cfg = make_config(&codex_home);
let cfg = make_config(&codex_home).await;
let outcome = load_skills(&cfg);
assert!(
outcome.errors.is_empty(),
@ -642,8 +640,8 @@ mod tests {
assert_eq!(outcome.skills[0].scope, SkillScope::User);
}
#[test]
fn repo_skills_search_does_not_escape_repo_root() {
#[tokio::test]
async fn repo_skills_search_does_not_escape_repo_root() {
let codex_home = tempfile::tempdir().expect("tempdir");
let outer_dir = tempfile::tempdir().expect("tempdir");
let repo_dir = outer_dir.path().join("repo");
@ -666,7 +664,7 @@ mod tests {
.expect("git init");
assert!(status.success(), "git init failed");
let mut cfg = make_config(&codex_home);
let mut cfg = make_config(&codex_home).await;
cfg.cwd = repo_dir;
let outcome = load_skills(&cfg);
@ -678,8 +676,8 @@ mod tests {
assert_eq!(outcome.skills.len(), 0);
}
#[test]
fn loads_skills_when_cwd_is_file_in_repo() {
#[tokio::test]
async fn loads_skills_when_cwd_is_file_in_repo() {
let codex_home = tempfile::tempdir().expect("tempdir");
let repo_dir = tempfile::tempdir().expect("tempdir");
@ -702,7 +700,7 @@ mod tests {
let file_path = repo_dir.path().join("some-file.txt");
fs::write(&file_path, "contents").unwrap();
let mut cfg = make_config(&codex_home);
let mut cfg = make_config(&codex_home).await;
cfg.cwd = file_path;
let outcome = load_skills(&cfg);
@ -716,8 +714,8 @@ mod tests {
assert_eq!(outcome.skills[0].scope, SkillScope::Repo);
}
#[test]
fn non_git_repo_skills_search_does_not_walk_parents() {
#[tokio::test]
async fn non_git_repo_skills_search_does_not_walk_parents() {
let codex_home = tempfile::tempdir().expect("tempdir");
let outer_dir = tempfile::tempdir().expect("tempdir");
let nested_dir = outer_dir.path().join("nested/inner");
@ -733,7 +731,7 @@ mod tests {
"from outer",
);
let mut cfg = make_config(&codex_home);
let mut cfg = make_config(&codex_home).await;
cfg.cwd = nested_dir;
let outcome = load_skills(&cfg);
@ -745,14 +743,14 @@ mod tests {
assert_eq!(outcome.skills.len(), 0);
}
#[test]
fn loads_skills_from_system_cache_when_present() {
#[tokio::test]
async fn loads_skills_from_system_cache_when_present() {
let codex_home = tempfile::tempdir().expect("tempdir");
let work_dir = tempfile::tempdir().expect("tempdir");
write_system_skill(&codex_home, "system", "system-skill", "from system");
let mut cfg = make_config(&codex_home);
let mut cfg = make_config(&codex_home).await;
cfg.cwd = work_dir.path().to_path_buf();
let outcome = load_skills(&cfg);
@ -766,15 +764,15 @@ mod tests {
assert_eq!(outcome.skills[0].scope, SkillScope::System);
}
#[test]
fn deduplicates_by_name_preferring_user_over_system() {
#[tokio::test]
async fn deduplicates_by_name_preferring_user_over_system() {
let codex_home = tempfile::tempdir().expect("tempdir");
let work_dir = tempfile::tempdir().expect("tempdir");
write_skill(&codex_home, "user", "dupe-skill", "from user");
write_system_skill(&codex_home, "system", "dupe-skill", "from system");
let mut cfg = make_config(&codex_home);
let mut cfg = make_config(&codex_home).await;
cfg.cwd = work_dir.path().to_path_buf();
let outcome = load_skills(&cfg);
@ -788,8 +786,8 @@ mod tests {
assert_eq!(outcome.skills[0].scope, SkillScope::User);
}
#[test]
fn deduplicates_by_name_preferring_repo_over_system() {
#[tokio::test]
async fn deduplicates_by_name_preferring_repo_over_system() {
let codex_home = tempfile::tempdir().expect("tempdir");
let repo_dir = tempfile::tempdir().expect("tempdir");
@ -811,7 +809,7 @@ mod tests {
);
write_system_skill(&codex_home, "system", "dupe-skill", "from system");
let mut cfg = make_config(&codex_home);
let mut cfg = make_config(&codex_home).await;
cfg.cwd = repo_dir.path().to_path_buf();
let outcome = load_skills(&cfg);

View file

@ -358,9 +358,9 @@ mod tests {
));
}
#[test]
fn shell_command_handler_to_exec_params_uses_session_shell_and_turn_context() {
let (session, turn_context) = make_session_and_context();
#[tokio::test]
async fn shell_command_handler_to_exec_params_uses_session_shell_and_turn_context() {
let (session, turn_context) = make_session_and_context().await;
let command = "echo hello".to_string();
let workdir = Some("subdir".to_string());

View file

@ -187,8 +187,8 @@ mod tests {
use super::session::OutputBufferState;
fn test_session_and_turn() -> (Arc<Session>, Arc<TurnContext>) {
let (session, mut turn) = make_session_and_context();
async fn test_session_and_turn() -> (Arc<Session>, Arc<TurnContext>) {
let (session, mut turn) = make_session_and_context().await;
turn.approval_policy = AskForApproval::Never;
turn.sandbox_policy = SandboxPolicy::DangerFullAccess;
(Arc::new(session), Arc::new(turn))
@ -266,7 +266,7 @@ mod tests {
async fn unified_exec_persists_across_requests() -> anyhow::Result<()> {
skip_if_sandbox!(Ok(()));
let (session, turn) = test_session_and_turn();
let (session, turn) = test_session_and_turn().await;
let open_shell = exec_command(&session, &turn, "bash -i", 2_500).await?;
let process_id = open_shell
@ -302,7 +302,7 @@ mod tests {
async fn multi_unified_exec_sessions() -> anyhow::Result<()> {
skip_if_sandbox!(Ok(()));
let (session, turn) = test_session_and_turn();
let (session, turn) = test_session_and_turn().await;
let shell_a = exec_command(&session, &turn, "bash -i", 2_500).await?;
let session_a = shell_a
@ -354,7 +354,7 @@ mod tests {
async fn unified_exec_timeouts() -> anyhow::Result<()> {
skip_if_sandbox!(Ok(()));
let (session, turn) = test_session_and_turn();
let (session, turn) = test_session_and_turn().await;
let open_shell = exec_command(&session, &turn, "bash -i", 2_500).await?;
let process_id = open_shell
@ -398,7 +398,7 @@ mod tests {
#[tokio::test]
#[ignore] // Ignored while we have a better way to test this.
async fn requests_with_large_timeout_are_capped() -> anyhow::Result<()> {
let (session, turn) = test_session_and_turn();
let (session, turn) = test_session_and_turn().await;
let result = exec_command(&session, &turn, "echo codex", 120_000).await?;
@ -411,7 +411,7 @@ mod tests {
#[tokio::test]
#[ignore] // Ignored while we have a better way to test this.
async fn completed_commands_do_not_persist_sessions() -> anyhow::Result<()> {
let (session, turn) = test_session_and_turn();
let (session, turn) = test_session_and_turn().await;
let result = exec_command(&session, &turn, "echo codex", 2_500).await?;
assert!(
@ -438,7 +438,7 @@ mod tests {
async fn reusing_completed_session_returns_unknown_session() -> anyhow::Result<()> {
skip_if_sandbox!(Ok(()));
let (session, turn) = test_session_and_turn();
let (session, turn) = test_session_and_turn().await;
let open_shell = exec_command(&session, &turn, "bash -i", 2_500).await?;
let process_id = open_shell

View file

@ -80,8 +80,8 @@ mod tests {
assert!(!is_user_shell_command_text("echo hi"));
}
#[test]
fn formats_basic_record() {
#[tokio::test]
async fn formats_basic_record() {
let exec_output = ExecToolCallOutput {
exit_code: 0,
stdout: StreamOutput::new("hi".to_string()),
@ -90,7 +90,7 @@ mod tests {
duration: Duration::from_secs(1),
timed_out: false,
};
let (_, turn_context) = make_session_and_context();
let (_, turn_context) = make_session_and_context().await;
let item = user_shell_command_record_item("echo hi", &exec_output, &turn_context);
let ResponseItem::Message { content, .. } = item else {
panic!("expected message");
@ -104,8 +104,8 @@ mod tests {
);
}
#[test]
fn uses_aggregated_output_over_streams() {
#[tokio::test]
async fn uses_aggregated_output_over_streams() {
let exec_output = ExecToolCallOutput {
exit_code: 42,
stdout: StreamOutput::new("stdout-only".to_string()),
@ -114,7 +114,7 @@ mod tests {
duration: Duration::from_millis(120),
timed_out: false,
};
let (_, turn_context) = make_session_and_context();
let (_, turn_context) = make_session_and_context().await;
let record = format_user_shell_command_record("false", &exec_output, &turn_context);
assert_eq!(
record,

View file

@ -65,7 +65,7 @@ async fn run_request(input: Vec<ResponseItem>) -> Value {
Ok(dir) => dir,
Err(e) => panic!("failed to create TempDir: {e}"),
};
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider_id = provider.name.clone();
config.model_provider = provider.clone();
config.show_raw_agent_reasoning = true;

View file

@ -64,7 +64,7 @@ async fn run_stream_with_bytes(sse_body: &[u8]) -> Vec<ResponseEvent> {
Ok(dir) => dir,
Err(e) => panic!("failed to create TempDir: {e}"),
};
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider_id = provider.name.clone();
config.model_provider = provider.clone();
config.show_raw_agent_reasoning = true;

View file

@ -4,8 +4,8 @@ use tempfile::TempDir;
use codex_core::CodexConversation;
use codex_core::config::Config;
use codex_core::config::ConfigBuilder;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use codex_utils_absolute_path::AbsolutePathBuf;
use regex_lite::Regex;
use std::path::PathBuf;
@ -75,13 +75,13 @@ pub fn test_tmp_path_buf() -> PathBuf {
/// Returns a default `Config` whose on-disk state is confined to the provided
/// temporary directory. Using a per-test directory keeps tests hermetic and
/// avoids clobbering a developers real `~/.codex`.
pub fn load_default_config_for_test(codex_home: &TempDir) -> Config {
Config::load_from_base_config_with_overrides(
ConfigToml::default(),
default_test_overrides(),
codex_home.path().to_path_buf(),
)
.expect("defaults for test should always succeed")
pub async fn load_default_config_for_test(codex_home: &TempDir) -> Config {
ConfigBuilder::default()
.codex_home(codex_home.path().to_path_buf())
.harness_overrides(default_test_overrides())
.build()
.await
.expect("defaults for test should always succeed")
}
#[cfg(target_os = "linux")]

View file

@ -178,7 +178,7 @@ impl TestCodexBuilder {
..built_in_model_providers()["openai"].clone()
};
let cwd = Arc::new(TempDir::new()?);
let mut config = load_default_config_for_test(home);
let mut config = load_default_config_for_test(home).await;
config.cwd = cwd.path().to_path_buf();
config.model_provider = model_provider;
for hook in self.pre_build_hooks.drain(..) {

View file

@ -57,7 +57,7 @@ async fn responses_stream_includes_subagent_header_on_review() {
};
let codex_home = TempDir::new().expect("failed to create TempDir");
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider_id = provider.name.clone();
config.model_provider = provider.clone();
let effort = config.model_reasoning_effort;
@ -151,7 +151,7 @@ async fn responses_stream_includes_subagent_header_on_other() {
};
let codex_home = TempDir::new().expect("failed to create TempDir");
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider_id = provider.name.clone();
config.model_provider = provider.clone();
let effort = config.model_reasoning_effort;
@ -241,7 +241,7 @@ async fn responses_respects_model_family_overrides_from_config() {
};
let codex_home = TempDir::new().expect("failed to create TempDir");
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model = Some("gpt-3.5-turbo".to_string());
config.model_provider_id = provider.name.clone();
config.model_provider = provider.clone();

View file

@ -254,7 +254,7 @@ async fn resume_includes_initial_messages_and_sends_prior_items() {
..built_in_model_providers()["openai"].clone()
};
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
// Also configure user instructions to ensure they are NOT delivered on resume.
config.user_instructions = Some("be nice".to_string());
@ -343,7 +343,7 @@ async fn includes_conversation_id_and_model_headers_in_request() {
// Init session
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
let conversation_manager = ConversationManager::with_models_provider_and_home(
@ -403,7 +403,7 @@ async fn includes_base_instructions_override_in_request() {
..built_in_model_providers()["openai"].clone()
};
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.base_instructions = Some("test instructions".to_string());
config.model_provider = model_provider;
@ -467,7 +467,7 @@ async fn chatgpt_auth_sends_correct_request() {
// Init session
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
let conversation_manager = ConversationManager::with_models_provider_and_home(
create_dummy_codex_auth(),
@ -559,7 +559,7 @@ async fn prefers_apikey_when_config_prefers_apikey_even_with_chatgpt_tokens() {
Some("acc-123"),
);
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
let auth_manager =
@ -602,7 +602,7 @@ async fn includes_user_instructions_message_in_request() {
};
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
config.user_instructions = Some("be nice".to_string());
@ -671,7 +671,7 @@ async fn skills_append_to_instructions() {
)
.expect("write skill");
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
config.cwd = codex_home.path().to_path_buf();
config.features.enable(Feature::Skills);
@ -1029,7 +1029,7 @@ async fn includes_developer_instructions_message_in_request() {
};
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
config.user_instructions = Some("be nice".to_string());
config.developer_instructions = Some("be useful".to_string());
@ -1119,7 +1119,7 @@ async fn azure_responses_request_includes_store_and_reasoning_ids() {
};
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider_id = provider.name.clone();
config.model_provider = provider.clone();
let effort = config.model_reasoning_effort;
@ -1261,7 +1261,7 @@ async fn token_count_includes_rate_limits_snapshot() {
provider.base_url = Some(format!("{}/v1", server.uri()));
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = provider;
let conversation_manager = ConversationManager::with_models_provider_and_home(
@ -1616,7 +1616,7 @@ async fn azure_overrides_assign_properties_used_for_responses_url() {
// Init session
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = provider;
let conversation_manager = ConversationManager::with_models_provider_and_home(
@ -1698,7 +1698,7 @@ async fn env_var_overrides_loaded_auth() {
// Init session
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = provider;
let conversation_manager = ConversationManager::with_models_provider_and_home(
@ -1780,7 +1780,7 @@ async fn history_dedupes_streamed_and_final_messages_across_turns() {
// Init session with isolated codex home.
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = model_provider;
let conversation_manager = ConversationManager::with_models_provider_and_home(

View file

@ -137,7 +137,7 @@ async fn summarize_context_three_requests_and_instructions() {
// Build config pointing to the mock server and spawn Codex.
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_auto_compact_token_limit = Some(200_000);
@ -331,7 +331,7 @@ async fn manual_compact_uses_custom_prompt() {
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
config.compact_prompt = Some(custom_prompt.to_string());
@ -411,7 +411,7 @@ async fn manual_compact_emits_api_and_local_token_usage_events() {
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
@ -1062,7 +1062,7 @@ async fn auto_compact_runs_after_token_limit_hit() {
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_auto_compact_token_limit = Some(200_000);
@ -1285,7 +1285,7 @@ async fn auto_compact_persists_rollout_entries() {
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_auto_compact_token_limit = Some(200_000);
@ -1397,7 +1397,7 @@ async fn manual_compact_retries_after_context_window_error() {
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_auto_compact_token_limit = Some(200_000);
@ -1530,7 +1530,7 @@ async fn manual_compact_twice_preserves_latest_user_messages() {
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
let codex = ConversationManager::with_models_provider(
@ -1733,7 +1733,7 @@ async fn auto_compact_allows_multiple_attempts_when_interleaved_with_other_turn_
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_auto_compact_token_limit = Some(200);
@ -1844,7 +1844,7 @@ async fn auto_compact_triggers_after_function_call_over_95_percent_usage() {
let model_provider = non_openai_model_provider(&server);
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_context_window = Some(context_window);

View file

@ -862,7 +862,7 @@ async fn start_test_conversation(
..built_in_model_providers()["openai"].clone()
};
let home = TempDir::new().expect("create temp dir");
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider;
config.compact_prompt = Some(SUMMARIZATION_PROMPT.to_string());
if let Some(model) = model {

View file

@ -51,7 +51,7 @@ async fn fork_conversation_twice_drops_to_first_message() {
};
let home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model_provider = model_provider.clone();
let config_for_fork = config.clone();

View file

@ -12,7 +12,7 @@ use tempfile::tempdir;
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn list_models_returns_api_key_models() -> Result<()> {
let codex_home = tempdir()?;
let config = load_default_config_for_test(&codex_home);
let config = load_default_config_for_test(&codex_home).await;
let manager = ConversationManager::with_models_provider(
CodexAuth::from_api_key("sk-test"),
built_in_model_providers()["openai"].clone(),
@ -28,7 +28,7 @@ async fn list_models_returns_api_key_models() -> Result<()> {
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn list_models_returns_chatgpt_models() -> Result<()> {
let codex_home = tempdir()?;
let config = load_default_config_for_test(&codex_home);
let config = load_default_config_for_test(&codex_home).await;
let manager = ConversationManager::with_models_provider(
CodexAuth::create_dummy_chatgpt_auth_for_testing(),
built_in_model_providers()["openai"].clone(),

View file

@ -19,7 +19,7 @@ async fn override_turn_context_does_not_persist_when_config_exists() {
.await
.expect("seed config.toml");
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model = Some("gpt-4o".to_string());
let conversation_manager = ConversationManager::with_models_provider(
@ -62,7 +62,7 @@ async fn override_turn_context_does_not_create_config_file() {
"test setup should start without config"
);
let config = load_default_config_for_test(&codex_home);
let config = load_default_config_for_test(&codex_home).await;
let conversation_manager = ConversationManager::with_models_provider(
CodexAuth::from_api_key("Test API Key"),

View file

@ -316,7 +316,7 @@ async fn remote_models_preserve_builtin_presets() -> Result<()> {
.await;
let codex_home = TempDir::new()?;
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.features.enable(Feature::RemoteModels);
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
@ -374,7 +374,7 @@ async fn remote_models_hide_picker_only_models() -> Result<()> {
.await;
let codex_home = TempDir::new()?;
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.features.enable(Feature::RemoteModels);
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
@ -440,7 +440,7 @@ where
let home = Arc::new(TempDir::new()?);
let cwd = Arc::new(TempDir::new()?);
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.cwd = cwd.path().to_path_buf();
config.features.enable(Feature::RemoteModels);

View file

@ -42,7 +42,7 @@ fn resume_history(
async fn emits_warning_when_resumed_model_differs() {
// Arrange a config with a current model and a prior rollout recorded under a different model.
let home = TempDir::new().expect("tempdir");
let mut config = load_default_config_for_test(&home);
let mut config = load_default_config_for_test(&home).await;
config.model = Some("current-model".to_string());
// Ensure cwd is absolute (the helper sets it to the temp dir already).
assert!(config.cwd.is_absolute());

View file

@ -453,7 +453,7 @@ async fn review_input_isolated_from_parent_history() {
// Seed a parent session history via resume file with both user + assistant items.
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.model_provider = ModelProviderInfo {
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
@ -740,7 +740,7 @@ where
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
};
let mut config = load_default_config_for_test(codex_home);
let mut config = load_default_config_for_test(codex_home).await;
config.model_provider = model_provider;
mutator(&mut config);
let conversation_manager = ConversationManager::with_models_provider(
@ -769,7 +769,7 @@ where
base_url: Some(format!("{}/v1", server.uri())),
..built_in_model_providers()["openai"].clone()
};
let mut config = load_default_config_for_test(codex_home);
let mut config = load_default_config_for_test(codex_home).await;
config.model_provider = model_provider;
mutator(&mut config);
let conversation_manager = ConversationManager::with_models_provider(

View file

@ -39,7 +39,7 @@ async fn user_shell_cmd_ls_and_cat_in_temp_dir() {
// Load config and pin cwd to the temp dir so ls/cat operate there.
let codex_home = TempDir::new().unwrap();
let mut config = load_default_config_for_test(&codex_home);
let mut config = load_default_config_for_test(&codex_home).await;
config.cwd = cwd.path().to_path_buf();
let conversation_manager = ConversationManager::with_models_provider(
@ -100,7 +100,7 @@ async fn user_shell_cmd_ls_and_cat_in_temp_dir() {
async fn user_shell_cmd_can_be_interrupted() {
// Set up isolated config and conversation.
let codex_home = TempDir::new().unwrap();
let config = load_default_config_for_test(&codex_home);
let config = load_default_config_for_test(&codex_home).await;
let conversation_manager = ConversationManager::with_models_provider(
codex_core::CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),

View file

@ -1252,8 +1252,8 @@ mod tests {
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
fn make_test_app() -> App {
let (chat_widget, app_event_tx, _rx, _op_rx) = make_chatwidget_manual_with_sender();
async fn make_test_app() -> App {
let (chat_widget, app_event_tx, _rx, _op_rx) = make_chatwidget_manual_with_sender().await;
let config = chat_widget.config_ref().clone();
let current_model = chat_widget.get_model_family().get_model_slug().to_string();
let server = Arc::new(ConversationManager::with_models_provider(
@ -1287,12 +1287,12 @@ mod tests {
}
}
fn make_test_app_with_channels() -> (
async fn make_test_app_with_channels() -> (
App,
tokio::sync::mpsc::UnboundedReceiver<AppEvent>,
tokio::sync::mpsc::UnboundedReceiver<Op>,
) {
let (chat_widget, app_event_tx, rx, op_rx) = make_chatwidget_manual_with_sender();
let (chat_widget, app_event_tx, rx, op_rx) = make_chatwidget_manual_with_sender().await;
let config = chat_widget.config_ref().clone();
let current_model = chat_widget.get_model_family().get_model_slug().to_string();
let server = Arc::new(ConversationManager::with_models_provider(
@ -1334,8 +1334,8 @@ mod tests {
codex_core::openai_models::model_presets::all_model_presets().clone()
}
#[test]
fn model_migration_prompt_only_shows_for_deprecated_models() {
#[tokio::test]
async fn model_migration_prompt_only_shows_for_deprecated_models() {
let seen = BTreeMap::new();
assert!(should_show_model_migration_prompt(
"gpt-5",
@ -1369,8 +1369,8 @@ mod tests {
));
}
#[test]
fn model_migration_prompt_respects_hide_flag_and_self_target() {
#[tokio::test]
async fn model_migration_prompt_respects_hide_flag_and_self_target() {
let mut seen = BTreeMap::new();
seen.insert("gpt-5".to_string(), "gpt-5.1".to_string());
assert!(!should_show_model_migration_prompt(
@ -1387,8 +1387,8 @@ mod tests {
));
}
#[test]
fn model_migration_prompt_skips_when_target_missing() {
#[tokio::test]
async fn model_migration_prompt_skips_when_target_missing() {
let mut available = all_model_presets();
let mut current = available
.iter()
@ -1415,9 +1415,9 @@ mod tests {
assert!(target_preset_for_upgrade(&available, "missing-target").is_none());
}
#[test]
fn update_reasoning_effort_updates_config() {
let mut app = make_test_app();
#[tokio::test]
async fn update_reasoning_effort_updates_config() {
let mut app = make_test_app().await;
app.config.model_reasoning_effort = Some(ReasoningEffortConfig::Medium);
app.chat_widget
.set_reasoning_effort(Some(ReasoningEffortConfig::Medium));
@ -1434,9 +1434,9 @@ mod tests {
);
}
#[test]
fn backtrack_selection_with_duplicate_history_targets_unique_turn() {
let mut app = make_test_app();
#[tokio::test]
async fn backtrack_selection_with_duplicate_history_targets_unique_turn() {
let mut app = make_test_app().await;
let user_cell = |text: &str| -> Arc<dyn HistoryCell> {
Arc::new(UserHistoryCell {
@ -1503,7 +1503,7 @@ mod tests {
#[tokio::test]
async fn new_session_requests_shutdown_for_previous_conversation() {
let (mut app, mut app_event_rx, mut op_rx) = make_test_app_with_channels();
let (mut app, mut app_event_rx, mut op_rx) = make_test_app_with_channels().await;
let conversation_id = ConversationId::new();
let event = SessionConfiguredEvent {
@ -1537,13 +1537,13 @@ mod tests {
}
}
#[test]
fn session_summary_skip_zero_usage() {
#[tokio::test]
async fn session_summary_skip_zero_usage() {
assert!(session_summary(TokenUsage::default(), None).is_none());
}
#[test]
fn session_summary_includes_resume_hint() {
#[tokio::test]
async fn session_summary_includes_resume_hint() {
let usage = TokenUsage {
input_tokens: 10,
output_tokens: 2,

File diff suppressed because it is too large Load diff

View file

@ -1682,8 +1682,7 @@ mod tests {
use crate::exec_cell::ExecCall;
use crate::exec_cell::ExecCell;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use codex_core::config::ConfigBuilder;
use codex_core::config::types::McpServerConfig;
use codex_core::config::types::McpServerTransportConfig;
use codex_core::openai_models::models_manager::ModelsManager;
@ -1700,14 +1699,13 @@ mod tests {
use mcp_types::TextContent;
use mcp_types::Tool;
use mcp_types::ToolInputSchema;
fn test_config() -> Config {
Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
std::env::temp_dir(),
)
.expect("config")
async fn test_config() -> Config {
let codex_home = std::env::temp_dir();
ConfigBuilder::default()
.codex_home(codex_home.clone())
.build()
.await
.expect("config")
}
fn render_lines(lines: &[Line<'static>]) -> Vec<String> {
@ -1785,9 +1783,9 @@ mod tests {
insta::assert_snapshot!(rendered);
}
#[test]
fn mcp_tools_output_masks_sensitive_values() {
let mut config = test_config();
#[tokio::test]
async fn mcp_tools_output_masks_sensitive_values() {
let mut config = test_config().await;
let mut env = HashMap::new();
env.insert("TOKEN".to_string(), "secret".to_string());
let stdio_config = McpServerConfig {
@ -2618,9 +2616,9 @@ mod tests {
assert_eq!(rendered, vec!["• Detailed reasoning goes here."]);
}
#[test]
fn reasoning_summary_block_respects_config_overrides() {
let mut config = test_config();
#[tokio::test]
async fn reasoning_summary_block_respects_config_overrides() {
let mut config = test_config().await;
config.model = Some("gpt-3.5-turbo".to_string());
config.model_supports_reasoning_summaries = Some(true);
config.model_reasoning_summary_format = Some(ReasoningSummaryFormat::Experimental);

View file

@ -596,21 +596,23 @@ fn should_show_login_screen(login_status: LoginStatus, config: &Config) -> bool
#[cfg(test)]
mod tests {
use super::*;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use codex_core::config::ConfigBuilder;
use codex_core::config::ProjectConfig;
use serial_test::serial;
use tempfile::TempDir;
#[test]
async fn build_config(temp_dir: &TempDir) -> std::io::Result<Config> {
ConfigBuilder::default()
.codex_home(temp_dir.path().to_path_buf())
.build()
.await
}
#[tokio::test]
#[serial]
fn windows_skips_trust_prompt_without_sandbox() -> std::io::Result<()> {
async fn windows_skips_trust_prompt_without_sandbox() -> std::io::Result<()> {
let temp_dir = TempDir::new()?;
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
temp_dir.path().to_path_buf(),
)?;
let mut config = build_config(&temp_dir).await?;
config.did_user_set_custom_approval_policy_or_sandbox_mode = false;
config.active_project = ProjectConfig { trust_level: None };
config.set_windows_sandbox_globally(false);
@ -629,15 +631,11 @@ mod tests {
}
Ok(())
}
#[test]
#[tokio::test]
#[serial]
fn windows_shows_trust_prompt_with_sandbox() -> std::io::Result<()> {
async fn windows_shows_trust_prompt_with_sandbox() -> std::io::Result<()> {
let temp_dir = TempDir::new()?;
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
temp_dir.path().to_path_buf(),
)?;
let mut config = build_config(&temp_dir).await?;
config.did_user_set_custom_approval_policy_or_sandbox_mode = false;
config.active_project = ProjectConfig { trust_level: None };
config.set_windows_sandbox_globally(true);
@ -656,15 +654,11 @@ mod tests {
}
Ok(())
}
#[test]
fn untrusted_project_skips_trust_prompt() -> std::io::Result<()> {
#[tokio::test]
async fn untrusted_project_skips_trust_prompt() -> std::io::Result<()> {
use codex_protocol::config_types::TrustLevel;
let temp_dir = TempDir::new()?;
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
temp_dir.path().to_path_buf(),
)?;
let mut config = build_config(&temp_dir).await?;
config.did_user_set_custom_approval_policy_or_sandbox_mode = false;
config.active_project = ProjectConfig {
trust_level: Some(TrustLevel::Untrusted),

View file

@ -1059,7 +1059,6 @@ mod tests {
use crossterm::event::KeyModifiers;
use insta::assert_snapshot;
use serde_json::json;
use std::future::Future;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
@ -1106,14 +1105,6 @@ mod tests {
}
}
fn block_on_future<F: Future<Output = T>, T>(future: F) -> T {
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
.block_on(future)
}
#[test]
fn preview_uses_first_message_input_text() {
let head = vec![
@ -1267,8 +1258,8 @@ mod tests {
assert_snapshot!("resume_picker_table", snapshot);
}
#[test]
fn resume_picker_screen_snapshot() {
#[tokio::test]
async fn resume_picker_screen_snapshot() {
use crate::custom_terminal::Terminal;
use crate::test_backend::VT100Backend;
use uuid::Uuid;
@ -1360,14 +1351,15 @@ mod tests {
None,
);
let page = block_on_future(RolloutRecorder::list_conversations(
let page = RolloutRecorder::list_conversations(
&state.codex_home,
PAGE_SIZE,
None,
INTERACTIVE_SESSION_SOURCES,
Some(&[String::from("openai")]),
"openai",
))
)
.await
.expect("list conversations");
let rows = rows_from_items(page.items);
@ -1526,8 +1518,8 @@ mod tests {
assert!(guard[0].search_token.is_none());
}
#[test]
fn page_navigation_uses_view_rows() {
#[tokio::test]
async fn page_navigation_uses_view_rows() {
let loader: PageLoader = Arc::new(|_| {});
let mut state = PickerState::new(
PathBuf::from("/tmp"),
@ -1551,33 +1543,27 @@ mod tests {
state.update_view_rows(5);
assert_eq!(state.selected, 0);
block_on_future(async {
state
.handle_key(KeyEvent::new(KeyCode::PageDown, KeyModifiers::NONE))
.await
.unwrap();
});
state
.handle_key(KeyEvent::new(KeyCode::PageDown, KeyModifiers::NONE))
.await
.unwrap();
assert_eq!(state.selected, 5);
block_on_future(async {
state
.handle_key(KeyEvent::new(KeyCode::PageDown, KeyModifiers::NONE))
.await
.unwrap();
});
state
.handle_key(KeyEvent::new(KeyCode::PageDown, KeyModifiers::NONE))
.await
.unwrap();
assert_eq!(state.selected, 10);
block_on_future(async {
state
.handle_key(KeyEvent::new(KeyCode::PageUp, KeyModifiers::NONE))
.await
.unwrap();
});
state
.handle_key(KeyEvent::new(KeyCode::PageUp, KeyModifiers::NONE))
.await
.unwrap();
assert_eq!(state.selected, 5);
}
#[test]
fn up_at_bottom_does_not_scroll_when_visible() {
#[tokio::test]
async fn up_at_bottom_does_not_scroll_when_visible() {
let loader: PageLoader = Arc::new(|_| {});
let mut state = PickerState::new(
PathBuf::from("/tmp"),
@ -1606,12 +1592,10 @@ mod tests {
let initial_top = state.scroll_top;
assert_eq!(initial_top, state.filtered_rows.len().saturating_sub(5));
block_on_future(async {
state
.handle_key(KeyEvent::new(KeyCode::Up, KeyModifiers::NONE))
.await
.unwrap();
});
state
.handle_key(KeyEvent::new(KeyCode::Up, KeyModifiers::NONE))
.await
.unwrap();
assert_eq!(state.scroll_top, initial_top);
assert_eq!(state.selected, state.filtered_rows.len().saturating_sub(2));

View file

@ -6,8 +6,7 @@ use chrono::TimeZone;
use chrono::Utc;
use codex_core::AuthManager;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use codex_core::config::ConfigBuilder;
use codex_core::openai_models::model_family::ModelFamily;
use codex_core::openai_models::models_manager::ModelsManager;
use codex_core::protocol::CreditsSnapshot;
@ -22,13 +21,12 @@ use ratatui::prelude::*;
use std::path::PathBuf;
use tempfile::TempDir;
fn test_config(temp_home: &TempDir) -> Config {
Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
temp_home.path().to_path_buf(),
)
.expect("load config")
async fn test_config(temp_home: &TempDir) -> Config {
ConfigBuilder::default()
.codex_home(temp_home.path().to_path_buf())
.build()
.await
.expect("load config")
}
fn test_auth_manager(config: &Config) -> AuthManager {
@ -84,10 +82,10 @@ fn reset_at_from(captured_at: &chrono::DateTime<chrono::Local>, seconds: i64) ->
.timestamp()
}
#[test]
fn status_snapshot_includes_reasoning_details() {
#[tokio::test]
async fn status_snapshot_includes_reasoning_details() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex-max".to_string());
config.model_provider_id = "openai".to_string();
config.model_reasoning_effort = Some(ReasoningEffort::High);
@ -155,10 +153,10 @@ fn status_snapshot_includes_reasoning_details() {
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_includes_monthly_limit() {
#[tokio::test]
async fn status_snapshot_includes_monthly_limit() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex-max".to_string());
config.model_provider_id = "openai".to_string();
config.cwd = PathBuf::from("/workspace/tests");
@ -212,10 +210,10 @@ fn status_snapshot_includes_monthly_limit() {
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_shows_unlimited_credits() {
#[tokio::test]
async fn status_snapshot_shows_unlimited_credits() {
let temp_home = TempDir::new().expect("temp home");
let config = test_config(&temp_home);
let config = test_config(&temp_home).await;
let auth_manager = test_auth_manager(&config);
let usage = TokenUsage::default();
let captured_at = chrono::Local
@ -256,10 +254,10 @@ fn status_snapshot_shows_unlimited_credits() {
);
}
#[test]
fn status_snapshot_shows_positive_credits() {
#[tokio::test]
async fn status_snapshot_shows_positive_credits() {
let temp_home = TempDir::new().expect("temp home");
let config = test_config(&temp_home);
let config = test_config(&temp_home).await;
let auth_manager = test_auth_manager(&config);
let usage = TokenUsage::default();
let captured_at = chrono::Local
@ -300,10 +298,10 @@ fn status_snapshot_shows_positive_credits() {
);
}
#[test]
fn status_snapshot_hides_zero_credits() {
#[tokio::test]
async fn status_snapshot_hides_zero_credits() {
let temp_home = TempDir::new().expect("temp home");
let config = test_config(&temp_home);
let config = test_config(&temp_home).await;
let auth_manager = test_auth_manager(&config);
let usage = TokenUsage::default();
let captured_at = chrono::Local
@ -342,10 +340,10 @@ fn status_snapshot_hides_zero_credits() {
);
}
#[test]
fn status_snapshot_hides_when_has_no_credits_flag() {
#[tokio::test]
async fn status_snapshot_hides_when_has_no_credits_flag() {
let temp_home = TempDir::new().expect("temp home");
let config = test_config(&temp_home);
let config = test_config(&temp_home).await;
let auth_manager = test_auth_manager(&config);
let usage = TokenUsage::default();
let captured_at = chrono::Local
@ -384,10 +382,10 @@ fn status_snapshot_hides_when_has_no_credits_flag() {
);
}
#[test]
fn status_card_token_usage_excludes_cached_tokens() {
#[tokio::test]
async fn status_card_token_usage_excludes_cached_tokens() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex-max".to_string());
config.cwd = PathBuf::from("/workspace/tests");
@ -427,10 +425,10 @@ fn status_card_token_usage_excludes_cached_tokens() {
);
}
#[test]
fn status_snapshot_truncates_in_narrow_terminal() {
#[tokio::test]
async fn status_snapshot_truncates_in_narrow_terminal() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex-max".to_string());
config.model_provider_id = "openai".to_string();
config.model_reasoning_effort = Some(ReasoningEffort::High);
@ -487,10 +485,10 @@ fn status_snapshot_truncates_in_narrow_terminal() {
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_shows_missing_limits_message() {
#[tokio::test]
async fn status_snapshot_shows_missing_limits_message() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex-max".to_string());
config.cwd = PathBuf::from("/workspace/tests");
@ -532,10 +530,10 @@ fn status_snapshot_shows_missing_limits_message() {
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_includes_credits_and_limits() {
#[tokio::test]
async fn status_snapshot_includes_credits_and_limits() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex".to_string());
config.cwd = PathBuf::from("/workspace/tests");
@ -596,10 +594,10 @@ fn status_snapshot_includes_credits_and_limits() {
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_shows_empty_limits_message() {
#[tokio::test]
async fn status_snapshot_shows_empty_limits_message() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex-max".to_string());
config.cwd = PathBuf::from("/workspace/tests");
@ -648,10 +646,10 @@ fn status_snapshot_shows_empty_limits_message() {
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_shows_stale_limits_message() {
#[tokio::test]
async fn status_snapshot_shows_stale_limits_message() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex-max".to_string());
config.cwd = PathBuf::from("/workspace/tests");
@ -709,10 +707,10 @@ fn status_snapshot_shows_stale_limits_message() {
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_cached_limits_hide_credits_without_flag() {
#[tokio::test]
async fn status_snapshot_cached_limits_hide_credits_without_flag() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex".to_string());
config.cwd = PathBuf::from("/workspace/tests");
@ -774,10 +772,10 @@ fn status_snapshot_cached_limits_hide_credits_without_flag() {
assert_snapshot!(sanitized);
}
#[test]
fn status_context_window_uses_last_usage() {
#[tokio::test]
async fn status_context_window_uses_last_usage() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model_context_window = Some(272_000);
let auth_manager = test_auth_manager(&config);

View file

@ -2134,8 +2134,8 @@ mod tests {
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
fn make_test_app() -> App {
let (chat_widget, app_event_tx, _rx, _op_rx) = make_chatwidget_manual_with_sender();
async fn make_test_app() -> App {
let (chat_widget, app_event_tx, _rx, _op_rx) = make_chatwidget_manual_with_sender().await;
let config = chat_widget.config_ref().clone();
let current_model = chat_widget.get_model_family().get_model_slug().to_string();
let server = Arc::new(ConversationManager::with_models_provider(
@ -2173,12 +2173,12 @@ mod tests {
}
}
fn make_test_app_with_channels() -> (
async fn make_test_app_with_channels() -> (
App,
tokio::sync::mpsc::UnboundedReceiver<AppEvent>,
tokio::sync::mpsc::UnboundedReceiver<Op>,
) {
let (chat_widget, app_event_tx, rx, op_rx) = make_chatwidget_manual_with_sender();
let (chat_widget, app_event_tx, rx, op_rx) = make_chatwidget_manual_with_sender().await;
let config = chat_widget.config_ref().clone();
let current_model = chat_widget.get_model_family().get_model_slug().to_string();
let server = Arc::new(ConversationManager::with_models_provider(
@ -2224,8 +2224,8 @@ mod tests {
codex_core::openai_models::model_presets::all_model_presets().clone()
}
#[test]
fn model_migration_prompt_only_shows_for_deprecated_models() {
#[tokio::test]
async fn model_migration_prompt_only_shows_for_deprecated_models() {
let seen = BTreeMap::new();
assert!(should_show_model_migration_prompt(
"gpt-5",
@ -2259,8 +2259,8 @@ mod tests {
));
}
#[test]
fn model_migration_prompt_respects_hide_flag_and_self_target() {
#[tokio::test]
async fn model_migration_prompt_respects_hide_flag_and_self_target() {
let mut seen = BTreeMap::new();
seen.insert("gpt-5".to_string(), "gpt-5.1".to_string());
assert!(!should_show_model_migration_prompt(
@ -2277,9 +2277,9 @@ mod tests {
));
}
#[test]
fn update_reasoning_effort_updates_config() {
let mut app = make_test_app();
#[tokio::test]
async fn update_reasoning_effort_updates_config() {
let mut app = make_test_app().await;
app.config.model_reasoning_effort = Some(ReasoningEffortConfig::Medium);
app.chat_widget
.set_reasoning_effort(Some(ReasoningEffortConfig::Medium));
@ -2296,9 +2296,9 @@ mod tests {
);
}
#[test]
fn backtrack_selection_with_duplicate_history_targets_unique_turn() {
let mut app = make_test_app();
#[tokio::test]
async fn backtrack_selection_with_duplicate_history_targets_unique_turn() {
let mut app = make_test_app().await;
let user_cell = |text: &str| -> Arc<dyn HistoryCell> {
Arc::new(UserHistoryCell {
@ -2363,12 +2363,12 @@ mod tests {
assert_eq!(prefill, "follow-up (edited)");
}
#[test]
fn transcript_selection_moves_with_scroll() {
#[tokio::test]
async fn transcript_selection_moves_with_scroll() {
use ratatui::buffer::Buffer;
use ratatui::layout::Rect;
let mut app = make_test_app();
let mut app = make_test_app().await;
app.transcript_total_lines = 3;
let area = Rect {
@ -2427,7 +2427,7 @@ mod tests {
#[tokio::test]
async fn new_session_requests_shutdown_for_previous_conversation() {
let (mut app, mut app_event_rx, mut op_rx) = make_test_app_with_channels();
let (mut app, mut app_event_rx, mut op_rx) = make_test_app_with_channels().await;
let conversation_id = ConversationId::new();
let event = SessionConfiguredEvent {
@ -2461,13 +2461,13 @@ mod tests {
}
}
#[test]
fn session_summary_skip_zero_usage() {
#[tokio::test]
async fn session_summary_skip_zero_usage() {
assert!(session_summary(TokenUsage::default(), None).is_none());
}
#[test]
fn render_lines_to_ansi_pads_user_rows_to_full_width() {
#[tokio::test]
async fn render_lines_to_ansi_pads_user_rows_to_full_width() {
let line: Line<'static> = Line::from("hi");
let lines = vec![line];
let line_meta = vec![TranscriptLineMeta::CellLine {
@ -2482,8 +2482,8 @@ mod tests {
assert!(rendered[0].contains("hi"));
}
#[test]
fn session_summary_includes_resume_hint() {
#[tokio::test]
async fn session_summary_includes_resume_hint() {
let usage = TokenUsage {
input_tokens: 10,
output_tokens: 2,

File diff suppressed because it is too large Load diff

View file

@ -1514,8 +1514,7 @@ mod tests {
use crate::exec_cell::ExecCall;
use crate::exec_cell::ExecCell;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use codex_core::config::ConfigBuilder;
use codex_core::config::types::McpServerConfig;
use codex_core::config::types::McpServerTransportConfig;
use codex_core::openai_models::models_manager::ModelsManager;
@ -1532,14 +1531,13 @@ mod tests {
use mcp_types::TextContent;
use mcp_types::Tool;
use mcp_types::ToolInputSchema;
fn test_config() -> Config {
Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
std::env::temp_dir(),
)
.expect("config")
async fn test_config() -> Config {
let codex_home = std::env::temp_dir();
ConfigBuilder::default()
.codex_home(codex_home.clone())
.build()
.await
.expect("config")
}
fn render_lines(lines: &[Line<'static>]) -> Vec<String> {
@ -1558,9 +1556,9 @@ mod tests {
render_lines(&cell.transcript_lines(u16::MAX))
}
#[test]
fn mcp_tools_output_masks_sensitive_values() {
let mut config = test_config();
#[tokio::test]
async fn mcp_tools_output_masks_sensitive_values() {
let mut config = test_config().await;
let mut env = HashMap::new();
env.insert("TOKEN".to_string(), "secret".to_string());
let stdio_config = McpServerConfig {
@ -2391,9 +2389,9 @@ mod tests {
assert_eq!(rendered, vec!["• Detailed reasoning goes here."]);
}
#[test]
fn reasoning_summary_block_respects_config_overrides() {
let mut config = test_config();
#[tokio::test]
async fn reasoning_summary_block_respects_config_overrides() {
let mut config = test_config().await;
config.model = Some("gpt-3.5-turbo".to_string());
config.model_supports_reasoning_summaries = Some(true);
config.model_reasoning_summary_format = Some(ReasoningSummaryFormat::Experimental);

View file

@ -625,21 +625,23 @@ fn should_show_login_screen(login_status: LoginStatus, config: &Config) -> bool
#[cfg(test)]
mod tests {
use super::*;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use codex_core::config::ConfigBuilder;
use codex_core::config::ProjectConfig;
use serial_test::serial;
use tempfile::TempDir;
#[test]
async fn build_config(temp_dir: &TempDir) -> std::io::Result<Config> {
ConfigBuilder::default()
.codex_home(temp_dir.path().to_path_buf())
.build()
.await
}
#[tokio::test]
#[serial]
fn windows_skips_trust_prompt_without_sandbox() -> std::io::Result<()> {
async fn windows_skips_trust_prompt_without_sandbox() -> std::io::Result<()> {
let temp_dir = TempDir::new()?;
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
temp_dir.path().to_path_buf(),
)?;
let mut config = build_config(&temp_dir).await?;
config.did_user_set_custom_approval_policy_or_sandbox_mode = false;
config.active_project = ProjectConfig { trust_level: None };
config.set_windows_sandbox_globally(false);
@ -658,15 +660,11 @@ mod tests {
}
Ok(())
}
#[test]
#[tokio::test]
#[serial]
fn windows_shows_trust_prompt_with_sandbox() -> std::io::Result<()> {
async fn windows_shows_trust_prompt_with_sandbox() -> std::io::Result<()> {
let temp_dir = TempDir::new()?;
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
temp_dir.path().to_path_buf(),
)?;
let mut config = build_config(&temp_dir).await?;
config.did_user_set_custom_approval_policy_or_sandbox_mode = false;
config.active_project = ProjectConfig { trust_level: None };
config.set_windows_sandbox_globally(true);
@ -685,15 +683,11 @@ mod tests {
}
Ok(())
}
#[test]
fn untrusted_project_skips_trust_prompt() -> std::io::Result<()> {
#[tokio::test]
async fn untrusted_project_skips_trust_prompt() -> std::io::Result<()> {
use codex_protocol::config_types::TrustLevel;
let temp_dir = TempDir::new()?;
let mut config = Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
temp_dir.path().to_path_buf(),
)?;
let mut config = build_config(&temp_dir).await?;
config.did_user_set_custom_approval_policy_or_sandbox_mode = false;
config.active_project = ProjectConfig {
trust_level: Some(TrustLevel::Untrusted),

View file

@ -1059,7 +1059,6 @@ mod tests {
use crossterm::event::KeyModifiers;
use insta::assert_snapshot;
use serde_json::json;
use std::future::Future;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
@ -1106,14 +1105,6 @@ mod tests {
}
}
fn block_on_future<F: Future<Output = T>, T>(future: F) -> T {
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
.block_on(future)
}
#[test]
fn preview_uses_first_message_input_text() {
let head = vec![
@ -1267,8 +1258,8 @@ mod tests {
assert_snapshot!("resume_picker_table", snapshot);
}
#[test]
fn resume_picker_screen_snapshot() {
#[tokio::test]
async fn resume_picker_screen_snapshot() {
use crate::custom_terminal::Terminal;
use crate::test_backend::VT100Backend;
use uuid::Uuid;
@ -1360,14 +1351,15 @@ mod tests {
None,
);
let page = block_on_future(RolloutRecorder::list_conversations(
let page = RolloutRecorder::list_conversations(
&state.codex_home,
PAGE_SIZE,
None,
INTERACTIVE_SESSION_SOURCES,
Some(&[String::from("openai")]),
"openai",
))
)
.await
.expect("list conversations");
let rows = rows_from_items(page.items);
@ -1526,8 +1518,8 @@ mod tests {
assert!(guard[0].search_token.is_none());
}
#[test]
fn page_navigation_uses_view_rows() {
#[tokio::test]
async fn page_navigation_uses_view_rows() {
let loader: PageLoader = Arc::new(|_| {});
let mut state = PickerState::new(
PathBuf::from("/tmp"),
@ -1551,33 +1543,27 @@ mod tests {
state.update_view_rows(5);
assert_eq!(state.selected, 0);
block_on_future(async {
state
.handle_key(KeyEvent::new(KeyCode::PageDown, KeyModifiers::NONE))
.await
.unwrap();
});
state
.handle_key(KeyEvent::new(KeyCode::PageDown, KeyModifiers::NONE))
.await
.unwrap();
assert_eq!(state.selected, 5);
block_on_future(async {
state
.handle_key(KeyEvent::new(KeyCode::PageDown, KeyModifiers::NONE))
.await
.unwrap();
});
state
.handle_key(KeyEvent::new(KeyCode::PageDown, KeyModifiers::NONE))
.await
.unwrap();
assert_eq!(state.selected, 10);
block_on_future(async {
state
.handle_key(KeyEvent::new(KeyCode::PageUp, KeyModifiers::NONE))
.await
.unwrap();
});
state
.handle_key(KeyEvent::new(KeyCode::PageUp, KeyModifiers::NONE))
.await
.unwrap();
assert_eq!(state.selected, 5);
}
#[test]
fn up_at_bottom_does_not_scroll_when_visible() {
#[tokio::test]
async fn up_at_bottom_does_not_scroll_when_visible() {
let loader: PageLoader = Arc::new(|_| {});
let mut state = PickerState::new(
PathBuf::from("/tmp"),
@ -1606,12 +1592,10 @@ mod tests {
let initial_top = state.scroll_top;
assert_eq!(initial_top, state.filtered_rows.len().saturating_sub(5));
block_on_future(async {
state
.handle_key(KeyEvent::new(KeyCode::Up, KeyModifiers::NONE))
.await
.unwrap();
});
state
.handle_key(KeyEvent::new(KeyCode::Up, KeyModifiers::NONE))
.await
.unwrap();
assert_eq!(state.scroll_top, initial_top);
assert_eq!(state.selected, state.filtered_rows.len().saturating_sub(2));

View file

@ -6,8 +6,7 @@ use chrono::TimeZone;
use chrono::Utc;
use codex_core::AuthManager;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use codex_core::config::ConfigBuilder;
use codex_core::openai_models::model_family::ModelFamily;
use codex_core::openai_models::models_manager::ModelsManager;
use codex_core::protocol::CreditsSnapshot;
@ -22,13 +21,12 @@ use ratatui::prelude::*;
use std::path::PathBuf;
use tempfile::TempDir;
fn test_config(temp_home: &TempDir) -> Config {
Config::load_from_base_config_with_overrides(
ConfigToml::default(),
ConfigOverrides::default(),
temp_home.path().to_path_buf(),
)
.expect("load config")
async fn test_config(temp_home: &TempDir) -> Config {
ConfigBuilder::default()
.codex_home(temp_home.path().to_path_buf())
.build()
.await
.expect("load config")
}
fn test_auth_manager(config: &Config) -> AuthManager {
@ -84,10 +82,10 @@ fn reset_at_from(captured_at: &chrono::DateTime<chrono::Local>, seconds: i64) ->
.timestamp()
}
#[test]
fn status_snapshot_includes_reasoning_details() {
#[tokio::test]
async fn status_snapshot_includes_reasoning_details() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex-max".to_string());
config.model_provider_id = "openai".to_string();
config.model_reasoning_effort = Some(ReasoningEffort::High);
@ -155,10 +153,10 @@ fn status_snapshot_includes_reasoning_details() {
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_includes_monthly_limit() {
#[tokio::test]
async fn status_snapshot_includes_monthly_limit() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex-max".to_string());
config.model_provider_id = "openai".to_string();
config.cwd = PathBuf::from("/workspace/tests");
@ -212,10 +210,10 @@ fn status_snapshot_includes_monthly_limit() {
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_shows_unlimited_credits() {
#[tokio::test]
async fn status_snapshot_shows_unlimited_credits() {
let temp_home = TempDir::new().expect("temp home");
let config = test_config(&temp_home);
let config = test_config(&temp_home).await;
let auth_manager = test_auth_manager(&config);
let usage = TokenUsage::default();
let captured_at = chrono::Local
@ -256,10 +254,10 @@ fn status_snapshot_shows_unlimited_credits() {
);
}
#[test]
fn status_snapshot_shows_positive_credits() {
#[tokio::test]
async fn status_snapshot_shows_positive_credits() {
let temp_home = TempDir::new().expect("temp home");
let config = test_config(&temp_home);
let config = test_config(&temp_home).await;
let auth_manager = test_auth_manager(&config);
let usage = TokenUsage::default();
let captured_at = chrono::Local
@ -300,10 +298,10 @@ fn status_snapshot_shows_positive_credits() {
);
}
#[test]
fn status_snapshot_hides_zero_credits() {
#[tokio::test]
async fn status_snapshot_hides_zero_credits() {
let temp_home = TempDir::new().expect("temp home");
let config = test_config(&temp_home);
let config = test_config(&temp_home).await;
let auth_manager = test_auth_manager(&config);
let usage = TokenUsage::default();
let captured_at = chrono::Local
@ -342,10 +340,10 @@ fn status_snapshot_hides_zero_credits() {
);
}
#[test]
fn status_snapshot_hides_when_has_no_credits_flag() {
#[tokio::test]
async fn status_snapshot_hides_when_has_no_credits_flag() {
let temp_home = TempDir::new().expect("temp home");
let config = test_config(&temp_home);
let config = test_config(&temp_home).await;
let auth_manager = test_auth_manager(&config);
let usage = TokenUsage::default();
let captured_at = chrono::Local
@ -384,10 +382,10 @@ fn status_snapshot_hides_when_has_no_credits_flag() {
);
}
#[test]
fn status_card_token_usage_excludes_cached_tokens() {
#[tokio::test]
async fn status_card_token_usage_excludes_cached_tokens() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex-max".to_string());
config.cwd = PathBuf::from("/workspace/tests");
@ -427,10 +425,10 @@ fn status_card_token_usage_excludes_cached_tokens() {
);
}
#[test]
fn status_snapshot_truncates_in_narrow_terminal() {
#[tokio::test]
async fn status_snapshot_truncates_in_narrow_terminal() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex-max".to_string());
config.model_provider_id = "openai".to_string();
config.model_reasoning_effort = Some(ReasoningEffort::High);
@ -487,10 +485,10 @@ fn status_snapshot_truncates_in_narrow_terminal() {
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_shows_missing_limits_message() {
#[tokio::test]
async fn status_snapshot_shows_missing_limits_message() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex-max".to_string());
config.cwd = PathBuf::from("/workspace/tests");
@ -532,10 +530,10 @@ fn status_snapshot_shows_missing_limits_message() {
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_includes_credits_and_limits() {
#[tokio::test]
async fn status_snapshot_includes_credits_and_limits() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex".to_string());
config.cwd = PathBuf::from("/workspace/tests");
@ -596,10 +594,10 @@ fn status_snapshot_includes_credits_and_limits() {
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_shows_empty_limits_message() {
#[tokio::test]
async fn status_snapshot_shows_empty_limits_message() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex-max".to_string());
config.cwd = PathBuf::from("/workspace/tests");
@ -648,10 +646,10 @@ fn status_snapshot_shows_empty_limits_message() {
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_shows_stale_limits_message() {
#[tokio::test]
async fn status_snapshot_shows_stale_limits_message() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex-max".to_string());
config.cwd = PathBuf::from("/workspace/tests");
@ -709,10 +707,10 @@ fn status_snapshot_shows_stale_limits_message() {
assert_snapshot!(sanitized);
}
#[test]
fn status_snapshot_cached_limits_hide_credits_without_flag() {
#[tokio::test]
async fn status_snapshot_cached_limits_hide_credits_without_flag() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model = Some("gpt-5.1-codex".to_string());
config.cwd = PathBuf::from("/workspace/tests");
@ -774,10 +772,10 @@ fn status_snapshot_cached_limits_hide_credits_without_flag() {
assert_snapshot!(sanitized);
}
#[test]
fn status_context_window_uses_last_usage() {
#[tokio::test]
async fn status_context_window_uses_last_usage() {
let temp_home = TempDir::new().expect("temp home");
let mut config = test_config(&temp_home);
let mut config = test_config(&temp_home).await;
config.model_context_window = Some(272_000);
let auth_manager = test_auth_manager(&config);