diff --git a/codex-rs/core/config.schema.json b/codex-rs/core/config.schema.json index 1bd7c1a93..b95a36a78 100644 --- a/codex-rs/core/config.schema.json +++ b/codex-rs/core/config.schema.json @@ -188,6 +188,9 @@ "apps": { "type": "boolean" }, + "apps_mcp_gateway": { + "type": "boolean" + }, "child_agents_md": { "type": "boolean" }, @@ -1275,6 +1278,9 @@ "apps": { "type": "boolean" }, + "apps_mcp_gateway": { + "type": "boolean" + }, "child_agents_md": { "type": "boolean" }, diff --git a/codex-rs/core/src/features.rs b/codex-rs/core/src/features.rs index 9df518b81..2fbf81403 100644 --- a/codex-rs/core/src/features.rs +++ b/codex-rs/core/src/features.rs @@ -121,6 +121,8 @@ pub enum Feature { Collab, /// Enable apps. Apps, + /// Route apps MCP calls through the configured gateway. + AppsMcpGateway, /// Allow prompting and installing missing MCP dependencies. SkillMcpDependencyInstall, /// Prompt for missing skill env var dependencies. @@ -566,6 +568,12 @@ pub const FEATURES: &[FeatureSpec] = &[ }, default_enabled: false, }, + FeatureSpec { + id: Feature::AppsMcpGateway, + key: "apps_mcp_gateway", + stage: Stage::UnderDevelopment, + default_enabled: false, + }, FeatureSpec { id: Feature::SkillMcpDependencyInstall, key: "skill_mcp_dependency_install", diff --git a/codex-rs/core/src/mcp/mod.rs b/codex-rs/core/src/mcp/mod.rs index 0ed0d221d..1365b5da8 100644 --- a/codex-rs/core/src/mcp/mod.rs +++ b/codex-rs/core/src/mcp/mod.rs @@ -30,6 +30,15 @@ const MCP_TOOL_NAME_PREFIX: &str = "mcp"; const MCP_TOOL_NAME_DELIMITER: &str = "__"; pub(crate) const CODEX_APPS_MCP_SERVER_NAME: &str = "codex_apps"; const CODEX_CONNECTORS_TOKEN_ENV_VAR: &str = "CODEX_CONNECTORS_TOKEN"; +const OPENAI_CONNECTORS_MCP_BASE_URL: &str = "https://api.openai.com"; +const OPENAI_CONNECTORS_MCP_PATH: &str = "/v1/connectors/mcp/"; + +// Legacy vs new MCP gateway +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum CodexAppsMcpGateway { + LegacyMCPGateway, + MCPGateway, +} fn codex_apps_mcp_bearer_token_env_var() -> Option { match env::var(CODEX_CONNECTORS_TOKEN_ENV_VAR) { @@ -65,7 +74,15 @@ fn codex_apps_mcp_http_headers(auth: Option<&CodexAuth>) -> Option String { +fn selected_config_codex_apps_mcp_gateway(config: &Config) -> CodexAppsMcpGateway { + if config.features.enabled(Feature::AppsMcpGateway) { + CodexAppsMcpGateway::MCPGateway + } else { + CodexAppsMcpGateway::LegacyMCPGateway + } +} + +fn normalize_codex_apps_base_url(base_url: &str) -> String { let mut base_url = base_url.trim_end_matches('/').to_string(); if (base_url.starts_with("https://chatgpt.com") || base_url.starts_with("https://chat.openai.com")) @@ -73,6 +90,15 @@ fn codex_apps_mcp_url(base_url: &str) -> String { { base_url = format!("{base_url}/backend-api"); } + base_url +} + +fn codex_apps_mcp_url_for_gateway(base_url: &str, gateway: CodexAppsMcpGateway) -> String { + if gateway == CodexAppsMcpGateway::MCPGateway { + return format!("{OPENAI_CONNECTORS_MCP_BASE_URL}{OPENAI_CONNECTORS_MCP_PATH}"); + } + + let base_url = normalize_codex_apps_base_url(base_url); if base_url.contains("/backend-api") { format!("{base_url}/wham/apps") } else if base_url.contains("/api/codex") { @@ -82,6 +108,13 @@ fn codex_apps_mcp_url(base_url: &str) -> String { } } +pub(crate) fn codex_apps_mcp_url(config: &Config) -> String { + codex_apps_mcp_url_for_gateway( + &config.chatgpt_base_url, + selected_config_codex_apps_mcp_gateway(config), + ) +} + fn codex_apps_mcp_server_config(config: &Config, auth: Option<&CodexAuth>) -> McpServerConfig { let bearer_token_env_var = codex_apps_mcp_bearer_token_env_var(); let http_headers = if bearer_token_env_var.is_some() { @@ -89,7 +122,7 @@ fn codex_apps_mcp_server_config(config: &Config, auth: Option<&CodexAuth>) -> Mc } else { codex_apps_mcp_http_headers(auth) }; - let url = codex_apps_mcp_url(&config.chatgpt_base_url); + let url = codex_apps_mcp_url(config); McpServerConfig { transport: McpServerTransportConfig::StreamableHttp { @@ -385,4 +418,128 @@ mod tests { assert_eq!(group_tools_by_server(&tools), expected); } + + #[test] + fn codex_apps_mcp_url_for_default_gateway_keeps_existing_paths() { + assert_eq!( + codex_apps_mcp_url_for_gateway( + "https://chatgpt.com/backend-api", + CodexAppsMcpGateway::LegacyMCPGateway + ), + "https://chatgpt.com/backend-api/wham/apps" + ); + assert_eq!( + codex_apps_mcp_url_for_gateway( + "https://chat.openai.com", + CodexAppsMcpGateway::LegacyMCPGateway + ), + "https://chat.openai.com/backend-api/wham/apps" + ); + assert_eq!( + codex_apps_mcp_url_for_gateway( + "http://localhost:8080/api/codex", + CodexAppsMcpGateway::LegacyMCPGateway + ), + "http://localhost:8080/api/codex/apps" + ); + assert_eq!( + codex_apps_mcp_url_for_gateway( + "http://localhost:8080", + CodexAppsMcpGateway::LegacyMCPGateway + ), + "http://localhost:8080/api/codex/apps" + ); + } + + #[test] + fn codex_apps_mcp_url_for_gateway_uses_openai_connectors_gateway() { + let expected_url = format!("{OPENAI_CONNECTORS_MCP_BASE_URL}{OPENAI_CONNECTORS_MCP_PATH}"); + + assert_eq!( + codex_apps_mcp_url_for_gateway( + "https://chatgpt.com/backend-api", + CodexAppsMcpGateway::MCPGateway + ), + expected_url.as_str() + ); + assert_eq!( + codex_apps_mcp_url_for_gateway( + "https://chat.openai.com", + CodexAppsMcpGateway::MCPGateway + ), + expected_url.as_str() + ); + assert_eq!( + codex_apps_mcp_url_for_gateway( + "http://localhost:8080/api/codex", + CodexAppsMcpGateway::MCPGateway + ), + expected_url.as_str() + ); + assert_eq!( + codex_apps_mcp_url_for_gateway( + "http://localhost:8080", + CodexAppsMcpGateway::MCPGateway + ), + expected_url.as_str() + ); + } + + #[test] + fn codex_apps_mcp_url_uses_default_gateway_when_feature_is_disabled() { + let mut config = crate::config::test_config(); + config.chatgpt_base_url = "https://chatgpt.com".to_string(); + + assert_eq!( + codex_apps_mcp_url(&config), + "https://chatgpt.com/backend-api/wham/apps" + ); + } + + #[test] + fn codex_apps_mcp_url_uses_openai_connectors_gateway_when_feature_is_enabled() { + let mut config = crate::config::test_config(); + config.chatgpt_base_url = "https://chatgpt.com".to_string(); + config.features.enable(Feature::AppsMcpGateway); + + assert_eq!( + codex_apps_mcp_url(&config), + format!("{OPENAI_CONNECTORS_MCP_BASE_URL}{OPENAI_CONNECTORS_MCP_PATH}") + ); + } + + #[test] + fn codex_apps_server_config_switches_gateway_with_flags() { + let mut config = crate::config::test_config(); + config.chatgpt_base_url = "https://chatgpt.com".to_string(); + + let mut servers = with_codex_apps_mcp(HashMap::new(), false, None, &config); + assert!(!servers.contains_key(CODEX_APPS_MCP_SERVER_NAME)); + + config.features.enable(Feature::Apps); + + servers = with_codex_apps_mcp(servers, true, None, &config); + let server = servers + .get(CODEX_APPS_MCP_SERVER_NAME) + .expect("codex apps should be present when apps is enabled"); + let url = match &server.transport { + McpServerTransportConfig::StreamableHttp { url, .. } => url, + _ => panic!("expected streamable http transport for codex apps"), + }; + + assert_eq!(url, "https://chatgpt.com/backend-api/wham/apps"); + + config.features.enable(Feature::AppsMcpGateway); + servers = with_codex_apps_mcp(servers, true, None, &config); + let server = servers + .get(CODEX_APPS_MCP_SERVER_NAME) + .expect("codex apps should remain present when apps stays enabled"); + let url = match &server.transport { + McpServerTransportConfig::StreamableHttp { url, .. } => url, + _ => panic!("expected streamable http transport for codex apps"), + }; + + let expected_url = format!("{OPENAI_CONNECTORS_MCP_BASE_URL}{OPENAI_CONNECTORS_MCP_PATH}"); + assert_eq!(url, &expected_url); + } } diff --git a/codex-rs/core/src/mcp_connection_manager.rs b/codex-rs/core/src/mcp_connection_manager.rs index edd1e13f6..5b9d3b997 100644 --- a/codex-rs/core/src/mcp_connection_manager.rs +++ b/codex-rs/core/src/mcp_connection_manager.rs @@ -1123,7 +1123,12 @@ fn read_cached_codex_apps_tools() -> Option> { return Some(cached.tools.clone()); } - *cache_guard = None; + if cache_guard + .as_ref() + .is_some_and(|cached| now >= cached.expires_at) + { + *cache_guard = None; + } None } @@ -1272,6 +1277,21 @@ mod tests { } } + fn with_clean_codex_apps_tools_cache(f: impl FnOnce() -> T) -> T { + let previous_cache = { + let mut cache_guard = CODEX_APPS_TOOLS_CACHE + .lock() + .unwrap_or_else(std::sync::PoisonError::into_inner); + cache_guard.take() + }; + let result = f(); + let mut cache_guard = CODEX_APPS_TOOLS_CACHE + .lock() + .unwrap_or_else(std::sync::PoisonError::into_inner); + *cache_guard = previous_cache; + result + } + #[test] fn test_qualify_tools_short_non_duplicated_names() { let tools = vec![ @@ -1424,6 +1444,47 @@ mod tests { assert_eq!(filtered[0].tool_name, "tool_a"); } + #[test] + fn codex_apps_tools_cache_is_overwritten_by_last_write() { + with_clean_codex_apps_tools_cache(|| { + let tools_gateway_1 = vec![create_test_tool(CODEX_APPS_MCP_SERVER_NAME, "one")]; + let tools_gateway_2 = vec![create_test_tool(CODEX_APPS_MCP_SERVER_NAME, "two")]; + + write_cached_codex_apps_tools(&tools_gateway_1); + let cached_gateway_1 = + read_cached_codex_apps_tools().expect("cache entry exists for first write"); + assert_eq!(cached_gateway_1[0].tool_name, "one"); + + write_cached_codex_apps_tools(&tools_gateway_2); + let cached_gateway_2 = + read_cached_codex_apps_tools().expect("cache entry exists for second write"); + assert_eq!(cached_gateway_2[0].tool_name, "two"); + }); + } + + #[test] + fn codex_apps_tools_cache_is_cleared_when_expired() { + with_clean_codex_apps_tools_cache(|| { + let tools = vec![create_test_tool(CODEX_APPS_MCP_SERVER_NAME, "stale_tool")]; + write_cached_codex_apps_tools(&tools); + + { + let mut cache_guard = CODEX_APPS_TOOLS_CACHE + .lock() + .unwrap_or_else(std::sync::PoisonError::into_inner); + cache_guard.as_mut().expect("cache exists").expires_at = + Instant::now() - Duration::from_secs(1); + } + + assert!(read_cached_codex_apps_tools().is_none()); + + let cache_guard = CODEX_APPS_TOOLS_CACHE + .lock() + .unwrap_or_else(std::sync::PoisonError::into_inner); + assert!(cache_guard.is_none()); + }); + } + #[test] fn mcp_init_error_display_prompts_for_github_pat() { let server_name = "github";