Add new apps_mcp_gateway (#11630)

Adds a new apps_mcp_gateway flag to route Apps MCP calls through
https://api.openai.com/v1/connectors/mcp/ when enabled, while keeping
legacy MCP routing as default.
This commit is contained in:
canvrno-oai 2026-02-12 16:54:11 -08:00 committed by GitHub
parent c37560069a
commit 46b2da35d5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 235 additions and 3 deletions

View file

@ -188,6 +188,9 @@
"apps": {
"type": "boolean"
},
"apps_mcp_gateway": {
"type": "boolean"
},
"child_agents_md": {
"type": "boolean"
},
@ -1275,6 +1278,9 @@
"apps": {
"type": "boolean"
},
"apps_mcp_gateway": {
"type": "boolean"
},
"child_agents_md": {
"type": "boolean"
},

View file

@ -121,6 +121,8 @@ pub enum Feature {
Collab,
/// Enable apps.
Apps,
/// Route apps MCP calls through the configured gateway.
AppsMcpGateway,
/// Allow prompting and installing missing MCP dependencies.
SkillMcpDependencyInstall,
/// Prompt for missing skill env var dependencies.
@ -566,6 +568,12 @@ pub const FEATURES: &[FeatureSpec] = &[
},
default_enabled: false,
},
FeatureSpec {
id: Feature::AppsMcpGateway,
key: "apps_mcp_gateway",
stage: Stage::UnderDevelopment,
default_enabled: false,
},
FeatureSpec {
id: Feature::SkillMcpDependencyInstall,
key: "skill_mcp_dependency_install",

View file

@ -30,6 +30,15 @@ const MCP_TOOL_NAME_PREFIX: &str = "mcp";
const MCP_TOOL_NAME_DELIMITER: &str = "__";
pub(crate) const CODEX_APPS_MCP_SERVER_NAME: &str = "codex_apps";
const CODEX_CONNECTORS_TOKEN_ENV_VAR: &str = "CODEX_CONNECTORS_TOKEN";
const OPENAI_CONNECTORS_MCP_BASE_URL: &str = "https://api.openai.com";
const OPENAI_CONNECTORS_MCP_PATH: &str = "/v1/connectors/mcp/";
// Legacy vs new MCP gateway
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum CodexAppsMcpGateway {
LegacyMCPGateway,
MCPGateway,
}
fn codex_apps_mcp_bearer_token_env_var() -> Option<String> {
match env::var(CODEX_CONNECTORS_TOKEN_ENV_VAR) {
@ -65,7 +74,15 @@ fn codex_apps_mcp_http_headers(auth: Option<&CodexAuth>) -> Option<HashMap<Strin
}
}
fn codex_apps_mcp_url(base_url: &str) -> String {
fn selected_config_codex_apps_mcp_gateway(config: &Config) -> CodexAppsMcpGateway {
if config.features.enabled(Feature::AppsMcpGateway) {
CodexAppsMcpGateway::MCPGateway
} else {
CodexAppsMcpGateway::LegacyMCPGateway
}
}
fn normalize_codex_apps_base_url(base_url: &str) -> String {
let mut base_url = base_url.trim_end_matches('/').to_string();
if (base_url.starts_with("https://chatgpt.com")
|| base_url.starts_with("https://chat.openai.com"))
@ -73,6 +90,15 @@ fn codex_apps_mcp_url(base_url: &str) -> String {
{
base_url = format!("{base_url}/backend-api");
}
base_url
}
fn codex_apps_mcp_url_for_gateway(base_url: &str, gateway: CodexAppsMcpGateway) -> String {
if gateway == CodexAppsMcpGateway::MCPGateway {
return format!("{OPENAI_CONNECTORS_MCP_BASE_URL}{OPENAI_CONNECTORS_MCP_PATH}");
}
let base_url = normalize_codex_apps_base_url(base_url);
if base_url.contains("/backend-api") {
format!("{base_url}/wham/apps")
} else if base_url.contains("/api/codex") {
@ -82,6 +108,13 @@ fn codex_apps_mcp_url(base_url: &str) -> String {
}
}
pub(crate) fn codex_apps_mcp_url(config: &Config) -> String {
codex_apps_mcp_url_for_gateway(
&config.chatgpt_base_url,
selected_config_codex_apps_mcp_gateway(config),
)
}
fn codex_apps_mcp_server_config(config: &Config, auth: Option<&CodexAuth>) -> McpServerConfig {
let bearer_token_env_var = codex_apps_mcp_bearer_token_env_var();
let http_headers = if bearer_token_env_var.is_some() {
@ -89,7 +122,7 @@ fn codex_apps_mcp_server_config(config: &Config, auth: Option<&CodexAuth>) -> Mc
} else {
codex_apps_mcp_http_headers(auth)
};
let url = codex_apps_mcp_url(&config.chatgpt_base_url);
let url = codex_apps_mcp_url(config);
McpServerConfig {
transport: McpServerTransportConfig::StreamableHttp {
@ -385,4 +418,128 @@ mod tests {
assert_eq!(group_tools_by_server(&tools), expected);
}
#[test]
fn codex_apps_mcp_url_for_default_gateway_keeps_existing_paths() {
assert_eq!(
codex_apps_mcp_url_for_gateway(
"https://chatgpt.com/backend-api",
CodexAppsMcpGateway::LegacyMCPGateway
),
"https://chatgpt.com/backend-api/wham/apps"
);
assert_eq!(
codex_apps_mcp_url_for_gateway(
"https://chat.openai.com",
CodexAppsMcpGateway::LegacyMCPGateway
),
"https://chat.openai.com/backend-api/wham/apps"
);
assert_eq!(
codex_apps_mcp_url_for_gateway(
"http://localhost:8080/api/codex",
CodexAppsMcpGateway::LegacyMCPGateway
),
"http://localhost:8080/api/codex/apps"
);
assert_eq!(
codex_apps_mcp_url_for_gateway(
"http://localhost:8080",
CodexAppsMcpGateway::LegacyMCPGateway
),
"http://localhost:8080/api/codex/apps"
);
}
#[test]
fn codex_apps_mcp_url_for_gateway_uses_openai_connectors_gateway() {
let expected_url = format!("{OPENAI_CONNECTORS_MCP_BASE_URL}{OPENAI_CONNECTORS_MCP_PATH}");
assert_eq!(
codex_apps_mcp_url_for_gateway(
"https://chatgpt.com/backend-api",
CodexAppsMcpGateway::MCPGateway
),
expected_url.as_str()
);
assert_eq!(
codex_apps_mcp_url_for_gateway(
"https://chat.openai.com",
CodexAppsMcpGateway::MCPGateway
),
expected_url.as_str()
);
assert_eq!(
codex_apps_mcp_url_for_gateway(
"http://localhost:8080/api/codex",
CodexAppsMcpGateway::MCPGateway
),
expected_url.as_str()
);
assert_eq!(
codex_apps_mcp_url_for_gateway(
"http://localhost:8080",
CodexAppsMcpGateway::MCPGateway
),
expected_url.as_str()
);
}
#[test]
fn codex_apps_mcp_url_uses_default_gateway_when_feature_is_disabled() {
let mut config = crate::config::test_config();
config.chatgpt_base_url = "https://chatgpt.com".to_string();
assert_eq!(
codex_apps_mcp_url(&config),
"https://chatgpt.com/backend-api/wham/apps"
);
}
#[test]
fn codex_apps_mcp_url_uses_openai_connectors_gateway_when_feature_is_enabled() {
let mut config = crate::config::test_config();
config.chatgpt_base_url = "https://chatgpt.com".to_string();
config.features.enable(Feature::AppsMcpGateway);
assert_eq!(
codex_apps_mcp_url(&config),
format!("{OPENAI_CONNECTORS_MCP_BASE_URL}{OPENAI_CONNECTORS_MCP_PATH}")
);
}
#[test]
fn codex_apps_server_config_switches_gateway_with_flags() {
let mut config = crate::config::test_config();
config.chatgpt_base_url = "https://chatgpt.com".to_string();
let mut servers = with_codex_apps_mcp(HashMap::new(), false, None, &config);
assert!(!servers.contains_key(CODEX_APPS_MCP_SERVER_NAME));
config.features.enable(Feature::Apps);
servers = with_codex_apps_mcp(servers, true, None, &config);
let server = servers
.get(CODEX_APPS_MCP_SERVER_NAME)
.expect("codex apps should be present when apps is enabled");
let url = match &server.transport {
McpServerTransportConfig::StreamableHttp { url, .. } => url,
_ => panic!("expected streamable http transport for codex apps"),
};
assert_eq!(url, "https://chatgpt.com/backend-api/wham/apps");
config.features.enable(Feature::AppsMcpGateway);
servers = with_codex_apps_mcp(servers, true, None, &config);
let server = servers
.get(CODEX_APPS_MCP_SERVER_NAME)
.expect("codex apps should remain present when apps stays enabled");
let url = match &server.transport {
McpServerTransportConfig::StreamableHttp { url, .. } => url,
_ => panic!("expected streamable http transport for codex apps"),
};
let expected_url = format!("{OPENAI_CONNECTORS_MCP_BASE_URL}{OPENAI_CONNECTORS_MCP_PATH}");
assert_eq!(url, &expected_url);
}
}

View file

@ -1123,7 +1123,12 @@ fn read_cached_codex_apps_tools() -> Option<Vec<ToolInfo>> {
return Some(cached.tools.clone());
}
*cache_guard = None;
if cache_guard
.as_ref()
.is_some_and(|cached| now >= cached.expires_at)
{
*cache_guard = None;
}
None
}
@ -1272,6 +1277,21 @@ mod tests {
}
}
fn with_clean_codex_apps_tools_cache<T>(f: impl FnOnce() -> T) -> T {
let previous_cache = {
let mut cache_guard = CODEX_APPS_TOOLS_CACHE
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
cache_guard.take()
};
let result = f();
let mut cache_guard = CODEX_APPS_TOOLS_CACHE
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
*cache_guard = previous_cache;
result
}
#[test]
fn test_qualify_tools_short_non_duplicated_names() {
let tools = vec![
@ -1424,6 +1444,47 @@ mod tests {
assert_eq!(filtered[0].tool_name, "tool_a");
}
#[test]
fn codex_apps_tools_cache_is_overwritten_by_last_write() {
with_clean_codex_apps_tools_cache(|| {
let tools_gateway_1 = vec![create_test_tool(CODEX_APPS_MCP_SERVER_NAME, "one")];
let tools_gateway_2 = vec![create_test_tool(CODEX_APPS_MCP_SERVER_NAME, "two")];
write_cached_codex_apps_tools(&tools_gateway_1);
let cached_gateway_1 =
read_cached_codex_apps_tools().expect("cache entry exists for first write");
assert_eq!(cached_gateway_1[0].tool_name, "one");
write_cached_codex_apps_tools(&tools_gateway_2);
let cached_gateway_2 =
read_cached_codex_apps_tools().expect("cache entry exists for second write");
assert_eq!(cached_gateway_2[0].tool_name, "two");
});
}
#[test]
fn codex_apps_tools_cache_is_cleared_when_expired() {
with_clean_codex_apps_tools_cache(|| {
let tools = vec![create_test_tool(CODEX_APPS_MCP_SERVER_NAME, "stale_tool")];
write_cached_codex_apps_tools(&tools);
{
let mut cache_guard = CODEX_APPS_TOOLS_CACHE
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
cache_guard.as_mut().expect("cache exists").expires_at =
Instant::now() - Duration::from_secs(1);
}
assert!(read_cached_codex_apps_tools().is_none());
let cache_guard = CODEX_APPS_TOOLS_CACHE
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner);
assert!(cache_guard.is_none());
});
}
#[test]
fn mcp_init_error_display_prompts_for_github_pat() {
let server_name = "github";