fix(tui): implement /mcp inventory for tui_app_server (#14931)

## Problem

The `/mcp` command did not work in the app-server TUI (remote mode). On
`main`, `add_mcp_output()` called `McpManager::effective_servers()`
in-process, which only sees locally configured servers, and then emitted
a generic stub message for the app-server to handle. In remote usage,
that left `/mcp` without a real inventory view.

## Solution

Implement `/mcp` for the app-server TUI by fetching MCP server inventory
directly from the app-server via the paginated `mcpServerStatus/list`
RPC and rendering the results into chat history.

The command now follows a three-phase lifecycle:

1. Loading: `ChatWidget::add_mcp_output()` inserts a transient
`McpInventoryLoadingCell` and emits `AppEvent::FetchMcpInventory`. This
gives immediate feedback that the command registered.
2. Fetch: `App::fetch_mcp_inventory()` spawns a background task that
calls `fetch_all_mcp_server_statuses()` over an app-server request
handle. When the RPC completes, it sends `AppEvent::McpInventoryLoaded {
result }`.
3. Resolve: `App::handle_mcp_inventory_result()` clears the loading cell
and renders either `new_mcp_tools_output_from_statuses(...)` or an error
message.

This keeps the main app event loop responsive, so the TUI can repaint
before the remote RPC finishes.

## Notes

- No `app-server` changes were required.
- The rendered inventory includes auth, tools, resources, and resource
templates, plus transport details when they are available from local
config for display enrichment.
- The app-server RPC does not expose authoritative `enabled` or
`disabled_reason` state for MCP servers, so the remote `/mcp` view no
longer renders a `Status:` row rather than guessing from local config.
- RPC failures surface in history as `Failed to load MCP inventory:
...`.

## Tests

- `slash_mcp_requests_inventory_via_app_server`
- `mcp_inventory_maps_prefix_tool_names_by_server`
- `handle_mcp_inventory_result_clears_committed_loading_cell`
- `mcp_tools_output_from_statuses_renders_status_only_servers`
- `mcp_inventory_loading_snapshot`
This commit is contained in:
Felipe Coury 2026-03-17 19:11:27 -03:00 committed by GitHub
parent 0d2ff40a58
commit 43ee72a9b9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 581 additions and 9 deletions

View file

@ -44,7 +44,13 @@ use crate::tui::TuiEvent;
use crate::update_action::UpdateAction;
use crate::version::CODEX_CLI_VERSION;
use codex_ansi_escape::ansi_escape_line;
use codex_app_server_client::AppServerRequestHandle;
use codex_app_server_protocol::ClientRequest;
use codex_app_server_protocol::ConfigLayerSource;
use codex_app_server_protocol::ListMcpServerStatusParams;
use codex_app_server_protocol::ListMcpServerStatusResponse;
use codex_app_server_protocol::McpServerStatus;
use codex_app_server_protocol::RequestId;
use codex_core::config::Config;
use codex_core::config::ConfigBuilder;
use codex_core::config::ConfigOverrides;
@ -75,6 +81,8 @@ use codex_protocol::protocol::EventMsg;
use codex_protocol::protocol::FinalOutput;
use codex_protocol::protocol::ListSkillsResponseEvent;
#[cfg(test)]
use codex_protocol::protocol::McpAuthStatus;
#[cfg(test)]
use codex_protocol::protocol::Op;
use codex_protocol::protocol::SandboxPolicy;
use codex_protocol::protocol::SessionConfiguredEvent;
@ -111,6 +119,7 @@ use tokio::sync::mpsc::error::TrySendError;
use tokio::sync::mpsc::unbounded_channel;
use tokio::task::JoinHandle;
use toml::Value as TomlValue;
use uuid::Uuid;
mod agent_navigation;
mod app_server_adapter;
mod app_server_requests;
@ -1536,6 +1545,72 @@ impl App {
Ok(())
}
/// Spawn a background task that fetches the full MCP server inventory from the
/// app-server via paginated RPCs, then delivers the result back through
/// `AppEvent::McpInventoryLoaded`.
///
/// The spawned task is fire-and-forget: no `JoinHandle` is stored, so a stale
/// result may arrive after the user has moved on. We currently accept that
/// tradeoff because the effect is limited to stale inventory output in history,
/// while request-token invalidation would add cross-cutting async state for a
/// low-severity path.
fn fetch_mcp_inventory(&mut self, app_server: &AppServerSession) {
let request_handle = app_server.request_handle();
let app_event_tx = self.app_event_tx.clone();
tokio::spawn(async move {
let result = fetch_all_mcp_server_statuses(request_handle)
.await
.map_err(|err| err.to_string());
app_event_tx.send(AppEvent::McpInventoryLoaded { result });
});
}
/// Process the completed MCP inventory fetch: clear the loading spinner, then
/// render either the full tool/resource listing or an error into chat history.
///
/// When both the local config and the app-server report zero servers, a special
/// "empty" cell is shown instead of the full table.
fn handle_mcp_inventory_result(&mut self, result: Result<Vec<McpServerStatus>, String>) {
let config = self.chat_widget.config_ref().clone();
self.chat_widget.clear_mcp_inventory_loading();
self.clear_committed_mcp_inventory_loading();
let statuses = match result {
Ok(statuses) => statuses,
Err(err) => {
self.chat_widget
.add_error_message(format!("Failed to load MCP inventory: {err}"));
return;
}
};
if config.mcp_servers.get().is_empty() && statuses.is_empty() {
self.chat_widget
.add_to_history(history_cell::empty_mcp_output());
return;
}
self.chat_widget
.add_to_history(history_cell::new_mcp_tools_output_from_statuses(
&config, &statuses,
));
}
fn clear_committed_mcp_inventory_loading(&mut self) {
let Some(index) = self
.transcript_cells
.iter()
.rposition(|cell| cell.as_any().is::<history_cell::McpInventoryLoadingCell>())
else {
return;
};
self.transcript_cells.remove(index);
if let Some(Overlay::Transcript(overlay)) = &mut self.overlay {
overlay.replace_cells(self.transcript_cells.clone());
}
}
async fn try_submit_active_thread_op_via_app_server(
&mut self,
app_server: &mut AppServerSession,
@ -3047,6 +3122,12 @@ impl App {
AppEvent::RefreshConnectors { force_refetch } => {
self.chat_widget.refresh_connectors(force_refetch);
}
AppEvent::FetchMcpInventory => {
self.fetch_mcp_inventory(app_server);
}
AppEvent::McpInventoryLoaded { result } => {
self.handle_mcp_inventory_result(result);
}
AppEvent::StartFileSearch(query) => {
self.file_search.on_user_query(query);
}
@ -4469,6 +4550,80 @@ impl App {
}
}
/// Collect every MCP server status from the app-server by walking the paginated
/// `mcpServerStatus/list` RPC until no `next_cursor` is returned.
///
/// All pages are eagerly gathered into a single `Vec` so the caller can render
/// the inventory atomically. Each page requests up to 100 entries.
async fn fetch_all_mcp_server_statuses(
request_handle: AppServerRequestHandle,
) -> Result<Vec<McpServerStatus>> {
let mut cursor = None;
let mut statuses = Vec::new();
loop {
let request_id = RequestId::String(format!("mcp-inventory-{}", Uuid::new_v4()));
let response: ListMcpServerStatusResponse = request_handle
.request_typed(ClientRequest::McpServerStatusList {
request_id,
params: ListMcpServerStatusParams {
cursor: cursor.clone(),
limit: Some(100),
},
})
.await
.wrap_err("mcpServerStatus/list failed in app-server TUI")?;
statuses.extend(response.data);
if let Some(next_cursor) = response.next_cursor {
cursor = Some(next_cursor);
} else {
break;
}
}
Ok(statuses)
}
/// Convert flat `McpServerStatus` responses into the per-server maps used by the
/// in-process MCP subsystem (tools keyed as `mcp__{server}__{tool}`, plus
/// per-server resource/template/auth maps). Test-only because the app-server TUI
/// renders directly from `McpServerStatus` rather than these maps.
#[cfg(test)]
type McpInventoryMaps = (
HashMap<String, codex_protocol::mcp::Tool>,
HashMap<String, Vec<codex_protocol::mcp::Resource>>,
HashMap<String, Vec<codex_protocol::mcp::ResourceTemplate>>,
HashMap<String, McpAuthStatus>,
);
#[cfg(test)]
fn mcp_inventory_maps_from_statuses(statuses: Vec<McpServerStatus>) -> McpInventoryMaps {
let mut tools = HashMap::new();
let mut resources = HashMap::new();
let mut resource_templates = HashMap::new();
let mut auth_statuses = HashMap::new();
for status in statuses {
let server_name = status.name;
auth_statuses.insert(
server_name.clone(),
match status.auth_status {
codex_app_server_protocol::McpAuthStatus::Unsupported => McpAuthStatus::Unsupported,
codex_app_server_protocol::McpAuthStatus::NotLoggedIn => McpAuthStatus::NotLoggedIn,
codex_app_server_protocol::McpAuthStatus::BearerToken => McpAuthStatus::BearerToken,
codex_app_server_protocol::McpAuthStatus::OAuth => McpAuthStatus::OAuth,
},
);
resources.insert(server_name.clone(), status.resources);
resource_templates.insert(server_name.clone(), status.resource_templates);
for (tool_name, tool) in status.tools {
tools.insert(format!("mcp__{server_name}__{tool_name}"), tool);
}
}
(tools, resources, resource_templates, auth_statuses)
}
#[cfg(test)]
mod tests {
use super::*;
@ -4500,11 +4655,13 @@ mod tests {
use codex_protocol::config_types::CollaborationModeMask;
use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::Settings;
use codex_protocol::mcp::Tool;
use codex_protocol::openai_models::ModelAvailabilityNux;
use codex_protocol::protocol::AgentMessageDeltaEvent;
use codex_protocol::protocol::AskForApproval;
use codex_protocol::protocol::Event;
use codex_protocol::protocol::EventMsg;
use codex_protocol::protocol::McpAuthStatus;
use codex_protocol::protocol::SandboxPolicy;
use codex_protocol::protocol::SessionConfiguredEvent;
use codex_protocol::protocol::SessionSource;
@ -4545,6 +4702,75 @@ mod tests {
Ok(())
}
#[test]
fn mcp_inventory_maps_prefix_tool_names_by_server() {
let statuses = vec![
McpServerStatus {
name: "docs".to_string(),
tools: HashMap::from([(
"list".to_string(),
Tool {
description: None,
name: "list".to_string(),
title: None,
input_schema: serde_json::json!({"type": "object"}),
output_schema: None,
annotations: None,
icons: None,
meta: None,
},
)]),
resources: Vec::new(),
resource_templates: Vec::new(),
auth_status: codex_app_server_protocol::McpAuthStatus::Unsupported,
},
McpServerStatus {
name: "disabled".to_string(),
tools: HashMap::new(),
resources: Vec::new(),
resource_templates: Vec::new(),
auth_status: codex_app_server_protocol::McpAuthStatus::Unsupported,
},
];
let (tools, resources, resource_templates, auth_statuses) =
mcp_inventory_maps_from_statuses(statuses);
let mut resource_names = resources.keys().cloned().collect::<Vec<_>>();
resource_names.sort();
let mut template_names = resource_templates.keys().cloned().collect::<Vec<_>>();
template_names.sort();
assert_eq!(
tools.keys().cloned().collect::<Vec<_>>(),
vec!["mcp__docs__list".to_string()]
);
assert_eq!(resource_names, vec!["disabled", "docs"]);
assert_eq!(template_names, vec!["disabled", "docs"]);
assert_eq!(
auth_statuses.get("disabled"),
Some(&McpAuthStatus::Unsupported)
);
}
#[tokio::test]
async fn handle_mcp_inventory_result_clears_committed_loading_cell() {
let mut app = make_test_app().await;
app.transcript_cells
.push(Arc::new(history_cell::new_mcp_inventory_loading(
/*animations_enabled*/ false,
)));
app.handle_mcp_inventory_result(Ok(vec![McpServerStatus {
name: "docs".to_string(),
tools: HashMap::new(),
resources: Vec::new(),
resource_templates: Vec::new(),
auth_status: codex_app_server_protocol::McpAuthStatus::Unsupported,
}]));
assert_eq!(app.transcript_cells.len(), 0);
}
#[test]
fn startup_waiting_gate_is_only_for_fresh_or_exit_session_selection() {
assert_eq!(

View file

@ -10,6 +10,7 @@
use std::path::PathBuf;
use codex_app_server_protocol::McpServerStatus;
use codex_chatgpt::connectors::AppInfo;
use codex_file_search::FileMatch;
use codex_protocol::ThreadId;
@ -165,6 +166,14 @@ pub(crate) enum AppEvent {
force_refetch: bool,
},
/// Fetch MCP inventory via app-server RPCs and render it into history.
FetchMcpInventory,
/// Result of fetching MCP inventory via app-server RPCs.
McpInventoryLoaded {
result: Result<Vec<McpServerStatus>, String>,
},
InsertHistoryCell(Box<dyn HistoryCell>),
/// Apply rollback semantics to local transcript cells.

View file

@ -67,7 +67,6 @@ use codex_core::find_thread_name_by_id;
use codex_core::git_info::current_branch_name;
use codex_core::git_info::get_git_repo_root;
use codex_core::git_info::local_git_branches;
use codex_core::mcp::McpManager;
use codex_core::plugins::PluginsManager;
use codex_core::project_doc::DEFAULT_PROJECT_DOC_FILENAME;
use codex_core::skills::model::SkillMetadata;
@ -8243,18 +8242,39 @@ impl ChatWidget {
PlainHistoryCell::new(vec![line.into()])
}
/// Begin the asynchronous MCP inventory flow: show a loading spinner and
/// request the app-server fetch via `AppEvent::FetchMcpInventory`.
///
/// The spinner lives in `active_cell` and is cleared by
/// [`clear_mcp_inventory_loading`] once the result arrives.
pub(crate) fn add_mcp_output(&mut self) {
let mcp_manager = McpManager::new(Arc::new(PluginsManager::new(
self.config.codex_home.clone(),
self.flush_answer_stream_with_separator();
self.flush_active_cell();
self.active_cell = Some(Box::new(history_cell::new_mcp_inventory_loading(
self.config.animations,
)));
if mcp_manager
.effective_servers(&self.config, /*auth*/ None)
.is_empty()
self.bump_active_cell_revision();
self.request_redraw();
self.app_event_tx.send(AppEvent::FetchMcpInventory);
}
/// Remove the MCP loading spinner if it is still the active cell.
///
/// Uses `Any`-based type checking so that a late-arriving inventory result
/// does not accidentally clear an unrelated cell that was set in the meantime.
pub(crate) fn clear_mcp_inventory_loading(&mut self) {
let Some(active) = self.active_cell.as_ref() else {
return;
};
if !active
.as_any()
.is::<history_cell::McpInventoryLoadingCell>()
{
self.add_to_history(history_cell::empty_mcp_output());
} else {
self.add_app_server_stub_message("MCP tool inventory");
return;
}
self.active_cell = None;
self.bump_active_cell_revision();
self.request_redraw();
}
pub(crate) fn add_connectors_output(&mut self) {

View file

@ -6042,6 +6042,17 @@ async fn slash_memory_drop_reports_stubbed_feature() {
);
}
#[tokio::test]
async fn slash_mcp_requests_inventory_via_app_server() {
let (mut chat, mut rx, mut op_rx) = make_chatwidget_manual(None).await;
chat.dispatch_command(SlashCommand::Mcp);
assert!(active_blob(&chat).contains("Loading MCP inventory"));
assert_matches!(rx.try_recv(), Ok(AppEvent::FetchMcpInventory));
assert!(op_rx.try_recv().is_err(), "expected no core op to be sent");
}
#[tokio::test]
async fn slash_memory_update_reports_stubbed_feature() {
let (mut chat, mut rx, mut op_rx) = make_chatwidget_manual(None).await;

View file

@ -37,6 +37,7 @@ use crate::wrapping::RtOptions;
use crate::wrapping::adaptive_wrap_line;
use crate::wrapping::adaptive_wrap_lines;
use base64::Engine;
use codex_app_server_protocol::McpServerStatus;
use codex_core::config::Config;
use codex_core::config::types::McpServerTransportConfig;
use codex_core::mcp::McpManager;
@ -1963,6 +1964,179 @@ pub(crate) fn new_mcp_tools_output(
PlainHistoryCell { lines }
}
/// Build the `/mcp` history cell from app-server `McpServerStatus` responses.
///
/// The server list comes directly from the app-server status response, sorted
/// alphabetically. Local config is only used to enrich returned servers with
/// transport details such as command, URL, cwd, and environment display.
///
/// This mirrors the layout of [`new_mcp_tools_output`] but sources data from
/// the paginated RPC response rather than the in-process `McpManager`.
pub(crate) fn new_mcp_tools_output_from_statuses(
config: &Config,
statuses: &[McpServerStatus],
) -> PlainHistoryCell {
let mut lines: Vec<Line<'static>> = vec![
"/mcp".magenta().into(),
"".into(),
vec!["🔌 ".into(), "MCP Tools".bold()].into(),
"".into(),
];
let mut statuses_by_name = HashMap::new();
for status in statuses {
statuses_by_name.insert(status.name.as_str(), status);
}
let mut server_names: Vec<String> = statuses.iter().map(|status| status.name.clone()).collect();
server_names.sort();
let has_any_tools = statuses.iter().any(|status| !status.tools.is_empty());
if !has_any_tools {
lines.push(" • No MCP tools available.".italic().into());
lines.push("".into());
}
for server in server_names {
let cfg = config.mcp_servers.get().get(server.as_str());
let status = statuses_by_name.get(server.as_str()).copied();
let header: Vec<Span<'static>> = vec!["".into(), server.clone().into()];
lines.push(header.into());
let auth_status = status
.map(|status| match status.auth_status {
codex_app_server_protocol::McpAuthStatus::Unsupported => McpAuthStatus::Unsupported,
codex_app_server_protocol::McpAuthStatus::NotLoggedIn => McpAuthStatus::NotLoggedIn,
codex_app_server_protocol::McpAuthStatus::BearerToken => McpAuthStatus::BearerToken,
codex_app_server_protocol::McpAuthStatus::OAuth => McpAuthStatus::OAuth,
})
.unwrap_or(McpAuthStatus::Unsupported);
lines.push(vec![" • Auth: ".into(), auth_status.to_string().into()].into());
if let Some(cfg) = cfg {
match &cfg.transport {
McpServerTransportConfig::Stdio {
command,
args,
env,
env_vars,
cwd,
} => {
let args_suffix = if args.is_empty() {
String::new()
} else {
format!(" {}", args.join(" "))
};
let cmd_display = format!("{command}{args_suffix}");
lines.push(vec![" • Command: ".into(), cmd_display.into()].into());
if let Some(cwd) = cwd.as_ref() {
lines.push(
vec![" • Cwd: ".into(), cwd.display().to_string().into()].into(),
);
}
let env_display = format_env_display(env.as_ref(), env_vars.as_slice());
if env_display != "-" {
lines.push(vec![" • Env: ".into(), env_display.into()].into());
}
}
McpServerTransportConfig::StreamableHttp {
url,
http_headers,
env_http_headers,
..
} => {
lines.push(vec![" • URL: ".into(), url.clone().into()].into());
if let Some(headers) = http_headers.as_ref()
&& !headers.is_empty()
{
let mut pairs: Vec<_> = headers.iter().collect();
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
let display = pairs
.into_iter()
.map(|(name, _)| format!("{name}=*****"))
.collect::<Vec<_>>()
.join(", ");
lines.push(vec![" • HTTP headers: ".into(), display.into()].into());
}
if let Some(headers) = env_http_headers.as_ref()
&& !headers.is_empty()
{
let mut pairs: Vec<_> = headers.iter().collect();
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
let display = pairs
.into_iter()
.map(|(name, var)| format!("{name}={var}"))
.collect::<Vec<_>>()
.join(", ");
lines.push(vec![" • Env HTTP headers: ".into(), display.into()].into());
}
}
}
}
let mut names = status
.map(|status| status.tools.keys().cloned().collect::<Vec<_>>())
.unwrap_or_default();
names.sort();
if names.is_empty() {
lines.push(" • Tools: (none)".into());
} else {
lines.push(vec![" • Tools: ".into(), names.join(", ").into()].into());
}
let server_resources = status
.map(|status| status.resources.clone())
.unwrap_or_default();
if server_resources.is_empty() {
lines.push(" • Resources: (none)".into());
} else {
let mut spans: Vec<Span<'static>> = vec![" • Resources: ".into()];
for (idx, resource) in server_resources.iter().enumerate() {
if idx > 0 {
spans.push(", ".into());
}
let label = resource.title.as_ref().unwrap_or(&resource.name);
spans.push(label.clone().into());
spans.push(" ".into());
spans.push(format!("({})", resource.uri).dim());
}
lines.push(spans.into());
}
let server_templates = status
.map(|status| status.resource_templates.clone())
.unwrap_or_default();
if server_templates.is_empty() {
lines.push(" • Resource templates: (none)".into());
} else {
let mut spans: Vec<Span<'static>> = vec![" • Resource templates: ".into()];
for (idx, template) in server_templates.iter().enumerate() {
if idx > 0 {
spans.push(", ".into());
}
let label = template.title.as_ref().unwrap_or(&template.name);
spans.push(label.clone().into());
spans.push(" ".into());
spans.push(format!("({})", template.uri_template).dim());
}
lines.push(spans.into());
}
lines.push(Line::from(""));
}
PlainHistoryCell { lines }
}
pub(crate) fn new_info_event(message: String, hint: Option<String>) -> PlainHistoryCell {
let mut line = vec!["".dim(), message.into()];
if let Some(hint) = hint {
@ -1981,6 +2155,54 @@ pub(crate) fn new_error_event(message: String) -> PlainHistoryCell {
PlainHistoryCell { lines }
}
/// A transient history cell that shows an animated spinner while the MCP
/// inventory RPC is in flight.
///
/// Inserted as the `active_cell` by `ChatWidget::add_mcp_output()` and removed
/// once the fetch completes. The app removes committed copies from transcript
/// history, while `ChatWidget::clear_mcp_inventory_loading()` only clears the
/// in-flight `active_cell`.
#[derive(Debug)]
pub(crate) struct McpInventoryLoadingCell {
start_time: Instant,
animations_enabled: bool,
}
impl McpInventoryLoadingCell {
pub(crate) fn new(animations_enabled: bool) -> Self {
Self {
start_time: Instant::now(),
animations_enabled,
}
}
}
impl HistoryCell for McpInventoryLoadingCell {
fn display_lines(&self, _width: u16) -> Vec<Line<'static>> {
vec![
vec![
spinner(Some(self.start_time), self.animations_enabled),
" ".into(),
"Loading MCP inventory".bold(),
"".dim(),
]
.into(),
]
}
fn transcript_animation_tick(&self) -> Option<u64> {
if !self.animations_enabled {
return None;
}
Some((self.start_time.elapsed().as_millis() / 50) as u64)
}
}
/// Convenience constructor for [`McpInventoryLoadingCell`].
pub(crate) fn new_mcp_inventory_loading(animations_enabled: bool) -> McpInventoryLoadingCell {
McpInventoryLoadingCell::new(animations_enabled)
}
/// Renders a completed (or interrupted) request_user_input exchange in history.
#[derive(Debug)]
pub(crate) struct RequestUserInputResultCell {
@ -2542,6 +2764,7 @@ mod tests {
use codex_core::config::Config;
use codex_core::config::ConfigBuilder;
use codex_core::config::types::McpServerConfig;
use codex_core::config::types::McpServerDisabledReason;
use codex_core::config::types::McpServerTransportConfig;
use codex_otel::RuntimeMetricTotals;
use codex_otel::RuntimeMetricsSummary;
@ -2961,6 +3184,61 @@ mod tests {
insta::assert_snapshot!(rendered);
}
#[tokio::test]
async fn mcp_tools_output_from_statuses_renders_status_only_servers() {
let mut config = test_config().await;
let servers = HashMap::from([(
"plugin_docs".to_string(),
McpServerConfig {
transport: McpServerTransportConfig::Stdio {
command: "docs-server".to_string(),
args: vec!["--stdio".to_string()],
env: None,
env_vars: vec![],
cwd: None,
},
enabled: false,
required: false,
disabled_reason: Some(McpServerDisabledReason::Unknown),
startup_timeout_sec: None,
tool_timeout_sec: None,
enabled_tools: None,
disabled_tools: None,
scopes: None,
oauth_resource: None,
},
)]);
config
.mcp_servers
.set(servers)
.expect("test mcp servers should accept any configuration");
let statuses = vec![McpServerStatus {
name: "plugin_docs".to_string(),
tools: HashMap::from([(
"lookup".to_string(),
Tool {
description: None,
name: "lookup".to_string(),
title: None,
input_schema: serde_json::json!({"type": "object", "properties": {}}),
output_schema: None,
annotations: None,
icons: None,
meta: None,
},
)]),
resources: Vec::new(),
resource_templates: Vec::new(),
auth_status: codex_app_server_protocol::McpAuthStatus::Unsupported,
}];
let cell = new_mcp_tools_output_from_statuses(&config, &statuses);
let rendered = render_lines(&cell.display_lines(120)).join("\n");
insta::assert_snapshot!(rendered);
}
#[test]
fn empty_agent_message_cell_transcript() {
let cell = AgentMessageCell::new(vec![Line::default()], false);
@ -3188,6 +3466,14 @@ mod tests {
insta::assert_snapshot!(rendered);
}
#[test]
fn mcp_inventory_loading_snapshot() {
let cell = new_mcp_inventory_loading(/*animations_enabled*/ true);
let rendered = render_lines(&cell.display_lines(80)).join("\n");
insta::assert_snapshot!(rendered);
}
#[test]
fn completed_mcp_tool_call_success_snapshot() {
let invocation = McpInvocation {

View file

@ -0,0 +1,6 @@
---
source: tui_app_server/src/history_cell.rs
assertion_line: 3477
expression: rendered
---
• Loading MCP inventory…

View file

@ -0,0 +1,14 @@
---
source: tui_app_server/src/history_cell.rs
expression: rendered
---
/mcp
🔌 MCP Tools
• plugin_docs
• Auth: Unsupported
• Command: docs-server --stdio
• Tools: lookup
• Resources: (none)
• Resource templates: (none)