Show spawned agent model and effort in TUI (#14273)

- include the requested sub-agent model and reasoning effort in the
spawn begin event\n- render that metadata next to the spawned agent name
and role in the TUI transcript

---------

Co-authored-by: Codex <noreply@openai.com>
This commit is contained in:
Ahmed Ibrahim 2026-03-10 17:46:25 -07:00 committed by Michael Bolin
parent 8a099b3dfb
commit 285b3a5143
12 changed files with 186 additions and 25 deletions

View file

@ -3015,10 +3015,16 @@
"description": "Identifier for the collab tool call.",
"type": "string"
},
"model": {
"type": "string"
},
"prompt": {
"description": "Initial prompt sent to the agent. Can be empty to prevent CoT leaking at the beginning.",
"type": "string"
},
"reasoning_effort": {
"$ref": "#/definitions/ReasoningEffort"
},
"sender_thread_id": {
"allOf": [
{
@ -3037,7 +3043,9 @@
},
"required": [
"call_id",
"model",
"prompt",
"reasoning_effort",
"sender_thread_id",
"type"
],
@ -9144,10 +9152,16 @@
"description": "Identifier for the collab tool call.",
"type": "string"
},
"model": {
"type": "string"
},
"prompt": {
"description": "Initial prompt sent to the agent. Can be empty to prevent CoT leaking at the beginning.",
"type": "string"
},
"reasoning_effort": {
"$ref": "#/definitions/ReasoningEffort"
},
"sender_thread_id": {
"allOf": [
{
@ -9166,7 +9180,9 @@
},
"required": [
"call_id",
"model",
"prompt",
"reasoning_effort",
"sender_thread_id",
"type"
],

View file

@ -4378,10 +4378,16 @@
"description": "Identifier for the collab tool call.",
"type": "string"
},
"model": {
"type": "string"
},
"prompt": {
"description": "Initial prompt sent to the agent. Can be empty to prevent CoT leaking at the beginning.",
"type": "string"
},
"reasoning_effort": {
"$ref": "#/definitions/v2/ReasoningEffort"
},
"sender_thread_id": {
"allOf": [
{
@ -4400,7 +4406,9 @@
},
"required": [
"call_id",
"model",
"prompt",
"reasoning_effort",
"sender_thread_id",
"type"
],

View file

@ -6180,10 +6180,16 @@
"description": "Identifier for the collab tool call.",
"type": "string"
},
"model": {
"type": "string"
},
"prompt": {
"description": "Initial prompt sent to the agent. Can be empty to prevent CoT leaking at the beginning.",
"type": "string"
},
"reasoning_effort": {
"$ref": "#/definitions/ReasoningEffort"
},
"sender_thread_id": {
"allOf": [
{
@ -6202,7 +6208,9 @@
},
"required": [
"call_id",
"model",
"prompt",
"reasoning_effort",
"sender_thread_id",
"type"
],

View file

@ -1,6 +1,7 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { ReasoningEffort } from "./ReasoningEffort";
import type { ThreadId } from "./ThreadId";
export type CollabAgentSpawnBeginEvent = {
@ -16,4 +17,4 @@ sender_thread_id: ThreadId,
* Initial prompt sent to the agent. Can be empty to prevent CoT leaking at the
* beginning.
*/
prompt: string, };
prompt: string, model: string, reasoning_effort: ReasoningEffort, };

View file

@ -157,6 +157,8 @@ mod spawn {
call_id: call_id.clone(),
sender_thread_id: session.conversation_id,
prompt: prompt.clone(),
model: args.model.clone().unwrap_or_default(),
reasoning_effort: args.reasoning_effort.unwrap_or_default(),
}
.into(),
)

View file

@ -698,6 +698,7 @@ impl EventProcessor for EventProcessorWithHumanOutput {
call_id,
sender_thread_id: _,
prompt,
..
}) => {
ts_msg!(
self,

View file

@ -34,6 +34,7 @@ use codex_protocol::ThreadId;
use codex_protocol::config_types::ModeKind;
use codex_protocol::mcp::CallToolResult;
use codex_protocol::models::WebSearchAction;
use codex_protocol::openai_models::ReasoningEffort as ReasoningEffortConfig;
use codex_protocol::plan_tool::PlanItemArg;
use codex_protocol::plan_tool::StepStatus;
use codex_protocol::plan_tool::UpdatePlanArgs;
@ -547,6 +548,8 @@ fn collab_spawn_begin_and_end_emit_item_events() {
call_id: "call-10".to_string(),
sender_thread_id,
prompt: prompt.clone(),
model: "gpt-5".to_string(),
reasoning_effort: ReasoningEffortConfig::default(),
}),
);
let begin_events = ep.collect_thread_events(&begin);

View file

@ -3132,6 +3132,8 @@ pub struct CollabAgentSpawnBeginEvent {
/// Initial prompt sent to the agent. Can be empty to prevent CoT leaking at the
/// beginning.
pub prompt: String,
pub model: String,
pub reasoning_effort: ReasoningEffortConfig,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, JsonSchema, TS)]

View file

@ -101,6 +101,7 @@ use codex_protocol::protocol::AgentReasoningRawContentEvent;
use codex_protocol::protocol::ApplyPatchApprovalRequestEvent;
use codex_protocol::protocol::BackgroundEventEvent;
use codex_protocol::protocol::CodexErrorInfo;
use codex_protocol::protocol::CollabAgentSpawnBeginEvent;
use codex_protocol::protocol::CreditsSnapshot;
use codex_protocol::protocol::DeprecationNoticeEvent;
use codex_protocol::protocol::ErrorEvent;
@ -579,6 +580,7 @@ pub(crate) struct ChatWidget {
// Latest completed user-visible Codex output that `/copy` should place on the clipboard.
last_copyable_output: Option<String>,
running_commands: HashMap<String, RunningCommand>,
pending_collab_spawn_requests: HashMap<String, multi_agents::SpawnRequestSummary>,
suppressed_exec_calls: HashSet<String>,
skills_all: Vec<ProtocolSkillMetadata>,
skills_initial_state: Option<HashMap<PathBuf, bool>>,
@ -3243,6 +3245,7 @@ impl ChatWidget {
plan_stream_controller: None,
last_copyable_output: None,
running_commands: HashMap::new(),
pending_collab_spawn_requests: HashMap::new(),
suppressed_exec_calls: HashSet::new(),
last_unified_wait: None,
unified_exec_wait_streak: None,
@ -3427,6 +3430,7 @@ impl ChatWidget {
plan_stream_controller: None,
last_copyable_output: None,
running_commands: HashMap::new(),
pending_collab_spawn_requests: HashMap::new(),
suppressed_exec_calls: HashSet::new(),
last_unified_wait: None,
unified_exec_wait_streak: None,
@ -3603,6 +3607,7 @@ impl ChatWidget {
plan_stream_controller: None,
last_copyable_output: None,
running_commands: HashMap::new(),
pending_collab_spawn_requests: HashMap::new(),
suppressed_exec_calls: HashSet::new(),
last_unified_wait: None,
unified_exec_wait_streak: None,
@ -4999,8 +5004,24 @@ impl ChatWidget {
}
EventMsg::ExitedReviewMode(review) => self.on_exited_review_mode(review),
EventMsg::ContextCompacted(_) => self.on_agent_message("Context compacted".to_owned()),
EventMsg::CollabAgentSpawnBegin(_) => {}
EventMsg::CollabAgentSpawnEnd(ev) => self.on_collab_event(multi_agents::spawn_end(ev)),
EventMsg::CollabAgentSpawnBegin(CollabAgentSpawnBeginEvent {
call_id,
model,
reasoning_effort,
..
}) => {
self.pending_collab_spawn_requests.insert(
call_id,
multi_agents::SpawnRequestSummary {
model,
reasoning_effort,
},
);
}
EventMsg::CollabAgentSpawnEnd(ev) => {
let spawn_request = self.pending_collab_spawn_requests.remove(&ev.call_id);
self.on_collab_event(multi_agents::spawn_end(ev, spawn_request.as_ref()));
}
EventMsg::CollabAgentInteractionBegin(_) => {}
EventMsg::CollabAgentInteractionEnd(ev) => {
self.on_collab_event(multi_agents::interaction_end(ev))

View file

@ -58,9 +58,12 @@ use codex_protocol::protocol::AgentMessageDeltaEvent;
use codex_protocol::protocol::AgentMessageEvent;
use codex_protocol::protocol::AgentReasoningDeltaEvent;
use codex_protocol::protocol::AgentReasoningEvent;
use codex_protocol::protocol::AgentStatus;
use codex_protocol::protocol::ApplyPatchApprovalRequestEvent;
use codex_protocol::protocol::BackgroundEventEvent;
use codex_protocol::protocol::CodexErrorInfo;
use codex_protocol::protocol::CollabAgentSpawnBeginEvent;
use codex_protocol::protocol::CollabAgentSpawnEndEvent;
use codex_protocol::protocol::CreditsSnapshot;
use codex_protocol::protocol::Event;
use codex_protocol::protocol::EventMsg;
@ -1838,6 +1841,7 @@ async fn make_chatwidget_manual(
plan_stream_controller: None,
last_copyable_output: None,
running_commands: HashMap::new(),
pending_collab_spawn_requests: HashMap::new(),
suppressed_exec_calls: HashSet::new(),
skills_all: Vec::new(),
skills_initial_state: None,
@ -2011,6 +2015,48 @@ fn lines_to_single_string(lines: &[ratatui::text::Line<'static>]) -> String {
s
}
#[tokio::test]
async fn collab_spawn_end_shows_requested_model_and_effort() {
let (mut chat, mut rx, _ops) = make_chatwidget_manual(None).await;
let sender_thread_id = ThreadId::new();
let spawned_thread_id = ThreadId::new();
chat.handle_codex_event(Event {
id: "spawn-begin".into(),
msg: EventMsg::CollabAgentSpawnBegin(CollabAgentSpawnBeginEvent {
call_id: "call-spawn".to_string(),
sender_thread_id,
prompt: "Explore the repo".to_string(),
model: "gpt-5".to_string(),
reasoning_effort: ReasoningEffortConfig::High,
}),
});
chat.handle_codex_event(Event {
id: "spawn-end".into(),
msg: EventMsg::CollabAgentSpawnEnd(CollabAgentSpawnEndEvent {
call_id: "call-spawn".to_string(),
sender_thread_id,
new_thread_id: Some(spawned_thread_id),
new_agent_nickname: Some("Robie".to_string()),
new_agent_role: Some("explorer".to_string()),
prompt: "Explore the repo".to_string(),
status: AgentStatus::PendingInit,
}),
});
let cells = drain_insert_history(&mut rx);
let rendered = cells
.iter()
.map(|lines| lines_to_single_string(lines))
.collect::<Vec<_>>()
.join("\n");
assert!(
rendered.contains("Spawned Robie [explorer] (gpt-5 high)"),
"expected spawn line to include agent metadata and requested model, got {rendered:?}"
);
}
fn status_line_text(chat: &ChatWidget) -> Option<String> {
chat.status_line_text()
}

View file

@ -2,6 +2,7 @@ use crate::history_cell::PlainHistoryCell;
use crate::render::line_utils::prefix_lines;
use crate::text_formatting::truncate_text;
use codex_protocol::ThreadId;
use codex_protocol::openai_models::ReasoningEffort as ReasoningEffortConfig;
use codex_protocol::protocol::AgentStatus;
use codex_protocol::protocol::CollabAgentInteractionEndEvent;
use codex_protocol::protocol::CollabAgentRef;
@ -36,6 +37,12 @@ struct AgentLabel<'a> {
role: Option<&'a str>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct SpawnRequestSummary {
pub(crate) model: String,
pub(crate) reasoning_effort: ReasoningEffortConfig,
}
pub(crate) fn agent_picker_status_dot_spans(is_closed: bool) -> Vec<Span<'static>> {
let dot = if is_closed {
"".into()
@ -74,7 +81,10 @@ pub(crate) fn sort_agent_picker_threads(agent_threads: &mut [(ThreadId, AgentPic
});
}
pub(crate) fn spawn_end(ev: CollabAgentSpawnEndEvent) -> PlainHistoryCell {
pub(crate) fn spawn_end(
ev: CollabAgentSpawnEndEvent,
spawn_request: Option<&SpawnRequestSummary>,
) -> PlainHistoryCell {
let CollabAgentSpawnEndEvent {
call_id: _,
sender_thread_id: _,
@ -93,6 +103,7 @@ pub(crate) fn spawn_end(ev: CollabAgentSpawnEndEvent) -> PlainHistoryCell {
nickname: new_agent_nickname.as_deref(),
role: new_agent_role.as_deref(),
},
spawn_request,
),
None => title_text("Agent spawn failed"),
};
@ -122,6 +133,7 @@ pub(crate) fn interaction_end(ev: CollabAgentInteractionEndEvent) -> PlainHistor
nickname: receiver_agent_nickname.as_deref(),
role: receiver_agent_role.as_deref(),
},
None,
);
let mut details = Vec::new();
@ -141,7 +153,7 @@ pub(crate) fn waiting_begin(ev: CollabWaitingBeginEvent) -> PlainHistoryCell {
let receiver_agents = merge_wait_receivers(&receiver_thread_ids, receiver_agents);
let title = match receiver_agents.as_slice() {
[receiver] => title_with_agent("Waiting for", agent_label_from_ref(receiver)),
[receiver] => title_with_agent("Waiting for", agent_label_from_ref(receiver), None),
[] => title_text("Waiting for agents"),
_ => title_text(format!("Waiting for {} agents", receiver_agents.len())),
};
@ -187,6 +199,7 @@ pub(crate) fn close_end(ev: CollabCloseEndEvent) -> PlainHistoryCell {
nickname: receiver_agent_nickname.as_deref(),
role: receiver_agent_role.as_deref(),
},
None,
),
Vec::new(),
)
@ -209,6 +222,7 @@ pub(crate) fn resume_begin(ev: CollabResumeBeginEvent) -> PlainHistoryCell {
nickname: receiver_agent_nickname.as_deref(),
role: receiver_agent_role.as_deref(),
},
None,
),
Vec::new(),
)
@ -232,6 +246,7 @@ pub(crate) fn resume_end(ev: CollabResumeEndEvent) -> PlainHistoryCell {
nickname: receiver_agent_nickname.as_deref(),
role: receiver_agent_role.as_deref(),
},
None,
),
vec![status_summary_line(&status)],
)
@ -249,9 +264,14 @@ fn title_text(title: impl Into<String>) -> Line<'static> {
title_spans_line(vec![Span::from(title.into()).bold()])
}
fn title_with_agent(prefix: &str, agent: AgentLabel<'_>) -> Line<'static> {
fn title_with_agent(
prefix: &str,
agent: AgentLabel<'_>,
spawn_request: Option<&SpawnRequestSummary>,
) -> Line<'static> {
let mut spans = vec![Span::from(format!("{prefix} ")).bold()];
spans.extend(agent_label_spans(agent));
spans.extend(spawn_request_spans(spawn_request));
title_spans_line(spans)
}
@ -298,6 +318,25 @@ fn agent_label_spans(agent: AgentLabel<'_>) -> Vec<Span<'static>> {
spans
}
fn spawn_request_spans(spawn_request: Option<&SpawnRequestSummary>) -> Vec<Span<'static>> {
let Some(spawn_request) = spawn_request else {
return Vec::new();
};
let model = spawn_request.model.trim();
if model.is_empty() && spawn_request.reasoning_effort == ReasoningEffortConfig::default() {
return Vec::new();
}
let details = if model.is_empty() {
format!("({})", spawn_request.reasoning_effort)
} else {
format!("({model} {})", spawn_request.reasoning_effort)
};
vec![Span::from(" ").dim(), Span::from(details).magenta()]
}
fn prompt_line(prompt: &str) -> Option<Line<'static>> {
let trimmed = prompt.trim();
if trimmed.is_empty() {
@ -460,15 +499,21 @@ mod tests {
let bob_id = ThreadId::from_string("00000000-0000-0000-0000-000000000003")
.expect("valid bob thread id");
let spawn = spawn_end(CollabAgentSpawnEndEvent {
call_id: "call-spawn".to_string(),
sender_thread_id,
new_thread_id: Some(robie_id),
new_agent_nickname: Some("Robie".to_string()),
new_agent_role: Some("explorer".to_string()),
prompt: "Compute 11! and reply with just the integer result.".to_string(),
status: AgentStatus::PendingInit,
});
let spawn = spawn_end(
CollabAgentSpawnEndEvent {
call_id: "call-spawn".to_string(),
sender_thread_id,
new_thread_id: Some(robie_id),
new_agent_nickname: Some("Robie".to_string()),
new_agent_role: Some("explorer".to_string()),
prompt: "Compute 11! and reply with just the integer result.".to_string(),
status: AgentStatus::PendingInit,
},
Some(&SpawnRequestSummary {
model: "gpt-5".to_string(),
reasoning_effort: ReasoningEffortConfig::High,
}),
);
let send = interaction_end(CollabAgentInteractionEndEvent {
call_id: "call-send".to_string(),
@ -540,15 +585,21 @@ mod tests {
.expect("valid sender thread id");
let robie_id = ThreadId::from_string("00000000-0000-0000-0000-000000000002")
.expect("valid robie thread id");
let cell = spawn_end(CollabAgentSpawnEndEvent {
call_id: "call-spawn".to_string(),
sender_thread_id,
new_thread_id: Some(robie_id),
new_agent_nickname: Some("Robie".to_string()),
new_agent_role: Some("explorer".to_string()),
prompt: String::new(),
status: AgentStatus::PendingInit,
});
let cell = spawn_end(
CollabAgentSpawnEndEvent {
call_id: "call-spawn".to_string(),
sender_thread_id,
new_thread_id: Some(robie_id),
new_agent_nickname: Some("Robie".to_string()),
new_agent_role: Some("explorer".to_string()),
prompt: String::new(),
status: AgentStatus::PendingInit,
},
Some(&SpawnRequestSummary {
model: "gpt-5".to_string(),
reasoning_effort: ReasoningEffortConfig::High,
}),
);
let lines = cell.display_lines(200);
let title = &lines[0];
@ -558,6 +609,8 @@ mod tests {
assert_eq!(title.spans[4].content.as_ref(), "[explorer]");
assert_eq!(title.spans[4].style.fg, None);
assert!(!title.spans[4].style.add_modifier.contains(Modifier::DIM));
assert_eq!(title.spans[6].content.as_ref(), "(gpt-5 high)");
assert_eq!(title.spans[6].style.fg, Some(Color::Magenta));
}
fn cell_to_text(cell: &PlainHistoryCell) -> String {

View file

@ -2,7 +2,7 @@
source: tui/src/multi_agents.rs
expression: snapshot
---
• Spawned Robie [explorer]
• Spawned Robie [explorer] (gpt-5 high)
└ Compute 11! and reply with just the integer result.
• Sent input to Robie [explorer]