From fd4a67352542c8d95c8599f0914ba351f8ca1856 Mon Sep 17 00:00:00 2001 From: Channing Conger Date: Tue, 10 Mar 2026 23:46:05 -0700 Subject: [PATCH] Responses: set x-client-request-id as convesration_id when talking to responses (#14312) Right now we're sending the header session_id to responses which is ignored/dropped. This sets a useful x-client-request-id to the conversation_id. --- codex-rs/codex-api/src/endpoint/responses.rs | 3 +++ codex-rs/core/src/client.rs | 8 +++++--- codex-rs/core/tests/suite/client_websockets.rs | 7 +++++++ 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/codex-rs/codex-api/src/endpoint/responses.rs b/codex-rs/codex-api/src/endpoint/responses.rs index 0dff795b0..d21208619 100644 --- a/codex-rs/codex-api/src/endpoint/responses.rs +++ b/codex-rs/codex-api/src/endpoint/responses.rs @@ -75,6 +75,9 @@ impl ResponsesClient { } let mut headers = extra_headers; + if let Some(ref conv_id) = conversation_id { + insert_header(&mut headers, "x-client-request-id", conv_id); + } headers.extend(build_conversation_headers(conversation_id)); if let Some(subagent) = subagent_header(&session_source) { insert_header(&mut headers, "x-openai-subagent", &subagent); diff --git a/codex-rs/core/src/client.rs b/codex-rs/core/src/client.rs index fa0107005..9fb598117 100644 --- a/codex-rs/core/src/client.rs +++ b/codex-rs/core/src/client.rs @@ -487,14 +487,16 @@ impl ModelClient { turn_metadata_header: Option<&str>, ) -> ApiHeaderMap { let turn_metadata_header = parse_turn_metadata_header(turn_metadata_header); + let conversation_id = self.state.conversation_id.to_string(); let mut headers = build_responses_headers( self.state.beta_features_header.as_deref(), turn_state, turn_metadata_header.as_ref(), ); - headers.extend(build_conversation_headers(Some( - self.state.conversation_id.to_string(), - ))); + if let Ok(header_value) = HeaderValue::from_str(&conversation_id) { + headers.insert("x-client-request-id", header_value); + } + headers.extend(build_conversation_headers(Some(conversation_id))); headers.insert( OPENAI_BETA_HEADER, HeaderValue::from_static(RESPONSES_WEBSOCKETS_V2_BETA_HEADER_VALUE), diff --git a/codex-rs/core/tests/suite/client_websockets.rs b/codex-rs/core/tests/suite/client_websockets.rs index 0850f6e54..2c7b4d48e 100755 --- a/codex-rs/core/tests/suite/client_websockets.rs +++ b/codex-rs/core/tests/suite/client_websockets.rs @@ -49,10 +49,12 @@ use tracing_test::traced_test; const MODEL: &str = "gpt-5.2-codex"; const OPENAI_BETA_HEADER: &str = "OpenAI-Beta"; const WS_V2_BETA_HEADER_VALUE: &str = "responses_websockets=2026-02-06"; +const X_CLIENT_REQUEST_ID_HEADER: &str = "x-client-request-id"; struct WebsocketTestHarness { _codex_home: TempDir, client: ModelClient, + conversation_id: ThreadId, model_info: ModelInfo, effort: Option, summary: ReasoningSummary, @@ -88,6 +90,10 @@ async fn responses_websocket_streams_request() { handshake.header(OPENAI_BETA_HEADER), Some(WS_V2_BETA_HEADER_VALUE.to_string()) ); + assert_eq!( + handshake.header(X_CLIENT_REQUEST_ID_HEADER), + Some(harness.conversation_id.to_string()) + ); server.shutdown().await; } @@ -1606,6 +1612,7 @@ async fn websocket_harness_with_options( WebsocketTestHarness { _codex_home: codex_home, client, + conversation_id, model_info, effort, summary,