chore(instructions) Remove unread SessionMeta.instructions field (#9423)

### Description
- Remove the now-unused `instructions` field from the session metadata
to simplify SessionMeta and stop propagating transient instruction text
through the rollout recorder API. This was only saving
user_instructions, and was never being read.
- Stop passing user instructions into the rollout writer at session
creation so the rollout header only contains canonical session metadata.

### Testing

- Ran `just fmt` which completed successfully.
- Ran `just fix -p codex-protocol`, `just fix -p codex-core`, `just fix
-p codex-app-server`, `just fix -p codex-tui`, and `just fix -p
codex-tui2` which completed (Clippy fixes applied) as part of
verification.
- Ran `cargo test -p codex-protocol` which passed (28 tests).
- Ran `cargo test -p codex-core` which showed failures in a small set of
tests (not caused by the protocol type change directly):
`default_client::tests::test_create_client_sets_default_headers`,
several `models_manager::manager::tests::refresh_available_models_*`,
and `shell_snapshot::tests::linux_sh_snapshot_includes_sections` (these
tests failed in this CI run).
- Ran `cargo test -p codex-app-server` which reported several failing
integration tests (including
`suite::codex_message_processor_flow::test_codex_jsonrpc_conversation_flow`,
`suite::output_schema::send_user_turn_*`, and
`suite::user_agent::get_user_agent_returns_current_codex_user_agent`).
- `cargo test -p codex-tui` and `cargo test -p codex-tui2` were
attempted but aborted due to disk space exhaustion (`No space left on
device`).

------
[Codex
Task](https://chatgpt.com/codex/tasks/task_i_696bd8ce632483228d298cf07c7eb41c)
This commit is contained in:
Dylan Hurd 2026-01-17 16:02:28 -08:00 committed by GitHub
parent bffe9b33e9
commit 80d7a5d7fe
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 1 additions and 33 deletions

View file

@ -4272,7 +4272,6 @@ mod tests {
"cwd": "/",
"originator": "codex",
"cli_version": "0.0.0",
"instructions": null,
"model_provider": "test-provider"
}),
json!({

View file

@ -57,7 +57,6 @@ pub fn create_fake_rollout(
cwd: PathBuf::from("/"),
originator: "codex".to_string(),
cli_version: "0.0.0".to_string(),
instructions: None,
source: SessionSource::Cli,
model_provider: model_provider.map(str::to_string),
};
@ -135,7 +134,6 @@ pub fn create_fake_rollout_with_text_elements(
cwd: PathBuf::from("/"),
originator: "codex".to_string(),
cli_version: "0.0.0".to_string(),
instructions: None,
source: SessionSource::Cli,
model_provider: model_provider.map(str::to_string),
};

View file

@ -598,12 +598,7 @@ impl Session {
let conversation_id = ThreadId::default();
(
conversation_id,
RolloutRecorderParams::new(
conversation_id,
forked_from_id,
session_configuration.user_instructions.clone(),
session_source,
),
RolloutRecorderParams::new(conversation_id, forked_from_id, session_source),
)
}
InitialHistory::Resumed(resumed_history) => (

View file

@ -58,7 +58,6 @@ pub enum RolloutRecorderParams {
Create {
conversation_id: ThreadId,
forked_from_id: Option<ThreadId>,
instructions: Option<String>,
source: SessionSource,
},
Resume {
@ -81,13 +80,11 @@ impl RolloutRecorderParams {
pub fn new(
conversation_id: ThreadId,
forked_from_id: Option<ThreadId>,
instructions: Option<String>,
source: SessionSource,
) -> Self {
Self::Create {
conversation_id,
forked_from_id,
instructions,
source,
}
}
@ -162,7 +159,6 @@ impl RolloutRecorder {
RolloutRecorderParams::Create {
conversation_id,
forked_from_id,
instructions,
source,
} => {
let LogFileInfo {
@ -190,7 +186,6 @@ impl RolloutRecorder {
cwd: config.cwd.clone(),
originator: originator().value,
cli_version: env!("CARGO_PKG_VERSION").to_string(),
instructions,
source,
model_provider: Some(config.model_provider_id.clone()),
}),

View file

@ -86,7 +86,6 @@ fn write_session_file_with_provider(
let mut payload = serde_json::json!({
"id": uuid,
"timestamp": ts_str,
"instructions": null,
"cwd": ".",
"originator": "test_originator",
"cli_version": "test_version",
@ -202,7 +201,6 @@ async fn test_list_conversations_latest_first() {
let head_3 = vec![serde_json::json!({
"id": u3,
"timestamp": "2025-01-03T12-00-00",
"instructions": null,
"cwd": ".",
"originator": "test_originator",
"cli_version": "test_version",
@ -212,7 +210,6 @@ async fn test_list_conversations_latest_first() {
let head_2 = vec![serde_json::json!({
"id": u2,
"timestamp": "2025-01-02T12-00-00",
"instructions": null,
"cwd": ".",
"originator": "test_originator",
"cli_version": "test_version",
@ -222,7 +219,6 @@ async fn test_list_conversations_latest_first() {
let head_1 = vec![serde_json::json!({
"id": u1,
"timestamp": "2025-01-01T12-00-00",
"instructions": null,
"cwd": ".",
"originator": "test_originator",
"cli_version": "test_version",
@ -343,7 +339,6 @@ async fn test_pagination_cursor() {
let head_5 = vec![serde_json::json!({
"id": u5,
"timestamp": "2025-03-05T09-00-00",
"instructions": null,
"cwd": ".",
"originator": "test_originator",
"cli_version": "test_version",
@ -353,7 +348,6 @@ async fn test_pagination_cursor() {
let head_4 = vec![serde_json::json!({
"id": u4,
"timestamp": "2025-03-04T09-00-00",
"instructions": null,
"cwd": ".",
"originator": "test_originator",
"cli_version": "test_version",
@ -411,7 +405,6 @@ async fn test_pagination_cursor() {
let head_3 = vec![serde_json::json!({
"id": u3,
"timestamp": "2025-03-03T09-00-00",
"instructions": null,
"cwd": ".",
"originator": "test_originator",
"cli_version": "test_version",
@ -421,7 +414,6 @@ async fn test_pagination_cursor() {
let head_2 = vec![serde_json::json!({
"id": u2,
"timestamp": "2025-03-02T09-00-00",
"instructions": null,
"cwd": ".",
"originator": "test_originator",
"cli_version": "test_version",
@ -473,7 +465,6 @@ async fn test_pagination_cursor() {
let head_1 = vec![serde_json::json!({
"id": u1,
"timestamp": "2025-03-01T09-00-00",
"instructions": null,
"cwd": ".",
"originator": "test_originator",
"cli_version": "test_version",
@ -531,7 +522,6 @@ async fn test_get_thread_contents() {
let expected_head = vec![serde_json::json!({
"id": uuid,
"timestamp": ts,
"instructions": null,
"cwd": ".",
"originator": "test_originator",
"cli_version": "test_version",
@ -558,7 +548,6 @@ async fn test_get_thread_contents() {
"payload": {
"id": uuid,
"timestamp": ts,
"instructions": null,
"cwd": ".",
"originator": "test_originator",
"cli_version": "test_version",
@ -643,7 +632,6 @@ async fn test_updated_at_uses_file_mtime() -> Result<()> {
id: conversation_id,
forked_from_id: None,
timestamp: ts.to_string(),
instructions: None,
cwd: ".".into(),
originator: "test_originator".into(),
cli_version: "test_version".into(),
@ -751,7 +739,6 @@ async fn test_stable_ordering_same_second_pagination() {
vec![serde_json::json!({
"id": u,
"timestamp": ts,
"instructions": null,
"cwd": ".",
"originator": "test_originator",
"cli_version": "test_version",

View file

@ -515,7 +515,6 @@ async fn review_input_isolated_from_parent_history() {
"payload": {
"id": convo_id,
"timestamp": "2024-01-01T00:00:00Z",
"instructions": null,
"cwd": ".",
"originator": "test_originator",
"cli_version": "test_version",

View file

@ -25,7 +25,6 @@ fn write_minimal_rollout_with_id(codex_home: &Path, id: Uuid) -> PathBuf {
"payload": {
"id": id,
"timestamp": "2024-01-01T00:00:00Z",
"instructions": null,
"cwd": ".",
"originator": "test",
"cli_version": "test",

View file

@ -1492,7 +1492,6 @@ pub struct SessionMeta {
pub cwd: PathBuf,
pub originator: String,
pub cli_version: String,
pub instructions: Option<String>,
#[serde(default)]
pub source: SessionSource,
pub model_provider: Option<String>,
@ -1507,7 +1506,6 @@ impl Default for SessionMeta {
cwd: PathBuf::new(),
originator: String::new(),
cli_version: String::new(),
instructions: None,
source: SessionSource::default(),
model_provider: None,
}

View file

@ -1362,7 +1362,6 @@ mod tests {
"cwd": cwd,
"originator": "user",
"cli_version": "0.0.0",
"instructions": null,
"source": "Cli",
"model_provider": "openai",
}

View file

@ -1362,7 +1362,6 @@ mod tests {
"cwd": cwd,
"originator": "user",
"cli_version": "0.0.0",
"instructions": null,
"source": "Cli",
"model_provider": "openai",
}