override instructions using ModelInfo (#7754)

Making sure we can override base instructions
This commit is contained in:
Ahmed Ibrahim 2025-12-08 17:30:42 -08:00 committed by GitHub
parent 0f2b589d5e
commit cacfd003ac
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 107 additions and 0 deletions

View file

@ -77,6 +77,7 @@ async fn models_client_hits_models_endpoint() {
supported_in_api: true,
priority: 1,
upgrade: None,
base_instructions: None,
}],
etag: String::new(),
};

View file

@ -102,6 +102,7 @@ impl ModelFamily {
if model.slug == self.slug {
self.default_reasoning_effort = Some(model.default_reasoning_level);
self.shell_type = model.shell_type;
self.base_instructions = model.base_instructions.unwrap_or(self.base_instructions);
}
}
self
@ -357,6 +358,7 @@ mod tests {
supported_in_api: true,
priority: 1,
upgrade: None,
base_instructions: None,
}
}

View file

@ -216,6 +216,7 @@ mod tests {
"supported_in_api": true,
"priority": priority,
"upgrade": null,
"base_instructions": null,
}))
.expect("valid model")
}

View file

@ -25,6 +25,7 @@ use core_test_support::responses::ev_completed;
use core_test_support::responses::ev_function_call;
use core_test_support::responses::ev_response_created;
use core_test_support::responses::mount_models_once;
use core_test_support::responses::mount_sse_once;
use core_test_support::responses::mount_sse_sequence;
use core_test_support::responses::sse;
use core_test_support::skip_if_no_network;
@ -67,6 +68,7 @@ async fn remote_models_remote_model_uses_unified_exec() -> Result<()> {
supported_in_api: true,
priority: 1,
upgrade: None,
base_instructions: None,
};
let models_mock = mount_models_once(
@ -167,6 +169,105 @@ async fn remote_models_remote_model_uses_unified_exec() -> Result<()> {
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn remote_models_apply_remote_base_instructions() -> Result<()> {
skip_if_no_network!(Ok(()));
skip_if_sandbox!(Ok(()));
let server = MockServer::builder()
.body_print_limit(BodyPrintLimit::Limited(80_000))
.start()
.await;
let model = "test-gpt-5-remote";
let remote_base = "Use the remote base instructions only.";
let remote_model = ModelInfo {
slug: model.to_string(),
display_name: "Parallel Remote".to_string(),
description: Some("A remote model with custom instructions".to_string()),
default_reasoning_level: ReasoningEffort::Medium,
supported_reasoning_levels: vec![ReasoningEffortPreset {
effort: ReasoningEffort::Medium,
description: ReasoningEffort::Medium.to_string(),
}],
shell_type: ConfigShellToolType::ShellCommand,
visibility: ModelVisibility::List,
minimal_client_version: ClientVersion(0, 1, 0),
supported_in_api: true,
priority: 1,
upgrade: None,
base_instructions: Some(remote_base.to_string()),
};
mount_models_once(
&server,
ModelsResponse {
models: vec![remote_model],
etag: String::new(),
},
)
.await;
let response_mock = mount_sse_once(
&server,
sse(vec![
ev_response_created("resp-1"),
ev_assistant_message("msg-1", "done"),
ev_completed("resp-1"),
]),
)
.await;
let mut builder = test_codex().with_config(|config| {
config.features.enable(Feature::RemoteModels);
config.model = "gpt-5.1".to_string();
});
let TestCodex {
codex,
cwd,
conversation_manager,
..
} = builder.build(&server).await?;
let models_manager = conversation_manager.get_models_manager();
wait_for_model_available(&models_manager, model).await;
codex
.submit(Op::OverrideTurnContext {
cwd: None,
approval_policy: None,
sandbox_policy: None,
model: Some(model.to_string()),
effort: None,
summary: None,
})
.await?;
codex
.submit(Op::UserTurn {
items: vec![UserInput::Text {
text: "hello remote".into(),
}],
final_output_json_schema: None,
cwd: cwd.path().to_path_buf(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::DangerFullAccess,
model: model.to_string(),
effort: None,
summary: ReasoningSummary::Auto,
})
.await?;
wait_for_event(&codex, |event| matches!(event, EventMsg::TaskComplete(_))).await;
let body = response_mock.single_request().body_json();
let instructions = body["instructions"].as_str().unwrap();
assert_eq!(instructions, remote_base);
Ok(())
}
async fn wait_for_model_available(manager: &Arc<ModelsManager>, slug: &str) -> ModelPreset {
let deadline = Instant::now() + Duration::from_secs(2);
loop {

View file

@ -135,6 +135,8 @@ pub struct ModelInfo {
pub priority: i32,
#[serde(default)]
pub upgrade: Option<String>,
#[serde(default)]
pub base_instructions: Option<String>,
}
/// Response wrapper for `/models`.