feat: bind package manager (#13571)

This commit is contained in:
jif-oai 2026-03-05 11:57:13 +00:00 committed by GitHub
parent b4cb989563
commit f304b2ef62
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 793 additions and 102 deletions

2
codex-rs/Cargo.lock generated
View file

@ -1548,6 +1548,7 @@ dependencies = [
"thiserror 2.0.18",
"tokio",
"url",
"which",
"wiremock",
"zip",
]
@ -2212,6 +2213,7 @@ dependencies = [
name = "codex-package-manager"
version = "0.0.0"
dependencies = [
"fd-lock",
"flate2",
"pretty_assertions",
"reqwest",

View file

@ -178,6 +178,7 @@ dirs = "6"
dotenvy = "0.15.7"
dunce = "1.0.4"
encoding_rs = "0.8.35"
fd-lock = "4.0.4"
env-flags = "0.1.1"
env_logger = "0.11.9"
eventsource-stream = "0.2.3"

View file

@ -13,6 +13,7 @@ tempfile = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["fs", "io-util", "process", "time"] }
url = { workspace = true }
which = { workspace = true }
[lints]
workspace = true

View file

@ -43,6 +43,7 @@ impl ArtifactsClient {
request: ArtifactBuildRequest,
) -> Result<ArtifactCommandOutput, ArtifactsError> {
let runtime = self.resolve_runtime().await?;
let js_runtime = runtime.resolve_js_runtime()?;
let staging_dir = TempDir::new().map_err(|source| ArtifactsError::Io {
context: "failed to create build staging directory".to_string(),
source,
@ -56,7 +57,7 @@ impl ArtifactsClient {
source,
})?;
let mut command = Command::new(runtime.node_path());
let mut command = Command::new(js_runtime.executable_path());
command
.arg(&script_path)
.current_dir(&request.cwd)
@ -67,6 +68,9 @@ impl ArtifactsClient {
)
.stdout(Stdio::piped())
.stderr(Stdio::piped());
if js_runtime.requires_electron_run_as_node() {
command.env("ELECTRON_RUN_AS_NODE", "1");
}
for (key, value) in &request.env {
command.env(key, value);
}
@ -83,13 +87,17 @@ impl ArtifactsClient {
request: ArtifactRenderCommandRequest,
) -> Result<ArtifactCommandOutput, ArtifactsError> {
let runtime = self.resolve_runtime().await?;
let mut command = Command::new(runtime.node_path());
let js_runtime = runtime.resolve_js_runtime()?;
let mut command = Command::new(js_runtime.executable_path());
command
.arg(runtime.render_cli_path())
.args(request.target.to_args())
.current_dir(&request.cwd)
.stdout(Stdio::piped())
.stderr(Stdio::piped());
if js_runtime.requires_electron_run_as_node() {
command.env("ELECTRON_RUN_AS_NODE", "1");
}
for (key, value) in &request.env {
command.env(key, value);
}

View file

@ -15,9 +15,14 @@ pub use runtime::ArtifactRuntimeManagerConfig;
pub use runtime::ArtifactRuntimePlatform;
pub use runtime::ArtifactRuntimeReleaseLocator;
pub use runtime::DEFAULT_CACHE_ROOT_RELATIVE;
pub use runtime::DEFAULT_RELEASE_BASE_URL;
pub use runtime::DEFAULT_RELEASE_TAG_PREFIX;
pub use runtime::ExtractedRuntimeManifest;
pub use runtime::InstalledArtifactRuntime;
pub use runtime::JsRuntime;
pub use runtime::JsRuntimeKind;
pub use runtime::ReleaseManifest;
pub use runtime::RuntimeEntrypoints;
pub use runtime::RuntimePathEntry;
pub use runtime::is_js_runtime_available;
pub use runtime::load_cached_runtime;

View file

@ -13,9 +13,82 @@ use std::path::Path;
use std::path::PathBuf;
use thiserror::Error;
use url::Url;
use which::which;
pub const DEFAULT_RELEASE_TAG_PREFIX: &str = "artifact-runtime-v";
pub const DEFAULT_CACHE_ROOT_RELATIVE: &str = "packages/artifacts";
pub const DEFAULT_RELEASE_BASE_URL: &str = "https://github.com/openai/codex/releases/download/";
const CODEX_APP_PRODUCT_NAMES: [&str; 6] = [
"Codex",
"Codex (Dev)",
"Codex (Agent)",
"Codex (Nightly)",
"Codex (Alpha)",
"Codex (Beta)",
];
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum JsRuntimeKind {
Node,
Electron,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct JsRuntime {
executable_path: PathBuf,
kind: JsRuntimeKind,
}
impl JsRuntime {
fn node(executable_path: PathBuf) -> Self {
Self {
executable_path,
kind: JsRuntimeKind::Node,
}
}
fn electron(executable_path: PathBuf) -> Self {
Self {
executable_path,
kind: JsRuntimeKind::Electron,
}
}
pub fn executable_path(&self) -> &Path {
&self.executable_path
}
pub fn requires_electron_run_as_node(&self) -> bool {
self.kind == JsRuntimeKind::Electron
}
}
pub fn is_js_runtime_available(codex_home: &Path, runtime_version: &str) -> bool {
load_cached_runtime(codex_home, runtime_version)
.ok()
.and_then(|runtime| runtime.resolve_js_runtime().ok())
.or_else(resolve_machine_js_runtime)
.is_some()
}
pub fn load_cached_runtime(
codex_home: &Path,
runtime_version: &str,
) -> Result<InstalledArtifactRuntime, ArtifactRuntimeError> {
let platform = ArtifactRuntimePlatform::detect_current()?;
let install_dir = cached_runtime_install_dir(codex_home, runtime_version, platform);
if !install_dir.exists() {
return Err(ArtifactRuntimeError::Io {
context: format!(
"artifact runtime {runtime_version} is not installed at {}",
install_dir.display()
),
source: std::io::Error::new(std::io::ErrorKind::NotFound, "missing artifact runtime"),
});
}
InstalledArtifactRuntime::load(install_dir, platform)
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ArtifactRuntimeReleaseLocator {
@ -56,9 +129,25 @@ impl ArtifactRuntimeReleaseLocator {
pub fn manifest_url(&self) -> Result<Url, PackageManagerError> {
self.base_url
.join(&self.manifest_file_name())
.join(&format!(
"{}/{}",
self.release_tag(),
self.manifest_file_name()
))
.map_err(PackageManagerError::InvalidBaseUrl)
}
pub fn default(runtime_version: impl Into<String>) -> Self {
Self::new(
match Url::parse(DEFAULT_RELEASE_BASE_URL) {
Ok(url) => url,
Err(error) => {
panic!("hard-coded artifact runtime release base URL must be valid: {error}")
}
},
runtime_version,
)
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
@ -76,6 +165,13 @@ impl ArtifactRuntimeManagerConfig {
}
}
pub fn with_default_release(codex_home: PathBuf, runtime_version: impl Into<String>) -> Self {
Self::new(
codex_home,
ArtifactRuntimeReleaseLocator::default(runtime_version),
)
}
pub fn with_cache_root(mut self, cache_root: PathBuf) -> Self {
self.package_manager = self.package_manager.with_cache_root(cache_root);
self
@ -163,7 +259,11 @@ impl ManagedPackage for ArtifactRuntimePackage {
fn archive_url(&self, archive: &PackageReleaseArchive) -> Result<Url, PackageManagerError> {
self.release
.base_url()
.join(&archive.archive)
.join(&format!(
"{}/{}",
self.release.release_tag(),
archive.archive
))
.map_err(PackageManagerError::InvalidBaseUrl)
}
@ -285,6 +385,8 @@ impl InstalledArtifactRuntime {
&root_dir,
&manifest.entrypoints.render_cli.relative_path,
)?;
verify_required_runtime_path(&build_js_path)?;
verify_required_runtime_path(&render_cli_path)?;
Ok(Self::new(
root_dir,
@ -324,6 +426,18 @@ impl InstalledArtifactRuntime {
pub fn render_cli_path(&self) -> &Path {
&self.render_cli_path
}
pub fn resolve_js_runtime(&self) -> Result<JsRuntime, ArtifactRuntimeError> {
resolve_js_runtime_from_candidates(
Some(self.node_path()),
system_node_runtime(),
system_electron_runtime(),
codex_app_runtime_candidates(),
)
.ok_or_else(|| ArtifactRuntimeError::MissingJsRuntime {
root_dir: self.root_dir.clone(),
})
}
}
#[derive(Debug, Error)]
@ -344,6 +458,125 @@ pub enum ArtifactRuntimeError {
},
#[error("runtime path `{0}` is invalid")]
InvalidRuntimePath(String),
#[error(
"no compatible JavaScript runtime found for artifact runtime at {root_dir}; install Node or the Codex desktop app"
)]
MissingJsRuntime { root_dir: PathBuf },
}
fn cached_runtime_install_dir(
codex_home: &Path,
runtime_version: &str,
platform: ArtifactRuntimePlatform,
) -> PathBuf {
codex_home
.join(DEFAULT_CACHE_ROOT_RELATIVE)
.join(runtime_version)
.join(platform.as_str())
}
fn resolve_machine_js_runtime() -> Option<JsRuntime> {
resolve_js_runtime_from_candidates(
None,
system_node_runtime(),
system_electron_runtime(),
codex_app_runtime_candidates(),
)
}
fn resolve_js_runtime_from_candidates(
preferred_node_path: Option<&Path>,
node_runtime: Option<JsRuntime>,
electron_runtime: Option<JsRuntime>,
codex_app_candidates: Vec<PathBuf>,
) -> Option<JsRuntime> {
preferred_node_path
.and_then(node_runtime_from_path)
.or(node_runtime)
.or(electron_runtime)
.or_else(|| {
codex_app_candidates
.into_iter()
.find_map(|candidate| electron_runtime_from_path(&candidate))
})
}
fn system_node_runtime() -> Option<JsRuntime> {
which("node")
.ok()
.and_then(|path| node_runtime_from_path(&path))
}
fn system_electron_runtime() -> Option<JsRuntime> {
which("electron")
.ok()
.and_then(|path| electron_runtime_from_path(&path))
}
fn node_runtime_from_path(path: &Path) -> Option<JsRuntime> {
path.is_file().then(|| JsRuntime::node(path.to_path_buf()))
}
fn electron_runtime_from_path(path: &Path) -> Option<JsRuntime> {
path.is_file()
.then(|| JsRuntime::electron(path.to_path_buf()))
}
fn codex_app_runtime_candidates() -> Vec<PathBuf> {
match std::env::consts::OS {
"macos" => {
let mut roots = vec![PathBuf::from("/Applications")];
if let Some(home) = std::env::var_os("HOME") {
roots.push(PathBuf::from(home).join("Applications"));
}
roots
.into_iter()
.flat_map(|root| {
CODEX_APP_PRODUCT_NAMES
.into_iter()
.map(move |product_name| {
root.join(format!("{product_name}.app"))
.join("Contents")
.join("MacOS")
.join(product_name)
})
})
.collect()
}
"windows" => {
let mut roots = Vec::new();
if let Some(local_app_data) = std::env::var_os("LOCALAPPDATA") {
roots.push(PathBuf::from(local_app_data).join("Programs"));
}
if let Some(program_files) = std::env::var_os("ProgramFiles") {
roots.push(PathBuf::from(program_files));
}
if let Some(program_files_x86) = std::env::var_os("ProgramFiles(x86)") {
roots.push(PathBuf::from(program_files_x86));
}
roots
.into_iter()
.flat_map(|root| {
CODEX_APP_PRODUCT_NAMES
.into_iter()
.map(move |product_name| {
root.join(product_name).join(format!("{product_name}.exe"))
})
})
.collect()
}
"linux" => [PathBuf::from("/opt"), PathBuf::from("/usr/lib")]
.into_iter()
.flat_map(|root| {
CODEX_APP_PRODUCT_NAMES
.into_iter()
.map(move |product_name| root.join(product_name).join(product_name))
})
.collect(),
_ => Vec::new(),
}
}
fn resolve_relative_runtime_path(
@ -369,6 +602,17 @@ fn resolve_relative_runtime_path(
Ok(root_dir.join(relative))
}
fn verify_required_runtime_path(path: &Path) -> Result<(), ArtifactRuntimeError> {
if path.is_file() {
return Ok(());
}
Err(ArtifactRuntimeError::Io {
context: format!("required runtime file is missing: {}", path.display()),
source: std::io::Error::new(std::io::ErrorKind::NotFound, "missing runtime file"),
})
}
#[cfg(test)]
mod tests {
use super::*;
@ -397,7 +641,20 @@ mod tests {
.unwrap_or_else(|error| panic!("{error}"));
assert_eq!(
url.as_str(),
"https://example.test/releases/artifact-runtime-v0.1.0-manifest.json"
"https://example.test/releases/artifact-runtime-v0.1.0/artifact-runtime-v0.1.0-manifest.json"
);
}
#[test]
fn default_release_locator_uses_openai_codex_github_releases() {
let locator = ArtifactRuntimeReleaseLocator::default("0.1.0");
let url = locator
.manifest_url()
.unwrap_or_else(|error| panic!("{error}"));
assert_eq!(
url.as_str(),
"https://github.com/openai/codex/releases/download/artifact-runtime-v0.1.0/artifact-runtime-v0.1.0-manifest.json"
);
}
@ -430,13 +687,15 @@ mod tests {
};
Mock::given(method("GET"))
.and(path(format!(
"/artifact-runtime-v{runtime_version}-manifest.json"
"/artifact-runtime-v{runtime_version}/artifact-runtime-v{runtime_version}-manifest.json"
)))
.respond_with(ResponseTemplate::new(200).set_body_json(&manifest))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path(format!("/{archive_name}")))
.and(path(format!(
"/artifact-runtime-v{runtime_version}/{archive_name}"
)))
.respond_with(ResponseTemplate::new(200).set_body_bytes(archive_bytes))
.mount(&server)
.await;
@ -467,8 +726,31 @@ mod tests {
assert!(
runtime
.render_cli_path()
.ends_with(Path::new("granola-render/dist/cli.mjs"))
.ends_with(Path::new("granola-render/dist/render_cli.mjs"))
);
assert_eq!(
runtime.resolve_js_runtime().expect("resolve js runtime"),
JsRuntime::node(runtime.node_path().to_path_buf())
);
}
#[test]
fn resolve_js_runtime_uses_codex_app_electron_candidate() {
let temp_dir = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
let electron_path = temp_dir.path().join("Codex");
let missing_node = temp_dir.path().join("missing-node");
std::fs::write(&electron_path, "#!/bin/sh\n").unwrap_or_else(|error| panic!("{error}"));
let runtime = resolve_js_runtime_from_candidates(
Some(missing_node.as_path()),
None,
None,
vec![electron_path.clone()],
)
.expect("resolve js runtime");
assert_eq!(runtime, JsRuntime::electron(electron_path));
assert!(runtime.requires_electron_run_as_node());
}
fn build_zip_archive(runtime_version: &str) -> Vec<u8> {
@ -482,8 +764,11 @@ mod tests {
.unwrap_or_else(|error| panic!("{error}"));
zip.write_all(&manifest)
.unwrap_or_else(|error| panic!("{error}"));
zip.start_file("artifact-runtime/node/bin/node", options)
.unwrap_or_else(|error| panic!("{error}"));
zip.start_file(
"artifact-runtime/node/bin/node",
options.unix_permissions(0o755),
)
.unwrap_or_else(|error| panic!("{error}"));
zip.write_all(b"#!/bin/sh\n")
.unwrap_or_else(|error| panic!("{error}"));
zip.start_file(
@ -493,8 +778,11 @@ mod tests {
.unwrap_or_else(|error| panic!("{error}"));
zip.write_all(b"export const ok = true;\n")
.unwrap_or_else(|error| panic!("{error}"));
zip.start_file("artifact-runtime/granola-render/dist/cli.mjs", options)
.unwrap_or_else(|error| panic!("{error}"));
zip.start_file(
"artifact-runtime/granola-render/dist/render_cli.mjs",
options,
)
.unwrap_or_else(|error| panic!("{error}"));
zip.write_all(b"export const ok = true;\n")
.unwrap_or_else(|error| panic!("{error}"));
zip.finish().unwrap_or_else(|error| panic!("{error}"));
@ -514,7 +802,7 @@ mod tests {
relative_path: "artifact-tool/dist/artifact_tool.mjs".to_string(),
},
render_cli: RuntimePathEntry {
relative_path: "granola-render/dist/cli.mjs".to_string(),
relative_path: "granola-render/dist/render_cli.mjs".to_string(),
},
},
}

View file

@ -730,6 +730,7 @@ impl TurnContext {
.with_updates(Some(model.clone()), Some(reasoning_effort), None);
let features = self.features.clone();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: self.tools_config.web_search_mode,
@ -1103,6 +1104,7 @@ impl Session {
let per_turn_config = Arc::new(per_turn_config);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &per_turn_config.codex_home,
model_info: &model_info,
features: &per_turn_config.features,
web_search_mode: Some(per_turn_config.web_search_mode.value()),
@ -4710,6 +4712,7 @@ async fn spawn_review_thread(
let _ = review_features.disable(crate::features::Feature::WebSearchCached);
let review_web_search_mode = WebSearchMode::Disabled;
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &review_model_info,
features: &review_features,
web_search_mode: Some(review_web_search_mode),

View file

@ -1,13 +1,11 @@
use async_trait::async_trait;
use codex_artifacts::ArtifactBuildRequest;
use codex_artifacts::ArtifactCommandOutput;
use codex_artifacts::ArtifactRuntimeError;
use codex_artifacts::ArtifactRuntimePlatform;
use codex_artifacts::ArtifactRuntimeManager;
use codex_artifacts::ArtifactRuntimeManagerConfig;
use codex_artifacts::ArtifactsClient;
use codex_artifacts::ArtifactsError;
use codex_artifacts::InstalledArtifactRuntime;
use serde_json::Value as JsonValue;
use std::path::Path;
use std::time::Duration;
use std::time::Instant;
@ -31,7 +29,7 @@ use codex_protocol::models::FunctionCallOutputBody;
const ARTIFACTS_TOOL_NAME: &str = "artifacts";
const ARTIFACTS_PRAGMA_PREFIXES: [&str; 2] = ["// codex-artifacts:", "// codex-artifact-tool:"];
const PINNED_ARTIFACT_RUNTIME_VERSION: &str = "2.4.0";
pub(crate) const PINNED_ARTIFACT_RUNTIME_VERSION: &str = "2.4.0";
const DEFAULT_EXECUTION_TIMEOUT: Duration = Duration::from_secs(30);
pub struct ArtifactsHandler;
@ -80,10 +78,9 @@ impl ToolHandler for ArtifactsHandler {
}
};
let runtime = resolve_preinstalled_runtime(&turn.config.codex_home)
.await
.map_err(artifacts_error)?;
let client = ArtifactsClient::from_installed_runtime(runtime);
let client = ArtifactsClient::from_runtime_manager(default_runtime_manager(
turn.config.codex_home.clone(),
));
let started_at = Instant::now();
emit_exec_begin(session.as_ref(), turn.as_ref(), &call_id).await;
@ -122,31 +119,6 @@ impl ToolHandler for ArtifactsHandler {
}
}
async fn resolve_preinstalled_runtime(
codex_home: &Path,
) -> Result<InstalledArtifactRuntime, ArtifactsError> {
let platform = ArtifactRuntimePlatform::detect_current()
.map_err(ArtifactRuntimeError::from)
.map_err(ArtifactsError::Runtime)?;
let install_dir = codex_home
.join("packages")
.join("artifacts")
.join(PINNED_ARTIFACT_RUNTIME_VERSION)
.join(platform.as_str());
if !install_dir.exists() {
return Err(ArtifactsError::Io {
context: format!(
"artifact runtime {} is not installed at {}",
PINNED_ARTIFACT_RUNTIME_VERSION,
install_dir.display()
),
source: std::io::Error::new(std::io::ErrorKind::NotFound, "missing artifact runtime"),
});
}
InstalledArtifactRuntime::load(install_dir, platform).map_err(ArtifactsError::Runtime)
}
fn parse_freeform_args(input: &str) -> Result<ArtifactsToolArgs, FunctionCallError> {
if input.trim().is_empty() {
return Err(FunctionCallError::RespondToModel(
@ -240,8 +212,11 @@ fn parse_pragma_prefix(line: &str) -> Option<&str> {
.find_map(|prefix| line.strip_prefix(prefix))
}
fn artifacts_error(error: ArtifactsError) -> FunctionCallError {
FunctionCallError::RespondToModel(error.to_string())
fn default_runtime_manager(codex_home: std::path::PathBuf) -> ArtifactRuntimeManager {
ArtifactRuntimeManager::new(ArtifactRuntimeManagerConfig::with_default_release(
codex_home,
PINNED_ARTIFACT_RUNTIME_VERSION,
))
}
async fn emit_exec_begin(session: &Session, turn: &TurnContext, call_id: &str) {
@ -357,10 +332,26 @@ mod tests {
);
}
#[tokio::test]
async fn resolve_preinstalled_runtime_reads_pinned_cache_path() {
#[test]
fn default_runtime_manager_uses_openai_codex_release_base() {
let codex_home = TempDir::new().expect("create temp codex home");
let platform = ArtifactRuntimePlatform::detect_current().expect("detect platform");
let manager = default_runtime_manager(codex_home.path().to_path_buf());
assert_eq!(
manager.config().release().base_url().as_str(),
"https://github.com/openai/codex/releases/download/"
);
assert_eq!(
manager.config().release().runtime_version(),
PINNED_ARTIFACT_RUNTIME_VERSION
);
}
#[test]
fn load_cached_runtime_reads_pinned_cache_path() {
let codex_home = TempDir::new().expect("create temp codex home");
let platform =
codex_artifacts::ArtifactRuntimePlatform::detect_current().expect("detect platform");
let install_dir = codex_home
.path()
.join("packages")
@ -382,10 +373,26 @@ mod tests {
.to_string(),
)
.expect("write manifest");
std::fs::create_dir_all(install_dir.join("artifact-tool/dist"))
.expect("create build entrypoint dir");
std::fs::create_dir_all(install_dir.join("granola-render/dist"))
.expect("create render entrypoint dir");
std::fs::write(
install_dir.join("artifact-tool/dist/artifact_tool.mjs"),
"export const ok = true;\n",
)
.expect("write build entrypoint");
std::fs::write(
install_dir.join("granola-render/dist/render_cli.mjs"),
"export const ok = true;\n",
)
.expect("write render entrypoint");
let runtime = resolve_preinstalled_runtime(codex_home.path())
.await
.expect("resolve runtime");
let runtime = codex_artifacts::load_cached_runtime(
codex_home.path(),
PINNED_ARTIFACT_RUNTIME_VERSION,
)
.expect("resolve runtime");
assert_eq!(runtime.runtime_version(), PINNED_ARTIFACT_RUNTIME_VERSION);
assert_eq!(
runtime.manifest().entrypoints,

View file

@ -29,6 +29,7 @@ use crate::sandboxing::SandboxPermissions;
use crate::sandboxing::normalize_additional_permissions;
pub use apply_patch::ApplyPatchHandler;
pub use artifacts::ArtifactsHandler;
pub(crate) use artifacts::PINNED_ARTIFACT_RUNTIME_VERSION;
use codex_protocol::models::PermissionProfile;
use codex_protocol::protocol::AskForApproval;
pub use dynamic::DynamicToolHandler;

View file

@ -7,6 +7,7 @@ use crate::features::Feature;
use crate::features::Features;
use crate::mcp_connection_manager::ToolInfo;
use crate::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use crate::tools::handlers::PINNED_ARTIFACT_RUNTIME_VERSION;
use crate::tools::handlers::PLAN_TOOL;
use crate::tools::handlers::SEARCH_TOOL_BM25_DEFAULT_LIMIT;
use crate::tools::handlers::SEARCH_TOOL_BM25_TOOL_NAME;
@ -34,6 +35,7 @@ use serde_json::Value as JsonValue;
use serde_json::json;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::path::Path;
const SEARCH_TOOL_BM25_DESCRIPTION_TEMPLATE: &str =
include_str!("../../templates/search_tool/tool_description.md");
@ -75,6 +77,7 @@ pub(crate) struct ToolsConfig {
}
pub(crate) struct ToolsConfigParams<'a> {
pub(crate) codex_home: &'a Path,
pub(crate) model_info: &'a ModelInfo,
pub(crate) features: &'a Features,
pub(crate) web_search_mode: Option<WebSearchMode>,
@ -84,6 +87,7 @@ pub(crate) struct ToolsConfigParams<'a> {
impl ToolsConfig {
pub fn new(params: &ToolsConfigParams) -> Self {
let ToolsConfigParams {
codex_home,
model_info,
features,
web_search_mode,
@ -98,7 +102,11 @@ impl ToolsConfig {
let include_default_mode_request_user_input =
include_request_user_input && features.enabled(Feature::DefaultModeRequestUserInput);
let include_search_tool = features.enabled(Feature::Apps);
let include_artifact_tools = features.enabled(Feature::Artifact);
let include_artifact_tools = features.enabled(Feature::Artifact)
&& codex_artifacts::is_js_runtime_available(
codex_home,
PINNED_ARTIFACT_RUNTIME_VERSION,
);
let include_image_gen_tool =
features.enabled(Feature::ImageGeneration) && supports_image_generation(model_info);
let include_agent_jobs = include_collab_tools && features.enabled(Feature::Sqlite);
@ -1405,7 +1413,7 @@ JS_SOURCE: /(?:\s*)(?:[^\s{\"`]|`[^`]|``[^`])[\s\S]*/
ToolSpec::Freeform(FreeformTool {
name: "artifacts".to_string(),
description: "Runs raw JavaScript against the preinstalled Codex @oai/artifact-tool runtime for creating presentations or spreadsheets. This is plain JavaScript executed by Node with top-level await, not TypeScript: do not use type annotations, `interface`, `type`, or `import type`. Author code the same way you would for `import { Presentation, Workbook, PresentationFile, SpreadsheetFile, FileBlob, ... } from \"@oai/artifact-tool\"`, but omit that import line because the package surface is already preloaded. Named exports are available directly on `globalThis`, and the full module is available as `globalThis.artifactTool` (also aliased as `globalThis.artifacts` and `globalThis.codexArtifacts`). Node built-ins such as `node:fs/promises` may still be imported when needed for saving preview bytes. This is a freeform tool: send raw JavaScript source text, optionally with a first-line pragma like `// codex-artifacts: timeout_ms=15000` or `// codex-artifact-tool: timeout_ms=15000`; do not send JSON/quotes/markdown fences."
description: "Runs raw JavaScript against the preinstalled Codex @oai/artifact-tool runtime for creating presentations or spreadsheets. This is plain JavaScript executed by a local Node-compatible runtime with top-level await, not TypeScript: do not use type annotations, `interface`, `type`, or `import type`. Author code the same way you would for `import { Presentation, Workbook, PresentationFile, SpreadsheetFile, FileBlob, ... } from \"@oai/artifact-tool\"`, but omit that import line because the package surface is already preloaded. Named exports are available directly on `globalThis`, and the full module is available as `globalThis.artifactTool` (also aliased as `globalThis.artifacts` and `globalThis.codexArtifacts`). Node built-ins such as `node:fs/promises` may still be imported when needed for saving preview bytes. This is a freeform tool: send raw JavaScript source text, optionally with a first-line pragma like `// codex-artifacts: timeout_ms=15000` or `// codex-artifact-tool: timeout_ms=15000`; do not send JSON/quotes/markdown fences."
.to_string(),
format: FreeformToolFormat {
r#type: "grammar".to_string(),
@ -2166,6 +2174,7 @@ mod tests {
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let config = ToolsConfig::new(&ToolsConfigParams {
codex_home: Path::new("."),
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Live),
@ -2229,6 +2238,7 @@ mod tests {
let mut features = Features::with_defaults();
features.enable(Feature::Collab);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2249,12 +2259,53 @@ mod tests {
#[test]
fn test_build_specs_artifact_tool_enabled() {
let config = test_config();
let mut config = test_config();
let runtime_root = tempfile::TempDir::new().expect("create temp codex home");
let platform = codex_artifacts::ArtifactRuntimePlatform::detect_current()
.expect("detect artifact platform");
let install_dir = runtime_root
.path()
.join("packages")
.join("artifacts")
.join(PINNED_ARTIFACT_RUNTIME_VERSION)
.join(platform.as_str());
std::fs::create_dir_all(install_dir.join("node/bin")).expect("create runtime dir");
std::fs::create_dir_all(install_dir.join("artifact-tool/dist"))
.expect("create build entrypoint dir");
std::fs::create_dir_all(install_dir.join("granola-render/dist"))
.expect("create render entrypoint dir");
std::fs::write(
install_dir.join("manifest.json"),
serde_json::json!({
"schema_version": 1,
"runtime_version": PINNED_ARTIFACT_RUNTIME_VERSION,
"node": { "relative_path": "node/bin/node" },
"entrypoints": {
"build_js": { "relative_path": "artifact-tool/dist/artifact_tool.mjs" },
"render_cli": { "relative_path": "granola-render/dist/render_cli.mjs" }
}
})
.to_string(),
)
.expect("write manifest");
std::fs::write(install_dir.join("node/bin/node"), "#!/bin/sh\n").expect("write node");
std::fs::write(
install_dir.join("artifact-tool/dist/artifact_tool.mjs"),
"export const ok = true;\n",
)
.expect("write build entrypoint");
std::fs::write(
install_dir.join("granola-render/dist/render_cli.mjs"),
"export const ok = true;\n",
)
.expect("write render entrypoint");
config.codex_home = runtime_root.path().to_path_buf();
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::Artifact);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2273,6 +2324,7 @@ mod tests {
features.enable(Feature::Collab);
features.enable(Feature::Sqlite);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2303,6 +2355,7 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2317,6 +2370,7 @@ mod tests {
features.enable(Feature::DefaultModeRequestUserInput);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2340,6 +2394,7 @@ mod tests {
let mut features = Features::with_defaults();
features.disable(Feature::MemoryTool);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2360,6 +2415,7 @@ mod tests {
let features = Features::with_defaults();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2386,6 +2442,7 @@ mod tests {
features.enable(Feature::JsRepl);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2408,6 +2465,7 @@ mod tests {
image_generation_features.enable(Feature::ImageGeneration);
let default_tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &supported_model_info,
features: &default_features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2422,6 +2480,7 @@ mod tests {
);
let supported_tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &supported_model_info,
features: &image_generation_features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2431,6 +2490,7 @@ mod tests {
assert_contains_tool_names(&supported_tools, &["image_generation"]);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &unsupported_model_info,
features: &image_generation_features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2466,8 +2526,10 @@ mod tests {
web_search_mode: Option<WebSearchMode>,
expected_tools: &[&str],
) {
let config = test_config();
let model_info = model_info_from_models_json(model_slug);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features,
web_search_mode,
@ -2502,6 +2564,7 @@ mod tests {
let features = Features::with_defaults();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2527,6 +2590,7 @@ mod tests {
let features = Features::with_defaults();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Live),
@ -2553,6 +2617,7 @@ mod tests {
let features = Features::with_defaults();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Live),
@ -2582,6 +2647,7 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let features = Features::with_defaults();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2605,6 +2671,7 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let features = Features::with_defaults();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2796,6 +2863,7 @@ mod tests {
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Live),
@ -2820,6 +2888,7 @@ mod tests {
features.enable(Feature::ShellZshFork);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Live),
@ -2846,6 +2915,7 @@ mod tests {
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2862,6 +2932,7 @@ mod tests {
#[test]
fn test_test_model_info_includes_sync_tool() {
let config = test_config();
let mut model_info = model_info_from_models_json("gpt-5-codex");
model_info.experimental_supported_tools = vec![
"test_sync_tool".to_string(),
@ -2871,6 +2942,7 @@ mod tests {
];
let features = Features::with_defaults();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -2903,6 +2975,7 @@ mod tests {
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Live),
@ -2990,6 +3063,7 @@ mod tests {
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -3036,6 +3110,7 @@ mod tests {
let mut features = Features::with_defaults();
features.enable(Feature::Apps);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -3104,6 +3179,7 @@ mod tests {
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -3159,6 +3235,7 @@ mod tests {
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -3211,6 +3288,7 @@ mod tests {
features.enable(Feature::UnifiedExec);
features.enable(Feature::ApplyPatchFreeform);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -3265,6 +3343,7 @@ mod tests {
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
@ -3398,6 +3477,7 @@ Examples of valid command strings:
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let tools_config = ToolsConfig::new(&ToolsConfigParams {
codex_home: &config.codex_home,
model_info: &model_info,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),

View file

@ -5,6 +5,7 @@ edition.workspace = true
license.workspace = true
[dependencies]
fd-lock = { workspace = true }
flate2 = { workspace = true }
reqwest = { workspace = true, features = ["json", "stream"] }
serde = { workspace = true, features = ["derive"] }
@ -12,7 +13,7 @@ sha2 = { workspace = true }
tar = { workspace = true }
tempfile = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["fs", "rt", "sync"] }
tokio = { workspace = true, features = ["fs", "rt", "sync", "time"] }
url = { workspace = true }
zip = { workspace = true }

View file

@ -1,19 +1,27 @@
use fd_lock::RwLock as FileRwLock;
use flate2::read::GzDecoder;
use reqwest::Client;
use serde::de::DeserializeOwned;
use sha2::Digest;
use sha2::Sha256;
use std::fs::File;
use std::fs::OpenOptions;
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use std::time::Duration;
use tar::Archive;
use tempfile::tempdir_in;
use thiserror::Error;
use tokio::fs;
use tokio::time::sleep;
use url::Url;
use zip::ZipArchive;
const INSTALL_LOCK_POLL_INTERVAL: Duration = Duration::from_millis(50);
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct PackageManagerConfig<P> {
codex_home: PathBuf,
@ -86,6 +94,14 @@ impl<P: ManagedPackage> PackageManager<P> {
.config
.package()
.install_dir(&self.config.cache_root(), platform);
self.resolve_cached_at(platform, install_dir).await
}
async fn resolve_cached_at(
&self,
platform: PackagePlatform,
install_dir: PathBuf,
) -> Result<Option<P::Installed>, P::Error> {
if !fs::try_exists(&install_dir)
.await
.map_err(|source| PackageManagerError::Io {
@ -97,10 +113,10 @@ impl<P: ManagedPackage> PackageManager<P> {
return Ok(None);
}
let package = self
.config
.package()
.load_installed(install_dir, platform)?;
let package = match self.config.package().load_installed(install_dir, platform) {
Ok(package) => package,
Err(_) => return Ok(None),
};
if self.config.package().installed_version(&package) != self.config.package().version() {
return Ok(None);
}
@ -113,6 +129,61 @@ impl<P: ManagedPackage> PackageManager<P> {
}
let platform = PackagePlatform::detect_current().map_err(P::Error::from)?;
let cache_root = self.config.cache_root();
let install_dir = self.config.package().install_dir(&cache_root, platform);
if let Some(package) = self
.resolve_cached_at(platform, install_dir.clone())
.await?
{
return Ok(package);
}
if let Some(parent) = install_dir.parent() {
fs::create_dir_all(parent)
.await
.map_err(|source| PackageManagerError::Io {
context: format!("failed to create {}", parent.display()),
source,
})
.map_err(P::Error::from)?;
}
let lock_path = install_dir.with_extension("lock");
let lock_file = OpenOptions::new()
.create(true)
.read(true)
.write(true)
.truncate(false)
.open(&lock_path)
.map_err(|source| PackageManagerError::Io {
context: format!("failed to open {}", lock_path.display()),
source,
})
.map_err(P::Error::from)?;
let mut install_lock = FileRwLock::new(lock_file);
let _install_guard = loop {
match install_lock.try_write() {
Ok(guard) => break guard,
Err(source) if source.kind() == std::io::ErrorKind::WouldBlock => {
sleep(INSTALL_LOCK_POLL_INTERVAL).await;
}
Err(source) => {
return Err(PackageManagerError::Io {
context: format!("failed to lock {}", lock_path.display()),
source,
}
.into());
}
}
};
if let Some(package) = self
.resolve_cached_at(platform, install_dir.clone())
.await?
{
return Ok(package);
}
let manifest = self.fetch_release_manifest().await?;
if self.config.package().release_version(&manifest) != self.config.package().version() {
return Err(PackageManagerError::UnexpectedPackageVersion {
@ -122,6 +193,22 @@ impl<P: ManagedPackage> PackageManager<P> {
.into());
}
fs::create_dir_all(&cache_root)
.await
.map_err(|source| PackageManagerError::Io {
context: format!("failed to create {}", cache_root.display()),
source,
})
.map_err(P::Error::from)?;
let staging_root = cache_root.join(".staging");
fs::create_dir_all(&staging_root)
.await
.map_err(|source| PackageManagerError::Io {
context: format!("failed to create {}", staging_root.display()),
source,
})
.map_err(P::Error::from)?;
let platform_archive = self
.config
.package()
@ -134,41 +221,11 @@ impl<P: ManagedPackage> PackageManager<P> {
let archive_bytes = self.download_bytes(&archive_url).await?;
verify_sha256(&archive_bytes, &platform_archive.sha256).map_err(P::Error::from)?;
let install_dir = self
.config
.package()
.install_dir(&self.config.cache_root(), platform);
if fs::try_exists(&install_dir)
.await
.map_err(|source| PackageManagerError::Io {
context: format!("failed to read {}", install_dir.display()),
source,
})
.map_err(P::Error::from)?
{
fs::remove_dir_all(&install_dir)
.await
.map_err(|source| PackageManagerError::Io {
context: format!("failed to remove {}", install_dir.display()),
source,
})
.map_err(P::Error::from)?;
}
let cache_root = self.config.cache_root();
fs::create_dir_all(&cache_root)
.await
.map_err(|source| PackageManagerError::Io {
context: format!("failed to create {}", cache_root.display()),
source,
})
.map_err(P::Error::from)?;
let staging_dir = tempdir_in(&cache_root)
let staging_dir = tempdir_in(&staging_root)
.map_err(|source| PackageManagerError::Io {
context: format!(
"failed to create staging directory in {}",
cache_root.display()
staging_root.display()
),
source,
})
@ -222,17 +279,91 @@ impl<P: ManagedPackage> PackageManager<P> {
.map_err(P::Error::from)?;
}
fs::rename(&extracted_root, &install_dir)
let mut replaced_install_dir = None;
if fs::try_exists(&install_dir)
.await
.map_err(|source| PackageManagerError::Io {
context: format!(
"failed to move {} to {}",
extracted_root.display(),
install_dir.display()
),
context: format!("failed to read {}", install_dir.display()),
source,
})
.map_err(P::Error::from)?;
.map_err(P::Error::from)?
{
let install_name = install_dir.file_name().ok_or_else(|| {
PackageManagerError::ArchiveExtraction(format!(
"install path `{}` has no terminal component",
install_dir.display()
))
})?;
let install_name = install_name.to_string_lossy();
let mut suffix = 0u32;
loop {
let quarantined_path = install_dir.with_file_name(format!(
".{install_name}.replaced-{}-{suffix}",
std::process::id()
));
match fs::rename(&install_dir, &quarantined_path).await {
Ok(()) => {
replaced_install_dir = Some(quarantined_path);
break;
}
Err(source) if source.kind() == std::io::ErrorKind::AlreadyExists => {
suffix += 1;
}
Err(source) => {
return Err(PackageManagerError::Io {
context: format!(
"failed to quarantine {} to {}",
install_dir.display(),
quarantined_path.display()
),
source,
}
.into());
}
}
}
}
match fs::rename(&extracted_root, &install_dir).await {
Ok(()) => {}
Err(source)
if matches!(
source.kind(),
std::io::ErrorKind::AlreadyExists | std::io::ErrorKind::DirectoryNotEmpty
) =>
{
if let Some(package) = self
.resolve_cached_at(platform, install_dir.clone())
.await?
{
return Ok(package);
}
return Err(PackageManagerError::Io {
context: format!(
"failed to move {} to {}",
extracted_root.display(),
install_dir.display()
),
source,
}
.into());
}
Err(source) => {
return Err(PackageManagerError::Io {
context: format!(
"failed to move {} to {}",
extracted_root.display(),
install_dir.display()
),
source,
}
.into());
}
}
if let Some(replaced_install_dir) = replaced_install_dir {
let _ = fs::remove_dir_all(replaced_install_dir).await;
}
self.config.package().load_installed(install_dir, platform)
}
@ -505,10 +636,35 @@ fn extract_zip_archive(archive_path: &Path, destination: &Path) -> Result<(), Pa
context: format!("failed to write {}", output_path.display()),
source,
})?;
apply_zip_permissions(&entry, &output_path)?;
}
Ok(())
}
#[cfg(unix)]
fn apply_zip_permissions(
entry: &zip::read::ZipFile<'_>,
output_path: &Path,
) -> Result<(), PackageManagerError> {
let Some(mode) = entry.unix_mode() else {
return Ok(());
};
std::fs::set_permissions(output_path, std::fs::Permissions::from_mode(mode)).map_err(|source| {
PackageManagerError::Io {
context: format!("failed to set permissions on {}", output_path.display()),
source,
}
})
}
#[cfg(not(unix))]
fn apply_zip_permissions(
_entry: &zip::read::ZipFile<'_>,
_output_path: &Path,
) -> Result<(), PackageManagerError> {
Ok(())
}
fn extract_tar_gz_archive(
archive_path: &Path,
destination: &Path,
@ -573,7 +729,9 @@ mod tests {
use std::collections::BTreeMap;
use std::io::Cursor;
use std::io::Write;
use std::sync::Arc;
use tempfile::TempDir;
use tokio::sync::Barrier;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
@ -732,6 +890,138 @@ mod tests {
.join(platform.as_str()),
}
);
#[cfg(unix)]
{
let executable_mode = std::fs::metadata(installed.root_dir.join("bin/tool"))
.unwrap_or_else(|error| panic!("{error}"))
.permissions()
.mode();
assert_eq!(executable_mode & 0o111, 0o111);
}
}
#[tokio::test]
async fn ensure_installed_replaces_invalid_cached_install() {
let server = MockServer::start().await;
let version = "0.1.0";
let platform = PackagePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
let archive_name = format!("test-package-v{version}-{}.zip", platform.as_str());
let archive_bytes = build_zip_archive(version);
let archive_sha = format!("{:x}", Sha256::digest(&archive_bytes));
let manifest = serde_json::json!({
"package_version": version,
"platforms": {
platform.as_str(): {
"archive": archive_name,
"sha256": archive_sha,
"format": "zip",
"size_bytes": archive_bytes.len(),
}
}
});
Mock::given(method("GET"))
.and(path(format!("/test-package-v{version}-manifest.json")))
.respond_with(ResponseTemplate::new(200).set_body_json(&manifest))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path(format!("/{archive_name}")))
.respond_with(ResponseTemplate::new(200).set_body_bytes(archive_bytes))
.mount(&server)
.await;
let codex_home = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
let install_dir = codex_home
.path()
.join("packages")
.join("test-package")
.join(version)
.join(platform.as_str());
std::fs::create_dir_all(&install_dir).unwrap_or_else(|error| panic!("{error}"));
std::fs::write(install_dir.join("broken.txt"), "stale")
.unwrap_or_else(|error| panic!("{error}"));
let manager = PackageManager::new(PackageManagerConfig::new(
codex_home.path().to_path_buf(),
TestPackage {
base_url: Url::parse(&format!("{}/", server.uri()))
.unwrap_or_else(|error| panic!("{error}")),
version: version.to_string(),
},
));
let installed = manager
.ensure_installed()
.await
.unwrap_or_else(|error| panic!("{error}"));
assert_eq!(installed.version, version);
assert!(installed.root_dir.join("manifest.json").exists());
assert!(!installed.root_dir.join("broken.txt").exists());
}
#[tokio::test]
async fn ensure_installed_serializes_concurrent_installs() {
let server = MockServer::start().await;
let version = "0.1.0";
let platform = PackagePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
let archive_name = format!("test-package-v{version}-{}.zip", platform.as_str());
let archive_bytes = build_zip_archive(version);
let archive_sha = format!("{:x}", Sha256::digest(&archive_bytes));
let manifest = serde_json::json!({
"package_version": version,
"platforms": {
platform.as_str(): {
"archive": archive_name,
"sha256": archive_sha,
"format": "zip",
"size_bytes": archive_bytes.len(),
}
}
});
Mock::given(method("GET"))
.and(path(format!("/test-package-v{version}-manifest.json")))
.respond_with(ResponseTemplate::new(200).set_body_json(&manifest))
.expect(1)
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path(format!("/{archive_name}")))
.respond_with(ResponseTemplate::new(200).set_body_bytes(archive_bytes))
.expect(1)
.mount(&server)
.await;
let codex_home = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
let config = PackageManagerConfig::new(
codex_home.path().to_path_buf(),
TestPackage {
base_url: Url::parse(&format!("{}/", server.uri()))
.unwrap_or_else(|error| panic!("{error}")),
version: version.to_string(),
},
);
let manager_one = PackageManager::new(config.clone());
let manager_two = PackageManager::new(config);
let barrier = Arc::new(Barrier::new(2));
let barrier_one = Arc::clone(&barrier);
let barrier_two = Arc::clone(&barrier);
let (first, second) = tokio::join!(
async {
barrier_one.wait().await;
manager_one.ensure_installed().await
},
async {
barrier_two.wait().await;
manager_two.ensure_installed().await
}
);
let first = first.unwrap_or_else(|error| panic!("{error}"));
let second = second.unwrap_or_else(|error| panic!("{error}"));
assert_eq!(first, second);
}
#[test]
@ -759,6 +1049,10 @@ mod tests {
.unwrap_or_else(|error| panic!("{error}"));
zip.write_all(version.as_bytes())
.unwrap_or_else(|error| panic!("{error}"));
zip.start_file("test-package/bin/tool", options.unix_permissions(0o755))
.unwrap_or_else(|error| panic!("{error}"));
zip.write_all(b"#!/bin/sh\n")
.unwrap_or_else(|error| panic!("{error}"));
zip.finish().unwrap_or_else(|error| panic!("{error}"));
}
bytes.into_inner()