parent
a246dbf9d1
commit
0cc6835416
13 changed files with 1731 additions and 1167 deletions
|
|
@ -730,7 +730,6 @@ impl TurnContext {
|
|||
.with_updates(Some(model.clone()), Some(reasoning_effort), None);
|
||||
let features = self.features.clone();
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: self.tools_config.web_search_mode,
|
||||
|
|
@ -1104,7 +1103,6 @@ impl Session {
|
|||
let per_turn_config = Arc::new(per_turn_config);
|
||||
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &per_turn_config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &per_turn_config.features,
|
||||
web_search_mode: Some(per_turn_config.web_search_mode.value()),
|
||||
|
|
@ -4712,7 +4710,6 @@ async fn spawn_review_thread(
|
|||
let _ = review_features.disable(crate::features::Feature::WebSearchCached);
|
||||
let review_web_search_mode = WebSearchMode::Disabled;
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &review_model_info,
|
||||
features: &review_features,
|
||||
web_search_mode: Some(review_web_search_mode),
|
||||
|
|
|
|||
|
|
@ -389,7 +389,9 @@ mod tests {
|
|||
.expect("write render entrypoint");
|
||||
|
||||
let runtime = codex_artifacts::load_cached_runtime(
|
||||
codex_home.path(),
|
||||
&codex_home
|
||||
.path()
|
||||
.join(codex_artifacts::DEFAULT_CACHE_ROOT_RELATIVE),
|
||||
PINNED_ARTIFACT_RUNTIME_VERSION,
|
||||
)
|
||||
.expect("resolve runtime");
|
||||
|
|
|
|||
|
|
@ -29,7 +29,6 @@ use crate::sandboxing::SandboxPermissions;
|
|||
use crate::sandboxing::normalize_additional_permissions;
|
||||
pub use apply_patch::ApplyPatchHandler;
|
||||
pub use artifacts::ArtifactsHandler;
|
||||
pub(crate) use artifacts::PINNED_ARTIFACT_RUNTIME_VERSION;
|
||||
use codex_protocol::models::PermissionProfile;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
pub use dynamic::DynamicToolHandler;
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ use crate::features::Feature;
|
|||
use crate::features::Features;
|
||||
use crate::mcp_connection_manager::ToolInfo;
|
||||
use crate::models_manager::collaboration_mode_presets::CollaborationModesConfig;
|
||||
use crate::tools::handlers::PINNED_ARTIFACT_RUNTIME_VERSION;
|
||||
use crate::tools::handlers::PLAN_TOOL;
|
||||
use crate::tools::handlers::SEARCH_TOOL_BM25_DEFAULT_LIMIT;
|
||||
use crate::tools::handlers::SEARCH_TOOL_BM25_TOOL_NAME;
|
||||
|
|
@ -35,7 +34,6 @@ use serde_json::Value as JsonValue;
|
|||
use serde_json::json;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
const SEARCH_TOOL_BM25_DESCRIPTION_TEMPLATE: &str =
|
||||
include_str!("../../templates/search_tool/tool_description.md");
|
||||
|
|
@ -77,7 +75,6 @@ pub(crate) struct ToolsConfig {
|
|||
}
|
||||
|
||||
pub(crate) struct ToolsConfigParams<'a> {
|
||||
pub(crate) codex_home: &'a Path,
|
||||
pub(crate) model_info: &'a ModelInfo,
|
||||
pub(crate) features: &'a Features,
|
||||
pub(crate) web_search_mode: Option<WebSearchMode>,
|
||||
|
|
@ -87,7 +84,6 @@ pub(crate) struct ToolsConfigParams<'a> {
|
|||
impl ToolsConfig {
|
||||
pub fn new(params: &ToolsConfigParams) -> Self {
|
||||
let ToolsConfigParams {
|
||||
codex_home,
|
||||
model_info,
|
||||
features,
|
||||
web_search_mode,
|
||||
|
|
@ -102,11 +98,8 @@ impl ToolsConfig {
|
|||
let include_default_mode_request_user_input =
|
||||
include_request_user_input && features.enabled(Feature::DefaultModeRequestUserInput);
|
||||
let include_search_tool = features.enabled(Feature::Apps);
|
||||
let include_artifact_tools = features.enabled(Feature::Artifact)
|
||||
&& codex_artifacts::is_js_runtime_available(
|
||||
codex_home,
|
||||
PINNED_ARTIFACT_RUNTIME_VERSION,
|
||||
);
|
||||
let include_artifact_tools =
|
||||
features.enabled(Feature::Artifact) && codex_artifacts::can_manage_artifact_runtime();
|
||||
let include_image_gen_tool =
|
||||
features.enabled(Feature::ImageGeneration) && supports_image_generation(model_info);
|
||||
let include_agent_jobs = include_collab_tools && features.enabled(Feature::Sqlite);
|
||||
|
|
@ -2174,7 +2167,6 @@ mod tests {
|
|||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
let config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: Path::new("."),
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Live),
|
||||
|
|
@ -2238,7 +2230,6 @@ mod tests {
|
|||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::Collab);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2261,51 +2252,12 @@ mod tests {
|
|||
fn test_build_specs_artifact_tool_enabled() {
|
||||
let mut config = test_config();
|
||||
let runtime_root = tempfile::TempDir::new().expect("create temp codex home");
|
||||
let platform = codex_artifacts::ArtifactRuntimePlatform::detect_current()
|
||||
.expect("detect artifact platform");
|
||||
let install_dir = runtime_root
|
||||
.path()
|
||||
.join("packages")
|
||||
.join("artifacts")
|
||||
.join(PINNED_ARTIFACT_RUNTIME_VERSION)
|
||||
.join(platform.as_str());
|
||||
std::fs::create_dir_all(install_dir.join("node/bin")).expect("create runtime dir");
|
||||
std::fs::create_dir_all(install_dir.join("artifact-tool/dist"))
|
||||
.expect("create build entrypoint dir");
|
||||
std::fs::create_dir_all(install_dir.join("granola-render/dist"))
|
||||
.expect("create render entrypoint dir");
|
||||
std::fs::write(
|
||||
install_dir.join("manifest.json"),
|
||||
serde_json::json!({
|
||||
"schema_version": 1,
|
||||
"runtime_version": PINNED_ARTIFACT_RUNTIME_VERSION,
|
||||
"node": { "relative_path": "node/bin/node" },
|
||||
"entrypoints": {
|
||||
"build_js": { "relative_path": "artifact-tool/dist/artifact_tool.mjs" },
|
||||
"render_cli": { "relative_path": "granola-render/dist/render_cli.mjs" }
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
)
|
||||
.expect("write manifest");
|
||||
std::fs::write(install_dir.join("node/bin/node"), "#!/bin/sh\n").expect("write node");
|
||||
std::fs::write(
|
||||
install_dir.join("artifact-tool/dist/artifact_tool.mjs"),
|
||||
"export const ok = true;\n",
|
||||
)
|
||||
.expect("write build entrypoint");
|
||||
std::fs::write(
|
||||
install_dir.join("granola-render/dist/render_cli.mjs"),
|
||||
"export const ok = true;\n",
|
||||
)
|
||||
.expect("write render entrypoint");
|
||||
config.codex_home = runtime_root.path().to_path_buf();
|
||||
let model_info =
|
||||
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
|
||||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::Artifact);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2324,7 +2276,6 @@ mod tests {
|
|||
features.enable(Feature::Collab);
|
||||
features.enable(Feature::Sqlite);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2355,7 +2306,6 @@ mod tests {
|
|||
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
|
||||
let mut features = Features::with_defaults();
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2370,7 +2320,6 @@ mod tests {
|
|||
|
||||
features.enable(Feature::DefaultModeRequestUserInput);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2394,7 +2343,6 @@ mod tests {
|
|||
let mut features = Features::with_defaults();
|
||||
features.disable(Feature::MemoryTool);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2415,7 +2363,6 @@ mod tests {
|
|||
let features = Features::with_defaults();
|
||||
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2442,7 +2389,6 @@ mod tests {
|
|||
features.enable(Feature::JsRepl);
|
||||
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2465,7 +2411,6 @@ mod tests {
|
|||
image_generation_features.enable(Feature::ImageGeneration);
|
||||
|
||||
let default_tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &supported_model_info,
|
||||
features: &default_features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2480,7 +2425,6 @@ mod tests {
|
|||
);
|
||||
|
||||
let supported_tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &supported_model_info,
|
||||
features: &image_generation_features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2490,7 +2434,6 @@ mod tests {
|
|||
assert_contains_tool_names(&supported_tools, &["image_generation"]);
|
||||
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &unsupported_model_info,
|
||||
features: &image_generation_features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2526,10 +2469,9 @@ mod tests {
|
|||
web_search_mode: Option<WebSearchMode>,
|
||||
expected_tools: &[&str],
|
||||
) {
|
||||
let config = test_config();
|
||||
let _config = test_config();
|
||||
let model_info = model_info_from_models_json(model_slug);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features,
|
||||
web_search_mode,
|
||||
|
|
@ -2564,7 +2506,6 @@ mod tests {
|
|||
let features = Features::with_defaults();
|
||||
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2590,7 +2531,6 @@ mod tests {
|
|||
let features = Features::with_defaults();
|
||||
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Live),
|
||||
|
|
@ -2617,7 +2557,6 @@ mod tests {
|
|||
let features = Features::with_defaults();
|
||||
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Live),
|
||||
|
|
@ -2647,7 +2586,6 @@ mod tests {
|
|||
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
|
||||
let features = Features::with_defaults();
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2671,7 +2609,6 @@ mod tests {
|
|||
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
|
||||
let features = Features::with_defaults();
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2863,7 +2800,6 @@ mod tests {
|
|||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Live),
|
||||
|
|
@ -2888,7 +2824,6 @@ mod tests {
|
|||
features.enable(Feature::ShellZshFork);
|
||||
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Live),
|
||||
|
|
@ -2915,7 +2850,6 @@ mod tests {
|
|||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2932,7 +2866,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_test_model_info_includes_sync_tool() {
|
||||
let config = test_config();
|
||||
let _config = test_config();
|
||||
let mut model_info = model_info_from_models_json("gpt-5-codex");
|
||||
model_info.experimental_supported_tools = vec![
|
||||
"test_sync_tool".to_string(),
|
||||
|
|
@ -2942,7 +2876,6 @@ mod tests {
|
|||
];
|
||||
let features = Features::with_defaults();
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -2975,7 +2908,6 @@ mod tests {
|
|||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Live),
|
||||
|
|
@ -3063,7 +2995,6 @@ mod tests {
|
|||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -3110,7 +3041,6 @@ mod tests {
|
|||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::Apps);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -3179,7 +3109,6 @@ mod tests {
|
|||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -3235,7 +3164,6 @@ mod tests {
|
|||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -3288,7 +3216,6 @@ mod tests {
|
|||
features.enable(Feature::UnifiedExec);
|
||||
features.enable(Feature::ApplyPatchFreeform);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -3343,7 +3270,6 @@ mod tests {
|
|||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
@ -3477,7 +3403,6 @@ Examples of valid command strings:
|
|||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
codex_home: &config.codex_home,
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
|
|
|
|||
64
codex-rs/package-manager/README.md
Normal file
64
codex-rs/package-manager/README.md
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
# codex-package-manager
|
||||
|
||||
`codex-package-manager` is the shared installer used for versioned runtime bundles and other cached artifacts in `codex-rs`.
|
||||
|
||||
It owns the generic parts of package installation:
|
||||
|
||||
- current-platform detection
|
||||
- manifest and archive fetches
|
||||
- checksum and archive-size validation
|
||||
- archive extraction for `.zip` and `.tar.gz`
|
||||
- staging and promotion into a versioned cache directory
|
||||
- cross-process install locking
|
||||
|
||||
Package-specific code stays behind the `ManagedPackage` trait.
|
||||
|
||||
## Model
|
||||
|
||||
The package manager is intentionally small:
|
||||
|
||||
1. A `ManagedPackage` implementation describes how to fetch a manifest, choose an archive for a `PackagePlatform`, and load a validated installed package from disk.
|
||||
2. `PackageManager::resolve_cached()` returns a cached install for the current platform if `load_installed()` succeeds and the version matches.
|
||||
3. `PackageManager::ensure_installed()` acquires a per-install lock, downloads the archive into a staging directory, extracts it, validates the staged package, and promotes it into the cache.
|
||||
|
||||
The default cache root is:
|
||||
|
||||
```text
|
||||
<codex_home>/<default_cache_root_relative>
|
||||
```
|
||||
|
||||
Callers can override that root with `PackageManagerConfig::with_cache_root(...)`.
|
||||
|
||||
## ManagedPackage Contract
|
||||
|
||||
The trait is small, but the invariants matter:
|
||||
|
||||
- `install_dir()` should be unique per package version and platform. If two versions or two platforms share a directory, promotion and cleanup become unsafe.
|
||||
- `load_installed()` must fully validate the installed package, not just deserialize a manifest. `resolve_cached()` trusts a successful load as a valid cache hit.
|
||||
- The default `detect_extracted_root()` looks for `manifest.json` at the extraction root or inside a single top-level directory. Override it if your package layout differs.
|
||||
- `archive_url()` should be derived from manifest data, not recomputed from unrelated caller state, so manifest selection and download stay aligned.
|
||||
|
||||
## Consumer Guidance
|
||||
|
||||
- If your feature can install on demand, do not gate feature registration on a preinstalled-cache check alone. `resolve_cached()` only answers "is it already present?" while `ensure_installed()` is the bootstrap path.
|
||||
- Keep cache-root overrides inside your manager/config surface. Separate helpers that reconstruct install paths can drift from `PackageManagerConfig`.
|
||||
- Prefer surfacing package-specific validation failures from `load_installed()` when debugging. The generic manager treats failed cache loads as cache misses today.
|
||||
|
||||
## Security and Extraction Rules
|
||||
|
||||
- `.zip` extraction rejects entries that escape the extraction root and preserves Unix executable bits when the archive carries them.
|
||||
- `.tar.gz` extraction rejects symlinks, hard links, sparse files, device files, and FIFOs. Only regular files and directories are promoted.
|
||||
- The archive SHA-256 is always verified, and `size_bytes` is enforced when present in the manifest.
|
||||
|
||||
## Extending It
|
||||
|
||||
Typical usage looks like this:
|
||||
|
||||
```rust,ignore
|
||||
let config = PackageManagerConfig::new(codex_home, MyPackage::new(...));
|
||||
let manager = PackageManager::new(config);
|
||||
|
||||
let package = manager.ensure_installed().await?;
|
||||
```
|
||||
|
||||
In practice, most packages should expose their own small wrapper config/manager types over the generic crate so the rest of the codebase does not depend on `ManagedPackage` details directly.
|
||||
270
codex-rs/package-manager/src/archive.rs
Normal file
270
codex-rs/package-manager/src/archive.rs
Normal file
|
|
@ -0,0 +1,270 @@
|
|||
use crate::PackageManagerError;
|
||||
use flate2::read::GzDecoder;
|
||||
use sha2::Digest;
|
||||
use sha2::Sha256;
|
||||
use std::fs::File;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::path::Component;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tar::Archive;
|
||||
use zip::ZipArchive;
|
||||
|
||||
/// Archive metadata for a platform entry in a release manifest.
|
||||
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize, PartialEq, Eq)]
|
||||
pub struct PackageReleaseArchive {
|
||||
/// Archive file name relative to the package release location.
|
||||
pub archive: String,
|
||||
/// Expected SHA-256 of the downloaded archive body.
|
||||
pub sha256: String,
|
||||
/// Archive format used by the download.
|
||||
pub format: ArchiveFormat,
|
||||
/// Expected archive length in bytes, when the manifest provides it.
|
||||
pub size_bytes: Option<u64>,
|
||||
}
|
||||
|
||||
/// Archive formats supported by the generic extractor.
|
||||
#[derive(Clone, Copy, Debug, serde::Deserialize, serde::Serialize, PartialEq, Eq)]
|
||||
pub enum ArchiveFormat {
|
||||
/// A `.zip` archive.
|
||||
#[serde(rename = "zip")]
|
||||
Zip,
|
||||
/// A `.tar.gz` archive.
|
||||
#[serde(rename = "tar.gz")]
|
||||
TarGz,
|
||||
}
|
||||
|
||||
/// Detects a package root with a `manifest.json` in an extraction directory.
|
||||
pub(crate) fn detect_single_package_root(
|
||||
extraction_root: &Path,
|
||||
) -> Result<PathBuf, PackageManagerError> {
|
||||
let direct_manifest = extraction_root.join("manifest.json");
|
||||
if direct_manifest.exists() {
|
||||
return Ok(extraction_root.to_path_buf());
|
||||
}
|
||||
|
||||
let mut directory_candidates = Vec::new();
|
||||
for entry in std::fs::read_dir(extraction_root).map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to read {}", extraction_root.display()),
|
||||
source,
|
||||
})? {
|
||||
let entry = entry.map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to read entry in {}", extraction_root.display()),
|
||||
source,
|
||||
})?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
directory_candidates.push(path);
|
||||
}
|
||||
}
|
||||
|
||||
if directory_candidates.len() == 1 {
|
||||
let candidate = &directory_candidates[0];
|
||||
if candidate.join("manifest.json").exists() {
|
||||
return Ok(candidate.clone());
|
||||
}
|
||||
}
|
||||
|
||||
Err(PackageManagerError::MissingPackageRoot(
|
||||
extraction_root.to_path_buf(),
|
||||
))
|
||||
}
|
||||
|
||||
pub(crate) fn verify_archive_size(
|
||||
bytes: &[u8],
|
||||
expected: Option<u64>,
|
||||
) -> Result<(), PackageManagerError> {
|
||||
let Some(expected) = expected else {
|
||||
return Ok(());
|
||||
};
|
||||
let actual = bytes.len() as u64;
|
||||
if actual == expected {
|
||||
return Ok(());
|
||||
}
|
||||
Err(PackageManagerError::UnexpectedArchiveSize { expected, actual })
|
||||
}
|
||||
|
||||
pub(crate) fn verify_sha256(bytes: &[u8], expected: &str) -> Result<(), PackageManagerError> {
|
||||
let actual = format!("{:x}", Sha256::digest(bytes));
|
||||
if actual == expected.to_ascii_lowercase() {
|
||||
return Ok(());
|
||||
}
|
||||
Err(PackageManagerError::ChecksumMismatch {
|
||||
expected: expected.to_string(),
|
||||
actual,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn extract_archive(
|
||||
archive_path: &Path,
|
||||
destination: &Path,
|
||||
format: ArchiveFormat,
|
||||
) -> Result<(), PackageManagerError> {
|
||||
match format {
|
||||
ArchiveFormat::Zip => extract_zip_archive(archive_path, destination),
|
||||
ArchiveFormat::TarGz => extract_tar_gz_archive(archive_path, destination),
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_zip_archive(archive_path: &Path, destination: &Path) -> Result<(), PackageManagerError> {
|
||||
let file = File::open(archive_path).map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to open {}", archive_path.display()),
|
||||
source,
|
||||
})?;
|
||||
let mut archive = ZipArchive::new(file)
|
||||
.map_err(|error| PackageManagerError::ArchiveExtraction(error.to_string()))?;
|
||||
for index in 0..archive.len() {
|
||||
let mut entry = archive
|
||||
.by_index(index)
|
||||
.map_err(|error| PackageManagerError::ArchiveExtraction(error.to_string()))?;
|
||||
let Some(relative_path) = entry.enclosed_name() else {
|
||||
return Err(PackageManagerError::ArchiveExtraction(format!(
|
||||
"zip entry `{}` escapes extraction root",
|
||||
entry.name()
|
||||
)));
|
||||
};
|
||||
let output_path = destination.join(relative_path);
|
||||
if entry.is_dir() {
|
||||
std::fs::create_dir_all(&output_path).map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to create {}", output_path.display()),
|
||||
source,
|
||||
})?;
|
||||
continue;
|
||||
}
|
||||
if let Some(parent) = output_path.parent() {
|
||||
std::fs::create_dir_all(parent).map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to create {}", parent.display()),
|
||||
source,
|
||||
})?;
|
||||
}
|
||||
let mut output = File::create(&output_path).map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to create {}", output_path.display()),
|
||||
source,
|
||||
})?;
|
||||
std::io::copy(&mut entry, &mut output).map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to write {}", output_path.display()),
|
||||
source,
|
||||
})?;
|
||||
apply_zip_permissions(&entry, &output_path)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn apply_zip_permissions(
|
||||
entry: &zip::read::ZipFile<'_>,
|
||||
output_path: &Path,
|
||||
) -> Result<(), PackageManagerError> {
|
||||
let Some(mode) = entry.unix_mode() else {
|
||||
return Ok(());
|
||||
};
|
||||
std::fs::set_permissions(output_path, std::fs::Permissions::from_mode(mode)).map_err(|source| {
|
||||
PackageManagerError::Io {
|
||||
context: format!("failed to set permissions on {}", output_path.display()),
|
||||
source,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn apply_zip_permissions(
|
||||
_entry: &zip::read::ZipFile<'_>,
|
||||
_output_path: &Path,
|
||||
) -> Result<(), PackageManagerError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn extract_tar_gz_archive(
|
||||
archive_path: &Path,
|
||||
destination: &Path,
|
||||
) -> Result<(), PackageManagerError> {
|
||||
let file = File::open(archive_path).map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to open {}", archive_path.display()),
|
||||
source,
|
||||
})?;
|
||||
let decoder = GzDecoder::new(file);
|
||||
let mut archive = Archive::new(decoder);
|
||||
for entry in archive
|
||||
.entries()
|
||||
.map_err(|error| PackageManagerError::ArchiveExtraction(error.to_string()))?
|
||||
{
|
||||
let mut entry =
|
||||
entry.map_err(|error| PackageManagerError::ArchiveExtraction(error.to_string()))?;
|
||||
let path = entry
|
||||
.path()
|
||||
.map_err(|error| PackageManagerError::ArchiveExtraction(error.to_string()))?;
|
||||
let output_path = safe_extract_path(destination, path.as_ref())?;
|
||||
let entry_type = entry.header().entry_type();
|
||||
|
||||
if entry_type.is_symlink()
|
||||
|| entry_type.is_hard_link()
|
||||
|| entry_type.is_block_special()
|
||||
|| entry_type.is_character_special()
|
||||
|| entry_type.is_fifo()
|
||||
|| entry_type.is_gnu_sparse()
|
||||
{
|
||||
return Err(PackageManagerError::ArchiveExtraction(format!(
|
||||
"tar entry `{}` has unsupported type",
|
||||
path.display()
|
||||
)));
|
||||
}
|
||||
|
||||
if entry_type.is_pax_global_extensions()
|
||||
|| entry_type.is_pax_local_extensions()
|
||||
|| entry_type.is_gnu_longname()
|
||||
|| entry_type.is_gnu_longlink()
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if entry_type.is_dir() {
|
||||
std::fs::create_dir_all(&output_path).map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to create {}", output_path.display()),
|
||||
source,
|
||||
})?;
|
||||
continue;
|
||||
}
|
||||
|
||||
if !entry_type.is_file() && !entry_type.is_contiguous() {
|
||||
return Err(PackageManagerError::ArchiveExtraction(format!(
|
||||
"tar entry `{}` has unsupported type",
|
||||
path.display()
|
||||
)));
|
||||
}
|
||||
|
||||
if let Some(parent) = output_path.parent() {
|
||||
std::fs::create_dir_all(parent).map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to create {}", parent.display()),
|
||||
source,
|
||||
})?;
|
||||
}
|
||||
entry
|
||||
.unpack(&output_path)
|
||||
.map_err(|error| PackageManagerError::ArchiveExtraction(error.to_string()))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn safe_extract_path(root: &Path, relative_path: &Path) -> Result<PathBuf, PackageManagerError> {
|
||||
let mut clean_relative = PathBuf::new();
|
||||
for component in relative_path.components() {
|
||||
match component {
|
||||
Component::Normal(segment) => clean_relative.push(segment),
|
||||
Component::CurDir => {}
|
||||
Component::ParentDir | Component::RootDir | Component::Prefix(_) => {
|
||||
return Err(PackageManagerError::ArchiveExtraction(format!(
|
||||
"entry `{}` escapes extraction root",
|
||||
relative_path.display()
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if clean_relative.as_os_str().is_empty() {
|
||||
return Err(PackageManagerError::ArchiveExtraction(
|
||||
"archive entry had an empty path".to_string(),
|
||||
));
|
||||
}
|
||||
Ok(root.join(clean_relative))
|
||||
}
|
||||
40
codex-rs/package-manager/src/config.rs
Normal file
40
codex-rs/package-manager/src/config.rs
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
use crate::ManagedPackage;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Immutable configuration for a [`crate::PackageManager`] instance.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct PackageManagerConfig<P> {
|
||||
pub(crate) codex_home: PathBuf,
|
||||
pub(crate) package: P,
|
||||
cache_root: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl<P> PackageManagerConfig<P> {
|
||||
/// Creates a config rooted at the provided Codex home directory.
|
||||
pub fn new(codex_home: PathBuf, package: P) -> Self {
|
||||
Self {
|
||||
codex_home,
|
||||
package,
|
||||
cache_root: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Overrides the package cache root instead of deriving it from `codex_home`.
|
||||
pub fn with_cache_root(mut self, cache_root: PathBuf) -> Self {
|
||||
self.cache_root = Some(cache_root);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<P: ManagedPackage> PackageManagerConfig<P> {
|
||||
/// Returns the effective cache root for the package.
|
||||
pub fn cache_root(&self) -> PathBuf {
|
||||
self.cache_root.clone().unwrap_or_else(|| {
|
||||
self.codex_home.join(
|
||||
self.package
|
||||
.default_cache_root_relative()
|
||||
.replace('/', std::path::MAIN_SEPARATOR_STR),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
54
codex-rs/package-manager/src/error.rs
Normal file
54
codex-rs/package-manager/src/error.rs
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
use std::path::PathBuf;
|
||||
use thiserror::Error;
|
||||
|
||||
/// Errors returned by the generic package manager.
|
||||
#[derive(Debug, Error)]
|
||||
pub enum PackageManagerError {
|
||||
/// The current machine OS/architecture pair is not supported by the package.
|
||||
#[error("unsupported platform: {os}-{arch}")]
|
||||
UnsupportedPlatform { os: String, arch: String },
|
||||
|
||||
/// The configured release base URL could not be joined with a package-specific path.
|
||||
#[error("invalid release base url")]
|
||||
InvalidBaseUrl(#[source] url::ParseError),
|
||||
|
||||
/// An HTTP request failed while fetching the manifest or archive.
|
||||
#[error("{context}")]
|
||||
Http {
|
||||
context: String,
|
||||
#[source]
|
||||
source: reqwest::Error,
|
||||
},
|
||||
|
||||
/// A filesystem operation failed while reading, staging, or promoting a package.
|
||||
#[error("{context}")]
|
||||
Io {
|
||||
context: String,
|
||||
#[source]
|
||||
source: std::io::Error,
|
||||
},
|
||||
|
||||
/// The release manifest did not contain an archive for the current platform.
|
||||
#[error("missing platform entry `{0}` in release manifest")]
|
||||
MissingPlatform(String),
|
||||
|
||||
/// The release manifest or installed package reported a different version than requested.
|
||||
#[error("unexpected package version: expected `{expected}`, got `{actual}`")]
|
||||
UnexpectedPackageVersion { expected: String, actual: String },
|
||||
|
||||
/// The downloaded archive length did not match the manifest metadata.
|
||||
#[error("unexpected archive size: expected `{expected}`, got `{actual}`")]
|
||||
UnexpectedArchiveSize { expected: u64, actual: u64 },
|
||||
|
||||
/// The downloaded archive checksum did not match the manifest metadata.
|
||||
#[error("checksum mismatch: expected `{expected}`, got `{actual}`")]
|
||||
ChecksumMismatch { expected: String, actual: String },
|
||||
|
||||
/// Archive extraction failed or the archive contents violated extraction rules.
|
||||
#[error("archive extraction failed: {0}")]
|
||||
ArchiveExtraction(String),
|
||||
|
||||
/// The extracted archive layout did not contain a detectable package root.
|
||||
#[error("archive did not contain a package root with manifest.json under {0}")]
|
||||
MissingPackageRoot(PathBuf),
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
464
codex-rs/package-manager/src/manager.rs
Normal file
464
codex-rs/package-manager/src/manager.rs
Normal file
|
|
@ -0,0 +1,464 @@
|
|||
use crate::ManagedPackage;
|
||||
use crate::PackageManagerConfig;
|
||||
use crate::PackageManagerError;
|
||||
use crate::PackagePlatform;
|
||||
use crate::archive::extract_archive;
|
||||
use crate::archive::verify_archive_size;
|
||||
use crate::archive::verify_sha256;
|
||||
use fd_lock::RwLock as FileRwLock;
|
||||
use reqwest::Client;
|
||||
use std::fs::OpenOptions;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Duration;
|
||||
use tempfile::tempdir_in;
|
||||
use tokio::fs;
|
||||
use tokio::time::sleep;
|
||||
use url::Url;
|
||||
|
||||
const INSTALL_LOCK_POLL_INTERVAL: Duration = Duration::from_millis(50);
|
||||
|
||||
/// Fetches and installs a versioned package into a shared cache directory.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PackageManager<P> {
|
||||
client: Client,
|
||||
config: PackageManagerConfig<P>,
|
||||
}
|
||||
|
||||
impl<P> PackageManager<P> {
|
||||
/// Creates a manager with a default `reqwest` client.
|
||||
pub fn new(config: PackageManagerConfig<P>) -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
config,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a manager with a caller-provided HTTP client.
|
||||
pub fn with_client(config: PackageManagerConfig<P>, client: Client) -> Self {
|
||||
Self { client, config }
|
||||
}
|
||||
}
|
||||
|
||||
impl<P: ManagedPackage> PackageManager<P> {
|
||||
/// Resolves a valid cached install for the current platform, if one exists.
|
||||
pub async fn resolve_cached(&self) -> Result<Option<P::Installed>, P::Error> {
|
||||
let platform = PackagePlatform::detect_current().map_err(P::Error::from)?;
|
||||
let install_dir = self
|
||||
.config
|
||||
.package
|
||||
.install_dir(&self.config.cache_root(), platform);
|
||||
self.resolve_cached_at(platform, install_dir).await
|
||||
}
|
||||
|
||||
/// Ensures the requested package is installed for the current platform.
|
||||
pub async fn ensure_installed(&self) -> Result<P::Installed, P::Error> {
|
||||
// Fast path: most calls should resolve an already validated cache entry
|
||||
// without touching the network or the install lock.
|
||||
if let Some(package) = self.resolve_cached().await? {
|
||||
return Ok(package);
|
||||
}
|
||||
|
||||
let platform = PackagePlatform::detect_current().map_err(P::Error::from)?;
|
||||
let cache_root = self.config.cache_root();
|
||||
let install_dir = self.config.package.install_dir(&cache_root, platform);
|
||||
if let Some(package) = self
|
||||
.resolve_cached_at(platform, install_dir.clone())
|
||||
.await?
|
||||
{
|
||||
return Ok(package);
|
||||
}
|
||||
|
||||
if let Some(parent) = install_dir.parent() {
|
||||
fs::create_dir_all(parent)
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to create {}", parent.display()),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?;
|
||||
}
|
||||
|
||||
let lock_path = install_dir.with_extension("lock");
|
||||
let lock_file = OpenOptions::new()
|
||||
.create(true)
|
||||
.read(true)
|
||||
.write(true)
|
||||
.truncate(false)
|
||||
.open(&lock_path)
|
||||
.map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to open {}", lock_path.display()),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?;
|
||||
let mut install_lock = FileRwLock::new(lock_file);
|
||||
let _install_guard = loop {
|
||||
match install_lock.try_write() {
|
||||
Ok(guard) => break guard,
|
||||
Err(source) if source.kind() == std::io::ErrorKind::WouldBlock => {
|
||||
sleep(INSTALL_LOCK_POLL_INTERVAL).await;
|
||||
}
|
||||
Err(source) => {
|
||||
return Err(PackageManagerError::Io {
|
||||
context: format!("failed to lock {}", lock_path.display()),
|
||||
source,
|
||||
}
|
||||
.into());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Another process may have finished the install while we were waiting
|
||||
// on the lock, so re-check before doing any download or extraction work.
|
||||
if let Some(package) = self
|
||||
.resolve_cached_at(platform, install_dir.clone())
|
||||
.await?
|
||||
{
|
||||
return Ok(package);
|
||||
}
|
||||
|
||||
let manifest = self.fetch_release_manifest().await?;
|
||||
if self.config.package.release_version(&manifest) != self.config.package.version() {
|
||||
return Err(PackageManagerError::UnexpectedPackageVersion {
|
||||
expected: self.config.package.version().to_string(),
|
||||
actual: self.config.package.release_version(&manifest).to_string(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
||||
fs::create_dir_all(&cache_root)
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to create {}", cache_root.display()),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?;
|
||||
let staging_root = cache_root.join(".staging");
|
||||
fs::create_dir_all(&staging_root)
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to create {}", staging_root.display()),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?;
|
||||
|
||||
// Everything below happens in a disposable staging area until the
|
||||
// extracted package has passed package-specific validation.
|
||||
let platform_archive = self.config.package.platform_archive(&manifest, platform)?;
|
||||
let archive_url = self
|
||||
.config
|
||||
.package
|
||||
.archive_url(&platform_archive)
|
||||
.map_err(P::Error::from)?;
|
||||
let archive_bytes = self.download_bytes(&archive_url).await?;
|
||||
verify_archive_size(&archive_bytes, platform_archive.size_bytes).map_err(P::Error::from)?;
|
||||
verify_sha256(&archive_bytes, &platform_archive.sha256).map_err(P::Error::from)?;
|
||||
|
||||
let staging_dir = tempdir_in(&staging_root)
|
||||
.map_err(|source| PackageManagerError::Io {
|
||||
context: format!(
|
||||
"failed to create staging directory in {}",
|
||||
staging_root.display()
|
||||
),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?;
|
||||
let archive_path = staging_dir.path().join(&platform_archive.archive);
|
||||
fs::write(&archive_path, &archive_bytes)
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to write {}", archive_path.display()),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?;
|
||||
let extraction_root = staging_dir.path().join("extract");
|
||||
fs::create_dir_all(&extraction_root)
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to create {}", extraction_root.display()),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?;
|
||||
|
||||
extract_archive(&archive_path, &extraction_root, platform_archive.format)
|
||||
.map_err(P::Error::from)?;
|
||||
let extracted_root = self
|
||||
.config
|
||||
.package
|
||||
.detect_extracted_root(&extraction_root)?;
|
||||
let package = self
|
||||
.config
|
||||
.package
|
||||
.load_installed(extracted_root.clone(), platform)?;
|
||||
if self.config.package.installed_version(&package) != self.config.package.version() {
|
||||
return Err(PackageManagerError::UnexpectedPackageVersion {
|
||||
expected: self.config.package.version().to_string(),
|
||||
actual: self.config.package.installed_version(&package).to_string(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
||||
if let Some(parent) = install_dir.parent() {
|
||||
fs::create_dir_all(parent)
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to create {}", parent.display()),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?;
|
||||
}
|
||||
|
||||
// Promotion is intentionally two-phase: move the old install aside,
|
||||
// then attempt an atomic rename of the staged tree into place.
|
||||
// If promotion fails, restore the previous install before returning.
|
||||
let replaced_install_dir = quarantine_existing_install(&install_dir)
|
||||
.await
|
||||
.map_err(P::Error::from)?;
|
||||
let promotion = promote_staged_install(&extracted_root, &install_dir).await;
|
||||
if let Err(error) = promotion {
|
||||
// If another process won the race after we staged our copy, prefer
|
||||
// the now-installed cache entry and clean up our quarantined copy.
|
||||
if matches!(
|
||||
&error,
|
||||
PackageManagerError::Io { source, .. }
|
||||
if matches!(
|
||||
source.kind(),
|
||||
std::io::ErrorKind::AlreadyExists
|
||||
| std::io::ErrorKind::DirectoryNotEmpty
|
||||
)
|
||||
) && let Some(package) = self
|
||||
.resolve_cached_at(platform, install_dir.clone())
|
||||
.await?
|
||||
{
|
||||
if let Some(replaced_install_dir) = replaced_install_dir {
|
||||
let _ = fs::remove_dir_all(replaced_install_dir).await;
|
||||
}
|
||||
return Ok(package);
|
||||
}
|
||||
|
||||
restore_quarantined_install(&install_dir, replaced_install_dir.as_deref(), &error)
|
||||
.await
|
||||
.map_err(P::Error::from)?;
|
||||
return Err(error.into());
|
||||
}
|
||||
|
||||
// Validate from the final install path before deleting the quarantined
|
||||
// previous install. Some packages may only fully validate once the
|
||||
// promoted tree is in place at its real cache location.
|
||||
let package = match self
|
||||
.config
|
||||
.package
|
||||
.load_installed(install_dir.clone(), platform)
|
||||
{
|
||||
Ok(package) => package,
|
||||
Err(error) => {
|
||||
if let Some(replaced_install_dir) = replaced_install_dir.as_deref() {
|
||||
// Final validation failed after promotion, so discard the
|
||||
// broken install and restore the last known-good copy.
|
||||
if fs::try_exists(&install_dir)
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to read {}", install_dir.display()),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?
|
||||
{
|
||||
fs::remove_dir_all(&install_dir)
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Io {
|
||||
context: format!(
|
||||
"failed to remove invalid install {} after final validation failed",
|
||||
install_dir.display()
|
||||
),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?;
|
||||
}
|
||||
fs::rename(replaced_install_dir, &install_dir)
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Io {
|
||||
context: format!(
|
||||
"failed to restore {} from {} after final validation failed",
|
||||
install_dir.display(),
|
||||
replaced_install_dir.display()
|
||||
),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?;
|
||||
}
|
||||
return Err(error);
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(replaced_install_dir) = replaced_install_dir {
|
||||
let _ = fs::remove_dir_all(replaced_install_dir).await;
|
||||
}
|
||||
|
||||
Ok(package)
|
||||
}
|
||||
|
||||
async fn resolve_cached_at(
|
||||
&self,
|
||||
platform: PackagePlatform,
|
||||
install_dir: PathBuf,
|
||||
) -> Result<Option<P::Installed>, P::Error> {
|
||||
if !fs::try_exists(&install_dir)
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to read {}", install_dir.display()),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let package = match self.config.package.load_installed(install_dir, platform) {
|
||||
Ok(package) => package,
|
||||
Err(_) => return Ok(None),
|
||||
};
|
||||
if self.config.package.installed_version(&package) != self.config.package.version() {
|
||||
return Ok(None);
|
||||
}
|
||||
Ok(Some(package))
|
||||
}
|
||||
|
||||
async fn fetch_release_manifest(&self) -> Result<P::ReleaseManifest, P::Error> {
|
||||
let manifest_url = self.config.package.manifest_url().map_err(P::Error::from)?;
|
||||
let response = self
|
||||
.client
|
||||
.get(manifest_url.clone())
|
||||
.send()
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Http {
|
||||
context: format!("failed to fetch {manifest_url}"),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?
|
||||
.error_for_status()
|
||||
.map_err(|source| PackageManagerError::Http {
|
||||
context: format!("manifest request failed for {manifest_url}"),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?;
|
||||
|
||||
response
|
||||
.json::<P::ReleaseManifest>()
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Http {
|
||||
context: format!("failed to decode manifest from {manifest_url}"),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)
|
||||
}
|
||||
|
||||
async fn download_bytes(&self, url: &Url) -> Result<Vec<u8>, P::Error> {
|
||||
let response = self
|
||||
.client
|
||||
.get(url.clone())
|
||||
.send()
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Http {
|
||||
context: format!("failed to download {url}"),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?
|
||||
.error_for_status()
|
||||
.map_err(|source| PackageManagerError::Http {
|
||||
context: format!("archive request failed for {url}"),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?;
|
||||
let bytes = response
|
||||
.bytes()
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Http {
|
||||
context: format!("failed to read response body for {url}"),
|
||||
source,
|
||||
})
|
||||
.map_err(P::Error::from)?;
|
||||
Ok(bytes.to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn quarantine_existing_install(
|
||||
install_dir: &Path,
|
||||
) -> Result<Option<PathBuf>, PackageManagerError> {
|
||||
if !fs::try_exists(install_dir)
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to read {}", install_dir.display()),
|
||||
source,
|
||||
})?
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let install_name = install_dir.file_name().ok_or_else(|| {
|
||||
PackageManagerError::ArchiveExtraction(format!(
|
||||
"install path `{}` has no terminal component",
|
||||
install_dir.display()
|
||||
))
|
||||
})?;
|
||||
let install_name = install_name.to_string_lossy();
|
||||
let mut suffix = 0u32;
|
||||
loop {
|
||||
let quarantined_path = install_dir.with_file_name(format!(
|
||||
".{install_name}.replaced-{}-{suffix}",
|
||||
std::process::id()
|
||||
));
|
||||
match fs::rename(install_dir, &quarantined_path).await {
|
||||
Ok(()) => return Ok(Some(quarantined_path)),
|
||||
Err(source) if source.kind() == std::io::ErrorKind::AlreadyExists => {
|
||||
suffix += 1;
|
||||
}
|
||||
Err(source) => {
|
||||
return Err(PackageManagerError::Io {
|
||||
context: format!(
|
||||
"failed to quarantine {} to {}",
|
||||
install_dir.display(),
|
||||
quarantined_path.display()
|
||||
),
|
||||
source,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn promote_staged_install(
|
||||
extracted_root: &Path,
|
||||
install_dir: &Path,
|
||||
) -> Result<(), PackageManagerError> {
|
||||
fs::rename(extracted_root, install_dir)
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Io {
|
||||
context: format!(
|
||||
"failed to move {} to {}",
|
||||
extracted_root.display(),
|
||||
install_dir.display()
|
||||
),
|
||||
source,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) async fn restore_quarantined_install(
|
||||
install_dir: &Path,
|
||||
quarantined_install_dir: Option<&Path>,
|
||||
promotion_error: &PackageManagerError,
|
||||
) -> Result<(), PackageManagerError> {
|
||||
let Some(quarantined_install_dir) = quarantined_install_dir else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
fs::rename(quarantined_install_dir, install_dir)
|
||||
.await
|
||||
.map_err(|source| PackageManagerError::Io {
|
||||
context: format!(
|
||||
"{promotion_error}; failed to restore {} from {}",
|
||||
install_dir.display(),
|
||||
quarantined_install_dir.display()
|
||||
),
|
||||
source,
|
||||
})
|
||||
}
|
||||
69
codex-rs/package-manager/src/package.rs
Normal file
69
codex-rs/package-manager/src/package.rs
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
use crate::PackageManagerError;
|
||||
use crate::PackagePlatform;
|
||||
use crate::PackageReleaseArchive;
|
||||
use crate::archive::detect_single_package_root;
|
||||
use serde::de::DeserializeOwned;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use url::Url;
|
||||
|
||||
/// Describes how a specific package is located, validated, and loaded.
|
||||
///
|
||||
/// Implementations should treat this trait as the package manager contract:
|
||||
///
|
||||
/// - [`Self::install_dir`] should resolve to a directory unique to the package version and
|
||||
/// platform so concurrent versions never overwrite each other.
|
||||
/// - [`Self::load_installed`] should fully validate whatever "installed" means for the package,
|
||||
/// because cache resolution trusts a successful load as a valid install.
|
||||
/// - The default [`Self::detect_extracted_root`] implementation expects the extracted archive to
|
||||
/// contain a `manifest.json` at the package root or a single top-level directory that does.
|
||||
pub trait ManagedPackage: Clone {
|
||||
/// Error type surfaced by package-specific loading and validation.
|
||||
type Error: From<PackageManagerError>;
|
||||
|
||||
/// The fully loaded package instance returned to callers.
|
||||
type Installed: Clone;
|
||||
|
||||
/// The decoded release manifest fetched from the remote source.
|
||||
type ReleaseManifest: DeserializeOwned;
|
||||
|
||||
/// Returns the default cache root relative to Codex home.
|
||||
fn default_cache_root_relative(&self) -> &str;
|
||||
|
||||
/// Returns the requested package version.
|
||||
fn version(&self) -> &str;
|
||||
|
||||
/// Returns the manifest URL for the requested version.
|
||||
fn manifest_url(&self) -> Result<Url, PackageManagerError>;
|
||||
|
||||
/// Returns the archive download URL for a platform-specific manifest entry.
|
||||
fn archive_url(&self, archive: &PackageReleaseArchive) -> Result<Url, PackageManagerError>;
|
||||
|
||||
/// Returns the version string stored in the fetched release manifest.
|
||||
fn release_version<'a>(&self, manifest: &'a Self::ReleaseManifest) -> &'a str;
|
||||
|
||||
/// Selects the archive to download for the current platform.
|
||||
fn platform_archive(
|
||||
&self,
|
||||
manifest: &Self::ReleaseManifest,
|
||||
platform: PackagePlatform,
|
||||
) -> Result<PackageReleaseArchive, Self::Error>;
|
||||
|
||||
/// Returns the final install directory for the package version and platform.
|
||||
fn install_dir(&self, cache_root: &Path, platform: PackagePlatform) -> PathBuf;
|
||||
|
||||
/// Returns the version string encoded in a fully loaded installed package.
|
||||
fn installed_version<'a>(&self, package: &'a Self::Installed) -> &'a str;
|
||||
|
||||
/// Loads and validates an installed package from disk.
|
||||
fn load_installed(
|
||||
&self,
|
||||
root_dir: PathBuf,
|
||||
platform: PackagePlatform,
|
||||
) -> Result<Self::Installed, Self::Error>;
|
||||
|
||||
/// Resolves the extracted package root before the staged install is promoted.
|
||||
fn detect_extracted_root(&self, extraction_root: &Path) -> Result<PathBuf, Self::Error> {
|
||||
detect_single_package_root(extraction_root).map_err(Self::Error::from)
|
||||
}
|
||||
}
|
||||
48
codex-rs/package-manager/src/platform.rs
Normal file
48
codex-rs/package-manager/src/platform.rs
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
use crate::PackageManagerError;
|
||||
|
||||
/// Supported OS and CPU combinations for managed packages.
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
pub enum PackagePlatform {
|
||||
/// macOS on Apple Silicon.
|
||||
DarwinArm64,
|
||||
/// macOS on x86_64.
|
||||
DarwinX64,
|
||||
/// Linux on AArch64.
|
||||
LinuxArm64,
|
||||
/// Linux on x86_64.
|
||||
LinuxX64,
|
||||
/// Windows on AArch64.
|
||||
WindowsArm64,
|
||||
/// Windows on x86_64.
|
||||
WindowsX64,
|
||||
}
|
||||
|
||||
impl PackagePlatform {
|
||||
/// Detects the current process platform.
|
||||
pub fn detect_current() -> Result<Self, PackageManagerError> {
|
||||
match (std::env::consts::OS, std::env::consts::ARCH) {
|
||||
("macos", "aarch64") | ("macos", "arm64") => Ok(Self::DarwinArm64),
|
||||
("macos", "x86_64") => Ok(Self::DarwinX64),
|
||||
("linux", "aarch64") | ("linux", "arm64") => Ok(Self::LinuxArm64),
|
||||
("linux", "x86_64") => Ok(Self::LinuxX64),
|
||||
("windows", "aarch64") | ("windows", "arm64") => Ok(Self::WindowsArm64),
|
||||
("windows", "x86_64") => Ok(Self::WindowsX64),
|
||||
(os, arch) => Err(PackageManagerError::UnsupportedPlatform {
|
||||
os: os.to_string(),
|
||||
arch: arch.to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the manifest/cache string for this platform.
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::DarwinArm64 => "darwin-arm64",
|
||||
Self::DarwinX64 => "darwin-x64",
|
||||
Self::LinuxArm64 => "linux-arm64",
|
||||
Self::LinuxX64 => "linux-x64",
|
||||
Self::WindowsArm64 => "windows-arm64",
|
||||
Self::WindowsX64 => "windows-x64",
|
||||
}
|
||||
}
|
||||
}
|
||||
700
codex-rs/package-manager/src/tests.rs
Normal file
700
codex-rs/package-manager/src/tests.rs
Normal file
|
|
@ -0,0 +1,700 @@
|
|||
use crate::ArchiveFormat;
|
||||
use crate::ManagedPackage;
|
||||
use crate::PackageManager;
|
||||
use crate::PackageManagerConfig;
|
||||
use crate::PackageManagerError;
|
||||
use crate::PackagePlatform;
|
||||
use crate::PackageReleaseArchive;
|
||||
use crate::archive::detect_single_package_root;
|
||||
use crate::archive::extract_archive;
|
||||
use crate::manager::promote_staged_install;
|
||||
use crate::manager::quarantine_existing_install;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde::Deserialize;
|
||||
use sha2::Digest;
|
||||
use sha2::Sha256;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs::File;
|
||||
use std::io::Cursor;
|
||||
use std::io::Write;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tar::Builder;
|
||||
use tar::EntryType;
|
||||
use tempfile::TempDir;
|
||||
use tokio::sync::Barrier;
|
||||
use url::Url;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
use zip::ZipWriter;
|
||||
use zip::write::SimpleFileOptions;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct TestPackage {
|
||||
base_url: Url,
|
||||
version: String,
|
||||
fail_on_final_install_dir: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
struct TestReleaseManifest {
|
||||
package_version: String,
|
||||
platforms: BTreeMap<String, PackageReleaseArchive>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
struct TestInstalledPackage {
|
||||
version: String,
|
||||
platform: PackagePlatform,
|
||||
root_dir: PathBuf,
|
||||
}
|
||||
|
||||
impl ManagedPackage for TestPackage {
|
||||
type Error = PackageManagerError;
|
||||
type Installed = TestInstalledPackage;
|
||||
type ReleaseManifest = TestReleaseManifest;
|
||||
|
||||
fn default_cache_root_relative(&self) -> &str {
|
||||
"packages/test-package"
|
||||
}
|
||||
|
||||
fn version(&self) -> &str {
|
||||
&self.version
|
||||
}
|
||||
|
||||
fn manifest_url(&self) -> Result<Url, PackageManagerError> {
|
||||
self.base_url
|
||||
.join(&format!("test-package-v{}-manifest.json", self.version))
|
||||
.map_err(PackageManagerError::InvalidBaseUrl)
|
||||
}
|
||||
|
||||
fn archive_url(&self, archive: &PackageReleaseArchive) -> Result<Url, PackageManagerError> {
|
||||
self.base_url
|
||||
.join(&archive.archive)
|
||||
.map_err(PackageManagerError::InvalidBaseUrl)
|
||||
}
|
||||
|
||||
fn release_version<'a>(&self, manifest: &'a Self::ReleaseManifest) -> &'a str {
|
||||
&manifest.package_version
|
||||
}
|
||||
|
||||
fn platform_archive(
|
||||
&self,
|
||||
manifest: &Self::ReleaseManifest,
|
||||
platform: PackagePlatform,
|
||||
) -> Result<PackageReleaseArchive, Self::Error> {
|
||||
manifest
|
||||
.platforms
|
||||
.get(platform.as_str())
|
||||
.cloned()
|
||||
.ok_or_else(|| PackageManagerError::MissingPlatform(platform.as_str().to_string()))
|
||||
}
|
||||
|
||||
fn install_dir(&self, cache_root: &Path, platform: PackagePlatform) -> PathBuf {
|
||||
cache_root.join(self.version()).join(platform.as_str())
|
||||
}
|
||||
|
||||
fn installed_version<'a>(&self, package: &'a Self::Installed) -> &'a str {
|
||||
&package.version
|
||||
}
|
||||
|
||||
fn load_installed(
|
||||
&self,
|
||||
root_dir: PathBuf,
|
||||
platform: PackagePlatform,
|
||||
) -> Result<Self::Installed, Self::Error> {
|
||||
if self.fail_on_final_install_dir
|
||||
&& root_dir
|
||||
.file_name()
|
||||
.is_some_and(|name| name == platform.as_str())
|
||||
{
|
||||
return Err(PackageManagerError::ArchiveExtraction(format!(
|
||||
"refusing final install dir {}",
|
||||
root_dir.display()
|
||||
)));
|
||||
}
|
||||
let manifest_path = root_dir.join("manifest.json");
|
||||
let version =
|
||||
std::fs::read_to_string(&manifest_path).map_err(|source| PackageManagerError::Io {
|
||||
context: format!("failed to read {}", manifest_path.display()),
|
||||
source,
|
||||
})?;
|
||||
Ok(TestInstalledPackage {
|
||||
version: version.trim().to_string(),
|
||||
platform,
|
||||
root_dir,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn ensure_installed_downloads_and_extracts_zip_package() {
|
||||
let server = MockServer::start().await;
|
||||
let version = "0.1.0";
|
||||
let platform = PackagePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
|
||||
let archive_name = format!("test-package-v{version}-{}.zip", platform.as_str());
|
||||
let archive_bytes = build_zip_archive(version);
|
||||
let archive_sha = format!("{:x}", Sha256::digest(&archive_bytes));
|
||||
let manifest = serde_json::json!({
|
||||
"package_version": version,
|
||||
"platforms": {
|
||||
platform.as_str(): {
|
||||
"archive": archive_name,
|
||||
"sha256": archive_sha,
|
||||
"format": "zip",
|
||||
"size_bytes": archive_bytes.len(),
|
||||
}
|
||||
}
|
||||
});
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/test-package-v{version}-manifest.json")))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_json(&manifest))
|
||||
.mount(&server)
|
||||
.await;
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/{archive_name}")))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_bytes(archive_bytes))
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let codex_home = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
|
||||
let package = TestPackage {
|
||||
base_url: Url::parse(&format!("{}/", server.uri()))
|
||||
.unwrap_or_else(|error| panic!("{error}")),
|
||||
version: version.to_string(),
|
||||
fail_on_final_install_dir: false,
|
||||
};
|
||||
let manager = PackageManager::new(PackageManagerConfig::new(
|
||||
codex_home.path().to_path_buf(),
|
||||
package,
|
||||
));
|
||||
|
||||
let installed = manager
|
||||
.ensure_installed()
|
||||
.await
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
|
||||
assert_eq!(
|
||||
installed,
|
||||
TestInstalledPackage {
|
||||
version: version.to_string(),
|
||||
platform,
|
||||
root_dir: codex_home
|
||||
.path()
|
||||
.join("packages")
|
||||
.join("test-package")
|
||||
.join(version)
|
||||
.join(platform.as_str()),
|
||||
}
|
||||
);
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
let executable_mode = std::fs::metadata(installed.root_dir.join("bin/tool"))
|
||||
.unwrap_or_else(|error| panic!("{error}"))
|
||||
.permissions()
|
||||
.mode();
|
||||
assert_eq!(executable_mode & 0o111, 0o111);
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn resolve_cached_uses_custom_cache_root() {
|
||||
let platform = PackagePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
|
||||
let codex_home = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
|
||||
let cache_root = codex_home.path().join("custom-cache");
|
||||
let install_dir = cache_root.join("0.1.0").join(platform.as_str());
|
||||
std::fs::create_dir_all(&install_dir).unwrap_or_else(|error| panic!("{error}"));
|
||||
std::fs::write(install_dir.join("manifest.json"), "0.1.0")
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
|
||||
let manager = PackageManager::new(
|
||||
PackageManagerConfig::new(
|
||||
codex_home.path().to_path_buf(),
|
||||
TestPackage {
|
||||
base_url: Url::parse("https://example.test/")
|
||||
.unwrap_or_else(|error| panic!("{error}")),
|
||||
version: "0.1.0".to_string(),
|
||||
fail_on_final_install_dir: false,
|
||||
},
|
||||
)
|
||||
.with_cache_root(cache_root.clone()),
|
||||
);
|
||||
|
||||
let installed = manager
|
||||
.resolve_cached()
|
||||
.await
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
|
||||
assert_eq!(
|
||||
installed,
|
||||
Some(TestInstalledPackage {
|
||||
version: "0.1.0".to_string(),
|
||||
platform,
|
||||
root_dir: cache_root.join("0.1.0").join(platform.as_str()),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn ensure_installed_replaces_invalid_cached_install() {
|
||||
let server = MockServer::start().await;
|
||||
let version = "0.1.0";
|
||||
let platform = PackagePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
|
||||
let archive_name = format!("test-package-v{version}-{}.zip", platform.as_str());
|
||||
let archive_bytes = build_zip_archive(version);
|
||||
let archive_sha = format!("{:x}", Sha256::digest(&archive_bytes));
|
||||
let manifest = serde_json::json!({
|
||||
"package_version": version,
|
||||
"platforms": {
|
||||
platform.as_str(): {
|
||||
"archive": archive_name,
|
||||
"sha256": archive_sha,
|
||||
"format": "zip",
|
||||
"size_bytes": archive_bytes.len(),
|
||||
}
|
||||
}
|
||||
});
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/test-package-v{version}-manifest.json")))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_json(&manifest))
|
||||
.mount(&server)
|
||||
.await;
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/{archive_name}")))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_bytes(archive_bytes))
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let codex_home = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
|
||||
let install_dir = codex_home
|
||||
.path()
|
||||
.join("packages")
|
||||
.join("test-package")
|
||||
.join(version)
|
||||
.join(platform.as_str());
|
||||
std::fs::create_dir_all(&install_dir).unwrap_or_else(|error| panic!("{error}"));
|
||||
std::fs::write(install_dir.join("broken.txt"), "stale")
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
|
||||
let manager = PackageManager::new(PackageManagerConfig::new(
|
||||
codex_home.path().to_path_buf(),
|
||||
TestPackage {
|
||||
base_url: Url::parse(&format!("{}/", server.uri()))
|
||||
.unwrap_or_else(|error| panic!("{error}")),
|
||||
version: version.to_string(),
|
||||
fail_on_final_install_dir: false,
|
||||
},
|
||||
));
|
||||
|
||||
let installed = manager
|
||||
.ensure_installed()
|
||||
.await
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
|
||||
assert_eq!(installed.version, version);
|
||||
assert!(installed.root_dir.join("manifest.json").exists());
|
||||
assert!(!installed.root_dir.join("broken.txt").exists());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn ensure_installed_rejects_manifest_version_mismatch() {
|
||||
let server = MockServer::start().await;
|
||||
let version = "0.1.0";
|
||||
let platform = PackagePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
|
||||
let archive_name = format!("test-package-v{version}-{}.zip", platform.as_str());
|
||||
let manifest = serde_json::json!({
|
||||
"package_version": "0.2.0",
|
||||
"platforms": {
|
||||
platform.as_str(): {
|
||||
"archive": archive_name,
|
||||
"sha256": "deadbeef",
|
||||
"format": "zip",
|
||||
"size_bytes": 1,
|
||||
}
|
||||
}
|
||||
});
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/test-package-v{version}-manifest.json")))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_json(&manifest))
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let codex_home = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
|
||||
let manager = PackageManager::new(PackageManagerConfig::new(
|
||||
codex_home.path().to_path_buf(),
|
||||
TestPackage {
|
||||
base_url: Url::parse(&format!("{}/", server.uri()))
|
||||
.unwrap_or_else(|error| panic!("{error}")),
|
||||
version: version.to_string(),
|
||||
fail_on_final_install_dir: false,
|
||||
},
|
||||
));
|
||||
|
||||
let error = manager
|
||||
.ensure_installed()
|
||||
.await
|
||||
.expect_err("manifest version mismatch should fail");
|
||||
assert!(matches!(
|
||||
error,
|
||||
PackageManagerError::UnexpectedPackageVersion { expected, actual }
|
||||
if expected == "0.1.0" && actual == "0.2.0"
|
||||
));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn ensure_installed_serializes_concurrent_installs() {
|
||||
let server = MockServer::start().await;
|
||||
let version = "0.1.0";
|
||||
let platform = PackagePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
|
||||
let archive_name = format!("test-package-v{version}-{}.zip", platform.as_str());
|
||||
let archive_bytes = build_zip_archive(version);
|
||||
let archive_sha = format!("{:x}", Sha256::digest(&archive_bytes));
|
||||
let manifest = serde_json::json!({
|
||||
"package_version": version,
|
||||
"platforms": {
|
||||
platform.as_str(): {
|
||||
"archive": archive_name,
|
||||
"sha256": archive_sha,
|
||||
"format": "zip",
|
||||
"size_bytes": archive_bytes.len(),
|
||||
}
|
||||
}
|
||||
});
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/test-package-v{version}-manifest.json")))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_json(&manifest))
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
.await;
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/{archive_name}")))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_bytes(archive_bytes))
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let codex_home = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
|
||||
let config = PackageManagerConfig::new(
|
||||
codex_home.path().to_path_buf(),
|
||||
TestPackage {
|
||||
base_url: Url::parse(&format!("{}/", server.uri()))
|
||||
.unwrap_or_else(|error| panic!("{error}")),
|
||||
version: version.to_string(),
|
||||
fail_on_final_install_dir: false,
|
||||
},
|
||||
);
|
||||
let manager_one = PackageManager::new(config.clone());
|
||||
let manager_two = PackageManager::new(config);
|
||||
let barrier = Arc::new(Barrier::new(2));
|
||||
let barrier_one = Arc::clone(&barrier);
|
||||
let barrier_two = Arc::clone(&barrier);
|
||||
|
||||
let (first, second) = tokio::join!(
|
||||
async {
|
||||
barrier_one.wait().await;
|
||||
manager_one.ensure_installed().await
|
||||
},
|
||||
async {
|
||||
barrier_two.wait().await;
|
||||
manager_two.ensure_installed().await
|
||||
}
|
||||
);
|
||||
|
||||
let first = first.unwrap_or_else(|error| panic!("{error}"));
|
||||
let second = second.unwrap_or_else(|error| panic!("{error}"));
|
||||
assert_eq!(first, second);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn ensure_installed_rejects_unexpected_archive_size() {
|
||||
let server = MockServer::start().await;
|
||||
let version = "0.1.0";
|
||||
let platform = PackagePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
|
||||
let archive_name = format!("test-package-v{version}-{}.zip", platform.as_str());
|
||||
let archive_bytes = build_zip_archive(version);
|
||||
let actual_size = archive_bytes.len() as u64;
|
||||
let expected_size = (archive_bytes.len() + 1) as u64;
|
||||
let archive_sha = format!("{:x}", Sha256::digest(&archive_bytes));
|
||||
let manifest = serde_json::json!({
|
||||
"package_version": version,
|
||||
"platforms": {
|
||||
platform.as_str(): {
|
||||
"archive": archive_name,
|
||||
"sha256": archive_sha,
|
||||
"format": "zip",
|
||||
"size_bytes": expected_size,
|
||||
}
|
||||
}
|
||||
});
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/test-package-v{version}-manifest.json")))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_json(&manifest))
|
||||
.mount(&server)
|
||||
.await;
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/{archive_name}")))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_bytes(archive_bytes))
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let codex_home = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
|
||||
let manager = PackageManager::new(PackageManagerConfig::new(
|
||||
codex_home.path().to_path_buf(),
|
||||
TestPackage {
|
||||
base_url: Url::parse(&format!("{}/", server.uri()))
|
||||
.unwrap_or_else(|error| panic!("{error}")),
|
||||
version: version.to_string(),
|
||||
fail_on_final_install_dir: false,
|
||||
},
|
||||
));
|
||||
|
||||
let error = manager
|
||||
.ensure_installed()
|
||||
.await
|
||||
.expect_err("archive size mismatch should fail");
|
||||
assert!(matches!(
|
||||
error,
|
||||
PackageManagerError::UnexpectedArchiveSize { expected, actual }
|
||||
if expected == expected_size && actual == actual_size
|
||||
));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn staged_install_restore_keeps_previous_install_on_failed_promotion() {
|
||||
let temp = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
|
||||
let install_dir = temp.path().join("install");
|
||||
let staged_dir = temp.path().join("missing-staged");
|
||||
std::fs::create_dir_all(&install_dir).unwrap_or_else(|error| panic!("{error}"));
|
||||
std::fs::write(install_dir.join("manifest.json"), "0.1.0")
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
|
||||
let quarantined = quarantine_existing_install(&install_dir)
|
||||
.await
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
let promotion_error = promote_staged_install(&staged_dir, &install_dir)
|
||||
.await
|
||||
.expect_err("promotion should fail");
|
||||
crate::manager::restore_quarantined_install(
|
||||
&install_dir,
|
||||
quarantined.as_deref(),
|
||||
&promotion_error,
|
||||
)
|
||||
.await
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
|
||||
assert!(install_dir.join("manifest.json").exists());
|
||||
assert_eq!(
|
||||
std::fs::read_to_string(install_dir.join("manifest.json"))
|
||||
.unwrap_or_else(|error| panic!("{error}")),
|
||||
"0.1.0"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn ensure_installed_restores_previous_install_when_final_validation_fails() {
|
||||
let server = MockServer::start().await;
|
||||
let version = "0.1.0";
|
||||
let platform = PackagePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
|
||||
let archive_name = format!("test-package-v{version}-{}.zip", platform.as_str());
|
||||
let archive_bytes = build_zip_archive(version);
|
||||
let archive_sha = format!("{:x}", Sha256::digest(&archive_bytes));
|
||||
let manifest = serde_json::json!({
|
||||
"package_version": version,
|
||||
"platforms": {
|
||||
platform.as_str(): {
|
||||
"archive": archive_name,
|
||||
"sha256": archive_sha,
|
||||
"format": "zip",
|
||||
"size_bytes": archive_bytes.len(),
|
||||
}
|
||||
}
|
||||
});
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/test-package-v{version}-manifest.json")))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_json(&manifest))
|
||||
.mount(&server)
|
||||
.await;
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/{archive_name}")))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_bytes(archive_bytes))
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let codex_home = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
|
||||
let install_dir = codex_home
|
||||
.path()
|
||||
.join("packages")
|
||||
.join("test-package")
|
||||
.join(version)
|
||||
.join(platform.as_str());
|
||||
std::fs::create_dir_all(&install_dir).unwrap_or_else(|error| panic!("{error}"));
|
||||
std::fs::write(install_dir.join("manifest.json"), "0.0.9")
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
|
||||
let error = PackageManager::new(PackageManagerConfig::new(
|
||||
codex_home.path().to_path_buf(),
|
||||
TestPackage {
|
||||
base_url: Url::parse(&format!("{}/", server.uri()))
|
||||
.unwrap_or_else(|error| panic!("{error}")),
|
||||
version: version.to_string(),
|
||||
fail_on_final_install_dir: true,
|
||||
},
|
||||
))
|
||||
.ensure_installed()
|
||||
.await
|
||||
.expect_err("final validation should fail");
|
||||
|
||||
assert!(
|
||||
matches!(error, PackageManagerError::ArchiveExtraction(message) if message.contains("refusing final install dir"))
|
||||
);
|
||||
assert_eq!(
|
||||
std::fs::read_to_string(install_dir.join("manifest.json"))
|
||||
.unwrap_or_else(|error| panic!("{error}")),
|
||||
"0.0.9"
|
||||
);
|
||||
assert!(
|
||||
!install_dir
|
||||
.parent()
|
||||
.unwrap_or_else(|| panic!("install dir should have a parent"))
|
||||
.read_dir()
|
||||
.unwrap_or_else(|error| panic!("{error}"))
|
||||
.any(|entry| {
|
||||
entry
|
||||
.unwrap_or_else(|error| panic!("{error}"))
|
||||
.file_name()
|
||||
.to_string_lossy()
|
||||
.contains(".replaced-")
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tar_gz_extraction_supports_default_package_root_detection() {
|
||||
let temp = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
|
||||
let archive_path = temp.path().join("package.tar.gz");
|
||||
let extraction_root = temp.path().join("extract");
|
||||
std::fs::create_dir_all(&extraction_root).unwrap_or_else(|error| panic!("{error}"));
|
||||
write_tar_gz_archive(&archive_path, "0.2.0");
|
||||
|
||||
extract_archive(&archive_path, &extraction_root, ArchiveFormat::TarGz)
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
let package_root =
|
||||
detect_single_package_root(&extraction_root).unwrap_or_else(|error| panic!("{error}"));
|
||||
|
||||
assert!(package_root.join("manifest.json").exists());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tar_gz_extraction_rejects_symlinks() {
|
||||
let temp = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
|
||||
let archive_path = temp.path().join("package.tar.gz");
|
||||
let extraction_root = temp.path().join("extract");
|
||||
std::fs::create_dir_all(&extraction_root).unwrap_or_else(|error| panic!("{error}"));
|
||||
write_tar_gz_archive_with_symlink(&archive_path);
|
||||
|
||||
let error = extract_archive(&archive_path, &extraction_root, ArchiveFormat::TarGz)
|
||||
.expect_err("symlink entry should fail");
|
||||
assert!(
|
||||
matches!(error, PackageManagerError::ArchiveExtraction(message) if message.contains("unsupported type"))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn zip_extraction_rejects_parent_paths() {
|
||||
let temp = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
|
||||
let archive_path = temp.path().join("package.zip");
|
||||
let extraction_root = temp.path().join("extract");
|
||||
std::fs::create_dir_all(&extraction_root).unwrap_or_else(|error| panic!("{error}"));
|
||||
write_zip_archive_with_parent_path(&archive_path);
|
||||
|
||||
let error = extract_archive(&archive_path, &extraction_root, ArchiveFormat::Zip)
|
||||
.expect_err("parent path entry should fail");
|
||||
assert!(
|
||||
matches!(error, PackageManagerError::ArchiveExtraction(message) if message.contains("escapes extraction root"))
|
||||
);
|
||||
}
|
||||
|
||||
fn build_zip_archive(version: &str) -> Vec<u8> {
|
||||
let mut bytes = Cursor::new(Vec::new());
|
||||
{
|
||||
let mut zip = ZipWriter::new(&mut bytes);
|
||||
let options = SimpleFileOptions::default();
|
||||
zip.start_file("test-package/manifest.json", options)
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
zip.write_all(version.as_bytes())
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
zip.start_file("test-package/bin/tool", options.unix_permissions(0o755))
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
zip.write_all(b"#!/bin/sh\n")
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
zip.finish().unwrap_or_else(|error| panic!("{error}"));
|
||||
}
|
||||
bytes.into_inner()
|
||||
}
|
||||
|
||||
fn write_zip_archive_with_parent_path(archive_path: &Path) {
|
||||
let file = File::create(archive_path).unwrap_or_else(|error| panic!("{error}"));
|
||||
let mut zip = ZipWriter::new(file);
|
||||
let options = SimpleFileOptions::default();
|
||||
zip.start_file("../escape.txt", options)
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
zip.write_all(b"escape")
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
zip.finish().unwrap_or_else(|error| panic!("{error}"));
|
||||
}
|
||||
|
||||
fn write_tar_gz_archive(archive_path: &Path, version: &str) {
|
||||
let file = File::create(archive_path).unwrap_or_else(|error| panic!("{error}"));
|
||||
let encoder = flate2::write::GzEncoder::new(file, flate2::Compression::default());
|
||||
let mut builder = Builder::new(encoder);
|
||||
|
||||
append_tar_file(
|
||||
&mut builder,
|
||||
"test-package/manifest.json",
|
||||
version.as_bytes(),
|
||||
);
|
||||
builder.finish().unwrap_or_else(|error| panic!("{error}"));
|
||||
}
|
||||
|
||||
fn write_tar_gz_archive_with_symlink(archive_path: &Path) {
|
||||
let file = File::create(archive_path).unwrap_or_else(|error| panic!("{error}"));
|
||||
let encoder = flate2::write::GzEncoder::new(file, flate2::Compression::default());
|
||||
let mut builder = Builder::new(encoder);
|
||||
|
||||
append_tar_file(&mut builder, "test-package/manifest.json", b"0.2.0");
|
||||
|
||||
let mut header = tar::Header::new_gnu();
|
||||
header.set_entry_type(EntryType::Symlink);
|
||||
header.set_size(0);
|
||||
header.set_mode(0o777);
|
||||
header
|
||||
.set_link_name("/tmp/escape")
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
header.set_cksum();
|
||||
builder
|
||||
.append_data(&mut header, "test-package/link", std::io::empty())
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
|
||||
builder.finish().unwrap_or_else(|error| panic!("{error}"));
|
||||
}
|
||||
|
||||
fn append_tar_file(
|
||||
builder: &mut Builder<flate2::write::GzEncoder<File>>,
|
||||
path: &str,
|
||||
contents: &[u8],
|
||||
) {
|
||||
let mut header = tar::Header::new_gnu();
|
||||
header.set_size(contents.len() as u64);
|
||||
header.set_mode(0o755);
|
||||
header.set_cksum();
|
||||
builder
|
||||
.append_data(&mut header, path, contents)
|
||||
.unwrap_or_else(|error| panic!("{error}"));
|
||||
}
|
||||
Loading…
Add table
Reference in a new issue