- Introduce `openai_models` in `/core` - Move `PRESETS` under it - Move `ModelPreset`, `ModelUpgrade`, `ReasoningEffortPreset`, `ReasoningEffortPreset`, and `ReasoningEffortPreset` to `protocol` - Introduce `Op::ListModels` and `EventMsg::AvailableModels` Next steps: - migrate `app-server` and `tui` to use the introduced Operation
23 lines
610 B
TOML
23 lines
610 B
TOML
[package]
|
|
name = "codex-common"
|
|
version.workspace = true
|
|
edition.workspace = true
|
|
license.workspace = true
|
|
|
|
[lints]
|
|
workspace = true
|
|
|
|
[dependencies]
|
|
clap = { workspace = true, features = ["derive", "wrap_help"], optional = true }
|
|
codex-core = { workspace = true }
|
|
codex-lmstudio = { workspace = true }
|
|
codex-ollama = { workspace = true }
|
|
codex-protocol = { workspace = true }
|
|
serde = { workspace = true, optional = true }
|
|
toml = { workspace = true, optional = true }
|
|
|
|
[features]
|
|
# Separate feature so that `clap` is not a mandatory dependency.
|
|
cli = ["clap", "serde", "toml"]
|
|
elapsed = []
|
|
sandbox_summary = []
|