## Overview Adds LM Studio OSS support. Closes #1883 ### Changes This PR enhances the behavior of `--oss` flag to support LM Studio as a provider. Additionally, it introduces a new flag`--local-provider` which can take in `lmstudio` or `ollama` as values if the user wants to explicitly choose which one to use. If no provider is specified `codex --oss` will auto-select the provider based on whichever is running. #### Additional enhancements The default can be set using `oss-provider` in config like: ``` oss_provider = "lmstudio" ``` For non-interactive users, they will need to either provide the provider as an arg or have it in their `config.toml` ### Notes For best performance, [set the default context length](https://lmstudio.ai/docs/app/advanced/per-model) for gpt-oss to the maximum your machine can support --------- Co-authored-by: Matt Clayton <matt@lmstudio.ai> Co-authored-by: Eric Traut <etraut@openai.com>
24 lines
665 B
TOML
24 lines
665 B
TOML
[package]
|
|
edition = "2024"
|
|
name = "codex-common"
|
|
version = { workspace = true }
|
|
|
|
[lints]
|
|
workspace = true
|
|
|
|
[dependencies]
|
|
clap = { workspace = true, features = ["derive", "wrap_help"], optional = true }
|
|
codex-app-server-protocol = { workspace = true }
|
|
codex-core = { workspace = true }
|
|
codex-lmstudio = { workspace = true }
|
|
codex-ollama = { workspace = true }
|
|
codex-protocol = { workspace = true }
|
|
once_cell = { workspace = true }
|
|
serde = { workspace = true, optional = true }
|
|
toml = { workspace = true, optional = true }
|
|
|
|
[features]
|
|
# Separate feature so that `clap` is not a mandatory dependency.
|
|
cli = ["clap", "serde", "toml"]
|
|
elapsed = []
|
|
sandbox_summary = []
|