## Overview Adds LM Studio OSS support. Closes #1883 ### Changes This PR enhances the behavior of `--oss` flag to support LM Studio as a provider. Additionally, it introduces a new flag`--local-provider` which can take in `lmstudio` or `ollama` as values if the user wants to explicitly choose which one to use. If no provider is specified `codex --oss` will auto-select the provider based on whichever is running. #### Additional enhancements The default can be set using `oss-provider` in config like: ``` oss_provider = "lmstudio" ``` For non-interactive users, they will need to either provide the provider as an arg or have it in their `config.toml` ### Notes For best performance, [set the default context length](https://lmstudio.ai/docs/app/advanced/per-model) for gpt-oss to the maximum your machine can support --------- Co-authored-by: Matt Clayton <matt@lmstudio.ai> Co-authored-by: Eric Traut <etraut@openai.com>
24 lines
487 B
TOML
24 lines
487 B
TOML
[package]
|
|
name = "codex-lmstudio"
|
|
version.workspace = true
|
|
edition.workspace = true
|
|
|
|
[lib]
|
|
name = "codex_lmstudio"
|
|
path = "src/lib.rs"
|
|
|
|
|
|
[dependencies]
|
|
codex-core = { path = "../core" }
|
|
reqwest = { version = "0.12", features = ["json", "stream"] }
|
|
serde_json = "1"
|
|
tokio = { version = "1", features = ["rt"] }
|
|
tracing = { version = "0.1.41", features = ["log"] }
|
|
which = "6.0"
|
|
|
|
[dev-dependencies]
|
|
wiremock = "0.6"
|
|
tokio = { version = "1", features = ["full"] }
|
|
|
|
[lints]
|
|
workspace = true
|