cli/cmd/ml/cmd_expand.go
Claude a43cc099cd
Some checks are pending
Security Scan / Go Vulnerability Check (push) Waiting to run
Security Scan / Secret Detection (push) Waiting to run
Security Scan / Dependency & Config Scan (push) Waiting to run
feat(cli): migrate imports to split repos + wire go-agentic registry
Virgil split go-ai into standalone modules (go-agentic, go-ml, go-mlx,
go-rag). This migrates all CLI imports to the new module paths and fixes
API mismatches from the split.

Key changes:
- go-ai/agentic → go-agentic (cmd/ai, cmd/dev)
- go-ai/ml → go-ml (31 files in cmd/ml)
- go-ai/rag → go-rag (3 files in cmd/rag)
- go-ai/mlx → go-mlx (1 file)
- Fix go.work path (../core → ../go)
- Add all split repos to go.work and go.mod
- Simplify daemon to goroutine-based MCP (remove missing supervisor)
- Wire go-agentic SQLiteRegistry into dispatch watch (--agent-id flag)
- Add `core ai agent fleet` command for local registry status
- Fix rag collections API (PointCount, Status string)
- Fix ml live/expand-status to use available go-ml API

Co-Authored-By: Charon <charon@lethean.io>
2026-02-20 12:47:02 +00:00

81 lines
2 KiB
Go

package ml
import (
"context"
"fmt"
"os"
"forge.lthn.ai/core/go/pkg/cli"
"forge.lthn.ai/core/go-ml"
)
var (
expandWorker string
expandOutput string
expandLimit int
expandDryRun bool
)
var expandCmd = &cli.Command{
Use: "expand",
Short: "Generate expansion responses from pending prompts",
Long: "Reads pending expansion prompts from DuckDB and generates responses via an OpenAI-compatible API.",
RunE: runExpand,
}
func init() {
expandCmd.Flags().StringVar(&expandWorker, "worker", "", "Worker hostname (defaults to os.Hostname())")
expandCmd.Flags().StringVar(&expandOutput, "output", ".", "Output directory for JSONL files")
expandCmd.Flags().IntVar(&expandLimit, "limit", 0, "Max prompts to process (0 = all)")
expandCmd.Flags().BoolVar(&expandDryRun, "dry-run", false, "Print plan and exit without generating")
}
func runExpand(cmd *cli.Command, args []string) error {
if modelName == "" {
return fmt.Errorf("--model is required")
}
path := dbPath
if path == "" {
path = os.Getenv("LEM_DB")
}
if path == "" {
return fmt.Errorf("--db or LEM_DB env is required")
}
if expandWorker == "" {
h, _ := os.Hostname()
expandWorker = h
}
db, err := ml.OpenDBReadWrite(path)
if err != nil {
return fmt.Errorf("open db: %w", err)
}
defer db.Close()
rows, err := db.QueryExpansionPrompts("pending", expandLimit)
if err != nil {
return fmt.Errorf("query expansion_prompts: %w", err)
}
fmt.Printf("Loaded %d pending prompts from %s\n", len(rows), path)
var prompts []ml.Response
for _, r := range rows {
prompt := r.Prompt
if prompt == "" && r.PromptEn != "" {
prompt = r.PromptEn
}
prompts = append(prompts, ml.Response{
ID: r.SeedID,
Domain: r.Domain,
Prompt: prompt,
})
}
ctx := context.Background()
backend := ml.NewHTTPBackend(apiURL, modelName)
influx := ml.NewInfluxClient(influxURL, influxDB)
return ml.ExpandPrompts(ctx, backend, influx, prompts, modelName, expandWorker, expandOutput, expandDryRun, expandLimit)
}