cli/internal/cmd/ml/cmd_gguf.go
Claude 5ff4b8a2eb feat: add ML inference, scoring, and training pipeline (pkg/ml)
Port LEM scoring/training pipeline into CoreGo as pkg/ml with:
- Inference abstraction with HTTP, llama-server, and Ollama backends
- 3-tier scoring engine (heuristic, exact, LLM judge)
- Capability and content probes for model evaluation
- GGUF/safetensors format converters, MLX to PEFT adapter conversion
- DuckDB integration for training data pipeline
- InfluxDB metrics for lab dashboard
- Training data export (JSONL + Parquet)
- Expansion generation pipeline with distributed workers
- 10 CLI commands under 'core ml' (score, probe, export, expand, status, gguf, convert, agent, worker)
- 5 MCP tools (ml_generate, ml_score, ml_probe, ml_status, ml_backends)

All 37 ML tests passing. Binary builds at 138MB with all commands.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-16 05:53:52 +00:00

40 lines
1.1 KiB
Go

package ml
import (
"fmt"
"forge.lthn.ai/core/cli/pkg/cli"
"forge.lthn.ai/core/cli/pkg/ml"
)
var (
ggufInput string
ggufConfig string
ggufOutput string
ggufArch string
)
var ggufCmd = &cli.Command{
Use: "gguf",
Short: "Convert MLX LoRA adapter to GGUF format",
Long: "Converts an MLX safetensors LoRA adapter to GGUF v3 format for use with llama.cpp.",
RunE: runGGUF,
}
func init() {
ggufCmd.Flags().StringVar(&ggufInput, "input", "", "Input safetensors file (required)")
ggufCmd.Flags().StringVar(&ggufConfig, "config", "", "Adapter config JSON (required)")
ggufCmd.Flags().StringVar(&ggufOutput, "output", "", "Output GGUF file (required)")
ggufCmd.Flags().StringVar(&ggufArch, "arch", "gemma3", "GGUF architecture name")
ggufCmd.MarkFlagRequired("input")
ggufCmd.MarkFlagRequired("config")
ggufCmd.MarkFlagRequired("output")
}
func runGGUF(cmd *cli.Command, args []string) error {
if err := ml.ConvertMLXtoGGUFLoRA(ggufInput, ggufConfig, ggufOutput, ggufArch); err != nil {
return fmt.Errorf("convert to GGUF: %w", err)
}
fmt.Printf("GGUF LoRA adapter written to %s\n", ggufOutput)
return nil
}